##// END OF EJS Templates
maintainance: added update caches to mercurial.
marcink -
r3928:739550ba default
parent child Browse files
Show More
@@ -1,175 +1,184 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2017-2019 RhodeCode GmbH
3 # Copyright (C) 2017-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
20 import logging
21
21
22 log = logging.getLogger(__name__)
22 log = logging.getLogger(__name__)
23
23
24
24
25 class MaintenanceTask(object):
25 class MaintenanceTask(object):
26 human_name = 'undefined'
26 human_name = 'undefined'
27
27
28 def __init__(self, db_repo):
28 def __init__(self, db_repo):
29 self.db_repo = db_repo
29 self.db_repo = db_repo
30
30
31 def run(self):
31 def run(self):
32 """Execute task and return task human value"""
32 """Execute task and return task human value"""
33 raise NotImplementedError()
33 raise NotImplementedError()
34
34
35
35
36 class GitGC(MaintenanceTask):
36 class GitGC(MaintenanceTask):
37 human_name = 'GIT Garbage collect'
37 human_name = 'GIT Garbage collect'
38
38
39 def _count_objects(self, repo):
39 def _count_objects(self, repo):
40 stdout, stderr = repo.run_git_command(
40 stdout, stderr = repo.run_git_command(
41 ['count-objects', '-v'], fail_on_stderr=False)
41 ['count-objects', '-v'], fail_on_stderr=False)
42
42
43 errors = ' '
43 errors = ' '
44 objects = ' '.join(stdout.splitlines())
44 objects = ' '.join(stdout.splitlines())
45
45
46 if stderr:
46 if stderr:
47 errors = '\nSTD ERR:' + '\n'.join(stderr.splitlines())
47 errors = '\nSTD ERR:' + '\n'.join(stderr.splitlines())
48 return objects + errors
48 return objects + errors
49
49
50 def run(self):
50 def run(self):
51 output = []
51 output = []
52 instance = self.db_repo.scm_instance()
52 instance = self.db_repo.scm_instance()
53
53
54 objects_before = self._count_objects(instance)
54 objects_before = self._count_objects(instance)
55
55
56 log.debug('GIT objects:%s', objects_before)
56 log.debug('GIT objects:%s', objects_before)
57 cmd = ['gc', '--aggressive']
57 cmd = ['gc', '--aggressive']
58 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
58 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
59
59
60 out = 'executed {}'.format(' '.join(cmd))
60 out = 'executed {}'.format(' '.join(cmd))
61 output.append(out)
61 output.append(out)
62
62
63 out = ''
63 out = ''
64 if stderr:
64 if stderr:
65 out += ''.join(stderr.splitlines())
65 out += ''.join(stderr.splitlines())
66
66
67 if stdout:
67 if stdout:
68 out += ''.join(stdout.splitlines())
68 out += ''.join(stdout.splitlines())
69
69
70 if out:
70 if out:
71 output.append(out)
71 output.append(out)
72
72
73 objects_after = self._count_objects(instance)
73 objects_after = self._count_objects(instance)
74 log.debug('GIT objects:%s', objects_after)
74 log.debug('GIT objects:%s', objects_after)
75 output.append('objects before :' + objects_before)
75 output.append('objects before :' + objects_before)
76 output.append('objects after :' + objects_after)
76 output.append('objects after :' + objects_after)
77
77
78 return '\n'.join(output)
78 return '\n'.join(output)
79
79
80
80
81 class GitFSCK(MaintenanceTask):
81 class GitFSCK(MaintenanceTask):
82 human_name = 'GIT FSCK'
82 human_name = 'GIT FSCK'
83
83
84 def run(self):
84 def run(self):
85 output = []
85 output = []
86 instance = self.db_repo.scm_instance()
86 instance = self.db_repo.scm_instance()
87
87
88 cmd = ['fsck', '--full']
88 cmd = ['fsck', '--full']
89 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
89 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
90
90
91 out = 'executed {}'.format(' '.join(cmd))
91 out = 'executed {}'.format(' '.join(cmd))
92 output.append(out)
92 output.append(out)
93
93
94 out = ''
94 out = ''
95 if stderr:
95 if stderr:
96 out += ''.join(stderr.splitlines())
96 out += ''.join(stderr.splitlines())
97
97
98 if stdout:
98 if stdout:
99 out += ''.join(stdout.splitlines())
99 out += ''.join(stdout.splitlines())
100
100
101 if out:
101 if out:
102 output.append(out)
102 output.append(out)
103
103
104 return '\n'.join(output)
104 return '\n'.join(output)
105
105
106
106
107 class GitRepack(MaintenanceTask):
107 class GitRepack(MaintenanceTask):
108 human_name = 'GIT Repack'
108 human_name = 'GIT Repack'
109
109
110 def run(self):
110 def run(self):
111 output = []
111 output = []
112 instance = self.db_repo.scm_instance()
112 instance = self.db_repo.scm_instance()
113 cmd = ['repack', '-a', '-d',
113 cmd = ['repack', '-a', '-d',
114 '--window-memory', '10m', '--max-pack-size', '100m']
114 '--window-memory', '10m', '--max-pack-size', '100m']
115 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
115 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
116
116
117 out = 'executed {}'.format(' '.join(cmd))
117 out = 'executed {}'.format(' '.join(cmd))
118 output.append(out)
118 output.append(out)
119 out = ''
119 out = ''
120
120
121 if stderr:
121 if stderr:
122 out += ''.join(stderr.splitlines())
122 out += ''.join(stderr.splitlines())
123
123
124 if stdout:
124 if stdout:
125 out += ''.join(stdout.splitlines())
125 out += ''.join(stdout.splitlines())
126
126
127 if out:
127 if out:
128 output.append(out)
128 output.append(out)
129
129
130 return '\n'.join(output)
130 return '\n'.join(output)
131
131
132
132
133 class HGVerify(MaintenanceTask):
133 class HGVerify(MaintenanceTask):
134 human_name = 'HG Verify repo'
134 human_name = 'HG Verify repo'
135
135
136 def run(self):
136 def run(self):
137 instance = self.db_repo.scm_instance()
137 instance = self.db_repo.scm_instance()
138 res = instance.verify()
138 res = instance.verify()
139 return res
139 return res
140
140
141
141
142 class HGUpdateCaches(MaintenanceTask):
143 human_name = 'HG update caches'
144
145 def run(self):
146 instance = self.db_repo.scm_instance()
147 res = instance.hg_update_cache()
148 return res
149
150
142 class SVNVerify(MaintenanceTask):
151 class SVNVerify(MaintenanceTask):
143 human_name = 'SVN Verify repo'
152 human_name = 'SVN Verify repo'
144
153
145 def run(self):
154 def run(self):
146 instance = self.db_repo.scm_instance()
155 instance = self.db_repo.scm_instance()
147 res = instance.verify()
156 res = instance.verify()
148 return res
157 return res
149
158
150
159
151 class RepoMaintenance(object):
160 class RepoMaintenance(object):
152 """
161 """
153 Performs maintenance of repository based on it's type
162 Performs maintenance of repository based on it's type
154 """
163 """
155 tasks = {
164 tasks = {
156 'hg': [HGVerify],
165 'hg': [HGVerify, HGUpdateCaches],
157 'git': [GitFSCK, GitGC, GitRepack],
166 'git': [GitFSCK, GitGC, GitRepack],
158 'svn': [SVNVerify],
167 'svn': [SVNVerify],
159 }
168 }
160
169
161 def get_tasks_for_repo(self, db_repo):
170 def get_tasks_for_repo(self, db_repo):
162 """
171 """
163 fetches human names of tasks pending for execution for given type of repo
172 fetches human names of tasks pending for execution for given type of repo
164 """
173 """
165 tasks = []
174 tasks = []
166 for task in self.tasks[db_repo.repo_type]:
175 for task in self.tasks[db_repo.repo_type]:
167 tasks.append(task.human_name)
176 tasks.append(task.human_name)
168 return tasks
177 return tasks
169
178
170 def execute(self, db_repo):
179 def execute(self, db_repo):
171 executed_tasks = []
180 executed_tasks = []
172 for task in self.tasks[db_repo.repo_type]:
181 for task in self.tasks[db_repo.repo_type]:
173 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
182 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
174 executed_tasks.append(output)
183 executed_tasks.append(output)
175 return executed_tasks
184 return executed_tasks
@@ -1,946 +1,952 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
289
290 self._remote.invalidate_vcs_cache()
291 return update_cache
292
287 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
288 if commit_id1 == commit_id2:
294 if commit_id1 == commit_id2:
289 return commit_id1
295 return commit_id1
290
296
291 ancestors = self._remote.revs_from_revspec(
297 ancestors = self._remote.revs_from_revspec(
292 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
293 other_path=repo2.path)
299 other_path=repo2.path)
294 return repo2[ancestors[0]].raw_id if ancestors else None
300 return repo2[ancestors[0]].raw_id if ancestors else None
295
301
296 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
297 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
298 commits = []
304 commits = []
299 else:
305 else:
300 if merge:
306 if merge:
301 indexes = self._remote.revs_from_revspec(
307 indexes = self._remote.revs_from_revspec(
302 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
303 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
304 else:
310 else:
305 indexes = self._remote.revs_from_revspec(
311 indexes = self._remote.revs_from_revspec(
306 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
307 commit_id1, other_path=repo2.path)
313 commit_id1, other_path=repo2.path)
308
314
309 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
310 for idx in indexes]
316 for idx in indexes]
311
317
312 return commits
318 return commits
313
319
314 @staticmethod
320 @staticmethod
315 def check_url(url, config):
321 def check_url(url, config):
316 """
322 """
317 Function will check given url and try to verify if it's a valid
323 Function will check given url and try to verify if it's a valid
318 link. Sometimes it may happened that mercurial will issue basic
324 link. Sometimes it may happened that mercurial will issue basic
319 auth request that can cause whole API to hang when used from python
325 auth request that can cause whole API to hang when used from python
320 or other external calls.
326 or other external calls.
321
327
322 On failures it'll raise urllib2.HTTPError, exception is also thrown
328 On failures it'll raise urllib2.HTTPError, exception is also thrown
323 when the return code is non 200
329 when the return code is non 200
324 """
330 """
325 # check first if it's not an local url
331 # check first if it's not an local url
326 if os.path.isdir(url) or url.startswith('file:'):
332 if os.path.isdir(url) or url.startswith('file:'):
327 return True
333 return True
328
334
329 # Request the _remote to verify the url
335 # Request the _remote to verify the url
330 return connection.Hg.check_url(url, config.serialize())
336 return connection.Hg.check_url(url, config.serialize())
331
337
332 @staticmethod
338 @staticmethod
333 def is_valid_repository(path):
339 def is_valid_repository(path):
334 return os.path.isdir(os.path.join(path, '.hg'))
340 return os.path.isdir(os.path.join(path, '.hg'))
335
341
336 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
337 """
343 """
338 Function will check for mercurial repository in given path. If there
344 Function will check for mercurial repository in given path. If there
339 is no repository in that path it will raise an exception unless
345 is no repository in that path it will raise an exception unless
340 `create` parameter is set to True - in that case repository would
346 `create` parameter is set to True - in that case repository would
341 be created.
347 be created.
342
348
343 If `src_url` is given, would try to clone repository from the
349 If `src_url` is given, would try to clone repository from the
344 location at given clone_point. Additionally it'll make update to
350 location at given clone_point. Additionally it'll make update to
345 working copy accordingly to `do_workspace_checkout` flag.
351 working copy accordingly to `do_workspace_checkout` flag.
346 """
352 """
347 if create and os.path.exists(self.path):
353 if create and os.path.exists(self.path):
348 raise RepositoryError(
354 raise RepositoryError(
349 "Cannot create repository at %s, location already exist"
355 "Cannot create repository at %s, location already exist"
350 % self.path)
356 % self.path)
351
357
352 if src_url:
358 if src_url:
353 url = str(self._get_url(src_url))
359 url = str(self._get_url(src_url))
354 MercurialRepository.check_url(url, self.config)
360 MercurialRepository.check_url(url, self.config)
355
361
356 self._remote.clone(url, self.path, do_workspace_checkout)
362 self._remote.clone(url, self.path, do_workspace_checkout)
357
363
358 # Don't try to create if we've already cloned repo
364 # Don't try to create if we've already cloned repo
359 create = False
365 create = False
360
366
361 if create:
367 if create:
362 os.makedirs(self.path, mode=0o755)
368 os.makedirs(self.path, mode=0o755)
363 self._remote.localrepository(create)
369 self._remote.localrepository(create)
364
370
365 @LazyProperty
371 @LazyProperty
366 def in_memory_commit(self):
372 def in_memory_commit(self):
367 return MercurialInMemoryCommit(self)
373 return MercurialInMemoryCommit(self)
368
374
369 @LazyProperty
375 @LazyProperty
370 def description(self):
376 def description(self):
371 description = self._remote.get_config_value(
377 description = self._remote.get_config_value(
372 'web', 'description', untrusted=True)
378 'web', 'description', untrusted=True)
373 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
374
380
375 @LazyProperty
381 @LazyProperty
376 def contact(self):
382 def contact(self):
377 contact = (
383 contact = (
378 self._remote.get_config_value("web", "contact") or
384 self._remote.get_config_value("web", "contact") or
379 self._remote.get_config_value("ui", "username"))
385 self._remote.get_config_value("ui", "username"))
380 return safe_unicode(contact or self.DEFAULT_CONTACT)
386 return safe_unicode(contact or self.DEFAULT_CONTACT)
381
387
382 @LazyProperty
388 @LazyProperty
383 def last_change(self):
389 def last_change(self):
384 """
390 """
385 Returns last change made on this repository as
391 Returns last change made on this repository as
386 `datetime.datetime` object.
392 `datetime.datetime` object.
387 """
393 """
388 try:
394 try:
389 return self.get_commit().date
395 return self.get_commit().date
390 except RepositoryError:
396 except RepositoryError:
391 tzoffset = makedate()[1]
397 tzoffset = makedate()[1]
392 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
393
399
394 def _get_fs_mtime(self):
400 def _get_fs_mtime(self):
395 # fallback to filesystem
401 # fallback to filesystem
396 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
397 st_path = os.path.join(self.path, '.hg', "store")
403 st_path = os.path.join(self.path, '.hg', "store")
398 if os.path.exists(cl_path):
404 if os.path.exists(cl_path):
399 return os.stat(cl_path).st_mtime
405 return os.stat(cl_path).st_mtime
400 else:
406 else:
401 return os.stat(st_path).st_mtime
407 return os.stat(st_path).st_mtime
402
408
403 def _get_url(self, url):
409 def _get_url(self, url):
404 """
410 """
405 Returns normalized url. If schema is not given, would fall
411 Returns normalized url. If schema is not given, would fall
406 to filesystem
412 to filesystem
407 (``file:///``) schema.
413 (``file:///``) schema.
408 """
414 """
409 url = url.encode('utf8')
415 url = url.encode('utf8')
410 if url != 'default' and '://' not in url:
416 if url != 'default' and '://' not in url:
411 url = "file:" + urllib.pathname2url(url)
417 url = "file:" + urllib.pathname2url(url)
412 return url
418 return url
413
419
414 def get_hook_location(self):
420 def get_hook_location(self):
415 """
421 """
416 returns absolute path to location where hooks are stored
422 returns absolute path to location where hooks are stored
417 """
423 """
418 return os.path.join(self.path, '.hg', '.hgrc')
424 return os.path.join(self.path, '.hg', '.hgrc')
419
425
420 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
421 """
427 """
422 Returns ``MercurialCommit`` object representing repository's
428 Returns ``MercurialCommit`` object representing repository's
423 commit at the given `commit_id` or `commit_idx`.
429 commit at the given `commit_id` or `commit_idx`.
424 """
430 """
425 if self.is_empty():
431 if self.is_empty():
426 raise EmptyRepositoryError("There are no commits yet")
432 raise EmptyRepositoryError("There are no commits yet")
427
433
428 if commit_id is not None:
434 if commit_id is not None:
429 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
430 try:
436 try:
431 # we have cached idx, use it without contacting the remote
437 # we have cached idx, use it without contacting the remote
432 idx = self._commit_ids[commit_id]
438 idx = self._commit_ids[commit_id]
433 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
434 except KeyError:
440 except KeyError:
435 pass
441 pass
436
442
437 elif commit_idx is not None:
443 elif commit_idx is not None:
438 self._validate_commit_idx(commit_idx)
444 self._validate_commit_idx(commit_idx)
439 try:
445 try:
440 _commit_id = self.commit_ids[commit_idx]
446 _commit_id = self.commit_ids[commit_idx]
441 if commit_idx < 0:
447 if commit_idx < 0:
442 commit_idx = self.commit_ids.index(_commit_id)
448 commit_idx = self.commit_ids.index(_commit_id)
443
449
444 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
445 except IndexError:
451 except IndexError:
446 commit_id = commit_idx
452 commit_id = commit_idx
447 else:
453 else:
448 commit_id = "tip"
454 commit_id = "tip"
449
455
450 if isinstance(commit_id, unicode):
456 if isinstance(commit_id, unicode):
451 commit_id = safe_str(commit_id)
457 commit_id = safe_str(commit_id)
452
458
453 try:
459 try:
454 raw_id, idx = self._remote.lookup(commit_id, both=True)
460 raw_id, idx = self._remote.lookup(commit_id, both=True)
455 except CommitDoesNotExistError:
461 except CommitDoesNotExistError:
456 msg = "Commit {} does not exist for `{}`".format(
462 msg = "Commit {} does not exist for `{}`".format(
457 *map(safe_str, [commit_id, self.name]))
463 *map(safe_str, [commit_id, self.name]))
458 raise CommitDoesNotExistError(msg)
464 raise CommitDoesNotExistError(msg)
459
465
460 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
461
467
462 def get_commits(
468 def get_commits(
463 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
464 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
465 """
471 """
466 Returns generator of ``MercurialCommit`` objects from start to end
472 Returns generator of ``MercurialCommit`` objects from start to end
467 (both are inclusive)
473 (both are inclusive)
468
474
469 :param start_id: None, str(commit_id)
475 :param start_id: None, str(commit_id)
470 :param end_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
471 :param start_date: if specified, commits with commit date less than
477 :param start_date: if specified, commits with commit date less than
472 ``start_date`` would be filtered out from returned set
478 ``start_date`` would be filtered out from returned set
473 :param end_date: if specified, commits with commit date greater than
479 :param end_date: if specified, commits with commit date greater than
474 ``end_date`` would be filtered out from returned set
480 ``end_date`` would be filtered out from returned set
475 :param branch_name: if specified, commits not reachable from given
481 :param branch_name: if specified, commits not reachable from given
476 branch would be filtered out from returned set
482 branch would be filtered out from returned set
477 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
478 Mercurial evolve
484 Mercurial evolve
479 :raise BranchDoesNotExistError: If given ``branch_name`` does not
485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
480 exist.
486 exist.
481 :raise CommitDoesNotExistError: If commit for given ``start`` or
487 :raise CommitDoesNotExistError: If commit for given ``start`` or
482 ``end`` could not be found.
488 ``end`` could not be found.
483 """
489 """
484 # actually we should check now if it's not an empty repo
490 # actually we should check now if it's not an empty repo
485 if self.is_empty():
491 if self.is_empty():
486 raise EmptyRepositoryError("There are no commits yet")
492 raise EmptyRepositoryError("There are no commits yet")
487 self._validate_branch_name(branch_name)
493 self._validate_branch_name(branch_name)
488
494
489 branch_ancestors = False
495 branch_ancestors = False
490 if start_id is not None:
496 if start_id is not None:
491 self._validate_commit_id(start_id)
497 self._validate_commit_id(start_id)
492 c_start = self.get_commit(commit_id=start_id)
498 c_start = self.get_commit(commit_id=start_id)
493 start_pos = self._commit_ids[c_start.raw_id]
499 start_pos = self._commit_ids[c_start.raw_id]
494 else:
500 else:
495 start_pos = None
501 start_pos = None
496
502
497 if end_id is not None:
503 if end_id is not None:
498 self._validate_commit_id(end_id)
504 self._validate_commit_id(end_id)
499 c_end = self.get_commit(commit_id=end_id)
505 c_end = self.get_commit(commit_id=end_id)
500 end_pos = max(0, self._commit_ids[c_end.raw_id])
506 end_pos = max(0, self._commit_ids[c_end.raw_id])
501 else:
507 else:
502 end_pos = None
508 end_pos = None
503
509
504 if None not in [start_id, end_id] and start_pos > end_pos:
510 if None not in [start_id, end_id] and start_pos > end_pos:
505 raise RepositoryError(
511 raise RepositoryError(
506 "Start commit '%s' cannot be after end commit '%s'" %
512 "Start commit '%s' cannot be after end commit '%s'" %
507 (start_id, end_id))
513 (start_id, end_id))
508
514
509 if end_pos is not None:
515 if end_pos is not None:
510 end_pos += 1
516 end_pos += 1
511
517
512 commit_filter = []
518 commit_filter = []
513
519
514 if branch_name and not branch_ancestors:
520 if branch_name and not branch_ancestors:
515 commit_filter.append('branch("%s")' % (branch_name,))
521 commit_filter.append('branch("%s")' % (branch_name,))
516 elif branch_name and branch_ancestors:
522 elif branch_name and branch_ancestors:
517 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
518
524
519 if start_date and not end_date:
525 if start_date and not end_date:
520 commit_filter.append('date(">%s")' % (start_date,))
526 commit_filter.append('date(">%s")' % (start_date,))
521 if end_date and not start_date:
527 if end_date and not start_date:
522 commit_filter.append('date("<%s")' % (end_date,))
528 commit_filter.append('date("<%s")' % (end_date,))
523 if start_date and end_date:
529 if start_date and end_date:
524 commit_filter.append(
530 commit_filter.append(
525 'date(">%s") and date("<%s")' % (start_date, end_date))
531 'date(">%s") and date("<%s")' % (start_date, end_date))
526
532
527 if not show_hidden:
533 if not show_hidden:
528 commit_filter.append('not obsolete()')
534 commit_filter.append('not obsolete()')
529 commit_filter.append('not hidden()')
535 commit_filter.append('not hidden()')
530
536
531 # TODO: johbo: Figure out a simpler way for this solution
537 # TODO: johbo: Figure out a simpler way for this solution
532 collection_generator = CollectionGenerator
538 collection_generator = CollectionGenerator
533 if commit_filter:
539 if commit_filter:
534 commit_filter = ' and '.join(map(safe_str, commit_filter))
540 commit_filter = ' and '.join(map(safe_str, commit_filter))
535 revisions = self._remote.rev_range([commit_filter])
541 revisions = self._remote.rev_range([commit_filter])
536 collection_generator = MercurialIndexBasedCollectionGenerator
542 collection_generator = MercurialIndexBasedCollectionGenerator
537 else:
543 else:
538 revisions = self.commit_ids
544 revisions = self.commit_ids
539
545
540 if start_pos or end_pos:
546 if start_pos or end_pos:
541 revisions = revisions[start_pos:end_pos]
547 revisions = revisions[start_pos:end_pos]
542
548
543 return collection_generator(self, revisions, pre_load=pre_load)
549 return collection_generator(self, revisions, pre_load=pre_load)
544
550
545 def pull(self, url, commit_ids=None):
551 def pull(self, url, commit_ids=None):
546 """
552 """
547 Pull changes from external location.
553 Pull changes from external location.
548
554
549 :param commit_ids: Optional. Can be set to a list of commit ids
555 :param commit_ids: Optional. Can be set to a list of commit ids
550 which shall be pulled from the other repository.
556 which shall be pulled from the other repository.
551 """
557 """
552 url = self._get_url(url)
558 url = self._get_url(url)
553 self._remote.pull(url, commit_ids=commit_ids)
559 self._remote.pull(url, commit_ids=commit_ids)
554 self._remote.invalidate_vcs_cache()
560 self._remote.invalidate_vcs_cache()
555
561
556 def fetch(self, url, commit_ids=None):
562 def fetch(self, url, commit_ids=None):
557 """
563 """
558 Backward compatibility with GIT fetch==pull
564 Backward compatibility with GIT fetch==pull
559 """
565 """
560 return self.pull(url, commit_ids=commit_ids)
566 return self.pull(url, commit_ids=commit_ids)
561
567
562 def push(self, url):
568 def push(self, url):
563 url = self._get_url(url)
569 url = self._get_url(url)
564 self._remote.sync_push(url)
570 self._remote.sync_push(url)
565
571
566 def _local_clone(self, clone_path):
572 def _local_clone(self, clone_path):
567 """
573 """
568 Create a local clone of the current repo.
574 Create a local clone of the current repo.
569 """
575 """
570 self._remote.clone(self.path, clone_path, update_after_clone=True,
576 self._remote.clone(self.path, clone_path, update_after_clone=True,
571 hooks=False)
577 hooks=False)
572
578
573 def _update(self, revision, clean=False):
579 def _update(self, revision, clean=False):
574 """
580 """
575 Update the working copy to the specified revision.
581 Update the working copy to the specified revision.
576 """
582 """
577 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
578 self._remote.update(revision, clean=clean)
584 self._remote.update(revision, clean=clean)
579
585
580 def _identify(self):
586 def _identify(self):
581 """
587 """
582 Return the current state of the working directory.
588 Return the current state of the working directory.
583 """
589 """
584 return self._remote.identify().strip().rstrip('+')
590 return self._remote.identify().strip().rstrip('+')
585
591
586 def _heads(self, branch=None):
592 def _heads(self, branch=None):
587 """
593 """
588 Return the commit ids of the repository heads.
594 Return the commit ids of the repository heads.
589 """
595 """
590 return self._remote.heads(branch=branch).strip().split(' ')
596 return self._remote.heads(branch=branch).strip().split(' ')
591
597
592 def _ancestor(self, revision1, revision2):
598 def _ancestor(self, revision1, revision2):
593 """
599 """
594 Return the common ancestor of the two revisions.
600 Return the common ancestor of the two revisions.
595 """
601 """
596 return self._remote.ancestor(revision1, revision2)
602 return self._remote.ancestor(revision1, revision2)
597
603
598 def _local_push(
604 def _local_push(
599 self, revision, repository_path, push_branches=False,
605 self, revision, repository_path, push_branches=False,
600 enable_hooks=False):
606 enable_hooks=False):
601 """
607 """
602 Push the given revision to the specified repository.
608 Push the given revision to the specified repository.
603
609
604 :param push_branches: allow to create branches in the target repo.
610 :param push_branches: allow to create branches in the target repo.
605 """
611 """
606 self._remote.push(
612 self._remote.push(
607 [revision], repository_path, hooks=enable_hooks,
613 [revision], repository_path, hooks=enable_hooks,
608 push_branches=push_branches)
614 push_branches=push_branches)
609
615
610 def _local_merge(self, target_ref, merge_message, user_name, user_email,
616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
611 source_ref, use_rebase=False, dry_run=False):
617 source_ref, use_rebase=False, dry_run=False):
612 """
618 """
613 Merge the given source_revision into the checked out revision.
619 Merge the given source_revision into the checked out revision.
614
620
615 Returns the commit id of the merge and a boolean indicating if the
621 Returns the commit id of the merge and a boolean indicating if the
616 commit needs to be pushed.
622 commit needs to be pushed.
617 """
623 """
618 self._update(target_ref.commit_id, clean=True)
624 self._update(target_ref.commit_id, clean=True)
619
625
620 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
621 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
622
628
623 if ancestor == source_ref.commit_id:
629 if ancestor == source_ref.commit_id:
624 # Nothing to do, the changes were already integrated
630 # Nothing to do, the changes were already integrated
625 return target_ref.commit_id, False
631 return target_ref.commit_id, False
626
632
627 elif ancestor == target_ref.commit_id and is_the_same_branch:
633 elif ancestor == target_ref.commit_id and is_the_same_branch:
628 # In this case we should force a commit message
634 # In this case we should force a commit message
629 return source_ref.commit_id, True
635 return source_ref.commit_id, True
630
636
631 if use_rebase:
637 if use_rebase:
632 try:
638 try:
633 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
639 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
634 target_ref.commit_id)
640 target_ref.commit_id)
635 self.bookmark(bookmark_name, revision=source_ref.commit_id)
641 self.bookmark(bookmark_name, revision=source_ref.commit_id)
636 self._remote.rebase(
642 self._remote.rebase(
637 source=source_ref.commit_id, dest=target_ref.commit_id)
643 source=source_ref.commit_id, dest=target_ref.commit_id)
638 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
639 self._update(bookmark_name, clean=True)
645 self._update(bookmark_name, clean=True)
640 return self._identify(), True
646 return self._identify(), True
641 except RepositoryError:
647 except RepositoryError:
642 # The rebase-abort may raise another exception which 'hides'
648 # The rebase-abort may raise another exception which 'hides'
643 # the original one, therefore we log it here.
649 # the original one, therefore we log it here.
644 log.exception('Error while rebasing shadow repo during merge.')
650 log.exception('Error while rebasing shadow repo during merge.')
645
651
646 # Cleanup any rebase leftovers
652 # Cleanup any rebase leftovers
647 self._remote.invalidate_vcs_cache()
653 self._remote.invalidate_vcs_cache()
648 self._remote.rebase(abort=True)
654 self._remote.rebase(abort=True)
649 self._remote.invalidate_vcs_cache()
655 self._remote.invalidate_vcs_cache()
650 self._remote.update(clean=True)
656 self._remote.update(clean=True)
651 raise
657 raise
652 else:
658 else:
653 try:
659 try:
654 self._remote.merge(source_ref.commit_id)
660 self._remote.merge(source_ref.commit_id)
655 self._remote.invalidate_vcs_cache()
661 self._remote.invalidate_vcs_cache()
656 self._remote.commit(
662 self._remote.commit(
657 message=safe_str(merge_message),
663 message=safe_str(merge_message),
658 username=safe_str('%s <%s>' % (user_name, user_email)))
664 username=safe_str('%s <%s>' % (user_name, user_email)))
659 self._remote.invalidate_vcs_cache()
665 self._remote.invalidate_vcs_cache()
660 return self._identify(), True
666 return self._identify(), True
661 except RepositoryError:
667 except RepositoryError:
662 # Cleanup any merge leftovers
668 # Cleanup any merge leftovers
663 self._remote.update(clean=True)
669 self._remote.update(clean=True)
664 raise
670 raise
665
671
666 def _local_close(self, target_ref, user_name, user_email,
672 def _local_close(self, target_ref, user_name, user_email,
667 source_ref, close_message=''):
673 source_ref, close_message=''):
668 """
674 """
669 Close the branch of the given source_revision
675 Close the branch of the given source_revision
670
676
671 Returns the commit id of the close and a boolean indicating if the
677 Returns the commit id of the close and a boolean indicating if the
672 commit needs to be pushed.
678 commit needs to be pushed.
673 """
679 """
674 self._update(source_ref.commit_id)
680 self._update(source_ref.commit_id)
675 message = close_message or "Closing branch: `{}`".format(source_ref.name)
681 message = close_message or "Closing branch: `{}`".format(source_ref.name)
676 try:
682 try:
677 self._remote.commit(
683 self._remote.commit(
678 message=safe_str(message),
684 message=safe_str(message),
679 username=safe_str('%s <%s>' % (user_name, user_email)),
685 username=safe_str('%s <%s>' % (user_name, user_email)),
680 close_branch=True)
686 close_branch=True)
681 self._remote.invalidate_vcs_cache()
687 self._remote.invalidate_vcs_cache()
682 return self._identify(), True
688 return self._identify(), True
683 except RepositoryError:
689 except RepositoryError:
684 # Cleanup any commit leftovers
690 # Cleanup any commit leftovers
685 self._remote.update(clean=True)
691 self._remote.update(clean=True)
686 raise
692 raise
687
693
688 def _is_the_same_branch(self, target_ref, source_ref):
694 def _is_the_same_branch(self, target_ref, source_ref):
689 return (
695 return (
690 self._get_branch_name(target_ref) ==
696 self._get_branch_name(target_ref) ==
691 self._get_branch_name(source_ref))
697 self._get_branch_name(source_ref))
692
698
693 def _get_branch_name(self, ref):
699 def _get_branch_name(self, ref):
694 if ref.type == 'branch':
700 if ref.type == 'branch':
695 return ref.name
701 return ref.name
696 return self._remote.ctx_branch(ref.commit_id)
702 return self._remote.ctx_branch(ref.commit_id)
697
703
698 def _maybe_prepare_merge_workspace(
704 def _maybe_prepare_merge_workspace(
699 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
705 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
700 shadow_repository_path = self._get_shadow_repository_path(
706 shadow_repository_path = self._get_shadow_repository_path(
701 repo_id, workspace_id)
707 repo_id, workspace_id)
702 if not os.path.exists(shadow_repository_path):
708 if not os.path.exists(shadow_repository_path):
703 self._local_clone(shadow_repository_path)
709 self._local_clone(shadow_repository_path)
704 log.debug(
710 log.debug(
705 'Prepared shadow repository in %s', shadow_repository_path)
711 'Prepared shadow repository in %s', shadow_repository_path)
706
712
707 return shadow_repository_path
713 return shadow_repository_path
708
714
709 def _merge_repo(self, repo_id, workspace_id, target_ref,
715 def _merge_repo(self, repo_id, workspace_id, target_ref,
710 source_repo, source_ref, merge_message,
716 source_repo, source_ref, merge_message,
711 merger_name, merger_email, dry_run=False,
717 merger_name, merger_email, dry_run=False,
712 use_rebase=False, close_branch=False):
718 use_rebase=False, close_branch=False):
713
719
714 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
720 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
715 'rebase' if use_rebase else 'merge', dry_run)
721 'rebase' if use_rebase else 'merge', dry_run)
716 if target_ref.commit_id not in self._heads():
722 if target_ref.commit_id not in self._heads():
717 return MergeResponse(
723 return MergeResponse(
718 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
724 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
719 metadata={'target_ref': target_ref})
725 metadata={'target_ref': target_ref})
720
726
721 try:
727 try:
722 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
728 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
723 heads = '\n,'.join(self._heads(target_ref.name))
729 heads = '\n,'.join(self._heads(target_ref.name))
724 metadata = {
730 metadata = {
725 'target_ref': target_ref,
731 'target_ref': target_ref,
726 'source_ref': source_ref,
732 'source_ref': source_ref,
727 'heads': heads
733 'heads': heads
728 }
734 }
729 return MergeResponse(
735 return MergeResponse(
730 False, False, None,
736 False, False, None,
731 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
737 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
732 metadata=metadata)
738 metadata=metadata)
733 except CommitDoesNotExistError:
739 except CommitDoesNotExistError:
734 log.exception('Failure when looking up branch heads on hg target')
740 log.exception('Failure when looking up branch heads on hg target')
735 return MergeResponse(
741 return MergeResponse(
736 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
742 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
737 metadata={'target_ref': target_ref})
743 metadata={'target_ref': target_ref})
738
744
739 shadow_repository_path = self._maybe_prepare_merge_workspace(
745 shadow_repository_path = self._maybe_prepare_merge_workspace(
740 repo_id, workspace_id, target_ref, source_ref)
746 repo_id, workspace_id, target_ref, source_ref)
741 shadow_repo = self.get_shadow_instance(shadow_repository_path)
747 shadow_repo = self.get_shadow_instance(shadow_repository_path)
742
748
743 log.debug('Pulling in target reference %s', target_ref)
749 log.debug('Pulling in target reference %s', target_ref)
744 self._validate_pull_reference(target_ref)
750 self._validate_pull_reference(target_ref)
745 shadow_repo._local_pull(self.path, target_ref)
751 shadow_repo._local_pull(self.path, target_ref)
746
752
747 try:
753 try:
748 log.debug('Pulling in source reference %s', source_ref)
754 log.debug('Pulling in source reference %s', source_ref)
749 source_repo._validate_pull_reference(source_ref)
755 source_repo._validate_pull_reference(source_ref)
750 shadow_repo._local_pull(source_repo.path, source_ref)
756 shadow_repo._local_pull(source_repo.path, source_ref)
751 except CommitDoesNotExistError:
757 except CommitDoesNotExistError:
752 log.exception('Failure when doing local pull on hg shadow repo')
758 log.exception('Failure when doing local pull on hg shadow repo')
753 return MergeResponse(
759 return MergeResponse(
754 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
760 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
755 metadata={'source_ref': source_ref})
761 metadata={'source_ref': source_ref})
756
762
757 merge_ref = None
763 merge_ref = None
758 merge_commit_id = None
764 merge_commit_id = None
759 close_commit_id = None
765 close_commit_id = None
760 merge_failure_reason = MergeFailureReason.NONE
766 merge_failure_reason = MergeFailureReason.NONE
761 metadata = {}
767 metadata = {}
762
768
763 # enforce that close branch should be used only in case we source from
769 # enforce that close branch should be used only in case we source from
764 # an actual Branch
770 # an actual Branch
765 close_branch = close_branch and source_ref.type == 'branch'
771 close_branch = close_branch and source_ref.type == 'branch'
766
772
767 # don't allow to close branch if source and target are the same
773 # don't allow to close branch if source and target are the same
768 close_branch = close_branch and source_ref.name != target_ref.name
774 close_branch = close_branch and source_ref.name != target_ref.name
769
775
770 needs_push_on_close = False
776 needs_push_on_close = False
771 if close_branch and not use_rebase and not dry_run:
777 if close_branch and not use_rebase and not dry_run:
772 try:
778 try:
773 close_commit_id, needs_push_on_close = shadow_repo._local_close(
779 close_commit_id, needs_push_on_close = shadow_repo._local_close(
774 target_ref, merger_name, merger_email, source_ref)
780 target_ref, merger_name, merger_email, source_ref)
775 merge_possible = True
781 merge_possible = True
776 except RepositoryError:
782 except RepositoryError:
777 log.exception('Failure when doing close branch on '
783 log.exception('Failure when doing close branch on '
778 'shadow repo: %s', shadow_repo)
784 'shadow repo: %s', shadow_repo)
779 merge_possible = False
785 merge_possible = False
780 merge_failure_reason = MergeFailureReason.MERGE_FAILED
786 merge_failure_reason = MergeFailureReason.MERGE_FAILED
781 else:
787 else:
782 merge_possible = True
788 merge_possible = True
783
789
784 needs_push = False
790 needs_push = False
785 if merge_possible:
791 if merge_possible:
786 try:
792 try:
787 merge_commit_id, needs_push = shadow_repo._local_merge(
793 merge_commit_id, needs_push = shadow_repo._local_merge(
788 target_ref, merge_message, merger_name, merger_email,
794 target_ref, merge_message, merger_name, merger_email,
789 source_ref, use_rebase=use_rebase, dry_run=dry_run)
795 source_ref, use_rebase=use_rebase, dry_run=dry_run)
790 merge_possible = True
796 merge_possible = True
791
797
792 # read the state of the close action, if it
798 # read the state of the close action, if it
793 # maybe required a push
799 # maybe required a push
794 needs_push = needs_push or needs_push_on_close
800 needs_push = needs_push or needs_push_on_close
795
801
796 # Set a bookmark pointing to the merge commit. This bookmark
802 # Set a bookmark pointing to the merge commit. This bookmark
797 # may be used to easily identify the last successful merge
803 # may be used to easily identify the last successful merge
798 # commit in the shadow repository.
804 # commit in the shadow repository.
799 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
805 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
800 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
806 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
801 except SubrepoMergeError:
807 except SubrepoMergeError:
802 log.exception(
808 log.exception(
803 'Subrepo merge error during local merge on hg shadow repo.')
809 'Subrepo merge error during local merge on hg shadow repo.')
804 merge_possible = False
810 merge_possible = False
805 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
811 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
806 needs_push = False
812 needs_push = False
807 except RepositoryError:
813 except RepositoryError:
808 log.exception('Failure when doing local merge on hg shadow repo')
814 log.exception('Failure when doing local merge on hg shadow repo')
809 merge_possible = False
815 merge_possible = False
810 merge_failure_reason = MergeFailureReason.MERGE_FAILED
816 merge_failure_reason = MergeFailureReason.MERGE_FAILED
811 needs_push = False
817 needs_push = False
812
818
813 if merge_possible and not dry_run:
819 if merge_possible and not dry_run:
814 if needs_push:
820 if needs_push:
815 # In case the target is a bookmark, update it, so after pushing
821 # In case the target is a bookmark, update it, so after pushing
816 # the bookmarks is also updated in the target.
822 # the bookmarks is also updated in the target.
817 if target_ref.type == 'book':
823 if target_ref.type == 'book':
818 shadow_repo.bookmark(
824 shadow_repo.bookmark(
819 target_ref.name, revision=merge_commit_id)
825 target_ref.name, revision=merge_commit_id)
820 try:
826 try:
821 shadow_repo_with_hooks = self.get_shadow_instance(
827 shadow_repo_with_hooks = self.get_shadow_instance(
822 shadow_repository_path,
828 shadow_repository_path,
823 enable_hooks=True)
829 enable_hooks=True)
824 # This is the actual merge action, we push from shadow
830 # This is the actual merge action, we push from shadow
825 # into origin.
831 # into origin.
826 # Note: the push_branches option will push any new branch
832 # Note: the push_branches option will push any new branch
827 # defined in the source repository to the target. This may
833 # defined in the source repository to the target. This may
828 # be dangerous as branches are permanent in Mercurial.
834 # be dangerous as branches are permanent in Mercurial.
829 # This feature was requested in issue #441.
835 # This feature was requested in issue #441.
830 shadow_repo_with_hooks._local_push(
836 shadow_repo_with_hooks._local_push(
831 merge_commit_id, self.path, push_branches=True,
837 merge_commit_id, self.path, push_branches=True,
832 enable_hooks=True)
838 enable_hooks=True)
833
839
834 # maybe we also need to push the close_commit_id
840 # maybe we also need to push the close_commit_id
835 if close_commit_id:
841 if close_commit_id:
836 shadow_repo_with_hooks._local_push(
842 shadow_repo_with_hooks._local_push(
837 close_commit_id, self.path, push_branches=True,
843 close_commit_id, self.path, push_branches=True,
838 enable_hooks=True)
844 enable_hooks=True)
839 merge_succeeded = True
845 merge_succeeded = True
840 except RepositoryError:
846 except RepositoryError:
841 log.exception(
847 log.exception(
842 'Failure when doing local push from the shadow '
848 'Failure when doing local push from the shadow '
843 'repository to the target repository at %s.', self.path)
849 'repository to the target repository at %s.', self.path)
844 merge_succeeded = False
850 merge_succeeded = False
845 merge_failure_reason = MergeFailureReason.PUSH_FAILED
851 merge_failure_reason = MergeFailureReason.PUSH_FAILED
846 metadata['target'] = 'hg shadow repo'
852 metadata['target'] = 'hg shadow repo'
847 metadata['merge_commit'] = merge_commit_id
853 metadata['merge_commit'] = merge_commit_id
848 else:
854 else:
849 merge_succeeded = True
855 merge_succeeded = True
850 else:
856 else:
851 merge_succeeded = False
857 merge_succeeded = False
852
858
853 return MergeResponse(
859 return MergeResponse(
854 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
860 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
855 metadata=metadata)
861 metadata=metadata)
856
862
857 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
863 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
858 config = self.config.copy()
864 config = self.config.copy()
859 if not enable_hooks:
865 if not enable_hooks:
860 config.clear_section('hooks')
866 config.clear_section('hooks')
861 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
867 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
862
868
863 def _validate_pull_reference(self, reference):
869 def _validate_pull_reference(self, reference):
864 if not (reference.name in self.bookmarks or
870 if not (reference.name in self.bookmarks or
865 reference.name in self.branches or
871 reference.name in self.branches or
866 self.get_commit(reference.commit_id)):
872 self.get_commit(reference.commit_id)):
867 raise CommitDoesNotExistError(
873 raise CommitDoesNotExistError(
868 'Unknown branch, bookmark or commit id')
874 'Unknown branch, bookmark or commit id')
869
875
870 def _local_pull(self, repository_path, reference):
876 def _local_pull(self, repository_path, reference):
871 """
877 """
872 Fetch a branch, bookmark or commit from a local repository.
878 Fetch a branch, bookmark or commit from a local repository.
873 """
879 """
874 repository_path = os.path.abspath(repository_path)
880 repository_path = os.path.abspath(repository_path)
875 if repository_path == self.path:
881 if repository_path == self.path:
876 raise ValueError('Cannot pull from the same repository')
882 raise ValueError('Cannot pull from the same repository')
877
883
878 reference_type_to_option_name = {
884 reference_type_to_option_name = {
879 'book': 'bookmark',
885 'book': 'bookmark',
880 'branch': 'branch',
886 'branch': 'branch',
881 }
887 }
882 option_name = reference_type_to_option_name.get(
888 option_name = reference_type_to_option_name.get(
883 reference.type, 'revision')
889 reference.type, 'revision')
884
890
885 if option_name == 'revision':
891 if option_name == 'revision':
886 ref = reference.commit_id
892 ref = reference.commit_id
887 else:
893 else:
888 ref = reference.name
894 ref = reference.name
889
895
890 options = {option_name: [ref]}
896 options = {option_name: [ref]}
891 self._remote.pull_cmd(repository_path, hooks=False, **options)
897 self._remote.pull_cmd(repository_path, hooks=False, **options)
892 self._remote.invalidate_vcs_cache()
898 self._remote.invalidate_vcs_cache()
893
899
894 def bookmark(self, bookmark, revision=None):
900 def bookmark(self, bookmark, revision=None):
895 if isinstance(bookmark, unicode):
901 if isinstance(bookmark, unicode):
896 bookmark = safe_str(bookmark)
902 bookmark = safe_str(bookmark)
897 self._remote.bookmark(bookmark, revision=revision)
903 self._remote.bookmark(bookmark, revision=revision)
898 self._remote.invalidate_vcs_cache()
904 self._remote.invalidate_vcs_cache()
899
905
900 def get_path_permissions(self, username):
906 def get_path_permissions(self, username):
901 hgacl_file = os.path.join(self.path, '.hg/hgacl')
907 hgacl_file = os.path.join(self.path, '.hg/hgacl')
902
908
903 def read_patterns(suffix):
909 def read_patterns(suffix):
904 svalue = None
910 svalue = None
905 for section, option in [
911 for section, option in [
906 ('narrowacl', username + suffix),
912 ('narrowacl', username + suffix),
907 ('narrowacl', 'default' + suffix),
913 ('narrowacl', 'default' + suffix),
908 ('narrowhgacl', username + suffix),
914 ('narrowhgacl', username + suffix),
909 ('narrowhgacl', 'default' + suffix)
915 ('narrowhgacl', 'default' + suffix)
910 ]:
916 ]:
911 try:
917 try:
912 svalue = hgacl.get(section, option)
918 svalue = hgacl.get(section, option)
913 break # stop at the first value we find
919 break # stop at the first value we find
914 except configparser.NoOptionError:
920 except configparser.NoOptionError:
915 pass
921 pass
916 if not svalue:
922 if not svalue:
917 return None
923 return None
918 result = ['/']
924 result = ['/']
919 for pattern in svalue.split():
925 for pattern in svalue.split():
920 result.append(pattern)
926 result.append(pattern)
921 if '*' not in pattern and '?' not in pattern:
927 if '*' not in pattern and '?' not in pattern:
922 result.append(pattern + '/*')
928 result.append(pattern + '/*')
923 return result
929 return result
924
930
925 if os.path.exists(hgacl_file):
931 if os.path.exists(hgacl_file):
926 try:
932 try:
927 hgacl = configparser.RawConfigParser()
933 hgacl = configparser.RawConfigParser()
928 hgacl.read(hgacl_file)
934 hgacl.read(hgacl_file)
929
935
930 includes = read_patterns('.includes')
936 includes = read_patterns('.includes')
931 excludes = read_patterns('.excludes')
937 excludes = read_patterns('.excludes')
932 return BasePathPermissionChecker.create_from_patterns(
938 return BasePathPermissionChecker.create_from_patterns(
933 includes, excludes)
939 includes, excludes)
934 except BaseException as e:
940 except BaseException as e:
935 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
941 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
936 hgacl_file, self.name, e)
942 hgacl_file, self.name, e)
937 raise exceptions.RepositoryRequirementError(msg)
943 raise exceptions.RepositoryRequirementError(msg)
938 else:
944 else:
939 return None
945 return None
940
946
941
947
942 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
948 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
943
949
944 def _commit_factory(self, commit_id):
950 def _commit_factory(self, commit_id):
945 return self.repo.get_commit(
951 return self.repo.get_commit(
946 commit_idx=commit_id, pre_load=self.pre_load)
952 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now