##// END OF EJS Templates
maintainance: added update caches to mercurial.
marcink -
r3928:739550ba default
parent child Browse files
Show More
@@ -1,175 +1,184 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2017-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import logging
21 21
22 22 log = logging.getLogger(__name__)
23 23
24 24
25 25 class MaintenanceTask(object):
26 26 human_name = 'undefined'
27 27
28 28 def __init__(self, db_repo):
29 29 self.db_repo = db_repo
30 30
31 31 def run(self):
32 32 """Execute task and return task human value"""
33 33 raise NotImplementedError()
34 34
35 35
36 36 class GitGC(MaintenanceTask):
37 37 human_name = 'GIT Garbage collect'
38 38
39 39 def _count_objects(self, repo):
40 40 stdout, stderr = repo.run_git_command(
41 41 ['count-objects', '-v'], fail_on_stderr=False)
42 42
43 43 errors = ' '
44 44 objects = ' '.join(stdout.splitlines())
45 45
46 46 if stderr:
47 47 errors = '\nSTD ERR:' + '\n'.join(stderr.splitlines())
48 48 return objects + errors
49 49
50 50 def run(self):
51 51 output = []
52 52 instance = self.db_repo.scm_instance()
53 53
54 54 objects_before = self._count_objects(instance)
55 55
56 56 log.debug('GIT objects:%s', objects_before)
57 57 cmd = ['gc', '--aggressive']
58 58 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
59 59
60 60 out = 'executed {}'.format(' '.join(cmd))
61 61 output.append(out)
62 62
63 63 out = ''
64 64 if stderr:
65 65 out += ''.join(stderr.splitlines())
66 66
67 67 if stdout:
68 68 out += ''.join(stdout.splitlines())
69 69
70 70 if out:
71 71 output.append(out)
72 72
73 73 objects_after = self._count_objects(instance)
74 74 log.debug('GIT objects:%s', objects_after)
75 75 output.append('objects before :' + objects_before)
76 76 output.append('objects after :' + objects_after)
77 77
78 78 return '\n'.join(output)
79 79
80 80
81 81 class GitFSCK(MaintenanceTask):
82 82 human_name = 'GIT FSCK'
83 83
84 84 def run(self):
85 85 output = []
86 86 instance = self.db_repo.scm_instance()
87 87
88 88 cmd = ['fsck', '--full']
89 89 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
90 90
91 91 out = 'executed {}'.format(' '.join(cmd))
92 92 output.append(out)
93 93
94 94 out = ''
95 95 if stderr:
96 96 out += ''.join(stderr.splitlines())
97 97
98 98 if stdout:
99 99 out += ''.join(stdout.splitlines())
100 100
101 101 if out:
102 102 output.append(out)
103 103
104 104 return '\n'.join(output)
105 105
106 106
107 107 class GitRepack(MaintenanceTask):
108 108 human_name = 'GIT Repack'
109 109
110 110 def run(self):
111 111 output = []
112 112 instance = self.db_repo.scm_instance()
113 113 cmd = ['repack', '-a', '-d',
114 114 '--window-memory', '10m', '--max-pack-size', '100m']
115 115 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
116 116
117 117 out = 'executed {}'.format(' '.join(cmd))
118 118 output.append(out)
119 119 out = ''
120 120
121 121 if stderr:
122 122 out += ''.join(stderr.splitlines())
123 123
124 124 if stdout:
125 125 out += ''.join(stdout.splitlines())
126 126
127 127 if out:
128 128 output.append(out)
129 129
130 130 return '\n'.join(output)
131 131
132 132
133 133 class HGVerify(MaintenanceTask):
134 134 human_name = 'HG Verify repo'
135 135
136 136 def run(self):
137 137 instance = self.db_repo.scm_instance()
138 138 res = instance.verify()
139 139 return res
140 140
141 141
142 class HGUpdateCaches(MaintenanceTask):
143 human_name = 'HG update caches'
144
145 def run(self):
146 instance = self.db_repo.scm_instance()
147 res = instance.hg_update_cache()
148 return res
149
150
142 151 class SVNVerify(MaintenanceTask):
143 152 human_name = 'SVN Verify repo'
144 153
145 154 def run(self):
146 155 instance = self.db_repo.scm_instance()
147 156 res = instance.verify()
148 157 return res
149 158
150 159
151 160 class RepoMaintenance(object):
152 161 """
153 162 Performs maintenance of repository based on it's type
154 163 """
155 164 tasks = {
156 'hg': [HGVerify],
165 'hg': [HGVerify, HGUpdateCaches],
157 166 'git': [GitFSCK, GitGC, GitRepack],
158 167 'svn': [SVNVerify],
159 168 }
160 169
161 170 def get_tasks_for_repo(self, db_repo):
162 171 """
163 172 fetches human names of tasks pending for execution for given type of repo
164 173 """
165 174 tasks = []
166 175 for task in self.tasks[db_repo.repo_type]:
167 176 tasks.append(task.human_name)
168 177 return tasks
169 178
170 179 def execute(self, db_repo):
171 180 executed_tasks = []
172 181 for task in self.tasks[db_repo.repo_type]:
173 182 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
174 183 executed_tasks.append(output)
175 184 return executed_tasks
@@ -1,946 +1,952 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, exceptions
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
46 46 from rhodecode.lib.vcs.compat import configparser
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
289
290 self._remote.invalidate_vcs_cache()
291 return update_cache
292
287 293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
288 294 if commit_id1 == commit_id2:
289 295 return commit_id1
290 296
291 297 ancestors = self._remote.revs_from_revspec(
292 298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
293 299 other_path=repo2.path)
294 300 return repo2[ancestors[0]].raw_id if ancestors else None
295 301
296 302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
297 303 if commit_id1 == commit_id2:
298 304 commits = []
299 305 else:
300 306 if merge:
301 307 indexes = self._remote.revs_from_revspec(
302 308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
303 309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
304 310 else:
305 311 indexes = self._remote.revs_from_revspec(
306 312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
307 313 commit_id1, other_path=repo2.path)
308 314
309 315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
310 316 for idx in indexes]
311 317
312 318 return commits
313 319
314 320 @staticmethod
315 321 def check_url(url, config):
316 322 """
317 323 Function will check given url and try to verify if it's a valid
318 324 link. Sometimes it may happened that mercurial will issue basic
319 325 auth request that can cause whole API to hang when used from python
320 326 or other external calls.
321 327
322 328 On failures it'll raise urllib2.HTTPError, exception is also thrown
323 329 when the return code is non 200
324 330 """
325 331 # check first if it's not an local url
326 332 if os.path.isdir(url) or url.startswith('file:'):
327 333 return True
328 334
329 335 # Request the _remote to verify the url
330 336 return connection.Hg.check_url(url, config.serialize())
331 337
332 338 @staticmethod
333 339 def is_valid_repository(path):
334 340 return os.path.isdir(os.path.join(path, '.hg'))
335 341
336 342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
337 343 """
338 344 Function will check for mercurial repository in given path. If there
339 345 is no repository in that path it will raise an exception unless
340 346 `create` parameter is set to True - in that case repository would
341 347 be created.
342 348
343 349 If `src_url` is given, would try to clone repository from the
344 350 location at given clone_point. Additionally it'll make update to
345 351 working copy accordingly to `do_workspace_checkout` flag.
346 352 """
347 353 if create and os.path.exists(self.path):
348 354 raise RepositoryError(
349 355 "Cannot create repository at %s, location already exist"
350 356 % self.path)
351 357
352 358 if src_url:
353 359 url = str(self._get_url(src_url))
354 360 MercurialRepository.check_url(url, self.config)
355 361
356 362 self._remote.clone(url, self.path, do_workspace_checkout)
357 363
358 364 # Don't try to create if we've already cloned repo
359 365 create = False
360 366
361 367 if create:
362 368 os.makedirs(self.path, mode=0o755)
363 369 self._remote.localrepository(create)
364 370
365 371 @LazyProperty
366 372 def in_memory_commit(self):
367 373 return MercurialInMemoryCommit(self)
368 374
369 375 @LazyProperty
370 376 def description(self):
371 377 description = self._remote.get_config_value(
372 378 'web', 'description', untrusted=True)
373 379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
374 380
375 381 @LazyProperty
376 382 def contact(self):
377 383 contact = (
378 384 self._remote.get_config_value("web", "contact") or
379 385 self._remote.get_config_value("ui", "username"))
380 386 return safe_unicode(contact or self.DEFAULT_CONTACT)
381 387
382 388 @LazyProperty
383 389 def last_change(self):
384 390 """
385 391 Returns last change made on this repository as
386 392 `datetime.datetime` object.
387 393 """
388 394 try:
389 395 return self.get_commit().date
390 396 except RepositoryError:
391 397 tzoffset = makedate()[1]
392 398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
393 399
394 400 def _get_fs_mtime(self):
395 401 # fallback to filesystem
396 402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
397 403 st_path = os.path.join(self.path, '.hg', "store")
398 404 if os.path.exists(cl_path):
399 405 return os.stat(cl_path).st_mtime
400 406 else:
401 407 return os.stat(st_path).st_mtime
402 408
403 409 def _get_url(self, url):
404 410 """
405 411 Returns normalized url. If schema is not given, would fall
406 412 to filesystem
407 413 (``file:///``) schema.
408 414 """
409 415 url = url.encode('utf8')
410 416 if url != 'default' and '://' not in url:
411 417 url = "file:" + urllib.pathname2url(url)
412 418 return url
413 419
414 420 def get_hook_location(self):
415 421 """
416 422 returns absolute path to location where hooks are stored
417 423 """
418 424 return os.path.join(self.path, '.hg', '.hgrc')
419 425
420 426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
421 427 """
422 428 Returns ``MercurialCommit`` object representing repository's
423 429 commit at the given `commit_id` or `commit_idx`.
424 430 """
425 431 if self.is_empty():
426 432 raise EmptyRepositoryError("There are no commits yet")
427 433
428 434 if commit_id is not None:
429 435 self._validate_commit_id(commit_id)
430 436 try:
431 437 # we have cached idx, use it without contacting the remote
432 438 idx = self._commit_ids[commit_id]
433 439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
434 440 except KeyError:
435 441 pass
436 442
437 443 elif commit_idx is not None:
438 444 self._validate_commit_idx(commit_idx)
439 445 try:
440 446 _commit_id = self.commit_ids[commit_idx]
441 447 if commit_idx < 0:
442 448 commit_idx = self.commit_ids.index(_commit_id)
443 449
444 450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
445 451 except IndexError:
446 452 commit_id = commit_idx
447 453 else:
448 454 commit_id = "tip"
449 455
450 456 if isinstance(commit_id, unicode):
451 457 commit_id = safe_str(commit_id)
452 458
453 459 try:
454 460 raw_id, idx = self._remote.lookup(commit_id, both=True)
455 461 except CommitDoesNotExistError:
456 462 msg = "Commit {} does not exist for `{}`".format(
457 463 *map(safe_str, [commit_id, self.name]))
458 464 raise CommitDoesNotExistError(msg)
459 465
460 466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
461 467
462 468 def get_commits(
463 469 self, start_id=None, end_id=None, start_date=None, end_date=None,
464 470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
465 471 """
466 472 Returns generator of ``MercurialCommit`` objects from start to end
467 473 (both are inclusive)
468 474
469 475 :param start_id: None, str(commit_id)
470 476 :param end_id: None, str(commit_id)
471 477 :param start_date: if specified, commits with commit date less than
472 478 ``start_date`` would be filtered out from returned set
473 479 :param end_date: if specified, commits with commit date greater than
474 480 ``end_date`` would be filtered out from returned set
475 481 :param branch_name: if specified, commits not reachable from given
476 482 branch would be filtered out from returned set
477 483 :param show_hidden: Show hidden commits such as obsolete or hidden from
478 484 Mercurial evolve
479 485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
480 486 exist.
481 487 :raise CommitDoesNotExistError: If commit for given ``start`` or
482 488 ``end`` could not be found.
483 489 """
484 490 # actually we should check now if it's not an empty repo
485 491 if self.is_empty():
486 492 raise EmptyRepositoryError("There are no commits yet")
487 493 self._validate_branch_name(branch_name)
488 494
489 495 branch_ancestors = False
490 496 if start_id is not None:
491 497 self._validate_commit_id(start_id)
492 498 c_start = self.get_commit(commit_id=start_id)
493 499 start_pos = self._commit_ids[c_start.raw_id]
494 500 else:
495 501 start_pos = None
496 502
497 503 if end_id is not None:
498 504 self._validate_commit_id(end_id)
499 505 c_end = self.get_commit(commit_id=end_id)
500 506 end_pos = max(0, self._commit_ids[c_end.raw_id])
501 507 else:
502 508 end_pos = None
503 509
504 510 if None not in [start_id, end_id] and start_pos > end_pos:
505 511 raise RepositoryError(
506 512 "Start commit '%s' cannot be after end commit '%s'" %
507 513 (start_id, end_id))
508 514
509 515 if end_pos is not None:
510 516 end_pos += 1
511 517
512 518 commit_filter = []
513 519
514 520 if branch_name and not branch_ancestors:
515 521 commit_filter.append('branch("%s")' % (branch_name,))
516 522 elif branch_name and branch_ancestors:
517 523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
518 524
519 525 if start_date and not end_date:
520 526 commit_filter.append('date(">%s")' % (start_date,))
521 527 if end_date and not start_date:
522 528 commit_filter.append('date("<%s")' % (end_date,))
523 529 if start_date and end_date:
524 530 commit_filter.append(
525 531 'date(">%s") and date("<%s")' % (start_date, end_date))
526 532
527 533 if not show_hidden:
528 534 commit_filter.append('not obsolete()')
529 535 commit_filter.append('not hidden()')
530 536
531 537 # TODO: johbo: Figure out a simpler way for this solution
532 538 collection_generator = CollectionGenerator
533 539 if commit_filter:
534 540 commit_filter = ' and '.join(map(safe_str, commit_filter))
535 541 revisions = self._remote.rev_range([commit_filter])
536 542 collection_generator = MercurialIndexBasedCollectionGenerator
537 543 else:
538 544 revisions = self.commit_ids
539 545
540 546 if start_pos or end_pos:
541 547 revisions = revisions[start_pos:end_pos]
542 548
543 549 return collection_generator(self, revisions, pre_load=pre_load)
544 550
545 551 def pull(self, url, commit_ids=None):
546 552 """
547 553 Pull changes from external location.
548 554
549 555 :param commit_ids: Optional. Can be set to a list of commit ids
550 556 which shall be pulled from the other repository.
551 557 """
552 558 url = self._get_url(url)
553 559 self._remote.pull(url, commit_ids=commit_ids)
554 560 self._remote.invalidate_vcs_cache()
555 561
556 562 def fetch(self, url, commit_ids=None):
557 563 """
558 564 Backward compatibility with GIT fetch==pull
559 565 """
560 566 return self.pull(url, commit_ids=commit_ids)
561 567
562 568 def push(self, url):
563 569 url = self._get_url(url)
564 570 self._remote.sync_push(url)
565 571
566 572 def _local_clone(self, clone_path):
567 573 """
568 574 Create a local clone of the current repo.
569 575 """
570 576 self._remote.clone(self.path, clone_path, update_after_clone=True,
571 577 hooks=False)
572 578
573 579 def _update(self, revision, clean=False):
574 580 """
575 581 Update the working copy to the specified revision.
576 582 """
577 583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
578 584 self._remote.update(revision, clean=clean)
579 585
580 586 def _identify(self):
581 587 """
582 588 Return the current state of the working directory.
583 589 """
584 590 return self._remote.identify().strip().rstrip('+')
585 591
586 592 def _heads(self, branch=None):
587 593 """
588 594 Return the commit ids of the repository heads.
589 595 """
590 596 return self._remote.heads(branch=branch).strip().split(' ')
591 597
592 598 def _ancestor(self, revision1, revision2):
593 599 """
594 600 Return the common ancestor of the two revisions.
595 601 """
596 602 return self._remote.ancestor(revision1, revision2)
597 603
598 604 def _local_push(
599 605 self, revision, repository_path, push_branches=False,
600 606 enable_hooks=False):
601 607 """
602 608 Push the given revision to the specified repository.
603 609
604 610 :param push_branches: allow to create branches in the target repo.
605 611 """
606 612 self._remote.push(
607 613 [revision], repository_path, hooks=enable_hooks,
608 614 push_branches=push_branches)
609 615
610 616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
611 617 source_ref, use_rebase=False, dry_run=False):
612 618 """
613 619 Merge the given source_revision into the checked out revision.
614 620
615 621 Returns the commit id of the merge and a boolean indicating if the
616 622 commit needs to be pushed.
617 623 """
618 624 self._update(target_ref.commit_id, clean=True)
619 625
620 626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
621 627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
622 628
623 629 if ancestor == source_ref.commit_id:
624 630 # Nothing to do, the changes were already integrated
625 631 return target_ref.commit_id, False
626 632
627 633 elif ancestor == target_ref.commit_id and is_the_same_branch:
628 634 # In this case we should force a commit message
629 635 return source_ref.commit_id, True
630 636
631 637 if use_rebase:
632 638 try:
633 639 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
634 640 target_ref.commit_id)
635 641 self.bookmark(bookmark_name, revision=source_ref.commit_id)
636 642 self._remote.rebase(
637 643 source=source_ref.commit_id, dest=target_ref.commit_id)
638 644 self._remote.invalidate_vcs_cache()
639 645 self._update(bookmark_name, clean=True)
640 646 return self._identify(), True
641 647 except RepositoryError:
642 648 # The rebase-abort may raise another exception which 'hides'
643 649 # the original one, therefore we log it here.
644 650 log.exception('Error while rebasing shadow repo during merge.')
645 651
646 652 # Cleanup any rebase leftovers
647 653 self._remote.invalidate_vcs_cache()
648 654 self._remote.rebase(abort=True)
649 655 self._remote.invalidate_vcs_cache()
650 656 self._remote.update(clean=True)
651 657 raise
652 658 else:
653 659 try:
654 660 self._remote.merge(source_ref.commit_id)
655 661 self._remote.invalidate_vcs_cache()
656 662 self._remote.commit(
657 663 message=safe_str(merge_message),
658 664 username=safe_str('%s <%s>' % (user_name, user_email)))
659 665 self._remote.invalidate_vcs_cache()
660 666 return self._identify(), True
661 667 except RepositoryError:
662 668 # Cleanup any merge leftovers
663 669 self._remote.update(clean=True)
664 670 raise
665 671
666 672 def _local_close(self, target_ref, user_name, user_email,
667 673 source_ref, close_message=''):
668 674 """
669 675 Close the branch of the given source_revision
670 676
671 677 Returns the commit id of the close and a boolean indicating if the
672 678 commit needs to be pushed.
673 679 """
674 680 self._update(source_ref.commit_id)
675 681 message = close_message or "Closing branch: `{}`".format(source_ref.name)
676 682 try:
677 683 self._remote.commit(
678 684 message=safe_str(message),
679 685 username=safe_str('%s <%s>' % (user_name, user_email)),
680 686 close_branch=True)
681 687 self._remote.invalidate_vcs_cache()
682 688 return self._identify(), True
683 689 except RepositoryError:
684 690 # Cleanup any commit leftovers
685 691 self._remote.update(clean=True)
686 692 raise
687 693
688 694 def _is_the_same_branch(self, target_ref, source_ref):
689 695 return (
690 696 self._get_branch_name(target_ref) ==
691 697 self._get_branch_name(source_ref))
692 698
693 699 def _get_branch_name(self, ref):
694 700 if ref.type == 'branch':
695 701 return ref.name
696 702 return self._remote.ctx_branch(ref.commit_id)
697 703
698 704 def _maybe_prepare_merge_workspace(
699 705 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
700 706 shadow_repository_path = self._get_shadow_repository_path(
701 707 repo_id, workspace_id)
702 708 if not os.path.exists(shadow_repository_path):
703 709 self._local_clone(shadow_repository_path)
704 710 log.debug(
705 711 'Prepared shadow repository in %s', shadow_repository_path)
706 712
707 713 return shadow_repository_path
708 714
709 715 def _merge_repo(self, repo_id, workspace_id, target_ref,
710 716 source_repo, source_ref, merge_message,
711 717 merger_name, merger_email, dry_run=False,
712 718 use_rebase=False, close_branch=False):
713 719
714 720 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
715 721 'rebase' if use_rebase else 'merge', dry_run)
716 722 if target_ref.commit_id not in self._heads():
717 723 return MergeResponse(
718 724 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
719 725 metadata={'target_ref': target_ref})
720 726
721 727 try:
722 728 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
723 729 heads = '\n,'.join(self._heads(target_ref.name))
724 730 metadata = {
725 731 'target_ref': target_ref,
726 732 'source_ref': source_ref,
727 733 'heads': heads
728 734 }
729 735 return MergeResponse(
730 736 False, False, None,
731 737 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
732 738 metadata=metadata)
733 739 except CommitDoesNotExistError:
734 740 log.exception('Failure when looking up branch heads on hg target')
735 741 return MergeResponse(
736 742 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
737 743 metadata={'target_ref': target_ref})
738 744
739 745 shadow_repository_path = self._maybe_prepare_merge_workspace(
740 746 repo_id, workspace_id, target_ref, source_ref)
741 747 shadow_repo = self.get_shadow_instance(shadow_repository_path)
742 748
743 749 log.debug('Pulling in target reference %s', target_ref)
744 750 self._validate_pull_reference(target_ref)
745 751 shadow_repo._local_pull(self.path, target_ref)
746 752
747 753 try:
748 754 log.debug('Pulling in source reference %s', source_ref)
749 755 source_repo._validate_pull_reference(source_ref)
750 756 shadow_repo._local_pull(source_repo.path, source_ref)
751 757 except CommitDoesNotExistError:
752 758 log.exception('Failure when doing local pull on hg shadow repo')
753 759 return MergeResponse(
754 760 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
755 761 metadata={'source_ref': source_ref})
756 762
757 763 merge_ref = None
758 764 merge_commit_id = None
759 765 close_commit_id = None
760 766 merge_failure_reason = MergeFailureReason.NONE
761 767 metadata = {}
762 768
763 769 # enforce that close branch should be used only in case we source from
764 770 # an actual Branch
765 771 close_branch = close_branch and source_ref.type == 'branch'
766 772
767 773 # don't allow to close branch if source and target are the same
768 774 close_branch = close_branch and source_ref.name != target_ref.name
769 775
770 776 needs_push_on_close = False
771 777 if close_branch and not use_rebase and not dry_run:
772 778 try:
773 779 close_commit_id, needs_push_on_close = shadow_repo._local_close(
774 780 target_ref, merger_name, merger_email, source_ref)
775 781 merge_possible = True
776 782 except RepositoryError:
777 783 log.exception('Failure when doing close branch on '
778 784 'shadow repo: %s', shadow_repo)
779 785 merge_possible = False
780 786 merge_failure_reason = MergeFailureReason.MERGE_FAILED
781 787 else:
782 788 merge_possible = True
783 789
784 790 needs_push = False
785 791 if merge_possible:
786 792 try:
787 793 merge_commit_id, needs_push = shadow_repo._local_merge(
788 794 target_ref, merge_message, merger_name, merger_email,
789 795 source_ref, use_rebase=use_rebase, dry_run=dry_run)
790 796 merge_possible = True
791 797
792 798 # read the state of the close action, if it
793 799 # maybe required a push
794 800 needs_push = needs_push or needs_push_on_close
795 801
796 802 # Set a bookmark pointing to the merge commit. This bookmark
797 803 # may be used to easily identify the last successful merge
798 804 # commit in the shadow repository.
799 805 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
800 806 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
801 807 except SubrepoMergeError:
802 808 log.exception(
803 809 'Subrepo merge error during local merge on hg shadow repo.')
804 810 merge_possible = False
805 811 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
806 812 needs_push = False
807 813 except RepositoryError:
808 814 log.exception('Failure when doing local merge on hg shadow repo')
809 815 merge_possible = False
810 816 merge_failure_reason = MergeFailureReason.MERGE_FAILED
811 817 needs_push = False
812 818
813 819 if merge_possible and not dry_run:
814 820 if needs_push:
815 821 # In case the target is a bookmark, update it, so after pushing
816 822 # the bookmarks is also updated in the target.
817 823 if target_ref.type == 'book':
818 824 shadow_repo.bookmark(
819 825 target_ref.name, revision=merge_commit_id)
820 826 try:
821 827 shadow_repo_with_hooks = self.get_shadow_instance(
822 828 shadow_repository_path,
823 829 enable_hooks=True)
824 830 # This is the actual merge action, we push from shadow
825 831 # into origin.
826 832 # Note: the push_branches option will push any new branch
827 833 # defined in the source repository to the target. This may
828 834 # be dangerous as branches are permanent in Mercurial.
829 835 # This feature was requested in issue #441.
830 836 shadow_repo_with_hooks._local_push(
831 837 merge_commit_id, self.path, push_branches=True,
832 838 enable_hooks=True)
833 839
834 840 # maybe we also need to push the close_commit_id
835 841 if close_commit_id:
836 842 shadow_repo_with_hooks._local_push(
837 843 close_commit_id, self.path, push_branches=True,
838 844 enable_hooks=True)
839 845 merge_succeeded = True
840 846 except RepositoryError:
841 847 log.exception(
842 848 'Failure when doing local push from the shadow '
843 849 'repository to the target repository at %s.', self.path)
844 850 merge_succeeded = False
845 851 merge_failure_reason = MergeFailureReason.PUSH_FAILED
846 852 metadata['target'] = 'hg shadow repo'
847 853 metadata['merge_commit'] = merge_commit_id
848 854 else:
849 855 merge_succeeded = True
850 856 else:
851 857 merge_succeeded = False
852 858
853 859 return MergeResponse(
854 860 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
855 861 metadata=metadata)
856 862
857 863 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
858 864 config = self.config.copy()
859 865 if not enable_hooks:
860 866 config.clear_section('hooks')
861 867 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
862 868
863 869 def _validate_pull_reference(self, reference):
864 870 if not (reference.name in self.bookmarks or
865 871 reference.name in self.branches or
866 872 self.get_commit(reference.commit_id)):
867 873 raise CommitDoesNotExistError(
868 874 'Unknown branch, bookmark or commit id')
869 875
870 876 def _local_pull(self, repository_path, reference):
871 877 """
872 878 Fetch a branch, bookmark or commit from a local repository.
873 879 """
874 880 repository_path = os.path.abspath(repository_path)
875 881 if repository_path == self.path:
876 882 raise ValueError('Cannot pull from the same repository')
877 883
878 884 reference_type_to_option_name = {
879 885 'book': 'bookmark',
880 886 'branch': 'branch',
881 887 }
882 888 option_name = reference_type_to_option_name.get(
883 889 reference.type, 'revision')
884 890
885 891 if option_name == 'revision':
886 892 ref = reference.commit_id
887 893 else:
888 894 ref = reference.name
889 895
890 896 options = {option_name: [ref]}
891 897 self._remote.pull_cmd(repository_path, hooks=False, **options)
892 898 self._remote.invalidate_vcs_cache()
893 899
894 900 def bookmark(self, bookmark, revision=None):
895 901 if isinstance(bookmark, unicode):
896 902 bookmark = safe_str(bookmark)
897 903 self._remote.bookmark(bookmark, revision=revision)
898 904 self._remote.invalidate_vcs_cache()
899 905
900 906 def get_path_permissions(self, username):
901 907 hgacl_file = os.path.join(self.path, '.hg/hgacl')
902 908
903 909 def read_patterns(suffix):
904 910 svalue = None
905 911 for section, option in [
906 912 ('narrowacl', username + suffix),
907 913 ('narrowacl', 'default' + suffix),
908 914 ('narrowhgacl', username + suffix),
909 915 ('narrowhgacl', 'default' + suffix)
910 916 ]:
911 917 try:
912 918 svalue = hgacl.get(section, option)
913 919 break # stop at the first value we find
914 920 except configparser.NoOptionError:
915 921 pass
916 922 if not svalue:
917 923 return None
918 924 result = ['/']
919 925 for pattern in svalue.split():
920 926 result.append(pattern)
921 927 if '*' not in pattern and '?' not in pattern:
922 928 result.append(pattern + '/*')
923 929 return result
924 930
925 931 if os.path.exists(hgacl_file):
926 932 try:
927 933 hgacl = configparser.RawConfigParser()
928 934 hgacl.read(hgacl_file)
929 935
930 936 includes = read_patterns('.includes')
931 937 excludes = read_patterns('.excludes')
932 938 return BasePathPermissionChecker.create_from_patterns(
933 939 includes, excludes)
934 940 except BaseException as e:
935 941 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
936 942 hgacl_file, self.name, e)
937 943 raise exceptions.RepositoryRequirementError(msg)
938 944 else:
939 945 return None
940 946
941 947
942 948 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
943 949
944 950 def _commit_factory(self, commit_id):
945 951 return self.repo.get_commit(
946 952 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now