##// END OF EJS Templates
scmutil: make revpair() return context objects (API)...
Martin von Zweigbergk -
r37269:e9ee540a default
parent child Browse files
Show More
@@ -1,1434 +1,1435
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 from .utils import (
44 from .utils import (
45 procutil,
45 procutil,
46 stringutil,
46 stringutil,
47 )
47 )
48
48
49 if pycompat.iswindows:
49 if pycompat.iswindows:
50 from . import scmwindows as scmplatform
50 from . import scmwindows as scmplatform
51 else:
51 else:
52 from . import scmposix as scmplatform
52 from . import scmposix as scmplatform
53
53
54 termsize = scmplatform.termsize
54 termsize = scmplatform.termsize
55
55
56 class status(tuple):
56 class status(tuple):
57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 and 'ignored' properties are only relevant to the working copy.
58 and 'ignored' properties are only relevant to the working copy.
59 '''
59 '''
60
60
61 __slots__ = ()
61 __slots__ = ()
62
62
63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 clean):
64 clean):
65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 ignored, clean))
66 ignored, clean))
67
67
68 @property
68 @property
69 def modified(self):
69 def modified(self):
70 '''files that have been modified'''
70 '''files that have been modified'''
71 return self[0]
71 return self[0]
72
72
73 @property
73 @property
74 def added(self):
74 def added(self):
75 '''files that have been added'''
75 '''files that have been added'''
76 return self[1]
76 return self[1]
77
77
78 @property
78 @property
79 def removed(self):
79 def removed(self):
80 '''files that have been removed'''
80 '''files that have been removed'''
81 return self[2]
81 return self[2]
82
82
83 @property
83 @property
84 def deleted(self):
84 def deleted(self):
85 '''files that are in the dirstate, but have been deleted from the
85 '''files that are in the dirstate, but have been deleted from the
86 working copy (aka "missing")
86 working copy (aka "missing")
87 '''
87 '''
88 return self[3]
88 return self[3]
89
89
90 @property
90 @property
91 def unknown(self):
91 def unknown(self):
92 '''files not in the dirstate that are not ignored'''
92 '''files not in the dirstate that are not ignored'''
93 return self[4]
93 return self[4]
94
94
95 @property
95 @property
96 def ignored(self):
96 def ignored(self):
97 '''files not in the dirstate that are ignored (by _dirignore())'''
97 '''files not in the dirstate that are ignored (by _dirignore())'''
98 return self[5]
98 return self[5]
99
99
100 @property
100 @property
101 def clean(self):
101 def clean(self):
102 '''files that have not been modified'''
102 '''files that have not been modified'''
103 return self[6]
103 return self[6]
104
104
105 def __repr__(self, *args, **kwargs):
105 def __repr__(self, *args, **kwargs):
106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 'unknown=%r, ignored=%r, clean=%r>') % self)
107 'unknown=%r, ignored=%r, clean=%r>') % self)
108
108
109 def itersubrepos(ctx1, ctx2):
109 def itersubrepos(ctx1, ctx2):
110 """find subrepos in ctx1 or ctx2"""
110 """find subrepos in ctx1 or ctx2"""
111 # Create a (subpath, ctx) mapping where we prefer subpaths from
111 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 # has been modified (in ctx2) but not yet committed (in ctx1).
113 # has been modified (in ctx2) but not yet committed (in ctx1).
114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116
116
117 missing = set()
117 missing = set()
118
118
119 for subpath in ctx2.substate:
119 for subpath in ctx2.substate:
120 if subpath not in ctx1.substate:
120 if subpath not in ctx1.substate:
121 del subpaths[subpath]
121 del subpaths[subpath]
122 missing.add(subpath)
122 missing.add(subpath)
123
123
124 for subpath, ctx in sorted(subpaths.iteritems()):
124 for subpath, ctx in sorted(subpaths.iteritems()):
125 yield subpath, ctx.sub(subpath)
125 yield subpath, ctx.sub(subpath)
126
126
127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 # status and diff will have an accurate result when it does
128 # status and diff will have an accurate result when it does
129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 # against itself.
130 # against itself.
131 for subpath in missing:
131 for subpath in missing:
132 yield subpath, ctx2.nullsub(subpath, ctx1)
132 yield subpath, ctx2.nullsub(subpath, ctx1)
133
133
134 def nochangesfound(ui, repo, excluded=None):
134 def nochangesfound(ui, repo, excluded=None):
135 '''Report no changes for push/pull, excluded is None or a list of
135 '''Report no changes for push/pull, excluded is None or a list of
136 nodes excluded from the push/pull.
136 nodes excluded from the push/pull.
137 '''
137 '''
138 secretlist = []
138 secretlist = []
139 if excluded:
139 if excluded:
140 for n in excluded:
140 for n in excluded:
141 ctx = repo[n]
141 ctx = repo[n]
142 if ctx.phase() >= phases.secret and not ctx.extinct():
142 if ctx.phase() >= phases.secret and not ctx.extinct():
143 secretlist.append(n)
143 secretlist.append(n)
144
144
145 if secretlist:
145 if secretlist:
146 ui.status(_("no changes found (ignored %d secret changesets)\n")
146 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 % len(secretlist))
147 % len(secretlist))
148 else:
148 else:
149 ui.status(_("no changes found\n"))
149 ui.status(_("no changes found\n"))
150
150
151 def callcatch(ui, func):
151 def callcatch(ui, func):
152 """call func() with global exception handling
152 """call func() with global exception handling
153
153
154 return func() if no exception happens. otherwise do some error handling
154 return func() if no exception happens. otherwise do some error handling
155 and return an exit code accordingly. does not handle all exceptions.
155 and return an exit code accordingly. does not handle all exceptions.
156 """
156 """
157 try:
157 try:
158 try:
158 try:
159 return func()
159 return func()
160 except: # re-raises
160 except: # re-raises
161 ui.traceback()
161 ui.traceback()
162 raise
162 raise
163 # Global exception handling, alphabetically
163 # Global exception handling, alphabetically
164 # Mercurial-specific first, followed by built-in and library exceptions
164 # Mercurial-specific first, followed by built-in and library exceptions
165 except error.LockHeld as inst:
165 except error.LockHeld as inst:
166 if inst.errno == errno.ETIMEDOUT:
166 if inst.errno == errno.ETIMEDOUT:
167 reason = _('timed out waiting for lock held by %r') % inst.locker
167 reason = _('timed out waiting for lock held by %r') % inst.locker
168 else:
168 else:
169 reason = _('lock held by %r') % inst.locker
169 reason = _('lock held by %r') % inst.locker
170 ui.warn(_("abort: %s: %s\n")
170 ui.warn(_("abort: %s: %s\n")
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 if not inst.locker:
172 if not inst.locker:
173 ui.warn(_("(lock might be very busy)\n"))
173 ui.warn(_("(lock might be very busy)\n"))
174 except error.LockUnavailable as inst:
174 except error.LockUnavailable as inst:
175 ui.warn(_("abort: could not lock %s: %s\n") %
175 ui.warn(_("abort: could not lock %s: %s\n") %
176 (inst.desc or stringutil.forcebytestr(inst.filename),
176 (inst.desc or stringutil.forcebytestr(inst.filename),
177 encoding.strtolocal(inst.strerror)))
177 encoding.strtolocal(inst.strerror)))
178 except error.OutOfBandError as inst:
178 except error.OutOfBandError as inst:
179 if inst.args:
179 if inst.args:
180 msg = _("abort: remote error:\n")
180 msg = _("abort: remote error:\n")
181 else:
181 else:
182 msg = _("abort: remote error\n")
182 msg = _("abort: remote error\n")
183 ui.warn(msg)
183 ui.warn(msg)
184 if inst.args:
184 if inst.args:
185 ui.warn(''.join(inst.args))
185 ui.warn(''.join(inst.args))
186 if inst.hint:
186 if inst.hint:
187 ui.warn('(%s)\n' % inst.hint)
187 ui.warn('(%s)\n' % inst.hint)
188 except error.RepoError as inst:
188 except error.RepoError as inst:
189 ui.warn(_("abort: %s!\n") % inst)
189 ui.warn(_("abort: %s!\n") % inst)
190 if inst.hint:
190 if inst.hint:
191 ui.warn(_("(%s)\n") % inst.hint)
191 ui.warn(_("(%s)\n") % inst.hint)
192 except error.ResponseError as inst:
192 except error.ResponseError as inst:
193 ui.warn(_("abort: %s") % inst.args[0])
193 ui.warn(_("abort: %s") % inst.args[0])
194 msg = inst.args[1]
194 msg = inst.args[1]
195 if isinstance(msg, type(u'')):
195 if isinstance(msg, type(u'')):
196 msg = pycompat.sysbytes(msg)
196 msg = pycompat.sysbytes(msg)
197 if not isinstance(msg, bytes):
197 if not isinstance(msg, bytes):
198 ui.warn(" %r\n" % (msg,))
198 ui.warn(" %r\n" % (msg,))
199 elif not msg:
199 elif not msg:
200 ui.warn(_(" empty string\n"))
200 ui.warn(_(" empty string\n"))
201 else:
201 else:
202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 except error.CensoredNodeError as inst:
203 except error.CensoredNodeError as inst:
204 ui.warn(_("abort: file censored %s!\n") % inst)
204 ui.warn(_("abort: file censored %s!\n") % inst)
205 except error.RevlogError as inst:
205 except error.RevlogError as inst:
206 ui.warn(_("abort: %s!\n") % inst)
206 ui.warn(_("abort: %s!\n") % inst)
207 except error.InterventionRequired as inst:
207 except error.InterventionRequired as inst:
208 ui.warn("%s\n" % inst)
208 ui.warn("%s\n" % inst)
209 if inst.hint:
209 if inst.hint:
210 ui.warn(_("(%s)\n") % inst.hint)
210 ui.warn(_("(%s)\n") % inst.hint)
211 return 1
211 return 1
212 except error.WdirUnsupported:
212 except error.WdirUnsupported:
213 ui.warn(_("abort: working directory revision cannot be specified\n"))
213 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 except error.Abort as inst:
214 except error.Abort as inst:
215 ui.warn(_("abort: %s\n") % inst)
215 ui.warn(_("abort: %s\n") % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.warn(_("(%s)\n") % inst.hint)
217 ui.warn(_("(%s)\n") % inst.hint)
218 except ImportError as inst:
218 except ImportError as inst:
219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 m = stringutil.forcebytestr(inst).split()[-1]
220 m = stringutil.forcebytestr(inst).split()[-1]
221 if m in "mpatch bdiff".split():
221 if m in "mpatch bdiff".split():
222 ui.warn(_("(did you forget to compile extensions?)\n"))
222 ui.warn(_("(did you forget to compile extensions?)\n"))
223 elif m in "zlib".split():
223 elif m in "zlib".split():
224 ui.warn(_("(is your Python install correct?)\n"))
224 ui.warn(_("(is your Python install correct?)\n"))
225 except IOError as inst:
225 except IOError as inst:
226 if util.safehasattr(inst, "code"):
226 if util.safehasattr(inst, "code"):
227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 elif util.safehasattr(inst, "reason"):
228 elif util.safehasattr(inst, "reason"):
229 try: # usually it is in the form (errno, strerror)
229 try: # usually it is in the form (errno, strerror)
230 reason = inst.reason.args[1]
230 reason = inst.reason.args[1]
231 except (AttributeError, IndexError):
231 except (AttributeError, IndexError):
232 # it might be anything, for example a string
232 # it might be anything, for example a string
233 reason = inst.reason
233 reason = inst.reason
234 if isinstance(reason, unicode):
234 if isinstance(reason, unicode):
235 # SSLError of Python 2.7.9 contains a unicode
235 # SSLError of Python 2.7.9 contains a unicode
236 reason = encoding.unitolocal(reason)
236 reason = encoding.unitolocal(reason)
237 ui.warn(_("abort: error: %s\n") % reason)
237 ui.warn(_("abort: error: %s\n") % reason)
238 elif (util.safehasattr(inst, "args")
238 elif (util.safehasattr(inst, "args")
239 and inst.args and inst.args[0] == errno.EPIPE):
239 and inst.args and inst.args[0] == errno.EPIPE):
240 pass
240 pass
241 elif getattr(inst, "strerror", None):
241 elif getattr(inst, "strerror", None):
242 if getattr(inst, "filename", None):
242 if getattr(inst, "filename", None):
243 ui.warn(_("abort: %s: %s\n") % (
243 ui.warn(_("abort: %s: %s\n") % (
244 encoding.strtolocal(inst.strerror),
244 encoding.strtolocal(inst.strerror),
245 stringutil.forcebytestr(inst.filename)))
245 stringutil.forcebytestr(inst.filename)))
246 else:
246 else:
247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 else:
248 else:
249 raise
249 raise
250 except OSError as inst:
250 except OSError as inst:
251 if getattr(inst, "filename", None) is not None:
251 if getattr(inst, "filename", None) is not None:
252 ui.warn(_("abort: %s: '%s'\n") % (
252 ui.warn(_("abort: %s: '%s'\n") % (
253 encoding.strtolocal(inst.strerror),
253 encoding.strtolocal(inst.strerror),
254 stringutil.forcebytestr(inst.filename)))
254 stringutil.forcebytestr(inst.filename)))
255 else:
255 else:
256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 except MemoryError:
257 except MemoryError:
258 ui.warn(_("abort: out of memory\n"))
258 ui.warn(_("abort: out of memory\n"))
259 except SystemExit as inst:
259 except SystemExit as inst:
260 # Commands shouldn't sys.exit directly, but give a return code.
260 # Commands shouldn't sys.exit directly, but give a return code.
261 # Just in case catch this and and pass exit code to caller.
261 # Just in case catch this and and pass exit code to caller.
262 return inst.code
262 return inst.code
263 except socket.error as inst:
263 except socket.error as inst:
264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265
265
266 return -1
266 return -1
267
267
268 def checknewlabel(repo, lbl, kind):
268 def checknewlabel(repo, lbl, kind):
269 # Do not use the "kind" parameter in ui output.
269 # Do not use the "kind" parameter in ui output.
270 # It makes strings difficult to translate.
270 # It makes strings difficult to translate.
271 if lbl in ['tip', '.', 'null']:
271 if lbl in ['tip', '.', 'null']:
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 for c in (':', '\0', '\n', '\r'):
273 for c in (':', '\0', '\n', '\r'):
274 if c in lbl:
274 if c in lbl:
275 raise error.Abort(
275 raise error.Abort(
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 try:
277 try:
278 int(lbl)
278 int(lbl)
279 raise error.Abort(_("cannot use an integer as a name"))
279 raise error.Abort(_("cannot use an integer as a name"))
280 except ValueError:
280 except ValueError:
281 pass
281 pass
282 if lbl.strip() != lbl:
282 if lbl.strip() != lbl:
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284
284
285 def checkfilename(f):
285 def checkfilename(f):
286 '''Check that the filename f is an acceptable filename for a tracked file'''
286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 if '\r' in f or '\n' in f:
287 if '\r' in f or '\n' in f:
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289
289
290 def checkportable(ui, f):
290 def checkportable(ui, f):
291 '''Check if filename f is portable and warn or abort depending on config'''
291 '''Check if filename f is portable and warn or abort depending on config'''
292 checkfilename(f)
292 checkfilename(f)
293 abort, warn = checkportabilityalert(ui)
293 abort, warn = checkportabilityalert(ui)
294 if abort or warn:
294 if abort or warn:
295 msg = util.checkwinfilename(f)
295 msg = util.checkwinfilename(f)
296 if msg:
296 if msg:
297 msg = "%s: %s" % (msg, procutil.shellquote(f))
297 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 if abort:
298 if abort:
299 raise error.Abort(msg)
299 raise error.Abort(msg)
300 ui.warn(_("warning: %s\n") % msg)
300 ui.warn(_("warning: %s\n") % msg)
301
301
302 def checkportabilityalert(ui):
302 def checkportabilityalert(ui):
303 '''check if the user's config requests nothing, a warning, or abort for
303 '''check if the user's config requests nothing, a warning, or abort for
304 non-portable filenames'''
304 non-portable filenames'''
305 val = ui.config('ui', 'portablefilenames')
305 val = ui.config('ui', 'portablefilenames')
306 lval = val.lower()
306 lval = val.lower()
307 bval = stringutil.parsebool(val)
307 bval = stringutil.parsebool(val)
308 abort = pycompat.iswindows or lval == 'abort'
308 abort = pycompat.iswindows or lval == 'abort'
309 warn = bval or lval == 'warn'
309 warn = bval or lval == 'warn'
310 if bval is None and not (warn or abort or lval == 'ignore'):
310 if bval is None and not (warn or abort or lval == 'ignore'):
311 raise error.ConfigError(
311 raise error.ConfigError(
312 _("ui.portablefilenames value is invalid ('%s')") % val)
312 _("ui.portablefilenames value is invalid ('%s')") % val)
313 return abort, warn
313 return abort, warn
314
314
315 class casecollisionauditor(object):
315 class casecollisionauditor(object):
316 def __init__(self, ui, abort, dirstate):
316 def __init__(self, ui, abort, dirstate):
317 self._ui = ui
317 self._ui = ui
318 self._abort = abort
318 self._abort = abort
319 allfiles = '\0'.join(dirstate._map)
319 allfiles = '\0'.join(dirstate._map)
320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._dirstate = dirstate
321 self._dirstate = dirstate
322 # The purpose of _newfiles is so that we don't complain about
322 # The purpose of _newfiles is so that we don't complain about
323 # case collisions if someone were to call this object with the
323 # case collisions if someone were to call this object with the
324 # same filename twice.
324 # same filename twice.
325 self._newfiles = set()
325 self._newfiles = set()
326
326
327 def __call__(self, f):
327 def __call__(self, f):
328 if f in self._newfiles:
328 if f in self._newfiles:
329 return
329 return
330 fl = encoding.lower(f)
330 fl = encoding.lower(f)
331 if fl in self._loweredfiles and f not in self._dirstate:
331 if fl in self._loweredfiles and f not in self._dirstate:
332 msg = _('possible case-folding collision for %s') % f
332 msg = _('possible case-folding collision for %s') % f
333 if self._abort:
333 if self._abort:
334 raise error.Abort(msg)
334 raise error.Abort(msg)
335 self._ui.warn(_("warning: %s\n") % msg)
335 self._ui.warn(_("warning: %s\n") % msg)
336 self._loweredfiles.add(fl)
336 self._loweredfiles.add(fl)
337 self._newfiles.add(f)
337 self._newfiles.add(f)
338
338
339 def filteredhash(repo, maxrev):
339 def filteredhash(repo, maxrev):
340 """build hash of filtered revisions in the current repoview.
340 """build hash of filtered revisions in the current repoview.
341
341
342 Multiple caches perform up-to-date validation by checking that the
342 Multiple caches perform up-to-date validation by checking that the
343 tiprev and tipnode stored in the cache file match the current repository.
343 tiprev and tipnode stored in the cache file match the current repository.
344 However, this is not sufficient for validating repoviews because the set
344 However, this is not sufficient for validating repoviews because the set
345 of revisions in the view may change without the repository tiprev and
345 of revisions in the view may change without the repository tiprev and
346 tipnode changing.
346 tipnode changing.
347
347
348 This function hashes all the revs filtered from the view and returns
348 This function hashes all the revs filtered from the view and returns
349 that SHA-1 digest.
349 that SHA-1 digest.
350 """
350 """
351 cl = repo.changelog
351 cl = repo.changelog
352 if not cl.filteredrevs:
352 if not cl.filteredrevs:
353 return None
353 return None
354 key = None
354 key = None
355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 if revs:
356 if revs:
357 s = hashlib.sha1()
357 s = hashlib.sha1()
358 for rev in revs:
358 for rev in revs:
359 s.update('%d;' % rev)
359 s.update('%d;' % rev)
360 key = s.digest()
360 key = s.digest()
361 return key
361 return key
362
362
363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 '''yield every hg repository under path, always recursively.
364 '''yield every hg repository under path, always recursively.
365 The recurse flag will only control recursion into repo working dirs'''
365 The recurse flag will only control recursion into repo working dirs'''
366 def errhandler(err):
366 def errhandler(err):
367 if err.filename == path:
367 if err.filename == path:
368 raise err
368 raise err
369 samestat = getattr(os.path, 'samestat', None)
369 samestat = getattr(os.path, 'samestat', None)
370 if followsym and samestat is not None:
370 if followsym and samestat is not None:
371 def adddir(dirlst, dirname):
371 def adddir(dirlst, dirname):
372 dirstat = os.stat(dirname)
372 dirstat = os.stat(dirname)
373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 if not match:
374 if not match:
375 dirlst.append(dirstat)
375 dirlst.append(dirstat)
376 return not match
376 return not match
377 else:
377 else:
378 followsym = False
378 followsym = False
379
379
380 if (seen_dirs is None) and followsym:
380 if (seen_dirs is None) and followsym:
381 seen_dirs = []
381 seen_dirs = []
382 adddir(seen_dirs, path)
382 adddir(seen_dirs, path)
383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 dirs.sort()
384 dirs.sort()
385 if '.hg' in dirs:
385 if '.hg' in dirs:
386 yield root # found a repository
386 yield root # found a repository
387 qroot = os.path.join(root, '.hg', 'patches')
387 qroot = os.path.join(root, '.hg', 'patches')
388 if os.path.isdir(os.path.join(qroot, '.hg')):
388 if os.path.isdir(os.path.join(qroot, '.hg')):
389 yield qroot # we have a patch queue repo here
389 yield qroot # we have a patch queue repo here
390 if recurse:
390 if recurse:
391 # avoid recursing inside the .hg directory
391 # avoid recursing inside the .hg directory
392 dirs.remove('.hg')
392 dirs.remove('.hg')
393 else:
393 else:
394 dirs[:] = [] # don't descend further
394 dirs[:] = [] # don't descend further
395 elif followsym:
395 elif followsym:
396 newdirs = []
396 newdirs = []
397 for d in dirs:
397 for d in dirs:
398 fname = os.path.join(root, d)
398 fname = os.path.join(root, d)
399 if adddir(seen_dirs, fname):
399 if adddir(seen_dirs, fname):
400 if os.path.islink(fname):
400 if os.path.islink(fname):
401 for hgname in walkrepos(fname, True, seen_dirs):
401 for hgname in walkrepos(fname, True, seen_dirs):
402 yield hgname
402 yield hgname
403 else:
403 else:
404 newdirs.append(d)
404 newdirs.append(d)
405 dirs[:] = newdirs
405 dirs[:] = newdirs
406
406
407 def binnode(ctx):
407 def binnode(ctx):
408 """Return binary node id for a given basectx"""
408 """Return binary node id for a given basectx"""
409 node = ctx.node()
409 node = ctx.node()
410 if node is None:
410 if node is None:
411 return wdirid
411 return wdirid
412 return node
412 return node
413
413
414 def intrev(ctx):
414 def intrev(ctx):
415 """Return integer for a given basectx that can be used in comparison or
415 """Return integer for a given basectx that can be used in comparison or
416 arithmetic operation"""
416 arithmetic operation"""
417 rev = ctx.rev()
417 rev = ctx.rev()
418 if rev is None:
418 if rev is None:
419 return wdirrev
419 return wdirrev
420 return rev
420 return rev
421
421
422 def formatchangeid(ctx):
422 def formatchangeid(ctx):
423 """Format changectx as '{rev}:{node|formatnode}', which is the default
423 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 template provided by logcmdutil.changesettemplater"""
424 template provided by logcmdutil.changesettemplater"""
425 repo = ctx.repo()
425 repo = ctx.repo()
426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427
427
428 def formatrevnode(ui, rev, node):
428 def formatrevnode(ui, rev, node):
429 """Format given revision and node depending on the current verbosity"""
429 """Format given revision and node depending on the current verbosity"""
430 if ui.debugflag:
430 if ui.debugflag:
431 hexfunc = hex
431 hexfunc = hex
432 else:
432 else:
433 hexfunc = short
433 hexfunc = short
434 return '%d:%s' % (rev, hexfunc(node))
434 return '%d:%s' % (rev, hexfunc(node))
435
435
436 def revsingle(repo, revspec, default='.', localalias=None):
436 def revsingle(repo, revspec, default='.', localalias=None):
437 if not revspec and revspec != 0:
437 if not revspec and revspec != 0:
438 return repo[default]
438 return repo[default]
439
439
440 l = revrange(repo, [revspec], localalias=localalias)
440 l = revrange(repo, [revspec], localalias=localalias)
441 if not l:
441 if not l:
442 raise error.Abort(_('empty revision set'))
442 raise error.Abort(_('empty revision set'))
443 return repo[l.last()]
443 return repo[l.last()]
444
444
445 def _pairspec(revspec):
445 def _pairspec(revspec):
446 tree = revsetlang.parse(revspec)
446 tree = revsetlang.parse(revspec)
447 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
447 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
448
448
449 def revpairnodes(repo, revs):
449 def revpairnodes(repo, revs):
450 return revpair(repo, revs)
450 ctx1, ctx2 = revpair(repo, revs)
451 return ctx1.node(), ctx2.node()
451
452
452 def revpair(repo, revs):
453 def revpair(repo, revs):
453 if not revs:
454 if not revs:
454 return repo.dirstate.p1(), None
455 return repo['.'], repo[None]
455
456
456 l = revrange(repo, revs)
457 l = revrange(repo, revs)
457
458
458 if not l:
459 if not l:
459 first = second = None
460 first = second = None
460 elif l.isascending():
461 elif l.isascending():
461 first = l.min()
462 first = l.min()
462 second = l.max()
463 second = l.max()
463 elif l.isdescending():
464 elif l.isdescending():
464 first = l.max()
465 first = l.max()
465 second = l.min()
466 second = l.min()
466 else:
467 else:
467 first = l.first()
468 first = l.first()
468 second = l.last()
469 second = l.last()
469
470
470 if first is None:
471 if first is None:
471 raise error.Abort(_('empty revision range'))
472 raise error.Abort(_('empty revision range'))
472 if (first == second and len(revs) >= 2
473 if (first == second and len(revs) >= 2
473 and not all(revrange(repo, [r]) for r in revs)):
474 and not all(revrange(repo, [r]) for r in revs)):
474 raise error.Abort(_('empty revision on one side of range'))
475 raise error.Abort(_('empty revision on one side of range'))
475
476
476 # if top-level is range expression, the result must always be a pair
477 # if top-level is range expression, the result must always be a pair
477 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
478 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
478 return repo.lookup(first), None
479 return repo[first], repo[None]
479
480
480 return repo.lookup(first), repo.lookup(second)
481 return repo[first], repo[second]
481
482
482 def revrange(repo, specs, localalias=None):
483 def revrange(repo, specs, localalias=None):
483 """Execute 1 to many revsets and return the union.
484 """Execute 1 to many revsets and return the union.
484
485
485 This is the preferred mechanism for executing revsets using user-specified
486 This is the preferred mechanism for executing revsets using user-specified
486 config options, such as revset aliases.
487 config options, such as revset aliases.
487
488
488 The revsets specified by ``specs`` will be executed via a chained ``OR``
489 The revsets specified by ``specs`` will be executed via a chained ``OR``
489 expression. If ``specs`` is empty, an empty result is returned.
490 expression. If ``specs`` is empty, an empty result is returned.
490
491
491 ``specs`` can contain integers, in which case they are assumed to be
492 ``specs`` can contain integers, in which case they are assumed to be
492 revision numbers.
493 revision numbers.
493
494
494 It is assumed the revsets are already formatted. If you have arguments
495 It is assumed the revsets are already formatted. If you have arguments
495 that need to be expanded in the revset, call ``revsetlang.formatspec()``
496 that need to be expanded in the revset, call ``revsetlang.formatspec()``
496 and pass the result as an element of ``specs``.
497 and pass the result as an element of ``specs``.
497
498
498 Specifying a single revset is allowed.
499 Specifying a single revset is allowed.
499
500
500 Returns a ``revset.abstractsmartset`` which is a list-like interface over
501 Returns a ``revset.abstractsmartset`` which is a list-like interface over
501 integer revisions.
502 integer revisions.
502 """
503 """
503 allspecs = []
504 allspecs = []
504 for spec in specs:
505 for spec in specs:
505 if isinstance(spec, int):
506 if isinstance(spec, int):
506 spec = revsetlang.formatspec('rev(%d)', spec)
507 spec = revsetlang.formatspec('rev(%d)', spec)
507 allspecs.append(spec)
508 allspecs.append(spec)
508 return repo.anyrevs(allspecs, user=True, localalias=localalias)
509 return repo.anyrevs(allspecs, user=True, localalias=localalias)
509
510
510 def meaningfulparents(repo, ctx):
511 def meaningfulparents(repo, ctx):
511 """Return list of meaningful (or all if debug) parentrevs for rev.
512 """Return list of meaningful (or all if debug) parentrevs for rev.
512
513
513 For merges (two non-nullrev revisions) both parents are meaningful.
514 For merges (two non-nullrev revisions) both parents are meaningful.
514 Otherwise the first parent revision is considered meaningful if it
515 Otherwise the first parent revision is considered meaningful if it
515 is not the preceding revision.
516 is not the preceding revision.
516 """
517 """
517 parents = ctx.parents()
518 parents = ctx.parents()
518 if len(parents) > 1:
519 if len(parents) > 1:
519 return parents
520 return parents
520 if repo.ui.debugflag:
521 if repo.ui.debugflag:
521 return [parents[0], repo['null']]
522 return [parents[0], repo['null']]
522 if parents[0].rev() >= intrev(ctx) - 1:
523 if parents[0].rev() >= intrev(ctx) - 1:
523 return []
524 return []
524 return parents
525 return parents
525
526
526 def expandpats(pats):
527 def expandpats(pats):
527 '''Expand bare globs when running on windows.
528 '''Expand bare globs when running on windows.
528 On posix we assume it already has already been done by sh.'''
529 On posix we assume it already has already been done by sh.'''
529 if not util.expandglobs:
530 if not util.expandglobs:
530 return list(pats)
531 return list(pats)
531 ret = []
532 ret = []
532 for kindpat in pats:
533 for kindpat in pats:
533 kind, pat = matchmod._patsplit(kindpat, None)
534 kind, pat = matchmod._patsplit(kindpat, None)
534 if kind is None:
535 if kind is None:
535 try:
536 try:
536 globbed = glob.glob(pat)
537 globbed = glob.glob(pat)
537 except re.error:
538 except re.error:
538 globbed = [pat]
539 globbed = [pat]
539 if globbed:
540 if globbed:
540 ret.extend(globbed)
541 ret.extend(globbed)
541 continue
542 continue
542 ret.append(kindpat)
543 ret.append(kindpat)
543 return ret
544 return ret
544
545
545 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
546 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
546 badfn=None):
547 badfn=None):
547 '''Return a matcher and the patterns that were used.
548 '''Return a matcher and the patterns that were used.
548 The matcher will warn about bad matches, unless an alternate badfn callback
549 The matcher will warn about bad matches, unless an alternate badfn callback
549 is provided.'''
550 is provided.'''
550 if pats == ("",):
551 if pats == ("",):
551 pats = []
552 pats = []
552 if opts is None:
553 if opts is None:
553 opts = {}
554 opts = {}
554 if not globbed and default == 'relpath':
555 if not globbed and default == 'relpath':
555 pats = expandpats(pats or [])
556 pats = expandpats(pats or [])
556
557
557 def bad(f, msg):
558 def bad(f, msg):
558 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
559 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
559
560
560 if badfn is None:
561 if badfn is None:
561 badfn = bad
562 badfn = bad
562
563
563 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
564 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
564 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
565 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
565
566
566 if m.always():
567 if m.always():
567 pats = []
568 pats = []
568 return m, pats
569 return m, pats
569
570
570 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
571 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
571 badfn=None):
572 badfn=None):
572 '''Return a matcher that will warn about bad matches.'''
573 '''Return a matcher that will warn about bad matches.'''
573 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
574 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
574
575
575 def matchall(repo):
576 def matchall(repo):
576 '''Return a matcher that will efficiently match everything.'''
577 '''Return a matcher that will efficiently match everything.'''
577 return matchmod.always(repo.root, repo.getcwd())
578 return matchmod.always(repo.root, repo.getcwd())
578
579
579 def matchfiles(repo, files, badfn=None):
580 def matchfiles(repo, files, badfn=None):
580 '''Return a matcher that will efficiently match exactly these files.'''
581 '''Return a matcher that will efficiently match exactly these files.'''
581 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
582 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
582
583
583 def parsefollowlinespattern(repo, rev, pat, msg):
584 def parsefollowlinespattern(repo, rev, pat, msg):
584 """Return a file name from `pat` pattern suitable for usage in followlines
585 """Return a file name from `pat` pattern suitable for usage in followlines
585 logic.
586 logic.
586 """
587 """
587 if not matchmod.patkind(pat):
588 if not matchmod.patkind(pat):
588 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
589 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
589 else:
590 else:
590 ctx = repo[rev]
591 ctx = repo[rev]
591 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
592 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
592 files = [f for f in ctx if m(f)]
593 files = [f for f in ctx if m(f)]
593 if len(files) != 1:
594 if len(files) != 1:
594 raise error.ParseError(msg)
595 raise error.ParseError(msg)
595 return files[0]
596 return files[0]
596
597
597 def origpath(ui, repo, filepath):
598 def origpath(ui, repo, filepath):
598 '''customize where .orig files are created
599 '''customize where .orig files are created
599
600
600 Fetch user defined path from config file: [ui] origbackuppath = <path>
601 Fetch user defined path from config file: [ui] origbackuppath = <path>
601 Fall back to default (filepath with .orig suffix) if not specified
602 Fall back to default (filepath with .orig suffix) if not specified
602 '''
603 '''
603 origbackuppath = ui.config('ui', 'origbackuppath')
604 origbackuppath = ui.config('ui', 'origbackuppath')
604 if not origbackuppath:
605 if not origbackuppath:
605 return filepath + ".orig"
606 return filepath + ".orig"
606
607
607 # Convert filepath from an absolute path into a path inside the repo.
608 # Convert filepath from an absolute path into a path inside the repo.
608 filepathfromroot = util.normpath(os.path.relpath(filepath,
609 filepathfromroot = util.normpath(os.path.relpath(filepath,
609 start=repo.root))
610 start=repo.root))
610
611
611 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
612 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
612 origbackupdir = origvfs.dirname(filepathfromroot)
613 origbackupdir = origvfs.dirname(filepathfromroot)
613 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
614 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
614 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
615 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
615
616
616 # Remove any files that conflict with the backup file's path
617 # Remove any files that conflict with the backup file's path
617 for f in reversed(list(util.finddirs(filepathfromroot))):
618 for f in reversed(list(util.finddirs(filepathfromroot))):
618 if origvfs.isfileorlink(f):
619 if origvfs.isfileorlink(f):
619 ui.note(_('removing conflicting file: %s\n')
620 ui.note(_('removing conflicting file: %s\n')
620 % origvfs.join(f))
621 % origvfs.join(f))
621 origvfs.unlink(f)
622 origvfs.unlink(f)
622 break
623 break
623
624
624 origvfs.makedirs(origbackupdir)
625 origvfs.makedirs(origbackupdir)
625
626
626 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
627 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
627 ui.note(_('removing conflicting directory: %s\n')
628 ui.note(_('removing conflicting directory: %s\n')
628 % origvfs.join(filepathfromroot))
629 % origvfs.join(filepathfromroot))
629 origvfs.rmtree(filepathfromroot, forcibly=True)
630 origvfs.rmtree(filepathfromroot, forcibly=True)
630
631
631 return origvfs.join(filepathfromroot)
632 return origvfs.join(filepathfromroot)
632
633
633 class _containsnode(object):
634 class _containsnode(object):
634 """proxy __contains__(node) to container.__contains__ which accepts revs"""
635 """proxy __contains__(node) to container.__contains__ which accepts revs"""
635
636
636 def __init__(self, repo, revcontainer):
637 def __init__(self, repo, revcontainer):
637 self._torev = repo.changelog.rev
638 self._torev = repo.changelog.rev
638 self._revcontains = revcontainer.__contains__
639 self._revcontains = revcontainer.__contains__
639
640
640 def __contains__(self, node):
641 def __contains__(self, node):
641 return self._revcontains(self._torev(node))
642 return self._revcontains(self._torev(node))
642
643
643 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
644 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
644 """do common cleanups when old nodes are replaced by new nodes
645 """do common cleanups when old nodes are replaced by new nodes
645
646
646 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
647 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
647 (we might also want to move working directory parent in the future)
648 (we might also want to move working directory parent in the future)
648
649
649 By default, bookmark moves are calculated automatically from 'replacements',
650 By default, bookmark moves are calculated automatically from 'replacements',
650 but 'moves' can be used to override that. Also, 'moves' may include
651 but 'moves' can be used to override that. Also, 'moves' may include
651 additional bookmark moves that should not have associated obsmarkers.
652 additional bookmark moves that should not have associated obsmarkers.
652
653
653 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
654 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
654 have replacements. operation is a string, like "rebase".
655 have replacements. operation is a string, like "rebase".
655
656
656 metadata is dictionary containing metadata to be stored in obsmarker if
657 metadata is dictionary containing metadata to be stored in obsmarker if
657 obsolescence is enabled.
658 obsolescence is enabled.
658 """
659 """
659 if not replacements and not moves:
660 if not replacements and not moves:
660 return
661 return
661
662
662 # translate mapping's other forms
663 # translate mapping's other forms
663 if not util.safehasattr(replacements, 'items'):
664 if not util.safehasattr(replacements, 'items'):
664 replacements = {n: () for n in replacements}
665 replacements = {n: () for n in replacements}
665
666
666 # Calculate bookmark movements
667 # Calculate bookmark movements
667 if moves is None:
668 if moves is None:
668 moves = {}
669 moves = {}
669 # Unfiltered repo is needed since nodes in replacements might be hidden.
670 # Unfiltered repo is needed since nodes in replacements might be hidden.
670 unfi = repo.unfiltered()
671 unfi = repo.unfiltered()
671 for oldnode, newnodes in replacements.items():
672 for oldnode, newnodes in replacements.items():
672 if oldnode in moves:
673 if oldnode in moves:
673 continue
674 continue
674 if len(newnodes) > 1:
675 if len(newnodes) > 1:
675 # usually a split, take the one with biggest rev number
676 # usually a split, take the one with biggest rev number
676 newnode = next(unfi.set('max(%ln)', newnodes)).node()
677 newnode = next(unfi.set('max(%ln)', newnodes)).node()
677 elif len(newnodes) == 0:
678 elif len(newnodes) == 0:
678 # move bookmark backwards
679 # move bookmark backwards
679 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
680 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
680 list(replacements)))
681 list(replacements)))
681 if roots:
682 if roots:
682 newnode = roots[0].node()
683 newnode = roots[0].node()
683 else:
684 else:
684 newnode = nullid
685 newnode = nullid
685 else:
686 else:
686 newnode = newnodes[0]
687 newnode = newnodes[0]
687 moves[oldnode] = newnode
688 moves[oldnode] = newnode
688
689
689 with repo.transaction('cleanup') as tr:
690 with repo.transaction('cleanup') as tr:
690 # Move bookmarks
691 # Move bookmarks
691 bmarks = repo._bookmarks
692 bmarks = repo._bookmarks
692 bmarkchanges = []
693 bmarkchanges = []
693 allnewnodes = [n for ns in replacements.values() for n in ns]
694 allnewnodes = [n for ns in replacements.values() for n in ns]
694 for oldnode, newnode in moves.items():
695 for oldnode, newnode in moves.items():
695 oldbmarks = repo.nodebookmarks(oldnode)
696 oldbmarks = repo.nodebookmarks(oldnode)
696 if not oldbmarks:
697 if not oldbmarks:
697 continue
698 continue
698 from . import bookmarks # avoid import cycle
699 from . import bookmarks # avoid import cycle
699 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
700 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
700 (util.rapply(pycompat.maybebytestr, oldbmarks),
701 (util.rapply(pycompat.maybebytestr, oldbmarks),
701 hex(oldnode), hex(newnode)))
702 hex(oldnode), hex(newnode)))
702 # Delete divergent bookmarks being parents of related newnodes
703 # Delete divergent bookmarks being parents of related newnodes
703 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
704 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
704 allnewnodes, newnode, oldnode)
705 allnewnodes, newnode, oldnode)
705 deletenodes = _containsnode(repo, deleterevs)
706 deletenodes = _containsnode(repo, deleterevs)
706 for name in oldbmarks:
707 for name in oldbmarks:
707 bmarkchanges.append((name, newnode))
708 bmarkchanges.append((name, newnode))
708 for b in bookmarks.divergent2delete(repo, deletenodes, name):
709 for b in bookmarks.divergent2delete(repo, deletenodes, name):
709 bmarkchanges.append((b, None))
710 bmarkchanges.append((b, None))
710
711
711 if bmarkchanges:
712 if bmarkchanges:
712 bmarks.applychanges(repo, tr, bmarkchanges)
713 bmarks.applychanges(repo, tr, bmarkchanges)
713
714
714 # Obsolete or strip nodes
715 # Obsolete or strip nodes
715 if obsolete.isenabled(repo, obsolete.createmarkersopt):
716 if obsolete.isenabled(repo, obsolete.createmarkersopt):
716 # If a node is already obsoleted, and we want to obsolete it
717 # If a node is already obsoleted, and we want to obsolete it
717 # without a successor, skip that obssolete request since it's
718 # without a successor, skip that obssolete request since it's
718 # unnecessary. That's the "if s or not isobs(n)" check below.
719 # unnecessary. That's the "if s or not isobs(n)" check below.
719 # Also sort the node in topology order, that might be useful for
720 # Also sort the node in topology order, that might be useful for
720 # some obsstore logic.
721 # some obsstore logic.
721 # NOTE: the filtering and sorting might belong to createmarkers.
722 # NOTE: the filtering and sorting might belong to createmarkers.
722 isobs = unfi.obsstore.successors.__contains__
723 isobs = unfi.obsstore.successors.__contains__
723 torev = unfi.changelog.rev
724 torev = unfi.changelog.rev
724 sortfunc = lambda ns: torev(ns[0])
725 sortfunc = lambda ns: torev(ns[0])
725 rels = [(unfi[n], tuple(unfi[m] for m in s))
726 rels = [(unfi[n], tuple(unfi[m] for m in s))
726 for n, s in sorted(replacements.items(), key=sortfunc)
727 for n, s in sorted(replacements.items(), key=sortfunc)
727 if s or not isobs(n)]
728 if s or not isobs(n)]
728 if rels:
729 if rels:
729 obsolete.createmarkers(repo, rels, operation=operation,
730 obsolete.createmarkers(repo, rels, operation=operation,
730 metadata=metadata)
731 metadata=metadata)
731 else:
732 else:
732 from . import repair # avoid import cycle
733 from . import repair # avoid import cycle
733 tostrip = list(replacements)
734 tostrip = list(replacements)
734 if tostrip:
735 if tostrip:
735 repair.delayedstrip(repo.ui, repo, tostrip, operation)
736 repair.delayedstrip(repo.ui, repo, tostrip, operation)
736
737
737 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
738 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
738 if opts is None:
739 if opts is None:
739 opts = {}
740 opts = {}
740 m = matcher
741 m = matcher
741 if dry_run is None:
742 if dry_run is None:
742 dry_run = opts.get('dry_run')
743 dry_run = opts.get('dry_run')
743 if similarity is None:
744 if similarity is None:
744 similarity = float(opts.get('similarity') or 0)
745 similarity = float(opts.get('similarity') or 0)
745
746
746 ret = 0
747 ret = 0
747 join = lambda f: os.path.join(prefix, f)
748 join = lambda f: os.path.join(prefix, f)
748
749
749 wctx = repo[None]
750 wctx = repo[None]
750 for subpath in sorted(wctx.substate):
751 for subpath in sorted(wctx.substate):
751 submatch = matchmod.subdirmatcher(subpath, m)
752 submatch = matchmod.subdirmatcher(subpath, m)
752 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
753 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
753 sub = wctx.sub(subpath)
754 sub = wctx.sub(subpath)
754 try:
755 try:
755 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
756 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
756 ret = 1
757 ret = 1
757 except error.LookupError:
758 except error.LookupError:
758 repo.ui.status(_("skipping missing subrepository: %s\n")
759 repo.ui.status(_("skipping missing subrepository: %s\n")
759 % join(subpath))
760 % join(subpath))
760
761
761 rejected = []
762 rejected = []
762 def badfn(f, msg):
763 def badfn(f, msg):
763 if f in m.files():
764 if f in m.files():
764 m.bad(f, msg)
765 m.bad(f, msg)
765 rejected.append(f)
766 rejected.append(f)
766
767
767 badmatch = matchmod.badmatch(m, badfn)
768 badmatch = matchmod.badmatch(m, badfn)
768 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
769 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
769 badmatch)
770 badmatch)
770
771
771 unknownset = set(unknown + forgotten)
772 unknownset = set(unknown + forgotten)
772 toprint = unknownset.copy()
773 toprint = unknownset.copy()
773 toprint.update(deleted)
774 toprint.update(deleted)
774 for abs in sorted(toprint):
775 for abs in sorted(toprint):
775 if repo.ui.verbose or not m.exact(abs):
776 if repo.ui.verbose or not m.exact(abs):
776 if abs in unknownset:
777 if abs in unknownset:
777 status = _('adding %s\n') % m.uipath(abs)
778 status = _('adding %s\n') % m.uipath(abs)
778 else:
779 else:
779 status = _('removing %s\n') % m.uipath(abs)
780 status = _('removing %s\n') % m.uipath(abs)
780 repo.ui.status(status)
781 repo.ui.status(status)
781
782
782 renames = _findrenames(repo, m, added + unknown, removed + deleted,
783 renames = _findrenames(repo, m, added + unknown, removed + deleted,
783 similarity)
784 similarity)
784
785
785 if not dry_run:
786 if not dry_run:
786 _markchanges(repo, unknown + forgotten, deleted, renames)
787 _markchanges(repo, unknown + forgotten, deleted, renames)
787
788
788 for f in rejected:
789 for f in rejected:
789 if f in m.files():
790 if f in m.files():
790 return 1
791 return 1
791 return ret
792 return ret
792
793
793 def marktouched(repo, files, similarity=0.0):
794 def marktouched(repo, files, similarity=0.0):
794 '''Assert that files have somehow been operated upon. files are relative to
795 '''Assert that files have somehow been operated upon. files are relative to
795 the repo root.'''
796 the repo root.'''
796 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
797 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
797 rejected = []
798 rejected = []
798
799
799 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
800 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
800
801
801 if repo.ui.verbose:
802 if repo.ui.verbose:
802 unknownset = set(unknown + forgotten)
803 unknownset = set(unknown + forgotten)
803 toprint = unknownset.copy()
804 toprint = unknownset.copy()
804 toprint.update(deleted)
805 toprint.update(deleted)
805 for abs in sorted(toprint):
806 for abs in sorted(toprint):
806 if abs in unknownset:
807 if abs in unknownset:
807 status = _('adding %s\n') % abs
808 status = _('adding %s\n') % abs
808 else:
809 else:
809 status = _('removing %s\n') % abs
810 status = _('removing %s\n') % abs
810 repo.ui.status(status)
811 repo.ui.status(status)
811
812
812 renames = _findrenames(repo, m, added + unknown, removed + deleted,
813 renames = _findrenames(repo, m, added + unknown, removed + deleted,
813 similarity)
814 similarity)
814
815
815 _markchanges(repo, unknown + forgotten, deleted, renames)
816 _markchanges(repo, unknown + forgotten, deleted, renames)
816
817
817 for f in rejected:
818 for f in rejected:
818 if f in m.files():
819 if f in m.files():
819 return 1
820 return 1
820 return 0
821 return 0
821
822
822 def _interestingfiles(repo, matcher):
823 def _interestingfiles(repo, matcher):
823 '''Walk dirstate with matcher, looking for files that addremove would care
824 '''Walk dirstate with matcher, looking for files that addremove would care
824 about.
825 about.
825
826
826 This is different from dirstate.status because it doesn't care about
827 This is different from dirstate.status because it doesn't care about
827 whether files are modified or clean.'''
828 whether files are modified or clean.'''
828 added, unknown, deleted, removed, forgotten = [], [], [], [], []
829 added, unknown, deleted, removed, forgotten = [], [], [], [], []
829 audit_path = pathutil.pathauditor(repo.root, cached=True)
830 audit_path = pathutil.pathauditor(repo.root, cached=True)
830
831
831 ctx = repo[None]
832 ctx = repo[None]
832 dirstate = repo.dirstate
833 dirstate = repo.dirstate
833 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
834 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
834 unknown=True, ignored=False, full=False)
835 unknown=True, ignored=False, full=False)
835 for abs, st in walkresults.iteritems():
836 for abs, st in walkresults.iteritems():
836 dstate = dirstate[abs]
837 dstate = dirstate[abs]
837 if dstate == '?' and audit_path.check(abs):
838 if dstate == '?' and audit_path.check(abs):
838 unknown.append(abs)
839 unknown.append(abs)
839 elif dstate != 'r' and not st:
840 elif dstate != 'r' and not st:
840 deleted.append(abs)
841 deleted.append(abs)
841 elif dstate == 'r' and st:
842 elif dstate == 'r' and st:
842 forgotten.append(abs)
843 forgotten.append(abs)
843 # for finding renames
844 # for finding renames
844 elif dstate == 'r' and not st:
845 elif dstate == 'r' and not st:
845 removed.append(abs)
846 removed.append(abs)
846 elif dstate == 'a':
847 elif dstate == 'a':
847 added.append(abs)
848 added.append(abs)
848
849
849 return added, unknown, deleted, removed, forgotten
850 return added, unknown, deleted, removed, forgotten
850
851
851 def _findrenames(repo, matcher, added, removed, similarity):
852 def _findrenames(repo, matcher, added, removed, similarity):
852 '''Find renames from removed files to added ones.'''
853 '''Find renames from removed files to added ones.'''
853 renames = {}
854 renames = {}
854 if similarity > 0:
855 if similarity > 0:
855 for old, new, score in similar.findrenames(repo, added, removed,
856 for old, new, score in similar.findrenames(repo, added, removed,
856 similarity):
857 similarity):
857 if (repo.ui.verbose or not matcher.exact(old)
858 if (repo.ui.verbose or not matcher.exact(old)
858 or not matcher.exact(new)):
859 or not matcher.exact(new)):
859 repo.ui.status(_('recording removal of %s as rename to %s '
860 repo.ui.status(_('recording removal of %s as rename to %s '
860 '(%d%% similar)\n') %
861 '(%d%% similar)\n') %
861 (matcher.rel(old), matcher.rel(new),
862 (matcher.rel(old), matcher.rel(new),
862 score * 100))
863 score * 100))
863 renames[new] = old
864 renames[new] = old
864 return renames
865 return renames
865
866
866 def _markchanges(repo, unknown, deleted, renames):
867 def _markchanges(repo, unknown, deleted, renames):
867 '''Marks the files in unknown as added, the files in deleted as removed,
868 '''Marks the files in unknown as added, the files in deleted as removed,
868 and the files in renames as copied.'''
869 and the files in renames as copied.'''
869 wctx = repo[None]
870 wctx = repo[None]
870 with repo.wlock():
871 with repo.wlock():
871 wctx.forget(deleted)
872 wctx.forget(deleted)
872 wctx.add(unknown)
873 wctx.add(unknown)
873 for new, old in renames.iteritems():
874 for new, old in renames.iteritems():
874 wctx.copy(old, new)
875 wctx.copy(old, new)
875
876
876 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
877 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
877 """Update the dirstate to reflect the intent of copying src to dst. For
878 """Update the dirstate to reflect the intent of copying src to dst. For
878 different reasons it might not end with dst being marked as copied from src.
879 different reasons it might not end with dst being marked as copied from src.
879 """
880 """
880 origsrc = repo.dirstate.copied(src) or src
881 origsrc = repo.dirstate.copied(src) or src
881 if dst == origsrc: # copying back a copy?
882 if dst == origsrc: # copying back a copy?
882 if repo.dirstate[dst] not in 'mn' and not dryrun:
883 if repo.dirstate[dst] not in 'mn' and not dryrun:
883 repo.dirstate.normallookup(dst)
884 repo.dirstate.normallookup(dst)
884 else:
885 else:
885 if repo.dirstate[origsrc] == 'a' and origsrc == src:
886 if repo.dirstate[origsrc] == 'a' and origsrc == src:
886 if not ui.quiet:
887 if not ui.quiet:
887 ui.warn(_("%s has not been committed yet, so no copy "
888 ui.warn(_("%s has not been committed yet, so no copy "
888 "data will be stored for %s.\n")
889 "data will be stored for %s.\n")
889 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
890 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
890 if repo.dirstate[dst] in '?r' and not dryrun:
891 if repo.dirstate[dst] in '?r' and not dryrun:
891 wctx.add([dst])
892 wctx.add([dst])
892 elif not dryrun:
893 elif not dryrun:
893 wctx.copy(origsrc, dst)
894 wctx.copy(origsrc, dst)
894
895
895 def readrequires(opener, supported):
896 def readrequires(opener, supported):
896 '''Reads and parses .hg/requires and checks if all entries found
897 '''Reads and parses .hg/requires and checks if all entries found
897 are in the list of supported features.'''
898 are in the list of supported features.'''
898 requirements = set(opener.read("requires").splitlines())
899 requirements = set(opener.read("requires").splitlines())
899 missings = []
900 missings = []
900 for r in requirements:
901 for r in requirements:
901 if r not in supported:
902 if r not in supported:
902 if not r or not r[0:1].isalnum():
903 if not r or not r[0:1].isalnum():
903 raise error.RequirementError(_(".hg/requires file is corrupt"))
904 raise error.RequirementError(_(".hg/requires file is corrupt"))
904 missings.append(r)
905 missings.append(r)
905 missings.sort()
906 missings.sort()
906 if missings:
907 if missings:
907 raise error.RequirementError(
908 raise error.RequirementError(
908 _("repository requires features unknown to this Mercurial: %s")
909 _("repository requires features unknown to this Mercurial: %s")
909 % " ".join(missings),
910 % " ".join(missings),
910 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
911 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
911 " for more information"))
912 " for more information"))
912 return requirements
913 return requirements
913
914
914 def writerequires(opener, requirements):
915 def writerequires(opener, requirements):
915 with opener('requires', 'w') as fp:
916 with opener('requires', 'w') as fp:
916 for r in sorted(requirements):
917 for r in sorted(requirements):
917 fp.write("%s\n" % r)
918 fp.write("%s\n" % r)
918
919
919 class filecachesubentry(object):
920 class filecachesubentry(object):
920 def __init__(self, path, stat):
921 def __init__(self, path, stat):
921 self.path = path
922 self.path = path
922 self.cachestat = None
923 self.cachestat = None
923 self._cacheable = None
924 self._cacheable = None
924
925
925 if stat:
926 if stat:
926 self.cachestat = filecachesubentry.stat(self.path)
927 self.cachestat = filecachesubentry.stat(self.path)
927
928
928 if self.cachestat:
929 if self.cachestat:
929 self._cacheable = self.cachestat.cacheable()
930 self._cacheable = self.cachestat.cacheable()
930 else:
931 else:
931 # None means we don't know yet
932 # None means we don't know yet
932 self._cacheable = None
933 self._cacheable = None
933
934
934 def refresh(self):
935 def refresh(self):
935 if self.cacheable():
936 if self.cacheable():
936 self.cachestat = filecachesubentry.stat(self.path)
937 self.cachestat = filecachesubentry.stat(self.path)
937
938
938 def cacheable(self):
939 def cacheable(self):
939 if self._cacheable is not None:
940 if self._cacheable is not None:
940 return self._cacheable
941 return self._cacheable
941
942
942 # we don't know yet, assume it is for now
943 # we don't know yet, assume it is for now
943 return True
944 return True
944
945
945 def changed(self):
946 def changed(self):
946 # no point in going further if we can't cache it
947 # no point in going further if we can't cache it
947 if not self.cacheable():
948 if not self.cacheable():
948 return True
949 return True
949
950
950 newstat = filecachesubentry.stat(self.path)
951 newstat = filecachesubentry.stat(self.path)
951
952
952 # we may not know if it's cacheable yet, check again now
953 # we may not know if it's cacheable yet, check again now
953 if newstat and self._cacheable is None:
954 if newstat and self._cacheable is None:
954 self._cacheable = newstat.cacheable()
955 self._cacheable = newstat.cacheable()
955
956
956 # check again
957 # check again
957 if not self._cacheable:
958 if not self._cacheable:
958 return True
959 return True
959
960
960 if self.cachestat != newstat:
961 if self.cachestat != newstat:
961 self.cachestat = newstat
962 self.cachestat = newstat
962 return True
963 return True
963 else:
964 else:
964 return False
965 return False
965
966
966 @staticmethod
967 @staticmethod
967 def stat(path):
968 def stat(path):
968 try:
969 try:
969 return util.cachestat(path)
970 return util.cachestat(path)
970 except OSError as e:
971 except OSError as e:
971 if e.errno != errno.ENOENT:
972 if e.errno != errno.ENOENT:
972 raise
973 raise
973
974
974 class filecacheentry(object):
975 class filecacheentry(object):
975 def __init__(self, paths, stat=True):
976 def __init__(self, paths, stat=True):
976 self._entries = []
977 self._entries = []
977 for path in paths:
978 for path in paths:
978 self._entries.append(filecachesubentry(path, stat))
979 self._entries.append(filecachesubentry(path, stat))
979
980
980 def changed(self):
981 def changed(self):
981 '''true if any entry has changed'''
982 '''true if any entry has changed'''
982 for entry in self._entries:
983 for entry in self._entries:
983 if entry.changed():
984 if entry.changed():
984 return True
985 return True
985 return False
986 return False
986
987
987 def refresh(self):
988 def refresh(self):
988 for entry in self._entries:
989 for entry in self._entries:
989 entry.refresh()
990 entry.refresh()
990
991
991 class filecache(object):
992 class filecache(object):
992 '''A property like decorator that tracks files under .hg/ for updates.
993 '''A property like decorator that tracks files under .hg/ for updates.
993
994
994 Records stat info when called in _filecache.
995 Records stat info when called in _filecache.
995
996
996 On subsequent calls, compares old stat info with new info, and recreates the
997 On subsequent calls, compares old stat info with new info, and recreates the
997 object when any of the files changes, updating the new stat info in
998 object when any of the files changes, updating the new stat info in
998 _filecache.
999 _filecache.
999
1000
1000 Mercurial either atomic renames or appends for files under .hg,
1001 Mercurial either atomic renames or appends for files under .hg,
1001 so to ensure the cache is reliable we need the filesystem to be able
1002 so to ensure the cache is reliable we need the filesystem to be able
1002 to tell us if a file has been replaced. If it can't, we fallback to
1003 to tell us if a file has been replaced. If it can't, we fallback to
1003 recreating the object on every call (essentially the same behavior as
1004 recreating the object on every call (essentially the same behavior as
1004 propertycache).
1005 propertycache).
1005
1006
1006 '''
1007 '''
1007 def __init__(self, *paths):
1008 def __init__(self, *paths):
1008 self.paths = paths
1009 self.paths = paths
1009
1010
1010 def join(self, obj, fname):
1011 def join(self, obj, fname):
1011 """Used to compute the runtime path of a cached file.
1012 """Used to compute the runtime path of a cached file.
1012
1013
1013 Users should subclass filecache and provide their own version of this
1014 Users should subclass filecache and provide their own version of this
1014 function to call the appropriate join function on 'obj' (an instance
1015 function to call the appropriate join function on 'obj' (an instance
1015 of the class that its member function was decorated).
1016 of the class that its member function was decorated).
1016 """
1017 """
1017 raise NotImplementedError
1018 raise NotImplementedError
1018
1019
1019 def __call__(self, func):
1020 def __call__(self, func):
1020 self.func = func
1021 self.func = func
1021 self.name = func.__name__.encode('ascii')
1022 self.name = func.__name__.encode('ascii')
1022 return self
1023 return self
1023
1024
1024 def __get__(self, obj, type=None):
1025 def __get__(self, obj, type=None):
1025 # if accessed on the class, return the descriptor itself.
1026 # if accessed on the class, return the descriptor itself.
1026 if obj is None:
1027 if obj is None:
1027 return self
1028 return self
1028 # do we need to check if the file changed?
1029 # do we need to check if the file changed?
1029 if self.name in obj.__dict__:
1030 if self.name in obj.__dict__:
1030 assert self.name in obj._filecache, self.name
1031 assert self.name in obj._filecache, self.name
1031 return obj.__dict__[self.name]
1032 return obj.__dict__[self.name]
1032
1033
1033 entry = obj._filecache.get(self.name)
1034 entry = obj._filecache.get(self.name)
1034
1035
1035 if entry:
1036 if entry:
1036 if entry.changed():
1037 if entry.changed():
1037 entry.obj = self.func(obj)
1038 entry.obj = self.func(obj)
1038 else:
1039 else:
1039 paths = [self.join(obj, path) for path in self.paths]
1040 paths = [self.join(obj, path) for path in self.paths]
1040
1041
1041 # We stat -before- creating the object so our cache doesn't lie if
1042 # We stat -before- creating the object so our cache doesn't lie if
1042 # a writer modified between the time we read and stat
1043 # a writer modified between the time we read and stat
1043 entry = filecacheentry(paths, True)
1044 entry = filecacheentry(paths, True)
1044 entry.obj = self.func(obj)
1045 entry.obj = self.func(obj)
1045
1046
1046 obj._filecache[self.name] = entry
1047 obj._filecache[self.name] = entry
1047
1048
1048 obj.__dict__[self.name] = entry.obj
1049 obj.__dict__[self.name] = entry.obj
1049 return entry.obj
1050 return entry.obj
1050
1051
1051 def __set__(self, obj, value):
1052 def __set__(self, obj, value):
1052 if self.name not in obj._filecache:
1053 if self.name not in obj._filecache:
1053 # we add an entry for the missing value because X in __dict__
1054 # we add an entry for the missing value because X in __dict__
1054 # implies X in _filecache
1055 # implies X in _filecache
1055 paths = [self.join(obj, path) for path in self.paths]
1056 paths = [self.join(obj, path) for path in self.paths]
1056 ce = filecacheentry(paths, False)
1057 ce = filecacheentry(paths, False)
1057 obj._filecache[self.name] = ce
1058 obj._filecache[self.name] = ce
1058 else:
1059 else:
1059 ce = obj._filecache[self.name]
1060 ce = obj._filecache[self.name]
1060
1061
1061 ce.obj = value # update cached copy
1062 ce.obj = value # update cached copy
1062 obj.__dict__[self.name] = value # update copy returned by obj.x
1063 obj.__dict__[self.name] = value # update copy returned by obj.x
1063
1064
1064 def __delete__(self, obj):
1065 def __delete__(self, obj):
1065 try:
1066 try:
1066 del obj.__dict__[self.name]
1067 del obj.__dict__[self.name]
1067 except KeyError:
1068 except KeyError:
1068 raise AttributeError(self.name)
1069 raise AttributeError(self.name)
1069
1070
1070 def extdatasource(repo, source):
1071 def extdatasource(repo, source):
1071 """Gather a map of rev -> value dict from the specified source
1072 """Gather a map of rev -> value dict from the specified source
1072
1073
1073 A source spec is treated as a URL, with a special case shell: type
1074 A source spec is treated as a URL, with a special case shell: type
1074 for parsing the output from a shell command.
1075 for parsing the output from a shell command.
1075
1076
1076 The data is parsed as a series of newline-separated records where
1077 The data is parsed as a series of newline-separated records where
1077 each record is a revision specifier optionally followed by a space
1078 each record is a revision specifier optionally followed by a space
1078 and a freeform string value. If the revision is known locally, it
1079 and a freeform string value. If the revision is known locally, it
1079 is converted to a rev, otherwise the record is skipped.
1080 is converted to a rev, otherwise the record is skipped.
1080
1081
1081 Note that both key and value are treated as UTF-8 and converted to
1082 Note that both key and value are treated as UTF-8 and converted to
1082 the local encoding. This allows uniformity between local and
1083 the local encoding. This allows uniformity between local and
1083 remote data sources.
1084 remote data sources.
1084 """
1085 """
1085
1086
1086 spec = repo.ui.config("extdata", source)
1087 spec = repo.ui.config("extdata", source)
1087 if not spec:
1088 if not spec:
1088 raise error.Abort(_("unknown extdata source '%s'") % source)
1089 raise error.Abort(_("unknown extdata source '%s'") % source)
1089
1090
1090 data = {}
1091 data = {}
1091 src = proc = None
1092 src = proc = None
1092 try:
1093 try:
1093 if spec.startswith("shell:"):
1094 if spec.startswith("shell:"):
1094 # external commands should be run relative to the repo root
1095 # external commands should be run relative to the repo root
1095 cmd = spec[6:]
1096 cmd = spec[6:]
1096 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1097 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1097 close_fds=procutil.closefds,
1098 close_fds=procutil.closefds,
1098 stdout=subprocess.PIPE, cwd=repo.root)
1099 stdout=subprocess.PIPE, cwd=repo.root)
1099 src = proc.stdout
1100 src = proc.stdout
1100 else:
1101 else:
1101 # treat as a URL or file
1102 # treat as a URL or file
1102 src = url.open(repo.ui, spec)
1103 src = url.open(repo.ui, spec)
1103 for l in src:
1104 for l in src:
1104 if " " in l:
1105 if " " in l:
1105 k, v = l.strip().split(" ", 1)
1106 k, v = l.strip().split(" ", 1)
1106 else:
1107 else:
1107 k, v = l.strip(), ""
1108 k, v = l.strip(), ""
1108
1109
1109 k = encoding.tolocal(k)
1110 k = encoding.tolocal(k)
1110 try:
1111 try:
1111 data[repo[k].rev()] = encoding.tolocal(v)
1112 data[repo[k].rev()] = encoding.tolocal(v)
1112 except (error.LookupError, error.RepoLookupError):
1113 except (error.LookupError, error.RepoLookupError):
1113 pass # we ignore data for nodes that don't exist locally
1114 pass # we ignore data for nodes that don't exist locally
1114 finally:
1115 finally:
1115 if proc:
1116 if proc:
1116 proc.communicate()
1117 proc.communicate()
1117 if src:
1118 if src:
1118 src.close()
1119 src.close()
1119 if proc and proc.returncode != 0:
1120 if proc and proc.returncode != 0:
1120 raise error.Abort(_("extdata command '%s' failed: %s")
1121 raise error.Abort(_("extdata command '%s' failed: %s")
1121 % (cmd, procutil.explainexit(proc.returncode)[0]))
1122 % (cmd, procutil.explainexit(proc.returncode)[0]))
1122
1123
1123 return data
1124 return data
1124
1125
1125 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1126 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1126 if lock is None:
1127 if lock is None:
1127 raise error.LockInheritanceContractViolation(
1128 raise error.LockInheritanceContractViolation(
1128 'lock can only be inherited while held')
1129 'lock can only be inherited while held')
1129 if environ is None:
1130 if environ is None:
1130 environ = {}
1131 environ = {}
1131 with lock.inherit() as locker:
1132 with lock.inherit() as locker:
1132 environ[envvar] = locker
1133 environ[envvar] = locker
1133 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1134 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1134
1135
1135 def wlocksub(repo, cmd, *args, **kwargs):
1136 def wlocksub(repo, cmd, *args, **kwargs):
1136 """run cmd as a subprocess that allows inheriting repo's wlock
1137 """run cmd as a subprocess that allows inheriting repo's wlock
1137
1138
1138 This can only be called while the wlock is held. This takes all the
1139 This can only be called while the wlock is held. This takes all the
1139 arguments that ui.system does, and returns the exit code of the
1140 arguments that ui.system does, and returns the exit code of the
1140 subprocess."""
1141 subprocess."""
1141 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1142 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1142 **kwargs)
1143 **kwargs)
1143
1144
1144 def gdinitconfig(ui):
1145 def gdinitconfig(ui):
1145 """helper function to know if a repo should be created as general delta
1146 """helper function to know if a repo should be created as general delta
1146 """
1147 """
1147 # experimental config: format.generaldelta
1148 # experimental config: format.generaldelta
1148 return (ui.configbool('format', 'generaldelta')
1149 return (ui.configbool('format', 'generaldelta')
1149 or ui.configbool('format', 'usegeneraldelta'))
1150 or ui.configbool('format', 'usegeneraldelta'))
1150
1151
1151 def gddeltaconfig(ui):
1152 def gddeltaconfig(ui):
1152 """helper function to know if incoming delta should be optimised
1153 """helper function to know if incoming delta should be optimised
1153 """
1154 """
1154 # experimental config: format.generaldelta
1155 # experimental config: format.generaldelta
1155 return ui.configbool('format', 'generaldelta')
1156 return ui.configbool('format', 'generaldelta')
1156
1157
1157 class simplekeyvaluefile(object):
1158 class simplekeyvaluefile(object):
1158 """A simple file with key=value lines
1159 """A simple file with key=value lines
1159
1160
1160 Keys must be alphanumerics and start with a letter, values must not
1161 Keys must be alphanumerics and start with a letter, values must not
1161 contain '\n' characters"""
1162 contain '\n' characters"""
1162 firstlinekey = '__firstline'
1163 firstlinekey = '__firstline'
1163
1164
1164 def __init__(self, vfs, path, keys=None):
1165 def __init__(self, vfs, path, keys=None):
1165 self.vfs = vfs
1166 self.vfs = vfs
1166 self.path = path
1167 self.path = path
1167
1168
1168 def read(self, firstlinenonkeyval=False):
1169 def read(self, firstlinenonkeyval=False):
1169 """Read the contents of a simple key-value file
1170 """Read the contents of a simple key-value file
1170
1171
1171 'firstlinenonkeyval' indicates whether the first line of file should
1172 'firstlinenonkeyval' indicates whether the first line of file should
1172 be treated as a key-value pair or reuturned fully under the
1173 be treated as a key-value pair or reuturned fully under the
1173 __firstline key."""
1174 __firstline key."""
1174 lines = self.vfs.readlines(self.path)
1175 lines = self.vfs.readlines(self.path)
1175 d = {}
1176 d = {}
1176 if firstlinenonkeyval:
1177 if firstlinenonkeyval:
1177 if not lines:
1178 if not lines:
1178 e = _("empty simplekeyvalue file")
1179 e = _("empty simplekeyvalue file")
1179 raise error.CorruptedState(e)
1180 raise error.CorruptedState(e)
1180 # we don't want to include '\n' in the __firstline
1181 # we don't want to include '\n' in the __firstline
1181 d[self.firstlinekey] = lines[0][:-1]
1182 d[self.firstlinekey] = lines[0][:-1]
1182 del lines[0]
1183 del lines[0]
1183
1184
1184 try:
1185 try:
1185 # the 'if line.strip()' part prevents us from failing on empty
1186 # the 'if line.strip()' part prevents us from failing on empty
1186 # lines which only contain '\n' therefore are not skipped
1187 # lines which only contain '\n' therefore are not skipped
1187 # by 'if line'
1188 # by 'if line'
1188 updatedict = dict(line[:-1].split('=', 1) for line in lines
1189 updatedict = dict(line[:-1].split('=', 1) for line in lines
1189 if line.strip())
1190 if line.strip())
1190 if self.firstlinekey in updatedict:
1191 if self.firstlinekey in updatedict:
1191 e = _("%r can't be used as a key")
1192 e = _("%r can't be used as a key")
1192 raise error.CorruptedState(e % self.firstlinekey)
1193 raise error.CorruptedState(e % self.firstlinekey)
1193 d.update(updatedict)
1194 d.update(updatedict)
1194 except ValueError as e:
1195 except ValueError as e:
1195 raise error.CorruptedState(str(e))
1196 raise error.CorruptedState(str(e))
1196 return d
1197 return d
1197
1198
1198 def write(self, data, firstline=None):
1199 def write(self, data, firstline=None):
1199 """Write key=>value mapping to a file
1200 """Write key=>value mapping to a file
1200 data is a dict. Keys must be alphanumerical and start with a letter.
1201 data is a dict. Keys must be alphanumerical and start with a letter.
1201 Values must not contain newline characters.
1202 Values must not contain newline characters.
1202
1203
1203 If 'firstline' is not None, it is written to file before
1204 If 'firstline' is not None, it is written to file before
1204 everything else, as it is, not in a key=value form"""
1205 everything else, as it is, not in a key=value form"""
1205 lines = []
1206 lines = []
1206 if firstline is not None:
1207 if firstline is not None:
1207 lines.append('%s\n' % firstline)
1208 lines.append('%s\n' % firstline)
1208
1209
1209 for k, v in data.items():
1210 for k, v in data.items():
1210 if k == self.firstlinekey:
1211 if k == self.firstlinekey:
1211 e = "key name '%s' is reserved" % self.firstlinekey
1212 e = "key name '%s' is reserved" % self.firstlinekey
1212 raise error.ProgrammingError(e)
1213 raise error.ProgrammingError(e)
1213 if not k[0:1].isalpha():
1214 if not k[0:1].isalpha():
1214 e = "keys must start with a letter in a key-value file"
1215 e = "keys must start with a letter in a key-value file"
1215 raise error.ProgrammingError(e)
1216 raise error.ProgrammingError(e)
1216 if not k.isalnum():
1217 if not k.isalnum():
1217 e = "invalid key name in a simple key-value file"
1218 e = "invalid key name in a simple key-value file"
1218 raise error.ProgrammingError(e)
1219 raise error.ProgrammingError(e)
1219 if '\n' in v:
1220 if '\n' in v:
1220 e = "invalid value in a simple key-value file"
1221 e = "invalid value in a simple key-value file"
1221 raise error.ProgrammingError(e)
1222 raise error.ProgrammingError(e)
1222 lines.append("%s=%s\n" % (k, v))
1223 lines.append("%s=%s\n" % (k, v))
1223 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1224 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1224 fp.write(''.join(lines))
1225 fp.write(''.join(lines))
1225
1226
1226 _reportobsoletedsource = [
1227 _reportobsoletedsource = [
1227 'debugobsolete',
1228 'debugobsolete',
1228 'pull',
1229 'pull',
1229 'push',
1230 'push',
1230 'serve',
1231 'serve',
1231 'unbundle',
1232 'unbundle',
1232 ]
1233 ]
1233
1234
1234 _reportnewcssource = [
1235 _reportnewcssource = [
1235 'pull',
1236 'pull',
1236 'unbundle',
1237 'unbundle',
1237 ]
1238 ]
1238
1239
1239 # a list of (repo, ctx, files) functions called by various commands to allow
1240 # a list of (repo, ctx, files) functions called by various commands to allow
1240 # extensions to ensure the corresponding files are available locally, before the
1241 # extensions to ensure the corresponding files are available locally, before the
1241 # command uses them.
1242 # command uses them.
1242 fileprefetchhooks = util.hooks()
1243 fileprefetchhooks = util.hooks()
1243
1244
1244 # A marker that tells the evolve extension to suppress its own reporting
1245 # A marker that tells the evolve extension to suppress its own reporting
1245 _reportstroubledchangesets = True
1246 _reportstroubledchangesets = True
1246
1247
1247 def registersummarycallback(repo, otr, txnname=''):
1248 def registersummarycallback(repo, otr, txnname=''):
1248 """register a callback to issue a summary after the transaction is closed
1249 """register a callback to issue a summary after the transaction is closed
1249 """
1250 """
1250 def txmatch(sources):
1251 def txmatch(sources):
1251 return any(txnname.startswith(source) for source in sources)
1252 return any(txnname.startswith(source) for source in sources)
1252
1253
1253 categories = []
1254 categories = []
1254
1255
1255 def reportsummary(func):
1256 def reportsummary(func):
1256 """decorator for report callbacks."""
1257 """decorator for report callbacks."""
1257 # The repoview life cycle is shorter than the one of the actual
1258 # The repoview life cycle is shorter than the one of the actual
1258 # underlying repository. So the filtered object can die before the
1259 # underlying repository. So the filtered object can die before the
1259 # weakref is used leading to troubles. We keep a reference to the
1260 # weakref is used leading to troubles. We keep a reference to the
1260 # unfiltered object and restore the filtering when retrieving the
1261 # unfiltered object and restore the filtering when retrieving the
1261 # repository through the weakref.
1262 # repository through the weakref.
1262 filtername = repo.filtername
1263 filtername = repo.filtername
1263 reporef = weakref.ref(repo.unfiltered())
1264 reporef = weakref.ref(repo.unfiltered())
1264 def wrapped(tr):
1265 def wrapped(tr):
1265 repo = reporef()
1266 repo = reporef()
1266 if filtername:
1267 if filtername:
1267 repo = repo.filtered(filtername)
1268 repo = repo.filtered(filtername)
1268 func(repo, tr)
1269 func(repo, tr)
1269 newcat = '%02i-txnreport' % len(categories)
1270 newcat = '%02i-txnreport' % len(categories)
1270 otr.addpostclose(newcat, wrapped)
1271 otr.addpostclose(newcat, wrapped)
1271 categories.append(newcat)
1272 categories.append(newcat)
1272 return wrapped
1273 return wrapped
1273
1274
1274 if txmatch(_reportobsoletedsource):
1275 if txmatch(_reportobsoletedsource):
1275 @reportsummary
1276 @reportsummary
1276 def reportobsoleted(repo, tr):
1277 def reportobsoleted(repo, tr):
1277 obsoleted = obsutil.getobsoleted(repo, tr)
1278 obsoleted = obsutil.getobsoleted(repo, tr)
1278 if obsoleted:
1279 if obsoleted:
1279 repo.ui.status(_('obsoleted %i changesets\n')
1280 repo.ui.status(_('obsoleted %i changesets\n')
1280 % len(obsoleted))
1281 % len(obsoleted))
1281
1282
1282 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1283 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1283 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1284 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1284 instabilitytypes = [
1285 instabilitytypes = [
1285 ('orphan', 'orphan'),
1286 ('orphan', 'orphan'),
1286 ('phase-divergent', 'phasedivergent'),
1287 ('phase-divergent', 'phasedivergent'),
1287 ('content-divergent', 'contentdivergent'),
1288 ('content-divergent', 'contentdivergent'),
1288 ]
1289 ]
1289
1290
1290 def getinstabilitycounts(repo):
1291 def getinstabilitycounts(repo):
1291 filtered = repo.changelog.filteredrevs
1292 filtered = repo.changelog.filteredrevs
1292 counts = {}
1293 counts = {}
1293 for instability, revset in instabilitytypes:
1294 for instability, revset in instabilitytypes:
1294 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1295 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1295 filtered)
1296 filtered)
1296 return counts
1297 return counts
1297
1298
1298 oldinstabilitycounts = getinstabilitycounts(repo)
1299 oldinstabilitycounts = getinstabilitycounts(repo)
1299 @reportsummary
1300 @reportsummary
1300 def reportnewinstabilities(repo, tr):
1301 def reportnewinstabilities(repo, tr):
1301 newinstabilitycounts = getinstabilitycounts(repo)
1302 newinstabilitycounts = getinstabilitycounts(repo)
1302 for instability, revset in instabilitytypes:
1303 for instability, revset in instabilitytypes:
1303 delta = (newinstabilitycounts[instability] -
1304 delta = (newinstabilitycounts[instability] -
1304 oldinstabilitycounts[instability])
1305 oldinstabilitycounts[instability])
1305 if delta > 0:
1306 if delta > 0:
1306 repo.ui.warn(_('%i new %s changesets\n') %
1307 repo.ui.warn(_('%i new %s changesets\n') %
1307 (delta, instability))
1308 (delta, instability))
1308
1309
1309 if txmatch(_reportnewcssource):
1310 if txmatch(_reportnewcssource):
1310 @reportsummary
1311 @reportsummary
1311 def reportnewcs(repo, tr):
1312 def reportnewcs(repo, tr):
1312 """Report the range of new revisions pulled/unbundled."""
1313 """Report the range of new revisions pulled/unbundled."""
1313 newrevs = tr.changes.get('revs', xrange(0, 0))
1314 newrevs = tr.changes.get('revs', xrange(0, 0))
1314 if not newrevs:
1315 if not newrevs:
1315 return
1316 return
1316
1317
1317 # Compute the bounds of new revisions' range, excluding obsoletes.
1318 # Compute the bounds of new revisions' range, excluding obsoletes.
1318 unfi = repo.unfiltered()
1319 unfi = repo.unfiltered()
1319 revs = unfi.revs('%ld and not obsolete()', newrevs)
1320 revs = unfi.revs('%ld and not obsolete()', newrevs)
1320 if not revs:
1321 if not revs:
1321 # Got only obsoletes.
1322 # Got only obsoletes.
1322 return
1323 return
1323 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1324 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1324
1325
1325 if minrev == maxrev:
1326 if minrev == maxrev:
1326 revrange = minrev
1327 revrange = minrev
1327 else:
1328 else:
1328 revrange = '%s:%s' % (minrev, maxrev)
1329 revrange = '%s:%s' % (minrev, maxrev)
1329 repo.ui.status(_('new changesets %s\n') % revrange)
1330 repo.ui.status(_('new changesets %s\n') % revrange)
1330
1331
1331 def nodesummaries(repo, nodes, maxnumnodes=4):
1332 def nodesummaries(repo, nodes, maxnumnodes=4):
1332 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1333 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1333 return ' '.join(short(h) for h in nodes)
1334 return ' '.join(short(h) for h in nodes)
1334 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1335 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1335 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1336 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1336
1337
1337 def enforcesinglehead(repo, tr, desc):
1338 def enforcesinglehead(repo, tr, desc):
1338 """check that no named branch has multiple heads"""
1339 """check that no named branch has multiple heads"""
1339 if desc in ('strip', 'repair'):
1340 if desc in ('strip', 'repair'):
1340 # skip the logic during strip
1341 # skip the logic during strip
1341 return
1342 return
1342 visible = repo.filtered('visible')
1343 visible = repo.filtered('visible')
1343 # possible improvement: we could restrict the check to affected branch
1344 # possible improvement: we could restrict the check to affected branch
1344 for name, heads in visible.branchmap().iteritems():
1345 for name, heads in visible.branchmap().iteritems():
1345 if len(heads) > 1:
1346 if len(heads) > 1:
1346 msg = _('rejecting multiple heads on branch "%s"')
1347 msg = _('rejecting multiple heads on branch "%s"')
1347 msg %= name
1348 msg %= name
1348 hint = _('%d heads: %s')
1349 hint = _('%d heads: %s')
1349 hint %= (len(heads), nodesummaries(repo, heads))
1350 hint %= (len(heads), nodesummaries(repo, heads))
1350 raise error.Abort(msg, hint=hint)
1351 raise error.Abort(msg, hint=hint)
1351
1352
1352 def wrapconvertsink(sink):
1353 def wrapconvertsink(sink):
1353 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1354 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1354 before it is used, whether or not the convert extension was formally loaded.
1355 before it is used, whether or not the convert extension was formally loaded.
1355 """
1356 """
1356 return sink
1357 return sink
1357
1358
1358 def unhidehashlikerevs(repo, specs, hiddentype):
1359 def unhidehashlikerevs(repo, specs, hiddentype):
1359 """parse the user specs and unhide changesets whose hash or revision number
1360 """parse the user specs and unhide changesets whose hash or revision number
1360 is passed.
1361 is passed.
1361
1362
1362 hiddentype can be: 1) 'warn': warn while unhiding changesets
1363 hiddentype can be: 1) 'warn': warn while unhiding changesets
1363 2) 'nowarn': don't warn while unhiding changesets
1364 2) 'nowarn': don't warn while unhiding changesets
1364
1365
1365 returns a repo object with the required changesets unhidden
1366 returns a repo object with the required changesets unhidden
1366 """
1367 """
1367 if not repo.filtername or not repo.ui.configbool('experimental',
1368 if not repo.filtername or not repo.ui.configbool('experimental',
1368 'directaccess'):
1369 'directaccess'):
1369 return repo
1370 return repo
1370
1371
1371 if repo.filtername not in ('visible', 'visible-hidden'):
1372 if repo.filtername not in ('visible', 'visible-hidden'):
1372 return repo
1373 return repo
1373
1374
1374 symbols = set()
1375 symbols = set()
1375 for spec in specs:
1376 for spec in specs:
1376 try:
1377 try:
1377 tree = revsetlang.parse(spec)
1378 tree = revsetlang.parse(spec)
1378 except error.ParseError: # will be reported by scmutil.revrange()
1379 except error.ParseError: # will be reported by scmutil.revrange()
1379 continue
1380 continue
1380
1381
1381 symbols.update(revsetlang.gethashlikesymbols(tree))
1382 symbols.update(revsetlang.gethashlikesymbols(tree))
1382
1383
1383 if not symbols:
1384 if not symbols:
1384 return repo
1385 return repo
1385
1386
1386 revs = _getrevsfromsymbols(repo, symbols)
1387 revs = _getrevsfromsymbols(repo, symbols)
1387
1388
1388 if not revs:
1389 if not revs:
1389 return repo
1390 return repo
1390
1391
1391 if hiddentype == 'warn':
1392 if hiddentype == 'warn':
1392 unfi = repo.unfiltered()
1393 unfi = repo.unfiltered()
1393 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1394 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1394 repo.ui.warn(_("warning: accessing hidden changesets for write "
1395 repo.ui.warn(_("warning: accessing hidden changesets for write "
1395 "operation: %s\n") % revstr)
1396 "operation: %s\n") % revstr)
1396
1397
1397 # we have to use new filtername to separate branch/tags cache until we can
1398 # we have to use new filtername to separate branch/tags cache until we can
1398 # disbale these cache when revisions are dynamically pinned.
1399 # disbale these cache when revisions are dynamically pinned.
1399 return repo.filtered('visible-hidden', revs)
1400 return repo.filtered('visible-hidden', revs)
1400
1401
1401 def _getrevsfromsymbols(repo, symbols):
1402 def _getrevsfromsymbols(repo, symbols):
1402 """parse the list of symbols and returns a set of revision numbers of hidden
1403 """parse the list of symbols and returns a set of revision numbers of hidden
1403 changesets present in symbols"""
1404 changesets present in symbols"""
1404 revs = set()
1405 revs = set()
1405 unfi = repo.unfiltered()
1406 unfi = repo.unfiltered()
1406 unficl = unfi.changelog
1407 unficl = unfi.changelog
1407 cl = repo.changelog
1408 cl = repo.changelog
1408 tiprev = len(unficl)
1409 tiprev = len(unficl)
1409 pmatch = unficl._partialmatch
1410 pmatch = unficl._partialmatch
1410 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1411 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1411 for s in symbols:
1412 for s in symbols:
1412 try:
1413 try:
1413 n = int(s)
1414 n = int(s)
1414 if n <= tiprev:
1415 if n <= tiprev:
1415 if not allowrevnums:
1416 if not allowrevnums:
1416 continue
1417 continue
1417 else:
1418 else:
1418 if n not in cl:
1419 if n not in cl:
1419 revs.add(n)
1420 revs.add(n)
1420 continue
1421 continue
1421 except ValueError:
1422 except ValueError:
1422 pass
1423 pass
1423
1424
1424 try:
1425 try:
1425 s = pmatch(s)
1426 s = pmatch(s)
1426 except (error.LookupError, error.WdirUnsupported):
1427 except (error.LookupError, error.WdirUnsupported):
1427 s = None
1428 s = None
1428
1429
1429 if s is not None:
1430 if s is not None:
1430 rev = unficl.rev(s)
1431 rev = unficl.rev(s)
1431 if rev not in cl:
1432 if rev not in cl:
1432 revs.add(rev)
1433 revs.add(rev)
1433
1434
1434 return revs
1435 return revs
General Comments 0
You need to be logged in to leave comments. Login now