##// END OF EJS Templates
scmutil: deprecate revpairnodes()...
Martin von Zweigbergk -
r37276:d29f6fbd default
parent child Browse files
Show More
@@ -1,1435 +1,1436 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 from .utils import (
44 from .utils import (
45 procutil,
45 procutil,
46 stringutil,
46 stringutil,
47 )
47 )
48
48
49 if pycompat.iswindows:
49 if pycompat.iswindows:
50 from . import scmwindows as scmplatform
50 from . import scmwindows as scmplatform
51 else:
51 else:
52 from . import scmposix as scmplatform
52 from . import scmposix as scmplatform
53
53
54 termsize = scmplatform.termsize
54 termsize = scmplatform.termsize
55
55
56 class status(tuple):
56 class status(tuple):
57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 and 'ignored' properties are only relevant to the working copy.
58 and 'ignored' properties are only relevant to the working copy.
59 '''
59 '''
60
60
61 __slots__ = ()
61 __slots__ = ()
62
62
63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 clean):
64 clean):
65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 ignored, clean))
66 ignored, clean))
67
67
68 @property
68 @property
69 def modified(self):
69 def modified(self):
70 '''files that have been modified'''
70 '''files that have been modified'''
71 return self[0]
71 return self[0]
72
72
73 @property
73 @property
74 def added(self):
74 def added(self):
75 '''files that have been added'''
75 '''files that have been added'''
76 return self[1]
76 return self[1]
77
77
78 @property
78 @property
79 def removed(self):
79 def removed(self):
80 '''files that have been removed'''
80 '''files that have been removed'''
81 return self[2]
81 return self[2]
82
82
83 @property
83 @property
84 def deleted(self):
84 def deleted(self):
85 '''files that are in the dirstate, but have been deleted from the
85 '''files that are in the dirstate, but have been deleted from the
86 working copy (aka "missing")
86 working copy (aka "missing")
87 '''
87 '''
88 return self[3]
88 return self[3]
89
89
90 @property
90 @property
91 def unknown(self):
91 def unknown(self):
92 '''files not in the dirstate that are not ignored'''
92 '''files not in the dirstate that are not ignored'''
93 return self[4]
93 return self[4]
94
94
95 @property
95 @property
96 def ignored(self):
96 def ignored(self):
97 '''files not in the dirstate that are ignored (by _dirignore())'''
97 '''files not in the dirstate that are ignored (by _dirignore())'''
98 return self[5]
98 return self[5]
99
99
100 @property
100 @property
101 def clean(self):
101 def clean(self):
102 '''files that have not been modified'''
102 '''files that have not been modified'''
103 return self[6]
103 return self[6]
104
104
105 def __repr__(self, *args, **kwargs):
105 def __repr__(self, *args, **kwargs):
106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 'unknown=%r, ignored=%r, clean=%r>') % self)
107 'unknown=%r, ignored=%r, clean=%r>') % self)
108
108
109 def itersubrepos(ctx1, ctx2):
109 def itersubrepos(ctx1, ctx2):
110 """find subrepos in ctx1 or ctx2"""
110 """find subrepos in ctx1 or ctx2"""
111 # Create a (subpath, ctx) mapping where we prefer subpaths from
111 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 # has been modified (in ctx2) but not yet committed (in ctx1).
113 # has been modified (in ctx2) but not yet committed (in ctx1).
114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116
116
117 missing = set()
117 missing = set()
118
118
119 for subpath in ctx2.substate:
119 for subpath in ctx2.substate:
120 if subpath not in ctx1.substate:
120 if subpath not in ctx1.substate:
121 del subpaths[subpath]
121 del subpaths[subpath]
122 missing.add(subpath)
122 missing.add(subpath)
123
123
124 for subpath, ctx in sorted(subpaths.iteritems()):
124 for subpath, ctx in sorted(subpaths.iteritems()):
125 yield subpath, ctx.sub(subpath)
125 yield subpath, ctx.sub(subpath)
126
126
127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 # status and diff will have an accurate result when it does
128 # status and diff will have an accurate result when it does
129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 # against itself.
130 # against itself.
131 for subpath in missing:
131 for subpath in missing:
132 yield subpath, ctx2.nullsub(subpath, ctx1)
132 yield subpath, ctx2.nullsub(subpath, ctx1)
133
133
134 def nochangesfound(ui, repo, excluded=None):
134 def nochangesfound(ui, repo, excluded=None):
135 '''Report no changes for push/pull, excluded is None or a list of
135 '''Report no changes for push/pull, excluded is None or a list of
136 nodes excluded from the push/pull.
136 nodes excluded from the push/pull.
137 '''
137 '''
138 secretlist = []
138 secretlist = []
139 if excluded:
139 if excluded:
140 for n in excluded:
140 for n in excluded:
141 ctx = repo[n]
141 ctx = repo[n]
142 if ctx.phase() >= phases.secret and not ctx.extinct():
142 if ctx.phase() >= phases.secret and not ctx.extinct():
143 secretlist.append(n)
143 secretlist.append(n)
144
144
145 if secretlist:
145 if secretlist:
146 ui.status(_("no changes found (ignored %d secret changesets)\n")
146 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 % len(secretlist))
147 % len(secretlist))
148 else:
148 else:
149 ui.status(_("no changes found\n"))
149 ui.status(_("no changes found\n"))
150
150
151 def callcatch(ui, func):
151 def callcatch(ui, func):
152 """call func() with global exception handling
152 """call func() with global exception handling
153
153
154 return func() if no exception happens. otherwise do some error handling
154 return func() if no exception happens. otherwise do some error handling
155 and return an exit code accordingly. does not handle all exceptions.
155 and return an exit code accordingly. does not handle all exceptions.
156 """
156 """
157 try:
157 try:
158 try:
158 try:
159 return func()
159 return func()
160 except: # re-raises
160 except: # re-raises
161 ui.traceback()
161 ui.traceback()
162 raise
162 raise
163 # Global exception handling, alphabetically
163 # Global exception handling, alphabetically
164 # Mercurial-specific first, followed by built-in and library exceptions
164 # Mercurial-specific first, followed by built-in and library exceptions
165 except error.LockHeld as inst:
165 except error.LockHeld as inst:
166 if inst.errno == errno.ETIMEDOUT:
166 if inst.errno == errno.ETIMEDOUT:
167 reason = _('timed out waiting for lock held by %r') % inst.locker
167 reason = _('timed out waiting for lock held by %r') % inst.locker
168 else:
168 else:
169 reason = _('lock held by %r') % inst.locker
169 reason = _('lock held by %r') % inst.locker
170 ui.warn(_("abort: %s: %s\n")
170 ui.warn(_("abort: %s: %s\n")
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 if not inst.locker:
172 if not inst.locker:
173 ui.warn(_("(lock might be very busy)\n"))
173 ui.warn(_("(lock might be very busy)\n"))
174 except error.LockUnavailable as inst:
174 except error.LockUnavailable as inst:
175 ui.warn(_("abort: could not lock %s: %s\n") %
175 ui.warn(_("abort: could not lock %s: %s\n") %
176 (inst.desc or stringutil.forcebytestr(inst.filename),
176 (inst.desc or stringutil.forcebytestr(inst.filename),
177 encoding.strtolocal(inst.strerror)))
177 encoding.strtolocal(inst.strerror)))
178 except error.OutOfBandError as inst:
178 except error.OutOfBandError as inst:
179 if inst.args:
179 if inst.args:
180 msg = _("abort: remote error:\n")
180 msg = _("abort: remote error:\n")
181 else:
181 else:
182 msg = _("abort: remote error\n")
182 msg = _("abort: remote error\n")
183 ui.warn(msg)
183 ui.warn(msg)
184 if inst.args:
184 if inst.args:
185 ui.warn(''.join(inst.args))
185 ui.warn(''.join(inst.args))
186 if inst.hint:
186 if inst.hint:
187 ui.warn('(%s)\n' % inst.hint)
187 ui.warn('(%s)\n' % inst.hint)
188 except error.RepoError as inst:
188 except error.RepoError as inst:
189 ui.warn(_("abort: %s!\n") % inst)
189 ui.warn(_("abort: %s!\n") % inst)
190 if inst.hint:
190 if inst.hint:
191 ui.warn(_("(%s)\n") % inst.hint)
191 ui.warn(_("(%s)\n") % inst.hint)
192 except error.ResponseError as inst:
192 except error.ResponseError as inst:
193 ui.warn(_("abort: %s") % inst.args[0])
193 ui.warn(_("abort: %s") % inst.args[0])
194 msg = inst.args[1]
194 msg = inst.args[1]
195 if isinstance(msg, type(u'')):
195 if isinstance(msg, type(u'')):
196 msg = pycompat.sysbytes(msg)
196 msg = pycompat.sysbytes(msg)
197 if not isinstance(msg, bytes):
197 if not isinstance(msg, bytes):
198 ui.warn(" %r\n" % (msg,))
198 ui.warn(" %r\n" % (msg,))
199 elif not msg:
199 elif not msg:
200 ui.warn(_(" empty string\n"))
200 ui.warn(_(" empty string\n"))
201 else:
201 else:
202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 except error.CensoredNodeError as inst:
203 except error.CensoredNodeError as inst:
204 ui.warn(_("abort: file censored %s!\n") % inst)
204 ui.warn(_("abort: file censored %s!\n") % inst)
205 except error.RevlogError as inst:
205 except error.RevlogError as inst:
206 ui.warn(_("abort: %s!\n") % inst)
206 ui.warn(_("abort: %s!\n") % inst)
207 except error.InterventionRequired as inst:
207 except error.InterventionRequired as inst:
208 ui.warn("%s\n" % inst)
208 ui.warn("%s\n" % inst)
209 if inst.hint:
209 if inst.hint:
210 ui.warn(_("(%s)\n") % inst.hint)
210 ui.warn(_("(%s)\n") % inst.hint)
211 return 1
211 return 1
212 except error.WdirUnsupported:
212 except error.WdirUnsupported:
213 ui.warn(_("abort: working directory revision cannot be specified\n"))
213 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 except error.Abort as inst:
214 except error.Abort as inst:
215 ui.warn(_("abort: %s\n") % inst)
215 ui.warn(_("abort: %s\n") % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.warn(_("(%s)\n") % inst.hint)
217 ui.warn(_("(%s)\n") % inst.hint)
218 except ImportError as inst:
218 except ImportError as inst:
219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 m = stringutil.forcebytestr(inst).split()[-1]
220 m = stringutil.forcebytestr(inst).split()[-1]
221 if m in "mpatch bdiff".split():
221 if m in "mpatch bdiff".split():
222 ui.warn(_("(did you forget to compile extensions?)\n"))
222 ui.warn(_("(did you forget to compile extensions?)\n"))
223 elif m in "zlib".split():
223 elif m in "zlib".split():
224 ui.warn(_("(is your Python install correct?)\n"))
224 ui.warn(_("(is your Python install correct?)\n"))
225 except IOError as inst:
225 except IOError as inst:
226 if util.safehasattr(inst, "code"):
226 if util.safehasattr(inst, "code"):
227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 elif util.safehasattr(inst, "reason"):
228 elif util.safehasattr(inst, "reason"):
229 try: # usually it is in the form (errno, strerror)
229 try: # usually it is in the form (errno, strerror)
230 reason = inst.reason.args[1]
230 reason = inst.reason.args[1]
231 except (AttributeError, IndexError):
231 except (AttributeError, IndexError):
232 # it might be anything, for example a string
232 # it might be anything, for example a string
233 reason = inst.reason
233 reason = inst.reason
234 if isinstance(reason, unicode):
234 if isinstance(reason, unicode):
235 # SSLError of Python 2.7.9 contains a unicode
235 # SSLError of Python 2.7.9 contains a unicode
236 reason = encoding.unitolocal(reason)
236 reason = encoding.unitolocal(reason)
237 ui.warn(_("abort: error: %s\n") % reason)
237 ui.warn(_("abort: error: %s\n") % reason)
238 elif (util.safehasattr(inst, "args")
238 elif (util.safehasattr(inst, "args")
239 and inst.args and inst.args[0] == errno.EPIPE):
239 and inst.args and inst.args[0] == errno.EPIPE):
240 pass
240 pass
241 elif getattr(inst, "strerror", None):
241 elif getattr(inst, "strerror", None):
242 if getattr(inst, "filename", None):
242 if getattr(inst, "filename", None):
243 ui.warn(_("abort: %s: %s\n") % (
243 ui.warn(_("abort: %s: %s\n") % (
244 encoding.strtolocal(inst.strerror),
244 encoding.strtolocal(inst.strerror),
245 stringutil.forcebytestr(inst.filename)))
245 stringutil.forcebytestr(inst.filename)))
246 else:
246 else:
247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 else:
248 else:
249 raise
249 raise
250 except OSError as inst:
250 except OSError as inst:
251 if getattr(inst, "filename", None) is not None:
251 if getattr(inst, "filename", None) is not None:
252 ui.warn(_("abort: %s: '%s'\n") % (
252 ui.warn(_("abort: %s: '%s'\n") % (
253 encoding.strtolocal(inst.strerror),
253 encoding.strtolocal(inst.strerror),
254 stringutil.forcebytestr(inst.filename)))
254 stringutil.forcebytestr(inst.filename)))
255 else:
255 else:
256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 except MemoryError:
257 except MemoryError:
258 ui.warn(_("abort: out of memory\n"))
258 ui.warn(_("abort: out of memory\n"))
259 except SystemExit as inst:
259 except SystemExit as inst:
260 # Commands shouldn't sys.exit directly, but give a return code.
260 # Commands shouldn't sys.exit directly, but give a return code.
261 # Just in case catch this and and pass exit code to caller.
261 # Just in case catch this and and pass exit code to caller.
262 return inst.code
262 return inst.code
263 except socket.error as inst:
263 except socket.error as inst:
264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265
265
266 return -1
266 return -1
267
267
268 def checknewlabel(repo, lbl, kind):
268 def checknewlabel(repo, lbl, kind):
269 # Do not use the "kind" parameter in ui output.
269 # Do not use the "kind" parameter in ui output.
270 # It makes strings difficult to translate.
270 # It makes strings difficult to translate.
271 if lbl in ['tip', '.', 'null']:
271 if lbl in ['tip', '.', 'null']:
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 for c in (':', '\0', '\n', '\r'):
273 for c in (':', '\0', '\n', '\r'):
274 if c in lbl:
274 if c in lbl:
275 raise error.Abort(
275 raise error.Abort(
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 try:
277 try:
278 int(lbl)
278 int(lbl)
279 raise error.Abort(_("cannot use an integer as a name"))
279 raise error.Abort(_("cannot use an integer as a name"))
280 except ValueError:
280 except ValueError:
281 pass
281 pass
282 if lbl.strip() != lbl:
282 if lbl.strip() != lbl:
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284
284
285 def checkfilename(f):
285 def checkfilename(f):
286 '''Check that the filename f is an acceptable filename for a tracked file'''
286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 if '\r' in f or '\n' in f:
287 if '\r' in f or '\n' in f:
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289
289
290 def checkportable(ui, f):
290 def checkportable(ui, f):
291 '''Check if filename f is portable and warn or abort depending on config'''
291 '''Check if filename f is portable and warn or abort depending on config'''
292 checkfilename(f)
292 checkfilename(f)
293 abort, warn = checkportabilityalert(ui)
293 abort, warn = checkportabilityalert(ui)
294 if abort or warn:
294 if abort or warn:
295 msg = util.checkwinfilename(f)
295 msg = util.checkwinfilename(f)
296 if msg:
296 if msg:
297 msg = "%s: %s" % (msg, procutil.shellquote(f))
297 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 if abort:
298 if abort:
299 raise error.Abort(msg)
299 raise error.Abort(msg)
300 ui.warn(_("warning: %s\n") % msg)
300 ui.warn(_("warning: %s\n") % msg)
301
301
302 def checkportabilityalert(ui):
302 def checkportabilityalert(ui):
303 '''check if the user's config requests nothing, a warning, or abort for
303 '''check if the user's config requests nothing, a warning, or abort for
304 non-portable filenames'''
304 non-portable filenames'''
305 val = ui.config('ui', 'portablefilenames')
305 val = ui.config('ui', 'portablefilenames')
306 lval = val.lower()
306 lval = val.lower()
307 bval = stringutil.parsebool(val)
307 bval = stringutil.parsebool(val)
308 abort = pycompat.iswindows or lval == 'abort'
308 abort = pycompat.iswindows or lval == 'abort'
309 warn = bval or lval == 'warn'
309 warn = bval or lval == 'warn'
310 if bval is None and not (warn or abort or lval == 'ignore'):
310 if bval is None and not (warn or abort or lval == 'ignore'):
311 raise error.ConfigError(
311 raise error.ConfigError(
312 _("ui.portablefilenames value is invalid ('%s')") % val)
312 _("ui.portablefilenames value is invalid ('%s')") % val)
313 return abort, warn
313 return abort, warn
314
314
315 class casecollisionauditor(object):
315 class casecollisionauditor(object):
316 def __init__(self, ui, abort, dirstate):
316 def __init__(self, ui, abort, dirstate):
317 self._ui = ui
317 self._ui = ui
318 self._abort = abort
318 self._abort = abort
319 allfiles = '\0'.join(dirstate._map)
319 allfiles = '\0'.join(dirstate._map)
320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._dirstate = dirstate
321 self._dirstate = dirstate
322 # The purpose of _newfiles is so that we don't complain about
322 # The purpose of _newfiles is so that we don't complain about
323 # case collisions if someone were to call this object with the
323 # case collisions if someone were to call this object with the
324 # same filename twice.
324 # same filename twice.
325 self._newfiles = set()
325 self._newfiles = set()
326
326
327 def __call__(self, f):
327 def __call__(self, f):
328 if f in self._newfiles:
328 if f in self._newfiles:
329 return
329 return
330 fl = encoding.lower(f)
330 fl = encoding.lower(f)
331 if fl in self._loweredfiles and f not in self._dirstate:
331 if fl in self._loweredfiles and f not in self._dirstate:
332 msg = _('possible case-folding collision for %s') % f
332 msg = _('possible case-folding collision for %s') % f
333 if self._abort:
333 if self._abort:
334 raise error.Abort(msg)
334 raise error.Abort(msg)
335 self._ui.warn(_("warning: %s\n") % msg)
335 self._ui.warn(_("warning: %s\n") % msg)
336 self._loweredfiles.add(fl)
336 self._loweredfiles.add(fl)
337 self._newfiles.add(f)
337 self._newfiles.add(f)
338
338
339 def filteredhash(repo, maxrev):
339 def filteredhash(repo, maxrev):
340 """build hash of filtered revisions in the current repoview.
340 """build hash of filtered revisions in the current repoview.
341
341
342 Multiple caches perform up-to-date validation by checking that the
342 Multiple caches perform up-to-date validation by checking that the
343 tiprev and tipnode stored in the cache file match the current repository.
343 tiprev and tipnode stored in the cache file match the current repository.
344 However, this is not sufficient for validating repoviews because the set
344 However, this is not sufficient for validating repoviews because the set
345 of revisions in the view may change without the repository tiprev and
345 of revisions in the view may change without the repository tiprev and
346 tipnode changing.
346 tipnode changing.
347
347
348 This function hashes all the revs filtered from the view and returns
348 This function hashes all the revs filtered from the view and returns
349 that SHA-1 digest.
349 that SHA-1 digest.
350 """
350 """
351 cl = repo.changelog
351 cl = repo.changelog
352 if not cl.filteredrevs:
352 if not cl.filteredrevs:
353 return None
353 return None
354 key = None
354 key = None
355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 if revs:
356 if revs:
357 s = hashlib.sha1()
357 s = hashlib.sha1()
358 for rev in revs:
358 for rev in revs:
359 s.update('%d;' % rev)
359 s.update('%d;' % rev)
360 key = s.digest()
360 key = s.digest()
361 return key
361 return key
362
362
363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 '''yield every hg repository under path, always recursively.
364 '''yield every hg repository under path, always recursively.
365 The recurse flag will only control recursion into repo working dirs'''
365 The recurse flag will only control recursion into repo working dirs'''
366 def errhandler(err):
366 def errhandler(err):
367 if err.filename == path:
367 if err.filename == path:
368 raise err
368 raise err
369 samestat = getattr(os.path, 'samestat', None)
369 samestat = getattr(os.path, 'samestat', None)
370 if followsym and samestat is not None:
370 if followsym and samestat is not None:
371 def adddir(dirlst, dirname):
371 def adddir(dirlst, dirname):
372 dirstat = os.stat(dirname)
372 dirstat = os.stat(dirname)
373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 if not match:
374 if not match:
375 dirlst.append(dirstat)
375 dirlst.append(dirstat)
376 return not match
376 return not match
377 else:
377 else:
378 followsym = False
378 followsym = False
379
379
380 if (seen_dirs is None) and followsym:
380 if (seen_dirs is None) and followsym:
381 seen_dirs = []
381 seen_dirs = []
382 adddir(seen_dirs, path)
382 adddir(seen_dirs, path)
383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 dirs.sort()
384 dirs.sort()
385 if '.hg' in dirs:
385 if '.hg' in dirs:
386 yield root # found a repository
386 yield root # found a repository
387 qroot = os.path.join(root, '.hg', 'patches')
387 qroot = os.path.join(root, '.hg', 'patches')
388 if os.path.isdir(os.path.join(qroot, '.hg')):
388 if os.path.isdir(os.path.join(qroot, '.hg')):
389 yield qroot # we have a patch queue repo here
389 yield qroot # we have a patch queue repo here
390 if recurse:
390 if recurse:
391 # avoid recursing inside the .hg directory
391 # avoid recursing inside the .hg directory
392 dirs.remove('.hg')
392 dirs.remove('.hg')
393 else:
393 else:
394 dirs[:] = [] # don't descend further
394 dirs[:] = [] # don't descend further
395 elif followsym:
395 elif followsym:
396 newdirs = []
396 newdirs = []
397 for d in dirs:
397 for d in dirs:
398 fname = os.path.join(root, d)
398 fname = os.path.join(root, d)
399 if adddir(seen_dirs, fname):
399 if adddir(seen_dirs, fname):
400 if os.path.islink(fname):
400 if os.path.islink(fname):
401 for hgname in walkrepos(fname, True, seen_dirs):
401 for hgname in walkrepos(fname, True, seen_dirs):
402 yield hgname
402 yield hgname
403 else:
403 else:
404 newdirs.append(d)
404 newdirs.append(d)
405 dirs[:] = newdirs
405 dirs[:] = newdirs
406
406
407 def binnode(ctx):
407 def binnode(ctx):
408 """Return binary node id for a given basectx"""
408 """Return binary node id for a given basectx"""
409 node = ctx.node()
409 node = ctx.node()
410 if node is None:
410 if node is None:
411 return wdirid
411 return wdirid
412 return node
412 return node
413
413
414 def intrev(ctx):
414 def intrev(ctx):
415 """Return integer for a given basectx that can be used in comparison or
415 """Return integer for a given basectx that can be used in comparison or
416 arithmetic operation"""
416 arithmetic operation"""
417 rev = ctx.rev()
417 rev = ctx.rev()
418 if rev is None:
418 if rev is None:
419 return wdirrev
419 return wdirrev
420 return rev
420 return rev
421
421
422 def formatchangeid(ctx):
422 def formatchangeid(ctx):
423 """Format changectx as '{rev}:{node|formatnode}', which is the default
423 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 template provided by logcmdutil.changesettemplater"""
424 template provided by logcmdutil.changesettemplater"""
425 repo = ctx.repo()
425 repo = ctx.repo()
426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427
427
428 def formatrevnode(ui, rev, node):
428 def formatrevnode(ui, rev, node):
429 """Format given revision and node depending on the current verbosity"""
429 """Format given revision and node depending on the current verbosity"""
430 if ui.debugflag:
430 if ui.debugflag:
431 hexfunc = hex
431 hexfunc = hex
432 else:
432 else:
433 hexfunc = short
433 hexfunc = short
434 return '%d:%s' % (rev, hexfunc(node))
434 return '%d:%s' % (rev, hexfunc(node))
435
435
436 def revsingle(repo, revspec, default='.', localalias=None):
436 def revsingle(repo, revspec, default='.', localalias=None):
437 if not revspec and revspec != 0:
437 if not revspec and revspec != 0:
438 return repo[default]
438 return repo[default]
439
439
440 l = revrange(repo, [revspec], localalias=localalias)
440 l = revrange(repo, [revspec], localalias=localalias)
441 if not l:
441 if not l:
442 raise error.Abort(_('empty revision set'))
442 raise error.Abort(_('empty revision set'))
443 return repo[l.last()]
443 return repo[l.last()]
444
444
445 def _pairspec(revspec):
445 def _pairspec(revspec):
446 tree = revsetlang.parse(revspec)
446 tree = revsetlang.parse(revspec)
447 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
447 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
448
448
449 def revpairnodes(repo, revs):
449 def revpairnodes(repo, revs):
450 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
450 ctx1, ctx2 = revpair(repo, revs)
451 ctx1, ctx2 = revpair(repo, revs)
451 return ctx1.node(), ctx2.node()
452 return ctx1.node(), ctx2.node()
452
453
453 def revpair(repo, revs):
454 def revpair(repo, revs):
454 if not revs:
455 if not revs:
455 return repo['.'], repo[None]
456 return repo['.'], repo[None]
456
457
457 l = revrange(repo, revs)
458 l = revrange(repo, revs)
458
459
459 if not l:
460 if not l:
460 first = second = None
461 first = second = None
461 elif l.isascending():
462 elif l.isascending():
462 first = l.min()
463 first = l.min()
463 second = l.max()
464 second = l.max()
464 elif l.isdescending():
465 elif l.isdescending():
465 first = l.max()
466 first = l.max()
466 second = l.min()
467 second = l.min()
467 else:
468 else:
468 first = l.first()
469 first = l.first()
469 second = l.last()
470 second = l.last()
470
471
471 if first is None:
472 if first is None:
472 raise error.Abort(_('empty revision range'))
473 raise error.Abort(_('empty revision range'))
473 if (first == second and len(revs) >= 2
474 if (first == second and len(revs) >= 2
474 and not all(revrange(repo, [r]) for r in revs)):
475 and not all(revrange(repo, [r]) for r in revs)):
475 raise error.Abort(_('empty revision on one side of range'))
476 raise error.Abort(_('empty revision on one side of range'))
476
477
477 # if top-level is range expression, the result must always be a pair
478 # if top-level is range expression, the result must always be a pair
478 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
479 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
479 return repo[first], repo[None]
480 return repo[first], repo[None]
480
481
481 return repo[first], repo[second]
482 return repo[first], repo[second]
482
483
483 def revrange(repo, specs, localalias=None):
484 def revrange(repo, specs, localalias=None):
484 """Execute 1 to many revsets and return the union.
485 """Execute 1 to many revsets and return the union.
485
486
486 This is the preferred mechanism for executing revsets using user-specified
487 This is the preferred mechanism for executing revsets using user-specified
487 config options, such as revset aliases.
488 config options, such as revset aliases.
488
489
489 The revsets specified by ``specs`` will be executed via a chained ``OR``
490 The revsets specified by ``specs`` will be executed via a chained ``OR``
490 expression. If ``specs`` is empty, an empty result is returned.
491 expression. If ``specs`` is empty, an empty result is returned.
491
492
492 ``specs`` can contain integers, in which case they are assumed to be
493 ``specs`` can contain integers, in which case they are assumed to be
493 revision numbers.
494 revision numbers.
494
495
495 It is assumed the revsets are already formatted. If you have arguments
496 It is assumed the revsets are already formatted. If you have arguments
496 that need to be expanded in the revset, call ``revsetlang.formatspec()``
497 that need to be expanded in the revset, call ``revsetlang.formatspec()``
497 and pass the result as an element of ``specs``.
498 and pass the result as an element of ``specs``.
498
499
499 Specifying a single revset is allowed.
500 Specifying a single revset is allowed.
500
501
501 Returns a ``revset.abstractsmartset`` which is a list-like interface over
502 Returns a ``revset.abstractsmartset`` which is a list-like interface over
502 integer revisions.
503 integer revisions.
503 """
504 """
504 allspecs = []
505 allspecs = []
505 for spec in specs:
506 for spec in specs:
506 if isinstance(spec, int):
507 if isinstance(spec, int):
507 spec = revsetlang.formatspec('rev(%d)', spec)
508 spec = revsetlang.formatspec('rev(%d)', spec)
508 allspecs.append(spec)
509 allspecs.append(spec)
509 return repo.anyrevs(allspecs, user=True, localalias=localalias)
510 return repo.anyrevs(allspecs, user=True, localalias=localalias)
510
511
511 def meaningfulparents(repo, ctx):
512 def meaningfulparents(repo, ctx):
512 """Return list of meaningful (or all if debug) parentrevs for rev.
513 """Return list of meaningful (or all if debug) parentrevs for rev.
513
514
514 For merges (two non-nullrev revisions) both parents are meaningful.
515 For merges (two non-nullrev revisions) both parents are meaningful.
515 Otherwise the first parent revision is considered meaningful if it
516 Otherwise the first parent revision is considered meaningful if it
516 is not the preceding revision.
517 is not the preceding revision.
517 """
518 """
518 parents = ctx.parents()
519 parents = ctx.parents()
519 if len(parents) > 1:
520 if len(parents) > 1:
520 return parents
521 return parents
521 if repo.ui.debugflag:
522 if repo.ui.debugflag:
522 return [parents[0], repo['null']]
523 return [parents[0], repo['null']]
523 if parents[0].rev() >= intrev(ctx) - 1:
524 if parents[0].rev() >= intrev(ctx) - 1:
524 return []
525 return []
525 return parents
526 return parents
526
527
527 def expandpats(pats):
528 def expandpats(pats):
528 '''Expand bare globs when running on windows.
529 '''Expand bare globs when running on windows.
529 On posix we assume it already has already been done by sh.'''
530 On posix we assume it already has already been done by sh.'''
530 if not util.expandglobs:
531 if not util.expandglobs:
531 return list(pats)
532 return list(pats)
532 ret = []
533 ret = []
533 for kindpat in pats:
534 for kindpat in pats:
534 kind, pat = matchmod._patsplit(kindpat, None)
535 kind, pat = matchmod._patsplit(kindpat, None)
535 if kind is None:
536 if kind is None:
536 try:
537 try:
537 globbed = glob.glob(pat)
538 globbed = glob.glob(pat)
538 except re.error:
539 except re.error:
539 globbed = [pat]
540 globbed = [pat]
540 if globbed:
541 if globbed:
541 ret.extend(globbed)
542 ret.extend(globbed)
542 continue
543 continue
543 ret.append(kindpat)
544 ret.append(kindpat)
544 return ret
545 return ret
545
546
546 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
547 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
547 badfn=None):
548 badfn=None):
548 '''Return a matcher and the patterns that were used.
549 '''Return a matcher and the patterns that were used.
549 The matcher will warn about bad matches, unless an alternate badfn callback
550 The matcher will warn about bad matches, unless an alternate badfn callback
550 is provided.'''
551 is provided.'''
551 if pats == ("",):
552 if pats == ("",):
552 pats = []
553 pats = []
553 if opts is None:
554 if opts is None:
554 opts = {}
555 opts = {}
555 if not globbed and default == 'relpath':
556 if not globbed and default == 'relpath':
556 pats = expandpats(pats or [])
557 pats = expandpats(pats or [])
557
558
558 def bad(f, msg):
559 def bad(f, msg):
559 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
560 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
560
561
561 if badfn is None:
562 if badfn is None:
562 badfn = bad
563 badfn = bad
563
564
564 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
565 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
565 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
566 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
566
567
567 if m.always():
568 if m.always():
568 pats = []
569 pats = []
569 return m, pats
570 return m, pats
570
571
571 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
572 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
572 badfn=None):
573 badfn=None):
573 '''Return a matcher that will warn about bad matches.'''
574 '''Return a matcher that will warn about bad matches.'''
574 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
575 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
575
576
576 def matchall(repo):
577 def matchall(repo):
577 '''Return a matcher that will efficiently match everything.'''
578 '''Return a matcher that will efficiently match everything.'''
578 return matchmod.always(repo.root, repo.getcwd())
579 return matchmod.always(repo.root, repo.getcwd())
579
580
580 def matchfiles(repo, files, badfn=None):
581 def matchfiles(repo, files, badfn=None):
581 '''Return a matcher that will efficiently match exactly these files.'''
582 '''Return a matcher that will efficiently match exactly these files.'''
582 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
583 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
583
584
584 def parsefollowlinespattern(repo, rev, pat, msg):
585 def parsefollowlinespattern(repo, rev, pat, msg):
585 """Return a file name from `pat` pattern suitable for usage in followlines
586 """Return a file name from `pat` pattern suitable for usage in followlines
586 logic.
587 logic.
587 """
588 """
588 if not matchmod.patkind(pat):
589 if not matchmod.patkind(pat):
589 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
590 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
590 else:
591 else:
591 ctx = repo[rev]
592 ctx = repo[rev]
592 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
593 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
593 files = [f for f in ctx if m(f)]
594 files = [f for f in ctx if m(f)]
594 if len(files) != 1:
595 if len(files) != 1:
595 raise error.ParseError(msg)
596 raise error.ParseError(msg)
596 return files[0]
597 return files[0]
597
598
598 def origpath(ui, repo, filepath):
599 def origpath(ui, repo, filepath):
599 '''customize where .orig files are created
600 '''customize where .orig files are created
600
601
601 Fetch user defined path from config file: [ui] origbackuppath = <path>
602 Fetch user defined path from config file: [ui] origbackuppath = <path>
602 Fall back to default (filepath with .orig suffix) if not specified
603 Fall back to default (filepath with .orig suffix) if not specified
603 '''
604 '''
604 origbackuppath = ui.config('ui', 'origbackuppath')
605 origbackuppath = ui.config('ui', 'origbackuppath')
605 if not origbackuppath:
606 if not origbackuppath:
606 return filepath + ".orig"
607 return filepath + ".orig"
607
608
608 # Convert filepath from an absolute path into a path inside the repo.
609 # Convert filepath from an absolute path into a path inside the repo.
609 filepathfromroot = util.normpath(os.path.relpath(filepath,
610 filepathfromroot = util.normpath(os.path.relpath(filepath,
610 start=repo.root))
611 start=repo.root))
611
612
612 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
613 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
613 origbackupdir = origvfs.dirname(filepathfromroot)
614 origbackupdir = origvfs.dirname(filepathfromroot)
614 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
615 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
615 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
616 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
616
617
617 # Remove any files that conflict with the backup file's path
618 # Remove any files that conflict with the backup file's path
618 for f in reversed(list(util.finddirs(filepathfromroot))):
619 for f in reversed(list(util.finddirs(filepathfromroot))):
619 if origvfs.isfileorlink(f):
620 if origvfs.isfileorlink(f):
620 ui.note(_('removing conflicting file: %s\n')
621 ui.note(_('removing conflicting file: %s\n')
621 % origvfs.join(f))
622 % origvfs.join(f))
622 origvfs.unlink(f)
623 origvfs.unlink(f)
623 break
624 break
624
625
625 origvfs.makedirs(origbackupdir)
626 origvfs.makedirs(origbackupdir)
626
627
627 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
628 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
628 ui.note(_('removing conflicting directory: %s\n')
629 ui.note(_('removing conflicting directory: %s\n')
629 % origvfs.join(filepathfromroot))
630 % origvfs.join(filepathfromroot))
630 origvfs.rmtree(filepathfromroot, forcibly=True)
631 origvfs.rmtree(filepathfromroot, forcibly=True)
631
632
632 return origvfs.join(filepathfromroot)
633 return origvfs.join(filepathfromroot)
633
634
634 class _containsnode(object):
635 class _containsnode(object):
635 """proxy __contains__(node) to container.__contains__ which accepts revs"""
636 """proxy __contains__(node) to container.__contains__ which accepts revs"""
636
637
637 def __init__(self, repo, revcontainer):
638 def __init__(self, repo, revcontainer):
638 self._torev = repo.changelog.rev
639 self._torev = repo.changelog.rev
639 self._revcontains = revcontainer.__contains__
640 self._revcontains = revcontainer.__contains__
640
641
641 def __contains__(self, node):
642 def __contains__(self, node):
642 return self._revcontains(self._torev(node))
643 return self._revcontains(self._torev(node))
643
644
644 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
645 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
645 """do common cleanups when old nodes are replaced by new nodes
646 """do common cleanups when old nodes are replaced by new nodes
646
647
647 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
648 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
648 (we might also want to move working directory parent in the future)
649 (we might also want to move working directory parent in the future)
649
650
650 By default, bookmark moves are calculated automatically from 'replacements',
651 By default, bookmark moves are calculated automatically from 'replacements',
651 but 'moves' can be used to override that. Also, 'moves' may include
652 but 'moves' can be used to override that. Also, 'moves' may include
652 additional bookmark moves that should not have associated obsmarkers.
653 additional bookmark moves that should not have associated obsmarkers.
653
654
654 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
655 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
655 have replacements. operation is a string, like "rebase".
656 have replacements. operation is a string, like "rebase".
656
657
657 metadata is dictionary containing metadata to be stored in obsmarker if
658 metadata is dictionary containing metadata to be stored in obsmarker if
658 obsolescence is enabled.
659 obsolescence is enabled.
659 """
660 """
660 if not replacements and not moves:
661 if not replacements and not moves:
661 return
662 return
662
663
663 # translate mapping's other forms
664 # translate mapping's other forms
664 if not util.safehasattr(replacements, 'items'):
665 if not util.safehasattr(replacements, 'items'):
665 replacements = {n: () for n in replacements}
666 replacements = {n: () for n in replacements}
666
667
667 # Calculate bookmark movements
668 # Calculate bookmark movements
668 if moves is None:
669 if moves is None:
669 moves = {}
670 moves = {}
670 # Unfiltered repo is needed since nodes in replacements might be hidden.
671 # Unfiltered repo is needed since nodes in replacements might be hidden.
671 unfi = repo.unfiltered()
672 unfi = repo.unfiltered()
672 for oldnode, newnodes in replacements.items():
673 for oldnode, newnodes in replacements.items():
673 if oldnode in moves:
674 if oldnode in moves:
674 continue
675 continue
675 if len(newnodes) > 1:
676 if len(newnodes) > 1:
676 # usually a split, take the one with biggest rev number
677 # usually a split, take the one with biggest rev number
677 newnode = next(unfi.set('max(%ln)', newnodes)).node()
678 newnode = next(unfi.set('max(%ln)', newnodes)).node()
678 elif len(newnodes) == 0:
679 elif len(newnodes) == 0:
679 # move bookmark backwards
680 # move bookmark backwards
680 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
681 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
681 list(replacements)))
682 list(replacements)))
682 if roots:
683 if roots:
683 newnode = roots[0].node()
684 newnode = roots[0].node()
684 else:
685 else:
685 newnode = nullid
686 newnode = nullid
686 else:
687 else:
687 newnode = newnodes[0]
688 newnode = newnodes[0]
688 moves[oldnode] = newnode
689 moves[oldnode] = newnode
689
690
690 with repo.transaction('cleanup') as tr:
691 with repo.transaction('cleanup') as tr:
691 # Move bookmarks
692 # Move bookmarks
692 bmarks = repo._bookmarks
693 bmarks = repo._bookmarks
693 bmarkchanges = []
694 bmarkchanges = []
694 allnewnodes = [n for ns in replacements.values() for n in ns]
695 allnewnodes = [n for ns in replacements.values() for n in ns]
695 for oldnode, newnode in moves.items():
696 for oldnode, newnode in moves.items():
696 oldbmarks = repo.nodebookmarks(oldnode)
697 oldbmarks = repo.nodebookmarks(oldnode)
697 if not oldbmarks:
698 if not oldbmarks:
698 continue
699 continue
699 from . import bookmarks # avoid import cycle
700 from . import bookmarks # avoid import cycle
700 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
701 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
701 (util.rapply(pycompat.maybebytestr, oldbmarks),
702 (util.rapply(pycompat.maybebytestr, oldbmarks),
702 hex(oldnode), hex(newnode)))
703 hex(oldnode), hex(newnode)))
703 # Delete divergent bookmarks being parents of related newnodes
704 # Delete divergent bookmarks being parents of related newnodes
704 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
705 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
705 allnewnodes, newnode, oldnode)
706 allnewnodes, newnode, oldnode)
706 deletenodes = _containsnode(repo, deleterevs)
707 deletenodes = _containsnode(repo, deleterevs)
707 for name in oldbmarks:
708 for name in oldbmarks:
708 bmarkchanges.append((name, newnode))
709 bmarkchanges.append((name, newnode))
709 for b in bookmarks.divergent2delete(repo, deletenodes, name):
710 for b in bookmarks.divergent2delete(repo, deletenodes, name):
710 bmarkchanges.append((b, None))
711 bmarkchanges.append((b, None))
711
712
712 if bmarkchanges:
713 if bmarkchanges:
713 bmarks.applychanges(repo, tr, bmarkchanges)
714 bmarks.applychanges(repo, tr, bmarkchanges)
714
715
715 # Obsolete or strip nodes
716 # Obsolete or strip nodes
716 if obsolete.isenabled(repo, obsolete.createmarkersopt):
717 if obsolete.isenabled(repo, obsolete.createmarkersopt):
717 # If a node is already obsoleted, and we want to obsolete it
718 # If a node is already obsoleted, and we want to obsolete it
718 # without a successor, skip that obssolete request since it's
719 # without a successor, skip that obssolete request since it's
719 # unnecessary. That's the "if s or not isobs(n)" check below.
720 # unnecessary. That's the "if s or not isobs(n)" check below.
720 # Also sort the node in topology order, that might be useful for
721 # Also sort the node in topology order, that might be useful for
721 # some obsstore logic.
722 # some obsstore logic.
722 # NOTE: the filtering and sorting might belong to createmarkers.
723 # NOTE: the filtering and sorting might belong to createmarkers.
723 isobs = unfi.obsstore.successors.__contains__
724 isobs = unfi.obsstore.successors.__contains__
724 torev = unfi.changelog.rev
725 torev = unfi.changelog.rev
725 sortfunc = lambda ns: torev(ns[0])
726 sortfunc = lambda ns: torev(ns[0])
726 rels = [(unfi[n], tuple(unfi[m] for m in s))
727 rels = [(unfi[n], tuple(unfi[m] for m in s))
727 for n, s in sorted(replacements.items(), key=sortfunc)
728 for n, s in sorted(replacements.items(), key=sortfunc)
728 if s or not isobs(n)]
729 if s or not isobs(n)]
729 if rels:
730 if rels:
730 obsolete.createmarkers(repo, rels, operation=operation,
731 obsolete.createmarkers(repo, rels, operation=operation,
731 metadata=metadata)
732 metadata=metadata)
732 else:
733 else:
733 from . import repair # avoid import cycle
734 from . import repair # avoid import cycle
734 tostrip = list(replacements)
735 tostrip = list(replacements)
735 if tostrip:
736 if tostrip:
736 repair.delayedstrip(repo.ui, repo, tostrip, operation)
737 repair.delayedstrip(repo.ui, repo, tostrip, operation)
737
738
738 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
739 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
739 if opts is None:
740 if opts is None:
740 opts = {}
741 opts = {}
741 m = matcher
742 m = matcher
742 if dry_run is None:
743 if dry_run is None:
743 dry_run = opts.get('dry_run')
744 dry_run = opts.get('dry_run')
744 if similarity is None:
745 if similarity is None:
745 similarity = float(opts.get('similarity') or 0)
746 similarity = float(opts.get('similarity') or 0)
746
747
747 ret = 0
748 ret = 0
748 join = lambda f: os.path.join(prefix, f)
749 join = lambda f: os.path.join(prefix, f)
749
750
750 wctx = repo[None]
751 wctx = repo[None]
751 for subpath in sorted(wctx.substate):
752 for subpath in sorted(wctx.substate):
752 submatch = matchmod.subdirmatcher(subpath, m)
753 submatch = matchmod.subdirmatcher(subpath, m)
753 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
754 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
754 sub = wctx.sub(subpath)
755 sub = wctx.sub(subpath)
755 try:
756 try:
756 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
757 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
757 ret = 1
758 ret = 1
758 except error.LookupError:
759 except error.LookupError:
759 repo.ui.status(_("skipping missing subrepository: %s\n")
760 repo.ui.status(_("skipping missing subrepository: %s\n")
760 % join(subpath))
761 % join(subpath))
761
762
762 rejected = []
763 rejected = []
763 def badfn(f, msg):
764 def badfn(f, msg):
764 if f in m.files():
765 if f in m.files():
765 m.bad(f, msg)
766 m.bad(f, msg)
766 rejected.append(f)
767 rejected.append(f)
767
768
768 badmatch = matchmod.badmatch(m, badfn)
769 badmatch = matchmod.badmatch(m, badfn)
769 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
770 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
770 badmatch)
771 badmatch)
771
772
772 unknownset = set(unknown + forgotten)
773 unknownset = set(unknown + forgotten)
773 toprint = unknownset.copy()
774 toprint = unknownset.copy()
774 toprint.update(deleted)
775 toprint.update(deleted)
775 for abs in sorted(toprint):
776 for abs in sorted(toprint):
776 if repo.ui.verbose or not m.exact(abs):
777 if repo.ui.verbose or not m.exact(abs):
777 if abs in unknownset:
778 if abs in unknownset:
778 status = _('adding %s\n') % m.uipath(abs)
779 status = _('adding %s\n') % m.uipath(abs)
779 else:
780 else:
780 status = _('removing %s\n') % m.uipath(abs)
781 status = _('removing %s\n') % m.uipath(abs)
781 repo.ui.status(status)
782 repo.ui.status(status)
782
783
783 renames = _findrenames(repo, m, added + unknown, removed + deleted,
784 renames = _findrenames(repo, m, added + unknown, removed + deleted,
784 similarity)
785 similarity)
785
786
786 if not dry_run:
787 if not dry_run:
787 _markchanges(repo, unknown + forgotten, deleted, renames)
788 _markchanges(repo, unknown + forgotten, deleted, renames)
788
789
789 for f in rejected:
790 for f in rejected:
790 if f in m.files():
791 if f in m.files():
791 return 1
792 return 1
792 return ret
793 return ret
793
794
794 def marktouched(repo, files, similarity=0.0):
795 def marktouched(repo, files, similarity=0.0):
795 '''Assert that files have somehow been operated upon. files are relative to
796 '''Assert that files have somehow been operated upon. files are relative to
796 the repo root.'''
797 the repo root.'''
797 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
798 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
798 rejected = []
799 rejected = []
799
800
800 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
801 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
801
802
802 if repo.ui.verbose:
803 if repo.ui.verbose:
803 unknownset = set(unknown + forgotten)
804 unknownset = set(unknown + forgotten)
804 toprint = unknownset.copy()
805 toprint = unknownset.copy()
805 toprint.update(deleted)
806 toprint.update(deleted)
806 for abs in sorted(toprint):
807 for abs in sorted(toprint):
807 if abs in unknownset:
808 if abs in unknownset:
808 status = _('adding %s\n') % abs
809 status = _('adding %s\n') % abs
809 else:
810 else:
810 status = _('removing %s\n') % abs
811 status = _('removing %s\n') % abs
811 repo.ui.status(status)
812 repo.ui.status(status)
812
813
813 renames = _findrenames(repo, m, added + unknown, removed + deleted,
814 renames = _findrenames(repo, m, added + unknown, removed + deleted,
814 similarity)
815 similarity)
815
816
816 _markchanges(repo, unknown + forgotten, deleted, renames)
817 _markchanges(repo, unknown + forgotten, deleted, renames)
817
818
818 for f in rejected:
819 for f in rejected:
819 if f in m.files():
820 if f in m.files():
820 return 1
821 return 1
821 return 0
822 return 0
822
823
823 def _interestingfiles(repo, matcher):
824 def _interestingfiles(repo, matcher):
824 '''Walk dirstate with matcher, looking for files that addremove would care
825 '''Walk dirstate with matcher, looking for files that addremove would care
825 about.
826 about.
826
827
827 This is different from dirstate.status because it doesn't care about
828 This is different from dirstate.status because it doesn't care about
828 whether files are modified or clean.'''
829 whether files are modified or clean.'''
829 added, unknown, deleted, removed, forgotten = [], [], [], [], []
830 added, unknown, deleted, removed, forgotten = [], [], [], [], []
830 audit_path = pathutil.pathauditor(repo.root, cached=True)
831 audit_path = pathutil.pathauditor(repo.root, cached=True)
831
832
832 ctx = repo[None]
833 ctx = repo[None]
833 dirstate = repo.dirstate
834 dirstate = repo.dirstate
834 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
835 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
835 unknown=True, ignored=False, full=False)
836 unknown=True, ignored=False, full=False)
836 for abs, st in walkresults.iteritems():
837 for abs, st in walkresults.iteritems():
837 dstate = dirstate[abs]
838 dstate = dirstate[abs]
838 if dstate == '?' and audit_path.check(abs):
839 if dstate == '?' and audit_path.check(abs):
839 unknown.append(abs)
840 unknown.append(abs)
840 elif dstate != 'r' and not st:
841 elif dstate != 'r' and not st:
841 deleted.append(abs)
842 deleted.append(abs)
842 elif dstate == 'r' and st:
843 elif dstate == 'r' and st:
843 forgotten.append(abs)
844 forgotten.append(abs)
844 # for finding renames
845 # for finding renames
845 elif dstate == 'r' and not st:
846 elif dstate == 'r' and not st:
846 removed.append(abs)
847 removed.append(abs)
847 elif dstate == 'a':
848 elif dstate == 'a':
848 added.append(abs)
849 added.append(abs)
849
850
850 return added, unknown, deleted, removed, forgotten
851 return added, unknown, deleted, removed, forgotten
851
852
852 def _findrenames(repo, matcher, added, removed, similarity):
853 def _findrenames(repo, matcher, added, removed, similarity):
853 '''Find renames from removed files to added ones.'''
854 '''Find renames from removed files to added ones.'''
854 renames = {}
855 renames = {}
855 if similarity > 0:
856 if similarity > 0:
856 for old, new, score in similar.findrenames(repo, added, removed,
857 for old, new, score in similar.findrenames(repo, added, removed,
857 similarity):
858 similarity):
858 if (repo.ui.verbose or not matcher.exact(old)
859 if (repo.ui.verbose or not matcher.exact(old)
859 or not matcher.exact(new)):
860 or not matcher.exact(new)):
860 repo.ui.status(_('recording removal of %s as rename to %s '
861 repo.ui.status(_('recording removal of %s as rename to %s '
861 '(%d%% similar)\n') %
862 '(%d%% similar)\n') %
862 (matcher.rel(old), matcher.rel(new),
863 (matcher.rel(old), matcher.rel(new),
863 score * 100))
864 score * 100))
864 renames[new] = old
865 renames[new] = old
865 return renames
866 return renames
866
867
867 def _markchanges(repo, unknown, deleted, renames):
868 def _markchanges(repo, unknown, deleted, renames):
868 '''Marks the files in unknown as added, the files in deleted as removed,
869 '''Marks the files in unknown as added, the files in deleted as removed,
869 and the files in renames as copied.'''
870 and the files in renames as copied.'''
870 wctx = repo[None]
871 wctx = repo[None]
871 with repo.wlock():
872 with repo.wlock():
872 wctx.forget(deleted)
873 wctx.forget(deleted)
873 wctx.add(unknown)
874 wctx.add(unknown)
874 for new, old in renames.iteritems():
875 for new, old in renames.iteritems():
875 wctx.copy(old, new)
876 wctx.copy(old, new)
876
877
877 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
878 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
878 """Update the dirstate to reflect the intent of copying src to dst. For
879 """Update the dirstate to reflect the intent of copying src to dst. For
879 different reasons it might not end with dst being marked as copied from src.
880 different reasons it might not end with dst being marked as copied from src.
880 """
881 """
881 origsrc = repo.dirstate.copied(src) or src
882 origsrc = repo.dirstate.copied(src) or src
882 if dst == origsrc: # copying back a copy?
883 if dst == origsrc: # copying back a copy?
883 if repo.dirstate[dst] not in 'mn' and not dryrun:
884 if repo.dirstate[dst] not in 'mn' and not dryrun:
884 repo.dirstate.normallookup(dst)
885 repo.dirstate.normallookup(dst)
885 else:
886 else:
886 if repo.dirstate[origsrc] == 'a' and origsrc == src:
887 if repo.dirstate[origsrc] == 'a' and origsrc == src:
887 if not ui.quiet:
888 if not ui.quiet:
888 ui.warn(_("%s has not been committed yet, so no copy "
889 ui.warn(_("%s has not been committed yet, so no copy "
889 "data will be stored for %s.\n")
890 "data will be stored for %s.\n")
890 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
891 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
891 if repo.dirstate[dst] in '?r' and not dryrun:
892 if repo.dirstate[dst] in '?r' and not dryrun:
892 wctx.add([dst])
893 wctx.add([dst])
893 elif not dryrun:
894 elif not dryrun:
894 wctx.copy(origsrc, dst)
895 wctx.copy(origsrc, dst)
895
896
896 def readrequires(opener, supported):
897 def readrequires(opener, supported):
897 '''Reads and parses .hg/requires and checks if all entries found
898 '''Reads and parses .hg/requires and checks if all entries found
898 are in the list of supported features.'''
899 are in the list of supported features.'''
899 requirements = set(opener.read("requires").splitlines())
900 requirements = set(opener.read("requires").splitlines())
900 missings = []
901 missings = []
901 for r in requirements:
902 for r in requirements:
902 if r not in supported:
903 if r not in supported:
903 if not r or not r[0:1].isalnum():
904 if not r or not r[0:1].isalnum():
904 raise error.RequirementError(_(".hg/requires file is corrupt"))
905 raise error.RequirementError(_(".hg/requires file is corrupt"))
905 missings.append(r)
906 missings.append(r)
906 missings.sort()
907 missings.sort()
907 if missings:
908 if missings:
908 raise error.RequirementError(
909 raise error.RequirementError(
909 _("repository requires features unknown to this Mercurial: %s")
910 _("repository requires features unknown to this Mercurial: %s")
910 % " ".join(missings),
911 % " ".join(missings),
911 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
912 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
912 " for more information"))
913 " for more information"))
913 return requirements
914 return requirements
914
915
915 def writerequires(opener, requirements):
916 def writerequires(opener, requirements):
916 with opener('requires', 'w') as fp:
917 with opener('requires', 'w') as fp:
917 for r in sorted(requirements):
918 for r in sorted(requirements):
918 fp.write("%s\n" % r)
919 fp.write("%s\n" % r)
919
920
920 class filecachesubentry(object):
921 class filecachesubentry(object):
921 def __init__(self, path, stat):
922 def __init__(self, path, stat):
922 self.path = path
923 self.path = path
923 self.cachestat = None
924 self.cachestat = None
924 self._cacheable = None
925 self._cacheable = None
925
926
926 if stat:
927 if stat:
927 self.cachestat = filecachesubentry.stat(self.path)
928 self.cachestat = filecachesubentry.stat(self.path)
928
929
929 if self.cachestat:
930 if self.cachestat:
930 self._cacheable = self.cachestat.cacheable()
931 self._cacheable = self.cachestat.cacheable()
931 else:
932 else:
932 # None means we don't know yet
933 # None means we don't know yet
933 self._cacheable = None
934 self._cacheable = None
934
935
935 def refresh(self):
936 def refresh(self):
936 if self.cacheable():
937 if self.cacheable():
937 self.cachestat = filecachesubentry.stat(self.path)
938 self.cachestat = filecachesubentry.stat(self.path)
938
939
939 def cacheable(self):
940 def cacheable(self):
940 if self._cacheable is not None:
941 if self._cacheable is not None:
941 return self._cacheable
942 return self._cacheable
942
943
943 # we don't know yet, assume it is for now
944 # we don't know yet, assume it is for now
944 return True
945 return True
945
946
946 def changed(self):
947 def changed(self):
947 # no point in going further if we can't cache it
948 # no point in going further if we can't cache it
948 if not self.cacheable():
949 if not self.cacheable():
949 return True
950 return True
950
951
951 newstat = filecachesubentry.stat(self.path)
952 newstat = filecachesubentry.stat(self.path)
952
953
953 # we may not know if it's cacheable yet, check again now
954 # we may not know if it's cacheable yet, check again now
954 if newstat and self._cacheable is None:
955 if newstat and self._cacheable is None:
955 self._cacheable = newstat.cacheable()
956 self._cacheable = newstat.cacheable()
956
957
957 # check again
958 # check again
958 if not self._cacheable:
959 if not self._cacheable:
959 return True
960 return True
960
961
961 if self.cachestat != newstat:
962 if self.cachestat != newstat:
962 self.cachestat = newstat
963 self.cachestat = newstat
963 return True
964 return True
964 else:
965 else:
965 return False
966 return False
966
967
967 @staticmethod
968 @staticmethod
968 def stat(path):
969 def stat(path):
969 try:
970 try:
970 return util.cachestat(path)
971 return util.cachestat(path)
971 except OSError as e:
972 except OSError as e:
972 if e.errno != errno.ENOENT:
973 if e.errno != errno.ENOENT:
973 raise
974 raise
974
975
975 class filecacheentry(object):
976 class filecacheentry(object):
976 def __init__(self, paths, stat=True):
977 def __init__(self, paths, stat=True):
977 self._entries = []
978 self._entries = []
978 for path in paths:
979 for path in paths:
979 self._entries.append(filecachesubentry(path, stat))
980 self._entries.append(filecachesubentry(path, stat))
980
981
981 def changed(self):
982 def changed(self):
982 '''true if any entry has changed'''
983 '''true if any entry has changed'''
983 for entry in self._entries:
984 for entry in self._entries:
984 if entry.changed():
985 if entry.changed():
985 return True
986 return True
986 return False
987 return False
987
988
988 def refresh(self):
989 def refresh(self):
989 for entry in self._entries:
990 for entry in self._entries:
990 entry.refresh()
991 entry.refresh()
991
992
992 class filecache(object):
993 class filecache(object):
993 '''A property like decorator that tracks files under .hg/ for updates.
994 '''A property like decorator that tracks files under .hg/ for updates.
994
995
995 Records stat info when called in _filecache.
996 Records stat info when called in _filecache.
996
997
997 On subsequent calls, compares old stat info with new info, and recreates the
998 On subsequent calls, compares old stat info with new info, and recreates the
998 object when any of the files changes, updating the new stat info in
999 object when any of the files changes, updating the new stat info in
999 _filecache.
1000 _filecache.
1000
1001
1001 Mercurial either atomic renames or appends for files under .hg,
1002 Mercurial either atomic renames or appends for files under .hg,
1002 so to ensure the cache is reliable we need the filesystem to be able
1003 so to ensure the cache is reliable we need the filesystem to be able
1003 to tell us if a file has been replaced. If it can't, we fallback to
1004 to tell us if a file has been replaced. If it can't, we fallback to
1004 recreating the object on every call (essentially the same behavior as
1005 recreating the object on every call (essentially the same behavior as
1005 propertycache).
1006 propertycache).
1006
1007
1007 '''
1008 '''
1008 def __init__(self, *paths):
1009 def __init__(self, *paths):
1009 self.paths = paths
1010 self.paths = paths
1010
1011
1011 def join(self, obj, fname):
1012 def join(self, obj, fname):
1012 """Used to compute the runtime path of a cached file.
1013 """Used to compute the runtime path of a cached file.
1013
1014
1014 Users should subclass filecache and provide their own version of this
1015 Users should subclass filecache and provide their own version of this
1015 function to call the appropriate join function on 'obj' (an instance
1016 function to call the appropriate join function on 'obj' (an instance
1016 of the class that its member function was decorated).
1017 of the class that its member function was decorated).
1017 """
1018 """
1018 raise NotImplementedError
1019 raise NotImplementedError
1019
1020
1020 def __call__(self, func):
1021 def __call__(self, func):
1021 self.func = func
1022 self.func = func
1022 self.name = func.__name__.encode('ascii')
1023 self.name = func.__name__.encode('ascii')
1023 return self
1024 return self
1024
1025
1025 def __get__(self, obj, type=None):
1026 def __get__(self, obj, type=None):
1026 # if accessed on the class, return the descriptor itself.
1027 # if accessed on the class, return the descriptor itself.
1027 if obj is None:
1028 if obj is None:
1028 return self
1029 return self
1029 # do we need to check if the file changed?
1030 # do we need to check if the file changed?
1030 if self.name in obj.__dict__:
1031 if self.name in obj.__dict__:
1031 assert self.name in obj._filecache, self.name
1032 assert self.name in obj._filecache, self.name
1032 return obj.__dict__[self.name]
1033 return obj.__dict__[self.name]
1033
1034
1034 entry = obj._filecache.get(self.name)
1035 entry = obj._filecache.get(self.name)
1035
1036
1036 if entry:
1037 if entry:
1037 if entry.changed():
1038 if entry.changed():
1038 entry.obj = self.func(obj)
1039 entry.obj = self.func(obj)
1039 else:
1040 else:
1040 paths = [self.join(obj, path) for path in self.paths]
1041 paths = [self.join(obj, path) for path in self.paths]
1041
1042
1042 # We stat -before- creating the object so our cache doesn't lie if
1043 # We stat -before- creating the object so our cache doesn't lie if
1043 # a writer modified between the time we read and stat
1044 # a writer modified between the time we read and stat
1044 entry = filecacheentry(paths, True)
1045 entry = filecacheentry(paths, True)
1045 entry.obj = self.func(obj)
1046 entry.obj = self.func(obj)
1046
1047
1047 obj._filecache[self.name] = entry
1048 obj._filecache[self.name] = entry
1048
1049
1049 obj.__dict__[self.name] = entry.obj
1050 obj.__dict__[self.name] = entry.obj
1050 return entry.obj
1051 return entry.obj
1051
1052
1052 def __set__(self, obj, value):
1053 def __set__(self, obj, value):
1053 if self.name not in obj._filecache:
1054 if self.name not in obj._filecache:
1054 # we add an entry for the missing value because X in __dict__
1055 # we add an entry for the missing value because X in __dict__
1055 # implies X in _filecache
1056 # implies X in _filecache
1056 paths = [self.join(obj, path) for path in self.paths]
1057 paths = [self.join(obj, path) for path in self.paths]
1057 ce = filecacheentry(paths, False)
1058 ce = filecacheentry(paths, False)
1058 obj._filecache[self.name] = ce
1059 obj._filecache[self.name] = ce
1059 else:
1060 else:
1060 ce = obj._filecache[self.name]
1061 ce = obj._filecache[self.name]
1061
1062
1062 ce.obj = value # update cached copy
1063 ce.obj = value # update cached copy
1063 obj.__dict__[self.name] = value # update copy returned by obj.x
1064 obj.__dict__[self.name] = value # update copy returned by obj.x
1064
1065
1065 def __delete__(self, obj):
1066 def __delete__(self, obj):
1066 try:
1067 try:
1067 del obj.__dict__[self.name]
1068 del obj.__dict__[self.name]
1068 except KeyError:
1069 except KeyError:
1069 raise AttributeError(self.name)
1070 raise AttributeError(self.name)
1070
1071
1071 def extdatasource(repo, source):
1072 def extdatasource(repo, source):
1072 """Gather a map of rev -> value dict from the specified source
1073 """Gather a map of rev -> value dict from the specified source
1073
1074
1074 A source spec is treated as a URL, with a special case shell: type
1075 A source spec is treated as a URL, with a special case shell: type
1075 for parsing the output from a shell command.
1076 for parsing the output from a shell command.
1076
1077
1077 The data is parsed as a series of newline-separated records where
1078 The data is parsed as a series of newline-separated records where
1078 each record is a revision specifier optionally followed by a space
1079 each record is a revision specifier optionally followed by a space
1079 and a freeform string value. If the revision is known locally, it
1080 and a freeform string value. If the revision is known locally, it
1080 is converted to a rev, otherwise the record is skipped.
1081 is converted to a rev, otherwise the record is skipped.
1081
1082
1082 Note that both key and value are treated as UTF-8 and converted to
1083 Note that both key and value are treated as UTF-8 and converted to
1083 the local encoding. This allows uniformity between local and
1084 the local encoding. This allows uniformity between local and
1084 remote data sources.
1085 remote data sources.
1085 """
1086 """
1086
1087
1087 spec = repo.ui.config("extdata", source)
1088 spec = repo.ui.config("extdata", source)
1088 if not spec:
1089 if not spec:
1089 raise error.Abort(_("unknown extdata source '%s'") % source)
1090 raise error.Abort(_("unknown extdata source '%s'") % source)
1090
1091
1091 data = {}
1092 data = {}
1092 src = proc = None
1093 src = proc = None
1093 try:
1094 try:
1094 if spec.startswith("shell:"):
1095 if spec.startswith("shell:"):
1095 # external commands should be run relative to the repo root
1096 # external commands should be run relative to the repo root
1096 cmd = spec[6:]
1097 cmd = spec[6:]
1097 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1098 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1098 close_fds=procutil.closefds,
1099 close_fds=procutil.closefds,
1099 stdout=subprocess.PIPE, cwd=repo.root)
1100 stdout=subprocess.PIPE, cwd=repo.root)
1100 src = proc.stdout
1101 src = proc.stdout
1101 else:
1102 else:
1102 # treat as a URL or file
1103 # treat as a URL or file
1103 src = url.open(repo.ui, spec)
1104 src = url.open(repo.ui, spec)
1104 for l in src:
1105 for l in src:
1105 if " " in l:
1106 if " " in l:
1106 k, v = l.strip().split(" ", 1)
1107 k, v = l.strip().split(" ", 1)
1107 else:
1108 else:
1108 k, v = l.strip(), ""
1109 k, v = l.strip(), ""
1109
1110
1110 k = encoding.tolocal(k)
1111 k = encoding.tolocal(k)
1111 try:
1112 try:
1112 data[repo[k].rev()] = encoding.tolocal(v)
1113 data[repo[k].rev()] = encoding.tolocal(v)
1113 except (error.LookupError, error.RepoLookupError):
1114 except (error.LookupError, error.RepoLookupError):
1114 pass # we ignore data for nodes that don't exist locally
1115 pass # we ignore data for nodes that don't exist locally
1115 finally:
1116 finally:
1116 if proc:
1117 if proc:
1117 proc.communicate()
1118 proc.communicate()
1118 if src:
1119 if src:
1119 src.close()
1120 src.close()
1120 if proc and proc.returncode != 0:
1121 if proc and proc.returncode != 0:
1121 raise error.Abort(_("extdata command '%s' failed: %s")
1122 raise error.Abort(_("extdata command '%s' failed: %s")
1122 % (cmd, procutil.explainexit(proc.returncode)[0]))
1123 % (cmd, procutil.explainexit(proc.returncode)[0]))
1123
1124
1124 return data
1125 return data
1125
1126
1126 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1127 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1127 if lock is None:
1128 if lock is None:
1128 raise error.LockInheritanceContractViolation(
1129 raise error.LockInheritanceContractViolation(
1129 'lock can only be inherited while held')
1130 'lock can only be inherited while held')
1130 if environ is None:
1131 if environ is None:
1131 environ = {}
1132 environ = {}
1132 with lock.inherit() as locker:
1133 with lock.inherit() as locker:
1133 environ[envvar] = locker
1134 environ[envvar] = locker
1134 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1135 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1135
1136
1136 def wlocksub(repo, cmd, *args, **kwargs):
1137 def wlocksub(repo, cmd, *args, **kwargs):
1137 """run cmd as a subprocess that allows inheriting repo's wlock
1138 """run cmd as a subprocess that allows inheriting repo's wlock
1138
1139
1139 This can only be called while the wlock is held. This takes all the
1140 This can only be called while the wlock is held. This takes all the
1140 arguments that ui.system does, and returns the exit code of the
1141 arguments that ui.system does, and returns the exit code of the
1141 subprocess."""
1142 subprocess."""
1142 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1143 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1143 **kwargs)
1144 **kwargs)
1144
1145
1145 def gdinitconfig(ui):
1146 def gdinitconfig(ui):
1146 """helper function to know if a repo should be created as general delta
1147 """helper function to know if a repo should be created as general delta
1147 """
1148 """
1148 # experimental config: format.generaldelta
1149 # experimental config: format.generaldelta
1149 return (ui.configbool('format', 'generaldelta')
1150 return (ui.configbool('format', 'generaldelta')
1150 or ui.configbool('format', 'usegeneraldelta'))
1151 or ui.configbool('format', 'usegeneraldelta'))
1151
1152
1152 def gddeltaconfig(ui):
1153 def gddeltaconfig(ui):
1153 """helper function to know if incoming delta should be optimised
1154 """helper function to know if incoming delta should be optimised
1154 """
1155 """
1155 # experimental config: format.generaldelta
1156 # experimental config: format.generaldelta
1156 return ui.configbool('format', 'generaldelta')
1157 return ui.configbool('format', 'generaldelta')
1157
1158
1158 class simplekeyvaluefile(object):
1159 class simplekeyvaluefile(object):
1159 """A simple file with key=value lines
1160 """A simple file with key=value lines
1160
1161
1161 Keys must be alphanumerics and start with a letter, values must not
1162 Keys must be alphanumerics and start with a letter, values must not
1162 contain '\n' characters"""
1163 contain '\n' characters"""
1163 firstlinekey = '__firstline'
1164 firstlinekey = '__firstline'
1164
1165
1165 def __init__(self, vfs, path, keys=None):
1166 def __init__(self, vfs, path, keys=None):
1166 self.vfs = vfs
1167 self.vfs = vfs
1167 self.path = path
1168 self.path = path
1168
1169
1169 def read(self, firstlinenonkeyval=False):
1170 def read(self, firstlinenonkeyval=False):
1170 """Read the contents of a simple key-value file
1171 """Read the contents of a simple key-value file
1171
1172
1172 'firstlinenonkeyval' indicates whether the first line of file should
1173 'firstlinenonkeyval' indicates whether the first line of file should
1173 be treated as a key-value pair or reuturned fully under the
1174 be treated as a key-value pair or reuturned fully under the
1174 __firstline key."""
1175 __firstline key."""
1175 lines = self.vfs.readlines(self.path)
1176 lines = self.vfs.readlines(self.path)
1176 d = {}
1177 d = {}
1177 if firstlinenonkeyval:
1178 if firstlinenonkeyval:
1178 if not lines:
1179 if not lines:
1179 e = _("empty simplekeyvalue file")
1180 e = _("empty simplekeyvalue file")
1180 raise error.CorruptedState(e)
1181 raise error.CorruptedState(e)
1181 # we don't want to include '\n' in the __firstline
1182 # we don't want to include '\n' in the __firstline
1182 d[self.firstlinekey] = lines[0][:-1]
1183 d[self.firstlinekey] = lines[0][:-1]
1183 del lines[0]
1184 del lines[0]
1184
1185
1185 try:
1186 try:
1186 # the 'if line.strip()' part prevents us from failing on empty
1187 # the 'if line.strip()' part prevents us from failing on empty
1187 # lines which only contain '\n' therefore are not skipped
1188 # lines which only contain '\n' therefore are not skipped
1188 # by 'if line'
1189 # by 'if line'
1189 updatedict = dict(line[:-1].split('=', 1) for line in lines
1190 updatedict = dict(line[:-1].split('=', 1) for line in lines
1190 if line.strip())
1191 if line.strip())
1191 if self.firstlinekey in updatedict:
1192 if self.firstlinekey in updatedict:
1192 e = _("%r can't be used as a key")
1193 e = _("%r can't be used as a key")
1193 raise error.CorruptedState(e % self.firstlinekey)
1194 raise error.CorruptedState(e % self.firstlinekey)
1194 d.update(updatedict)
1195 d.update(updatedict)
1195 except ValueError as e:
1196 except ValueError as e:
1196 raise error.CorruptedState(str(e))
1197 raise error.CorruptedState(str(e))
1197 return d
1198 return d
1198
1199
1199 def write(self, data, firstline=None):
1200 def write(self, data, firstline=None):
1200 """Write key=>value mapping to a file
1201 """Write key=>value mapping to a file
1201 data is a dict. Keys must be alphanumerical and start with a letter.
1202 data is a dict. Keys must be alphanumerical and start with a letter.
1202 Values must not contain newline characters.
1203 Values must not contain newline characters.
1203
1204
1204 If 'firstline' is not None, it is written to file before
1205 If 'firstline' is not None, it is written to file before
1205 everything else, as it is, not in a key=value form"""
1206 everything else, as it is, not in a key=value form"""
1206 lines = []
1207 lines = []
1207 if firstline is not None:
1208 if firstline is not None:
1208 lines.append('%s\n' % firstline)
1209 lines.append('%s\n' % firstline)
1209
1210
1210 for k, v in data.items():
1211 for k, v in data.items():
1211 if k == self.firstlinekey:
1212 if k == self.firstlinekey:
1212 e = "key name '%s' is reserved" % self.firstlinekey
1213 e = "key name '%s' is reserved" % self.firstlinekey
1213 raise error.ProgrammingError(e)
1214 raise error.ProgrammingError(e)
1214 if not k[0:1].isalpha():
1215 if not k[0:1].isalpha():
1215 e = "keys must start with a letter in a key-value file"
1216 e = "keys must start with a letter in a key-value file"
1216 raise error.ProgrammingError(e)
1217 raise error.ProgrammingError(e)
1217 if not k.isalnum():
1218 if not k.isalnum():
1218 e = "invalid key name in a simple key-value file"
1219 e = "invalid key name in a simple key-value file"
1219 raise error.ProgrammingError(e)
1220 raise error.ProgrammingError(e)
1220 if '\n' in v:
1221 if '\n' in v:
1221 e = "invalid value in a simple key-value file"
1222 e = "invalid value in a simple key-value file"
1222 raise error.ProgrammingError(e)
1223 raise error.ProgrammingError(e)
1223 lines.append("%s=%s\n" % (k, v))
1224 lines.append("%s=%s\n" % (k, v))
1224 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1225 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1225 fp.write(''.join(lines))
1226 fp.write(''.join(lines))
1226
1227
1227 _reportobsoletedsource = [
1228 _reportobsoletedsource = [
1228 'debugobsolete',
1229 'debugobsolete',
1229 'pull',
1230 'pull',
1230 'push',
1231 'push',
1231 'serve',
1232 'serve',
1232 'unbundle',
1233 'unbundle',
1233 ]
1234 ]
1234
1235
1235 _reportnewcssource = [
1236 _reportnewcssource = [
1236 'pull',
1237 'pull',
1237 'unbundle',
1238 'unbundle',
1238 ]
1239 ]
1239
1240
1240 # a list of (repo, ctx, files) functions called by various commands to allow
1241 # a list of (repo, ctx, files) functions called by various commands to allow
1241 # extensions to ensure the corresponding files are available locally, before the
1242 # extensions to ensure the corresponding files are available locally, before the
1242 # command uses them.
1243 # command uses them.
1243 fileprefetchhooks = util.hooks()
1244 fileprefetchhooks = util.hooks()
1244
1245
1245 # A marker that tells the evolve extension to suppress its own reporting
1246 # A marker that tells the evolve extension to suppress its own reporting
1246 _reportstroubledchangesets = True
1247 _reportstroubledchangesets = True
1247
1248
1248 def registersummarycallback(repo, otr, txnname=''):
1249 def registersummarycallback(repo, otr, txnname=''):
1249 """register a callback to issue a summary after the transaction is closed
1250 """register a callback to issue a summary after the transaction is closed
1250 """
1251 """
1251 def txmatch(sources):
1252 def txmatch(sources):
1252 return any(txnname.startswith(source) for source in sources)
1253 return any(txnname.startswith(source) for source in sources)
1253
1254
1254 categories = []
1255 categories = []
1255
1256
1256 def reportsummary(func):
1257 def reportsummary(func):
1257 """decorator for report callbacks."""
1258 """decorator for report callbacks."""
1258 # The repoview life cycle is shorter than the one of the actual
1259 # The repoview life cycle is shorter than the one of the actual
1259 # underlying repository. So the filtered object can die before the
1260 # underlying repository. So the filtered object can die before the
1260 # weakref is used leading to troubles. We keep a reference to the
1261 # weakref is used leading to troubles. We keep a reference to the
1261 # unfiltered object and restore the filtering when retrieving the
1262 # unfiltered object and restore the filtering when retrieving the
1262 # repository through the weakref.
1263 # repository through the weakref.
1263 filtername = repo.filtername
1264 filtername = repo.filtername
1264 reporef = weakref.ref(repo.unfiltered())
1265 reporef = weakref.ref(repo.unfiltered())
1265 def wrapped(tr):
1266 def wrapped(tr):
1266 repo = reporef()
1267 repo = reporef()
1267 if filtername:
1268 if filtername:
1268 repo = repo.filtered(filtername)
1269 repo = repo.filtered(filtername)
1269 func(repo, tr)
1270 func(repo, tr)
1270 newcat = '%02i-txnreport' % len(categories)
1271 newcat = '%02i-txnreport' % len(categories)
1271 otr.addpostclose(newcat, wrapped)
1272 otr.addpostclose(newcat, wrapped)
1272 categories.append(newcat)
1273 categories.append(newcat)
1273 return wrapped
1274 return wrapped
1274
1275
1275 if txmatch(_reportobsoletedsource):
1276 if txmatch(_reportobsoletedsource):
1276 @reportsummary
1277 @reportsummary
1277 def reportobsoleted(repo, tr):
1278 def reportobsoleted(repo, tr):
1278 obsoleted = obsutil.getobsoleted(repo, tr)
1279 obsoleted = obsutil.getobsoleted(repo, tr)
1279 if obsoleted:
1280 if obsoleted:
1280 repo.ui.status(_('obsoleted %i changesets\n')
1281 repo.ui.status(_('obsoleted %i changesets\n')
1281 % len(obsoleted))
1282 % len(obsoleted))
1282
1283
1283 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1284 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1284 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1285 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1285 instabilitytypes = [
1286 instabilitytypes = [
1286 ('orphan', 'orphan'),
1287 ('orphan', 'orphan'),
1287 ('phase-divergent', 'phasedivergent'),
1288 ('phase-divergent', 'phasedivergent'),
1288 ('content-divergent', 'contentdivergent'),
1289 ('content-divergent', 'contentdivergent'),
1289 ]
1290 ]
1290
1291
1291 def getinstabilitycounts(repo):
1292 def getinstabilitycounts(repo):
1292 filtered = repo.changelog.filteredrevs
1293 filtered = repo.changelog.filteredrevs
1293 counts = {}
1294 counts = {}
1294 for instability, revset in instabilitytypes:
1295 for instability, revset in instabilitytypes:
1295 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1296 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1296 filtered)
1297 filtered)
1297 return counts
1298 return counts
1298
1299
1299 oldinstabilitycounts = getinstabilitycounts(repo)
1300 oldinstabilitycounts = getinstabilitycounts(repo)
1300 @reportsummary
1301 @reportsummary
1301 def reportnewinstabilities(repo, tr):
1302 def reportnewinstabilities(repo, tr):
1302 newinstabilitycounts = getinstabilitycounts(repo)
1303 newinstabilitycounts = getinstabilitycounts(repo)
1303 for instability, revset in instabilitytypes:
1304 for instability, revset in instabilitytypes:
1304 delta = (newinstabilitycounts[instability] -
1305 delta = (newinstabilitycounts[instability] -
1305 oldinstabilitycounts[instability])
1306 oldinstabilitycounts[instability])
1306 if delta > 0:
1307 if delta > 0:
1307 repo.ui.warn(_('%i new %s changesets\n') %
1308 repo.ui.warn(_('%i new %s changesets\n') %
1308 (delta, instability))
1309 (delta, instability))
1309
1310
1310 if txmatch(_reportnewcssource):
1311 if txmatch(_reportnewcssource):
1311 @reportsummary
1312 @reportsummary
1312 def reportnewcs(repo, tr):
1313 def reportnewcs(repo, tr):
1313 """Report the range of new revisions pulled/unbundled."""
1314 """Report the range of new revisions pulled/unbundled."""
1314 newrevs = tr.changes.get('revs', xrange(0, 0))
1315 newrevs = tr.changes.get('revs', xrange(0, 0))
1315 if not newrevs:
1316 if not newrevs:
1316 return
1317 return
1317
1318
1318 # Compute the bounds of new revisions' range, excluding obsoletes.
1319 # Compute the bounds of new revisions' range, excluding obsoletes.
1319 unfi = repo.unfiltered()
1320 unfi = repo.unfiltered()
1320 revs = unfi.revs('%ld and not obsolete()', newrevs)
1321 revs = unfi.revs('%ld and not obsolete()', newrevs)
1321 if not revs:
1322 if not revs:
1322 # Got only obsoletes.
1323 # Got only obsoletes.
1323 return
1324 return
1324 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1325 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1325
1326
1326 if minrev == maxrev:
1327 if minrev == maxrev:
1327 revrange = minrev
1328 revrange = minrev
1328 else:
1329 else:
1329 revrange = '%s:%s' % (minrev, maxrev)
1330 revrange = '%s:%s' % (minrev, maxrev)
1330 repo.ui.status(_('new changesets %s\n') % revrange)
1331 repo.ui.status(_('new changesets %s\n') % revrange)
1331
1332
1332 def nodesummaries(repo, nodes, maxnumnodes=4):
1333 def nodesummaries(repo, nodes, maxnumnodes=4):
1333 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1334 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1334 return ' '.join(short(h) for h in nodes)
1335 return ' '.join(short(h) for h in nodes)
1335 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1336 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1336 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1337 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1337
1338
1338 def enforcesinglehead(repo, tr, desc):
1339 def enforcesinglehead(repo, tr, desc):
1339 """check that no named branch has multiple heads"""
1340 """check that no named branch has multiple heads"""
1340 if desc in ('strip', 'repair'):
1341 if desc in ('strip', 'repair'):
1341 # skip the logic during strip
1342 # skip the logic during strip
1342 return
1343 return
1343 visible = repo.filtered('visible')
1344 visible = repo.filtered('visible')
1344 # possible improvement: we could restrict the check to affected branch
1345 # possible improvement: we could restrict the check to affected branch
1345 for name, heads in visible.branchmap().iteritems():
1346 for name, heads in visible.branchmap().iteritems():
1346 if len(heads) > 1:
1347 if len(heads) > 1:
1347 msg = _('rejecting multiple heads on branch "%s"')
1348 msg = _('rejecting multiple heads on branch "%s"')
1348 msg %= name
1349 msg %= name
1349 hint = _('%d heads: %s')
1350 hint = _('%d heads: %s')
1350 hint %= (len(heads), nodesummaries(repo, heads))
1351 hint %= (len(heads), nodesummaries(repo, heads))
1351 raise error.Abort(msg, hint=hint)
1352 raise error.Abort(msg, hint=hint)
1352
1353
1353 def wrapconvertsink(sink):
1354 def wrapconvertsink(sink):
1354 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1355 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1355 before it is used, whether or not the convert extension was formally loaded.
1356 before it is used, whether or not the convert extension was formally loaded.
1356 """
1357 """
1357 return sink
1358 return sink
1358
1359
1359 def unhidehashlikerevs(repo, specs, hiddentype):
1360 def unhidehashlikerevs(repo, specs, hiddentype):
1360 """parse the user specs and unhide changesets whose hash or revision number
1361 """parse the user specs and unhide changesets whose hash or revision number
1361 is passed.
1362 is passed.
1362
1363
1363 hiddentype can be: 1) 'warn': warn while unhiding changesets
1364 hiddentype can be: 1) 'warn': warn while unhiding changesets
1364 2) 'nowarn': don't warn while unhiding changesets
1365 2) 'nowarn': don't warn while unhiding changesets
1365
1366
1366 returns a repo object with the required changesets unhidden
1367 returns a repo object with the required changesets unhidden
1367 """
1368 """
1368 if not repo.filtername or not repo.ui.configbool('experimental',
1369 if not repo.filtername or not repo.ui.configbool('experimental',
1369 'directaccess'):
1370 'directaccess'):
1370 return repo
1371 return repo
1371
1372
1372 if repo.filtername not in ('visible', 'visible-hidden'):
1373 if repo.filtername not in ('visible', 'visible-hidden'):
1373 return repo
1374 return repo
1374
1375
1375 symbols = set()
1376 symbols = set()
1376 for spec in specs:
1377 for spec in specs:
1377 try:
1378 try:
1378 tree = revsetlang.parse(spec)
1379 tree = revsetlang.parse(spec)
1379 except error.ParseError: # will be reported by scmutil.revrange()
1380 except error.ParseError: # will be reported by scmutil.revrange()
1380 continue
1381 continue
1381
1382
1382 symbols.update(revsetlang.gethashlikesymbols(tree))
1383 symbols.update(revsetlang.gethashlikesymbols(tree))
1383
1384
1384 if not symbols:
1385 if not symbols:
1385 return repo
1386 return repo
1386
1387
1387 revs = _getrevsfromsymbols(repo, symbols)
1388 revs = _getrevsfromsymbols(repo, symbols)
1388
1389
1389 if not revs:
1390 if not revs:
1390 return repo
1391 return repo
1391
1392
1392 if hiddentype == 'warn':
1393 if hiddentype == 'warn':
1393 unfi = repo.unfiltered()
1394 unfi = repo.unfiltered()
1394 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1395 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1395 repo.ui.warn(_("warning: accessing hidden changesets for write "
1396 repo.ui.warn(_("warning: accessing hidden changesets for write "
1396 "operation: %s\n") % revstr)
1397 "operation: %s\n") % revstr)
1397
1398
1398 # we have to use new filtername to separate branch/tags cache until we can
1399 # we have to use new filtername to separate branch/tags cache until we can
1399 # disbale these cache when revisions are dynamically pinned.
1400 # disbale these cache when revisions are dynamically pinned.
1400 return repo.filtered('visible-hidden', revs)
1401 return repo.filtered('visible-hidden', revs)
1401
1402
1402 def _getrevsfromsymbols(repo, symbols):
1403 def _getrevsfromsymbols(repo, symbols):
1403 """parse the list of symbols and returns a set of revision numbers of hidden
1404 """parse the list of symbols and returns a set of revision numbers of hidden
1404 changesets present in symbols"""
1405 changesets present in symbols"""
1405 revs = set()
1406 revs = set()
1406 unfi = repo.unfiltered()
1407 unfi = repo.unfiltered()
1407 unficl = unfi.changelog
1408 unficl = unfi.changelog
1408 cl = repo.changelog
1409 cl = repo.changelog
1409 tiprev = len(unficl)
1410 tiprev = len(unficl)
1410 pmatch = unficl._partialmatch
1411 pmatch = unficl._partialmatch
1411 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1412 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1412 for s in symbols:
1413 for s in symbols:
1413 try:
1414 try:
1414 n = int(s)
1415 n = int(s)
1415 if n <= tiprev:
1416 if n <= tiprev:
1416 if not allowrevnums:
1417 if not allowrevnums:
1417 continue
1418 continue
1418 else:
1419 else:
1419 if n not in cl:
1420 if n not in cl:
1420 revs.add(n)
1421 revs.add(n)
1421 continue
1422 continue
1422 except ValueError:
1423 except ValueError:
1423 pass
1424 pass
1424
1425
1425 try:
1426 try:
1426 s = pmatch(s)
1427 s = pmatch(s)
1427 except (error.LookupError, error.WdirUnsupported):
1428 except (error.LookupError, error.WdirUnsupported):
1428 s = None
1429 s = None
1429
1430
1430 if s is not None:
1431 if s is not None:
1431 rev = unficl.rev(s)
1432 rev = unficl.rev(s)
1432 if rev not in cl:
1433 if rev not in cl:
1433 revs.add(rev)
1434 revs.add(rev)
1434
1435
1435 return revs
1436 return revs
General Comments 0
You need to be logged in to leave comments. Login now