##// END OF EJS Templates
revert: extract origvfs logic in a sub-function...
Boris Feld -
r40783:65591a51 default
parent child Browse files
Show More
@@ -1,1802 +1,1810 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 encoding,
31 encoding,
32 error,
32 error,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 obsutil,
35 obsutil,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 policy,
38 policy,
39 pycompat,
39 pycompat,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 procutil,
49 procutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 if pycompat.iswindows:
53 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
54 from . import scmwindows as scmplatform
55 else:
55 else:
56 from . import scmposix as scmplatform
56 from . import scmposix as scmplatform
57
57
58 parsers = policy.importmod(r'parsers')
58 parsers = policy.importmod(r'parsers')
59
59
60 termsize = scmplatform.termsize
60 termsize = scmplatform.termsize
61
61
62 class status(tuple):
62 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
64 and 'ignored' properties are only relevant to the working copy.
65 '''
65 '''
66
66
67 __slots__ = ()
67 __slots__ = ()
68
68
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
70 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
72 ignored, clean))
73
73
74 @property
74 @property
75 def modified(self):
75 def modified(self):
76 '''files that have been modified'''
76 '''files that have been modified'''
77 return self[0]
77 return self[0]
78
78
79 @property
79 @property
80 def added(self):
80 def added(self):
81 '''files that have been added'''
81 '''files that have been added'''
82 return self[1]
82 return self[1]
83
83
84 @property
84 @property
85 def removed(self):
85 def removed(self):
86 '''files that have been removed'''
86 '''files that have been removed'''
87 return self[2]
87 return self[2]
88
88
89 @property
89 @property
90 def deleted(self):
90 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
91 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
92 working copy (aka "missing")
93 '''
93 '''
94 return self[3]
94 return self[3]
95
95
96 @property
96 @property
97 def unknown(self):
97 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
98 '''files not in the dirstate that are not ignored'''
99 return self[4]
99 return self[4]
100
100
101 @property
101 @property
102 def ignored(self):
102 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
104 return self[5]
105
105
106 @property
106 @property
107 def clean(self):
107 def clean(self):
108 '''files that have not been modified'''
108 '''files that have not been modified'''
109 return self[6]
109 return self[6]
110
110
111 def __repr__(self, *args, **kwargs):
111 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
115
116 def itersubrepos(ctx1, ctx2):
116 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
117 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
123
124 missing = set()
124 missing = set()
125
125
126 for subpath in ctx2.substate:
126 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
127 if subpath not in ctx1.substate:
128 del subpaths[subpath]
128 del subpaths[subpath]
129 missing.add(subpath)
129 missing.add(subpath)
130
130
131 for subpath, ctx in sorted(subpaths.iteritems()):
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
132 yield subpath, ctx.sub(subpath)
133
133
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
135 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
137 # against itself.
138 for subpath in missing:
138 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140
140
141 def nochangesfound(ui, repo, excluded=None):
141 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
142 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
143 nodes excluded from the push/pull.
144 '''
144 '''
145 secretlist = []
145 secretlist = []
146 if excluded:
146 if excluded:
147 for n in excluded:
147 for n in excluded:
148 ctx = repo[n]
148 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
150 secretlist.append(n)
151
151
152 if secretlist:
152 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
154 % len(secretlist))
155 else:
155 else:
156 ui.status(_("no changes found\n"))
156 ui.status(_("no changes found\n"))
157
157
158 def callcatch(ui, func):
158 def callcatch(ui, func):
159 """call func() with global exception handling
159 """call func() with global exception handling
160
160
161 return func() if no exception happens. otherwise do some error handling
161 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
162 and return an exit code accordingly. does not handle all exceptions.
163 """
163 """
164 try:
164 try:
165 try:
165 try:
166 return func()
166 return func()
167 except: # re-raises
167 except: # re-raises
168 ui.traceback()
168 ui.traceback()
169 raise
169 raise
170 # Global exception handling, alphabetically
170 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
171 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
172 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
173 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % (
174 reason = _('timed out waiting for lock held by %r') % (
175 pycompat.bytestr(inst.locker))
175 pycompat.bytestr(inst.locker))
176 else:
176 else:
177 reason = _('lock held by %r') % inst.locker
177 reason = _('lock held by %r') % inst.locker
178 ui.error(_("abort: %s: %s\n") % (
178 ui.error(_("abort: %s: %s\n") % (
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 if not inst.locker:
180 if not inst.locker:
181 ui.error(_("(lock might be very busy)\n"))
181 ui.error(_("(lock might be very busy)\n"))
182 except error.LockUnavailable as inst:
182 except error.LockUnavailable as inst:
183 ui.error(_("abort: could not lock %s: %s\n") %
183 ui.error(_("abort: could not lock %s: %s\n") %
184 (inst.desc or stringutil.forcebytestr(inst.filename),
184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 encoding.strtolocal(inst.strerror)))
185 encoding.strtolocal(inst.strerror)))
186 except error.OutOfBandError as inst:
186 except error.OutOfBandError as inst:
187 if inst.args:
187 if inst.args:
188 msg = _("abort: remote error:\n")
188 msg = _("abort: remote error:\n")
189 else:
189 else:
190 msg = _("abort: remote error\n")
190 msg = _("abort: remote error\n")
191 ui.error(msg)
191 ui.error(msg)
192 if inst.args:
192 if inst.args:
193 ui.error(''.join(inst.args))
193 ui.error(''.join(inst.args))
194 if inst.hint:
194 if inst.hint:
195 ui.error('(%s)\n' % inst.hint)
195 ui.error('(%s)\n' % inst.hint)
196 except error.RepoError as inst:
196 except error.RepoError as inst:
197 ui.error(_("abort: %s!\n") % inst)
197 ui.error(_("abort: %s!\n") % inst)
198 if inst.hint:
198 if inst.hint:
199 ui.error(_("(%s)\n") % inst.hint)
199 ui.error(_("(%s)\n") % inst.hint)
200 except error.ResponseError as inst:
200 except error.ResponseError as inst:
201 ui.error(_("abort: %s") % inst.args[0])
201 ui.error(_("abort: %s") % inst.args[0])
202 msg = inst.args[1]
202 msg = inst.args[1]
203 if isinstance(msg, type(u'')):
203 if isinstance(msg, type(u'')):
204 msg = pycompat.sysbytes(msg)
204 msg = pycompat.sysbytes(msg)
205 if not isinstance(msg, bytes):
205 if not isinstance(msg, bytes):
206 ui.error(" %r\n" % (msg,))
206 ui.error(" %r\n" % (msg,))
207 elif not msg:
207 elif not msg:
208 ui.error(_(" empty string\n"))
208 ui.error(_(" empty string\n"))
209 else:
209 else:
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 except error.CensoredNodeError as inst:
211 except error.CensoredNodeError as inst:
212 ui.error(_("abort: file censored %s!\n") % inst)
212 ui.error(_("abort: file censored %s!\n") % inst)
213 except error.StorageError as inst:
213 except error.StorageError as inst:
214 ui.error(_("abort: %s!\n") % inst)
214 ui.error(_("abort: %s!\n") % inst)
215 if inst.hint:
215 if inst.hint:
216 ui.error(_("(%s)\n") % inst.hint)
216 ui.error(_("(%s)\n") % inst.hint)
217 except error.InterventionRequired as inst:
217 except error.InterventionRequired as inst:
218 ui.error("%s\n" % inst)
218 ui.error("%s\n" % inst)
219 if inst.hint:
219 if inst.hint:
220 ui.error(_("(%s)\n") % inst.hint)
220 ui.error(_("(%s)\n") % inst.hint)
221 return 1
221 return 1
222 except error.WdirUnsupported:
222 except error.WdirUnsupported:
223 ui.error(_("abort: working directory revision cannot be specified\n"))
223 ui.error(_("abort: working directory revision cannot be specified\n"))
224 except error.Abort as inst:
224 except error.Abort as inst:
225 ui.error(_("abort: %s\n") % inst)
225 ui.error(_("abort: %s\n") % inst)
226 if inst.hint:
226 if inst.hint:
227 ui.error(_("(%s)\n") % inst.hint)
227 ui.error(_("(%s)\n") % inst.hint)
228 except ImportError as inst:
228 except ImportError as inst:
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 m = stringutil.forcebytestr(inst).split()[-1]
230 m = stringutil.forcebytestr(inst).split()[-1]
231 if m in "mpatch bdiff".split():
231 if m in "mpatch bdiff".split():
232 ui.error(_("(did you forget to compile extensions?)\n"))
232 ui.error(_("(did you forget to compile extensions?)\n"))
233 elif m in "zlib".split():
233 elif m in "zlib".split():
234 ui.error(_("(is your Python install correct?)\n"))
234 ui.error(_("(is your Python install correct?)\n"))
235 except IOError as inst:
235 except IOError as inst:
236 if util.safehasattr(inst, "code"):
236 if util.safehasattr(inst, "code"):
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 elif util.safehasattr(inst, "reason"):
238 elif util.safehasattr(inst, "reason"):
239 try: # usually it is in the form (errno, strerror)
239 try: # usually it is in the form (errno, strerror)
240 reason = inst.reason.args[1]
240 reason = inst.reason.args[1]
241 except (AttributeError, IndexError):
241 except (AttributeError, IndexError):
242 # it might be anything, for example a string
242 # it might be anything, for example a string
243 reason = inst.reason
243 reason = inst.reason
244 if isinstance(reason, pycompat.unicode):
244 if isinstance(reason, pycompat.unicode):
245 # SSLError of Python 2.7.9 contains a unicode
245 # SSLError of Python 2.7.9 contains a unicode
246 reason = encoding.unitolocal(reason)
246 reason = encoding.unitolocal(reason)
247 ui.error(_("abort: error: %s\n") % reason)
247 ui.error(_("abort: error: %s\n") % reason)
248 elif (util.safehasattr(inst, "args")
248 elif (util.safehasattr(inst, "args")
249 and inst.args and inst.args[0] == errno.EPIPE):
249 and inst.args and inst.args[0] == errno.EPIPE):
250 pass
250 pass
251 elif getattr(inst, "strerror", None):
251 elif getattr(inst, "strerror", None):
252 if getattr(inst, "filename", None):
252 if getattr(inst, "filename", None):
253 ui.error(_("abort: %s: %s\n") % (
253 ui.error(_("abort: %s: %s\n") % (
254 encoding.strtolocal(inst.strerror),
254 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
255 stringutil.forcebytestr(inst.filename)))
256 else:
256 else:
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 else:
258 else:
259 raise
259 raise
260 except OSError as inst:
260 except OSError as inst:
261 if getattr(inst, "filename", None) is not None:
261 if getattr(inst, "filename", None) is not None:
262 ui.error(_("abort: %s: '%s'\n") % (
262 ui.error(_("abort: %s: '%s'\n") % (
263 encoding.strtolocal(inst.strerror),
263 encoding.strtolocal(inst.strerror),
264 stringutil.forcebytestr(inst.filename)))
264 stringutil.forcebytestr(inst.filename)))
265 else:
265 else:
266 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
266 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
267 except MemoryError:
267 except MemoryError:
268 ui.error(_("abort: out of memory\n"))
268 ui.error(_("abort: out of memory\n"))
269 except SystemExit as inst:
269 except SystemExit as inst:
270 # Commands shouldn't sys.exit directly, but give a return code.
270 # Commands shouldn't sys.exit directly, but give a return code.
271 # Just in case catch this and and pass exit code to caller.
271 # Just in case catch this and and pass exit code to caller.
272 return inst.code
272 return inst.code
273 except socket.error as inst:
273 except socket.error as inst:
274 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
274 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
275
275
276 return -1
276 return -1
277
277
278 def checknewlabel(repo, lbl, kind):
278 def checknewlabel(repo, lbl, kind):
279 # Do not use the "kind" parameter in ui output.
279 # Do not use the "kind" parameter in ui output.
280 # It makes strings difficult to translate.
280 # It makes strings difficult to translate.
281 if lbl in ['tip', '.', 'null']:
281 if lbl in ['tip', '.', 'null']:
282 raise error.Abort(_("the name '%s' is reserved") % lbl)
282 raise error.Abort(_("the name '%s' is reserved") % lbl)
283 for c in (':', '\0', '\n', '\r'):
283 for c in (':', '\0', '\n', '\r'):
284 if c in lbl:
284 if c in lbl:
285 raise error.Abort(
285 raise error.Abort(
286 _("%r cannot be used in a name") % pycompat.bytestr(c))
286 _("%r cannot be used in a name") % pycompat.bytestr(c))
287 try:
287 try:
288 int(lbl)
288 int(lbl)
289 raise error.Abort(_("cannot use an integer as a name"))
289 raise error.Abort(_("cannot use an integer as a name"))
290 except ValueError:
290 except ValueError:
291 pass
291 pass
292 if lbl.strip() != lbl:
292 if lbl.strip() != lbl:
293 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
293 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
294
294
295 def checkfilename(f):
295 def checkfilename(f):
296 '''Check that the filename f is an acceptable filename for a tracked file'''
296 '''Check that the filename f is an acceptable filename for a tracked file'''
297 if '\r' in f or '\n' in f:
297 if '\r' in f or '\n' in f:
298 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
298 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
299 % pycompat.bytestr(f))
299 % pycompat.bytestr(f))
300
300
301 def checkportable(ui, f):
301 def checkportable(ui, f):
302 '''Check if filename f is portable and warn or abort depending on config'''
302 '''Check if filename f is portable and warn or abort depending on config'''
303 checkfilename(f)
303 checkfilename(f)
304 abort, warn = checkportabilityalert(ui)
304 abort, warn = checkportabilityalert(ui)
305 if abort or warn:
305 if abort or warn:
306 msg = util.checkwinfilename(f)
306 msg = util.checkwinfilename(f)
307 if msg:
307 if msg:
308 msg = "%s: %s" % (msg, procutil.shellquote(f))
308 msg = "%s: %s" % (msg, procutil.shellquote(f))
309 if abort:
309 if abort:
310 raise error.Abort(msg)
310 raise error.Abort(msg)
311 ui.warn(_("warning: %s\n") % msg)
311 ui.warn(_("warning: %s\n") % msg)
312
312
313 def checkportabilityalert(ui):
313 def checkportabilityalert(ui):
314 '''check if the user's config requests nothing, a warning, or abort for
314 '''check if the user's config requests nothing, a warning, or abort for
315 non-portable filenames'''
315 non-portable filenames'''
316 val = ui.config('ui', 'portablefilenames')
316 val = ui.config('ui', 'portablefilenames')
317 lval = val.lower()
317 lval = val.lower()
318 bval = stringutil.parsebool(val)
318 bval = stringutil.parsebool(val)
319 abort = pycompat.iswindows or lval == 'abort'
319 abort = pycompat.iswindows or lval == 'abort'
320 warn = bval or lval == 'warn'
320 warn = bval or lval == 'warn'
321 if bval is None and not (warn or abort or lval == 'ignore'):
321 if bval is None and not (warn or abort or lval == 'ignore'):
322 raise error.ConfigError(
322 raise error.ConfigError(
323 _("ui.portablefilenames value is invalid ('%s')") % val)
323 _("ui.portablefilenames value is invalid ('%s')") % val)
324 return abort, warn
324 return abort, warn
325
325
326 class casecollisionauditor(object):
326 class casecollisionauditor(object):
327 def __init__(self, ui, abort, dirstate):
327 def __init__(self, ui, abort, dirstate):
328 self._ui = ui
328 self._ui = ui
329 self._abort = abort
329 self._abort = abort
330 allfiles = '\0'.join(dirstate._map)
330 allfiles = '\0'.join(dirstate._map)
331 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
331 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
332 self._dirstate = dirstate
332 self._dirstate = dirstate
333 # The purpose of _newfiles is so that we don't complain about
333 # The purpose of _newfiles is so that we don't complain about
334 # case collisions if someone were to call this object with the
334 # case collisions if someone were to call this object with the
335 # same filename twice.
335 # same filename twice.
336 self._newfiles = set()
336 self._newfiles = set()
337
337
338 def __call__(self, f):
338 def __call__(self, f):
339 if f in self._newfiles:
339 if f in self._newfiles:
340 return
340 return
341 fl = encoding.lower(f)
341 fl = encoding.lower(f)
342 if fl in self._loweredfiles and f not in self._dirstate:
342 if fl in self._loweredfiles and f not in self._dirstate:
343 msg = _('possible case-folding collision for %s') % f
343 msg = _('possible case-folding collision for %s') % f
344 if self._abort:
344 if self._abort:
345 raise error.Abort(msg)
345 raise error.Abort(msg)
346 self._ui.warn(_("warning: %s\n") % msg)
346 self._ui.warn(_("warning: %s\n") % msg)
347 self._loweredfiles.add(fl)
347 self._loweredfiles.add(fl)
348 self._newfiles.add(f)
348 self._newfiles.add(f)
349
349
350 def filteredhash(repo, maxrev):
350 def filteredhash(repo, maxrev):
351 """build hash of filtered revisions in the current repoview.
351 """build hash of filtered revisions in the current repoview.
352
352
353 Multiple caches perform up-to-date validation by checking that the
353 Multiple caches perform up-to-date validation by checking that the
354 tiprev and tipnode stored in the cache file match the current repository.
354 tiprev and tipnode stored in the cache file match the current repository.
355 However, this is not sufficient for validating repoviews because the set
355 However, this is not sufficient for validating repoviews because the set
356 of revisions in the view may change without the repository tiprev and
356 of revisions in the view may change without the repository tiprev and
357 tipnode changing.
357 tipnode changing.
358
358
359 This function hashes all the revs filtered from the view and returns
359 This function hashes all the revs filtered from the view and returns
360 that SHA-1 digest.
360 that SHA-1 digest.
361 """
361 """
362 cl = repo.changelog
362 cl = repo.changelog
363 if not cl.filteredrevs:
363 if not cl.filteredrevs:
364 return None
364 return None
365 key = None
365 key = None
366 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
366 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
367 if revs:
367 if revs:
368 s = hashlib.sha1()
368 s = hashlib.sha1()
369 for rev in revs:
369 for rev in revs:
370 s.update('%d;' % rev)
370 s.update('%d;' % rev)
371 key = s.digest()
371 key = s.digest()
372 return key
372 return key
373
373
374 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
374 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
375 '''yield every hg repository under path, always recursively.
375 '''yield every hg repository under path, always recursively.
376 The recurse flag will only control recursion into repo working dirs'''
376 The recurse flag will only control recursion into repo working dirs'''
377 def errhandler(err):
377 def errhandler(err):
378 if err.filename == path:
378 if err.filename == path:
379 raise err
379 raise err
380 samestat = getattr(os.path, 'samestat', None)
380 samestat = getattr(os.path, 'samestat', None)
381 if followsym and samestat is not None:
381 if followsym and samestat is not None:
382 def adddir(dirlst, dirname):
382 def adddir(dirlst, dirname):
383 dirstat = os.stat(dirname)
383 dirstat = os.stat(dirname)
384 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
384 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
385 if not match:
385 if not match:
386 dirlst.append(dirstat)
386 dirlst.append(dirstat)
387 return not match
387 return not match
388 else:
388 else:
389 followsym = False
389 followsym = False
390
390
391 if (seen_dirs is None) and followsym:
391 if (seen_dirs is None) and followsym:
392 seen_dirs = []
392 seen_dirs = []
393 adddir(seen_dirs, path)
393 adddir(seen_dirs, path)
394 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
394 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
395 dirs.sort()
395 dirs.sort()
396 if '.hg' in dirs:
396 if '.hg' in dirs:
397 yield root # found a repository
397 yield root # found a repository
398 qroot = os.path.join(root, '.hg', 'patches')
398 qroot = os.path.join(root, '.hg', 'patches')
399 if os.path.isdir(os.path.join(qroot, '.hg')):
399 if os.path.isdir(os.path.join(qroot, '.hg')):
400 yield qroot # we have a patch queue repo here
400 yield qroot # we have a patch queue repo here
401 if recurse:
401 if recurse:
402 # avoid recursing inside the .hg directory
402 # avoid recursing inside the .hg directory
403 dirs.remove('.hg')
403 dirs.remove('.hg')
404 else:
404 else:
405 dirs[:] = [] # don't descend further
405 dirs[:] = [] # don't descend further
406 elif followsym:
406 elif followsym:
407 newdirs = []
407 newdirs = []
408 for d in dirs:
408 for d in dirs:
409 fname = os.path.join(root, d)
409 fname = os.path.join(root, d)
410 if adddir(seen_dirs, fname):
410 if adddir(seen_dirs, fname):
411 if os.path.islink(fname):
411 if os.path.islink(fname):
412 for hgname in walkrepos(fname, True, seen_dirs):
412 for hgname in walkrepos(fname, True, seen_dirs):
413 yield hgname
413 yield hgname
414 else:
414 else:
415 newdirs.append(d)
415 newdirs.append(d)
416 dirs[:] = newdirs
416 dirs[:] = newdirs
417
417
418 def binnode(ctx):
418 def binnode(ctx):
419 """Return binary node id for a given basectx"""
419 """Return binary node id for a given basectx"""
420 node = ctx.node()
420 node = ctx.node()
421 if node is None:
421 if node is None:
422 return wdirid
422 return wdirid
423 return node
423 return node
424
424
425 def intrev(ctx):
425 def intrev(ctx):
426 """Return integer for a given basectx that can be used in comparison or
426 """Return integer for a given basectx that can be used in comparison or
427 arithmetic operation"""
427 arithmetic operation"""
428 rev = ctx.rev()
428 rev = ctx.rev()
429 if rev is None:
429 if rev is None:
430 return wdirrev
430 return wdirrev
431 return rev
431 return rev
432
432
433 def formatchangeid(ctx):
433 def formatchangeid(ctx):
434 """Format changectx as '{rev}:{node|formatnode}', which is the default
434 """Format changectx as '{rev}:{node|formatnode}', which is the default
435 template provided by logcmdutil.changesettemplater"""
435 template provided by logcmdutil.changesettemplater"""
436 repo = ctx.repo()
436 repo = ctx.repo()
437 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
437 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
438
438
439 def formatrevnode(ui, rev, node):
439 def formatrevnode(ui, rev, node):
440 """Format given revision and node depending on the current verbosity"""
440 """Format given revision and node depending on the current verbosity"""
441 if ui.debugflag:
441 if ui.debugflag:
442 hexfunc = hex
442 hexfunc = hex
443 else:
443 else:
444 hexfunc = short
444 hexfunc = short
445 return '%d:%s' % (rev, hexfunc(node))
445 return '%d:%s' % (rev, hexfunc(node))
446
446
447 def resolvehexnodeidprefix(repo, prefix):
447 def resolvehexnodeidprefix(repo, prefix):
448 if (prefix.startswith('x') and
448 if (prefix.startswith('x') and
449 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
449 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
450 prefix = prefix[1:]
450 prefix = prefix[1:]
451 try:
451 try:
452 # Uses unfiltered repo because it's faster when prefix is ambiguous/
452 # Uses unfiltered repo because it's faster when prefix is ambiguous/
453 # This matches the shortesthexnodeidprefix() function below.
453 # This matches the shortesthexnodeidprefix() function below.
454 node = repo.unfiltered().changelog._partialmatch(prefix)
454 node = repo.unfiltered().changelog._partialmatch(prefix)
455 except error.AmbiguousPrefixLookupError:
455 except error.AmbiguousPrefixLookupError:
456 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
456 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
457 if revset:
457 if revset:
458 # Clear config to avoid infinite recursion
458 # Clear config to avoid infinite recursion
459 configoverrides = {('experimental',
459 configoverrides = {('experimental',
460 'revisions.disambiguatewithin'): None}
460 'revisions.disambiguatewithin'): None}
461 with repo.ui.configoverride(configoverrides):
461 with repo.ui.configoverride(configoverrides):
462 revs = repo.anyrevs([revset], user=True)
462 revs = repo.anyrevs([revset], user=True)
463 matches = []
463 matches = []
464 for rev in revs:
464 for rev in revs:
465 node = repo.changelog.node(rev)
465 node = repo.changelog.node(rev)
466 if hex(node).startswith(prefix):
466 if hex(node).startswith(prefix):
467 matches.append(node)
467 matches.append(node)
468 if len(matches) == 1:
468 if len(matches) == 1:
469 return matches[0]
469 return matches[0]
470 raise
470 raise
471 if node is None:
471 if node is None:
472 return
472 return
473 repo.changelog.rev(node) # make sure node isn't filtered
473 repo.changelog.rev(node) # make sure node isn't filtered
474 return node
474 return node
475
475
476 def mayberevnum(repo, prefix):
476 def mayberevnum(repo, prefix):
477 """Checks if the given prefix may be mistaken for a revision number"""
477 """Checks if the given prefix may be mistaken for a revision number"""
478 try:
478 try:
479 i = int(prefix)
479 i = int(prefix)
480 # if we are a pure int, then starting with zero will not be
480 # if we are a pure int, then starting with zero will not be
481 # confused as a rev; or, obviously, if the int is larger
481 # confused as a rev; or, obviously, if the int is larger
482 # than the value of the tip rev. We still need to disambiguate if
482 # than the value of the tip rev. We still need to disambiguate if
483 # prefix == '0', since that *is* a valid revnum.
483 # prefix == '0', since that *is* a valid revnum.
484 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
484 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
485 return False
485 return False
486 return True
486 return True
487 except ValueError:
487 except ValueError:
488 return False
488 return False
489
489
490 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
490 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
491 """Find the shortest unambiguous prefix that matches hexnode.
491 """Find the shortest unambiguous prefix that matches hexnode.
492
492
493 If "cache" is not None, it must be a dictionary that can be used for
493 If "cache" is not None, it must be a dictionary that can be used for
494 caching between calls to this method.
494 caching between calls to this method.
495 """
495 """
496 # _partialmatch() of filtered changelog could take O(len(repo)) time,
496 # _partialmatch() of filtered changelog could take O(len(repo)) time,
497 # which would be unacceptably slow. so we look for hash collision in
497 # which would be unacceptably slow. so we look for hash collision in
498 # unfiltered space, which means some hashes may be slightly longer.
498 # unfiltered space, which means some hashes may be slightly longer.
499
499
500 minlength=max(minlength, 1)
500 minlength=max(minlength, 1)
501
501
502 def disambiguate(prefix):
502 def disambiguate(prefix):
503 """Disambiguate against revnums."""
503 """Disambiguate against revnums."""
504 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
504 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
505 if mayberevnum(repo, prefix):
505 if mayberevnum(repo, prefix):
506 return 'x' + prefix
506 return 'x' + prefix
507 else:
507 else:
508 return prefix
508 return prefix
509
509
510 hexnode = hex(node)
510 hexnode = hex(node)
511 for length in range(len(prefix), len(hexnode) + 1):
511 for length in range(len(prefix), len(hexnode) + 1):
512 prefix = hexnode[:length]
512 prefix = hexnode[:length]
513 if not mayberevnum(repo, prefix):
513 if not mayberevnum(repo, prefix):
514 return prefix
514 return prefix
515
515
516 cl = repo.unfiltered().changelog
516 cl = repo.unfiltered().changelog
517 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
517 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
518 if revset:
518 if revset:
519 revs = None
519 revs = None
520 if cache is not None:
520 if cache is not None:
521 revs = cache.get('disambiguationrevset')
521 revs = cache.get('disambiguationrevset')
522 if revs is None:
522 if revs is None:
523 revs = repo.anyrevs([revset], user=True)
523 revs = repo.anyrevs([revset], user=True)
524 if cache is not None:
524 if cache is not None:
525 cache['disambiguationrevset'] = revs
525 cache['disambiguationrevset'] = revs
526 if cl.rev(node) in revs:
526 if cl.rev(node) in revs:
527 hexnode = hex(node)
527 hexnode = hex(node)
528 nodetree = None
528 nodetree = None
529 if cache is not None:
529 if cache is not None:
530 nodetree = cache.get('disambiguationnodetree')
530 nodetree = cache.get('disambiguationnodetree')
531 if not nodetree:
531 if not nodetree:
532 try:
532 try:
533 nodetree = parsers.nodetree(cl.index, len(revs))
533 nodetree = parsers.nodetree(cl.index, len(revs))
534 except AttributeError:
534 except AttributeError:
535 # no native nodetree
535 # no native nodetree
536 pass
536 pass
537 else:
537 else:
538 for r in revs:
538 for r in revs:
539 nodetree.insert(r)
539 nodetree.insert(r)
540 if cache is not None:
540 if cache is not None:
541 cache['disambiguationnodetree'] = nodetree
541 cache['disambiguationnodetree'] = nodetree
542 if nodetree is not None:
542 if nodetree is not None:
543 length = max(nodetree.shortest(node), minlength)
543 length = max(nodetree.shortest(node), minlength)
544 prefix = hexnode[:length]
544 prefix = hexnode[:length]
545 return disambiguate(prefix)
545 return disambiguate(prefix)
546 for length in range(minlength, len(hexnode) + 1):
546 for length in range(minlength, len(hexnode) + 1):
547 matches = []
547 matches = []
548 prefix = hexnode[:length]
548 prefix = hexnode[:length]
549 for rev in revs:
549 for rev in revs:
550 otherhexnode = repo[rev].hex()
550 otherhexnode = repo[rev].hex()
551 if prefix == otherhexnode[:length]:
551 if prefix == otherhexnode[:length]:
552 matches.append(otherhexnode)
552 matches.append(otherhexnode)
553 if len(matches) == 1:
553 if len(matches) == 1:
554 return disambiguate(prefix)
554 return disambiguate(prefix)
555
555
556 try:
556 try:
557 return disambiguate(cl.shortest(node, minlength))
557 return disambiguate(cl.shortest(node, minlength))
558 except error.LookupError:
558 except error.LookupError:
559 raise error.RepoLookupError()
559 raise error.RepoLookupError()
560
560
561 def isrevsymbol(repo, symbol):
561 def isrevsymbol(repo, symbol):
562 """Checks if a symbol exists in the repo.
562 """Checks if a symbol exists in the repo.
563
563
564 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
564 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
565 symbol is an ambiguous nodeid prefix.
565 symbol is an ambiguous nodeid prefix.
566 """
566 """
567 try:
567 try:
568 revsymbol(repo, symbol)
568 revsymbol(repo, symbol)
569 return True
569 return True
570 except error.RepoLookupError:
570 except error.RepoLookupError:
571 return False
571 return False
572
572
573 def revsymbol(repo, symbol):
573 def revsymbol(repo, symbol):
574 """Returns a context given a single revision symbol (as string).
574 """Returns a context given a single revision symbol (as string).
575
575
576 This is similar to revsingle(), but accepts only a single revision symbol,
576 This is similar to revsingle(), but accepts only a single revision symbol,
577 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
577 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
578 not "max(public())".
578 not "max(public())".
579 """
579 """
580 if not isinstance(symbol, bytes):
580 if not isinstance(symbol, bytes):
581 msg = ("symbol (%s of type %s) was not a string, did you mean "
581 msg = ("symbol (%s of type %s) was not a string, did you mean "
582 "repo[symbol]?" % (symbol, type(symbol)))
582 "repo[symbol]?" % (symbol, type(symbol)))
583 raise error.ProgrammingError(msg)
583 raise error.ProgrammingError(msg)
584 try:
584 try:
585 if symbol in ('.', 'tip', 'null'):
585 if symbol in ('.', 'tip', 'null'):
586 return repo[symbol]
586 return repo[symbol]
587
587
588 try:
588 try:
589 r = int(symbol)
589 r = int(symbol)
590 if '%d' % r != symbol:
590 if '%d' % r != symbol:
591 raise ValueError
591 raise ValueError
592 l = len(repo.changelog)
592 l = len(repo.changelog)
593 if r < 0:
593 if r < 0:
594 r += l
594 r += l
595 if r < 0 or r >= l and r != wdirrev:
595 if r < 0 or r >= l and r != wdirrev:
596 raise ValueError
596 raise ValueError
597 return repo[r]
597 return repo[r]
598 except error.FilteredIndexError:
598 except error.FilteredIndexError:
599 raise
599 raise
600 except (ValueError, OverflowError, IndexError):
600 except (ValueError, OverflowError, IndexError):
601 pass
601 pass
602
602
603 if len(symbol) == 40:
603 if len(symbol) == 40:
604 try:
604 try:
605 node = bin(symbol)
605 node = bin(symbol)
606 rev = repo.changelog.rev(node)
606 rev = repo.changelog.rev(node)
607 return repo[rev]
607 return repo[rev]
608 except error.FilteredLookupError:
608 except error.FilteredLookupError:
609 raise
609 raise
610 except (TypeError, LookupError):
610 except (TypeError, LookupError):
611 pass
611 pass
612
612
613 # look up bookmarks through the name interface
613 # look up bookmarks through the name interface
614 try:
614 try:
615 node = repo.names.singlenode(repo, symbol)
615 node = repo.names.singlenode(repo, symbol)
616 rev = repo.changelog.rev(node)
616 rev = repo.changelog.rev(node)
617 return repo[rev]
617 return repo[rev]
618 except KeyError:
618 except KeyError:
619 pass
619 pass
620
620
621 node = resolvehexnodeidprefix(repo, symbol)
621 node = resolvehexnodeidprefix(repo, symbol)
622 if node is not None:
622 if node is not None:
623 rev = repo.changelog.rev(node)
623 rev = repo.changelog.rev(node)
624 return repo[rev]
624 return repo[rev]
625
625
626 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
626 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
627
627
628 except error.WdirUnsupported:
628 except error.WdirUnsupported:
629 return repo[None]
629 return repo[None]
630 except (error.FilteredIndexError, error.FilteredLookupError,
630 except (error.FilteredIndexError, error.FilteredLookupError,
631 error.FilteredRepoLookupError):
631 error.FilteredRepoLookupError):
632 raise _filterederror(repo, symbol)
632 raise _filterederror(repo, symbol)
633
633
634 def _filterederror(repo, changeid):
634 def _filterederror(repo, changeid):
635 """build an exception to be raised about a filtered changeid
635 """build an exception to be raised about a filtered changeid
636
636
637 This is extracted in a function to help extensions (eg: evolve) to
637 This is extracted in a function to help extensions (eg: evolve) to
638 experiment with various message variants."""
638 experiment with various message variants."""
639 if repo.filtername.startswith('visible'):
639 if repo.filtername.startswith('visible'):
640
640
641 # Check if the changeset is obsolete
641 # Check if the changeset is obsolete
642 unfilteredrepo = repo.unfiltered()
642 unfilteredrepo = repo.unfiltered()
643 ctx = revsymbol(unfilteredrepo, changeid)
643 ctx = revsymbol(unfilteredrepo, changeid)
644
644
645 # If the changeset is obsolete, enrich the message with the reason
645 # If the changeset is obsolete, enrich the message with the reason
646 # that made this changeset not visible
646 # that made this changeset not visible
647 if ctx.obsolete():
647 if ctx.obsolete():
648 msg = obsutil._getfilteredreason(repo, changeid, ctx)
648 msg = obsutil._getfilteredreason(repo, changeid, ctx)
649 else:
649 else:
650 msg = _("hidden revision '%s'") % changeid
650 msg = _("hidden revision '%s'") % changeid
651
651
652 hint = _('use --hidden to access hidden revisions')
652 hint = _('use --hidden to access hidden revisions')
653
653
654 return error.FilteredRepoLookupError(msg, hint=hint)
654 return error.FilteredRepoLookupError(msg, hint=hint)
655 msg = _("filtered revision '%s' (not in '%s' subset)")
655 msg = _("filtered revision '%s' (not in '%s' subset)")
656 msg %= (changeid, repo.filtername)
656 msg %= (changeid, repo.filtername)
657 return error.FilteredRepoLookupError(msg)
657 return error.FilteredRepoLookupError(msg)
658
658
659 def revsingle(repo, revspec, default='.', localalias=None):
659 def revsingle(repo, revspec, default='.', localalias=None):
660 if not revspec and revspec != 0:
660 if not revspec and revspec != 0:
661 return repo[default]
661 return repo[default]
662
662
663 l = revrange(repo, [revspec], localalias=localalias)
663 l = revrange(repo, [revspec], localalias=localalias)
664 if not l:
664 if not l:
665 raise error.Abort(_('empty revision set'))
665 raise error.Abort(_('empty revision set'))
666 return repo[l.last()]
666 return repo[l.last()]
667
667
668 def _pairspec(revspec):
668 def _pairspec(revspec):
669 tree = revsetlang.parse(revspec)
669 tree = revsetlang.parse(revspec)
670 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
670 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
671
671
672 def revpair(repo, revs):
672 def revpair(repo, revs):
673 if not revs:
673 if not revs:
674 return repo['.'], repo[None]
674 return repo['.'], repo[None]
675
675
676 l = revrange(repo, revs)
676 l = revrange(repo, revs)
677
677
678 if not l:
678 if not l:
679 first = second = None
679 first = second = None
680 elif l.isascending():
680 elif l.isascending():
681 first = l.min()
681 first = l.min()
682 second = l.max()
682 second = l.max()
683 elif l.isdescending():
683 elif l.isdescending():
684 first = l.max()
684 first = l.max()
685 second = l.min()
685 second = l.min()
686 else:
686 else:
687 first = l.first()
687 first = l.first()
688 second = l.last()
688 second = l.last()
689
689
690 if first is None:
690 if first is None:
691 raise error.Abort(_('empty revision range'))
691 raise error.Abort(_('empty revision range'))
692 if (first == second and len(revs) >= 2
692 if (first == second and len(revs) >= 2
693 and not all(revrange(repo, [r]) for r in revs)):
693 and not all(revrange(repo, [r]) for r in revs)):
694 raise error.Abort(_('empty revision on one side of range'))
694 raise error.Abort(_('empty revision on one side of range'))
695
695
696 # if top-level is range expression, the result must always be a pair
696 # if top-level is range expression, the result must always be a pair
697 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
697 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
698 return repo[first], repo[None]
698 return repo[first], repo[None]
699
699
700 return repo[first], repo[second]
700 return repo[first], repo[second]
701
701
702 def revrange(repo, specs, localalias=None):
702 def revrange(repo, specs, localalias=None):
703 """Execute 1 to many revsets and return the union.
703 """Execute 1 to many revsets and return the union.
704
704
705 This is the preferred mechanism for executing revsets using user-specified
705 This is the preferred mechanism for executing revsets using user-specified
706 config options, such as revset aliases.
706 config options, such as revset aliases.
707
707
708 The revsets specified by ``specs`` will be executed via a chained ``OR``
708 The revsets specified by ``specs`` will be executed via a chained ``OR``
709 expression. If ``specs`` is empty, an empty result is returned.
709 expression. If ``specs`` is empty, an empty result is returned.
710
710
711 ``specs`` can contain integers, in which case they are assumed to be
711 ``specs`` can contain integers, in which case they are assumed to be
712 revision numbers.
712 revision numbers.
713
713
714 It is assumed the revsets are already formatted. If you have arguments
714 It is assumed the revsets are already formatted. If you have arguments
715 that need to be expanded in the revset, call ``revsetlang.formatspec()``
715 that need to be expanded in the revset, call ``revsetlang.formatspec()``
716 and pass the result as an element of ``specs``.
716 and pass the result as an element of ``specs``.
717
717
718 Specifying a single revset is allowed.
718 Specifying a single revset is allowed.
719
719
720 Returns a ``revset.abstractsmartset`` which is a list-like interface over
720 Returns a ``revset.abstractsmartset`` which is a list-like interface over
721 integer revisions.
721 integer revisions.
722 """
722 """
723 allspecs = []
723 allspecs = []
724 for spec in specs:
724 for spec in specs:
725 if isinstance(spec, int):
725 if isinstance(spec, int):
726 spec = revsetlang.formatspec('rev(%d)', spec)
726 spec = revsetlang.formatspec('rev(%d)', spec)
727 allspecs.append(spec)
727 allspecs.append(spec)
728 return repo.anyrevs(allspecs, user=True, localalias=localalias)
728 return repo.anyrevs(allspecs, user=True, localalias=localalias)
729
729
730 def meaningfulparents(repo, ctx):
730 def meaningfulparents(repo, ctx):
731 """Return list of meaningful (or all if debug) parentrevs for rev.
731 """Return list of meaningful (or all if debug) parentrevs for rev.
732
732
733 For merges (two non-nullrev revisions) both parents are meaningful.
733 For merges (two non-nullrev revisions) both parents are meaningful.
734 Otherwise the first parent revision is considered meaningful if it
734 Otherwise the first parent revision is considered meaningful if it
735 is not the preceding revision.
735 is not the preceding revision.
736 """
736 """
737 parents = ctx.parents()
737 parents = ctx.parents()
738 if len(parents) > 1:
738 if len(parents) > 1:
739 return parents
739 return parents
740 if repo.ui.debugflag:
740 if repo.ui.debugflag:
741 return [parents[0], repo[nullrev]]
741 return [parents[0], repo[nullrev]]
742 if parents[0].rev() >= intrev(ctx) - 1:
742 if parents[0].rev() >= intrev(ctx) - 1:
743 return []
743 return []
744 return parents
744 return parents
745
745
746 def expandpats(pats):
746 def expandpats(pats):
747 '''Expand bare globs when running on windows.
747 '''Expand bare globs when running on windows.
748 On posix we assume it already has already been done by sh.'''
748 On posix we assume it already has already been done by sh.'''
749 if not util.expandglobs:
749 if not util.expandglobs:
750 return list(pats)
750 return list(pats)
751 ret = []
751 ret = []
752 for kindpat in pats:
752 for kindpat in pats:
753 kind, pat = matchmod._patsplit(kindpat, None)
753 kind, pat = matchmod._patsplit(kindpat, None)
754 if kind is None:
754 if kind is None:
755 try:
755 try:
756 globbed = glob.glob(pat)
756 globbed = glob.glob(pat)
757 except re.error:
757 except re.error:
758 globbed = [pat]
758 globbed = [pat]
759 if globbed:
759 if globbed:
760 ret.extend(globbed)
760 ret.extend(globbed)
761 continue
761 continue
762 ret.append(kindpat)
762 ret.append(kindpat)
763 return ret
763 return ret
764
764
765 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
765 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
766 badfn=None):
766 badfn=None):
767 '''Return a matcher and the patterns that were used.
767 '''Return a matcher and the patterns that were used.
768 The matcher will warn about bad matches, unless an alternate badfn callback
768 The matcher will warn about bad matches, unless an alternate badfn callback
769 is provided.'''
769 is provided.'''
770 if pats == ("",):
770 if pats == ("",):
771 pats = []
771 pats = []
772 if opts is None:
772 if opts is None:
773 opts = {}
773 opts = {}
774 if not globbed and default == 'relpath':
774 if not globbed and default == 'relpath':
775 pats = expandpats(pats or [])
775 pats = expandpats(pats or [])
776
776
777 def bad(f, msg):
777 def bad(f, msg):
778 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
778 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
779
779
780 if badfn is None:
780 if badfn is None:
781 badfn = bad
781 badfn = bad
782
782
783 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
783 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
784 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
784 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
785
785
786 if m.always():
786 if m.always():
787 pats = []
787 pats = []
788 return m, pats
788 return m, pats
789
789
790 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
790 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
791 badfn=None):
791 badfn=None):
792 '''Return a matcher that will warn about bad matches.'''
792 '''Return a matcher that will warn about bad matches.'''
793 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
793 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
794
794
795 def matchall(repo):
795 def matchall(repo):
796 '''Return a matcher that will efficiently match everything.'''
796 '''Return a matcher that will efficiently match everything.'''
797 return matchmod.always(repo.root, repo.getcwd())
797 return matchmod.always(repo.root, repo.getcwd())
798
798
799 def matchfiles(repo, files, badfn=None):
799 def matchfiles(repo, files, badfn=None):
800 '''Return a matcher that will efficiently match exactly these files.'''
800 '''Return a matcher that will efficiently match exactly these files.'''
801 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
801 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
802
802
803 def parsefollowlinespattern(repo, rev, pat, msg):
803 def parsefollowlinespattern(repo, rev, pat, msg):
804 """Return a file name from `pat` pattern suitable for usage in followlines
804 """Return a file name from `pat` pattern suitable for usage in followlines
805 logic.
805 logic.
806 """
806 """
807 if not matchmod.patkind(pat):
807 if not matchmod.patkind(pat):
808 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
808 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
809 else:
809 else:
810 ctx = repo[rev]
810 ctx = repo[rev]
811 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
811 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
812 files = [f for f in ctx if m(f)]
812 files = [f for f in ctx if m(f)]
813 if len(files) != 1:
813 if len(files) != 1:
814 raise error.ParseError(msg)
814 raise error.ParseError(msg)
815 return files[0]
815 return files[0]
816
816
817 def getorigvfs(ui, repo):
818 """return a vfs suitable to save 'orig' file
819
820 return None if no special directory is configured"""
821 origbackuppath = ui.config('ui', 'origbackuppath')
822 if not origbackuppath:
823 return None
824 return vfs.vfs(repo.wvfs.join(origbackuppath))
825
817 def origpath(ui, repo, filepath):
826 def origpath(ui, repo, filepath):
818 '''customize where .orig files are created
827 '''customize where .orig files are created
819
828
820 Fetch user defined path from config file: [ui] origbackuppath = <path>
829 Fetch user defined path from config file: [ui] origbackuppath = <path>
821 Fall back to default (filepath with .orig suffix) if not specified
830 Fall back to default (filepath with .orig suffix) if not specified
822 '''
831 '''
823 origbackuppath = ui.config('ui', 'origbackuppath')
832 origvfs = getorigvfs(ui, repo)
824 if not origbackuppath:
833 if origvfs is None:
825 return filepath + ".orig"
834 return filepath + ".orig"
826
835
827 # Convert filepath from an absolute path into a path inside the repo.
836 # Convert filepath from an absolute path into a path inside the repo.
828 filepathfromroot = util.normpath(os.path.relpath(filepath,
837 filepathfromroot = util.normpath(os.path.relpath(filepath,
829 start=repo.root))
838 start=repo.root))
830
839
831 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
832 origbackupdir = origvfs.dirname(filepathfromroot)
840 origbackupdir = origvfs.dirname(filepathfromroot)
833 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
841 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
834 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
842 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
835
843
836 # Remove any files that conflict with the backup file's path
844 # Remove any files that conflict with the backup file's path
837 for f in reversed(list(util.finddirs(filepathfromroot))):
845 for f in reversed(list(util.finddirs(filepathfromroot))):
838 if origvfs.isfileorlink(f):
846 if origvfs.isfileorlink(f):
839 ui.note(_('removing conflicting file: %s\n')
847 ui.note(_('removing conflicting file: %s\n')
840 % origvfs.join(f))
848 % origvfs.join(f))
841 origvfs.unlink(f)
849 origvfs.unlink(f)
842 break
850 break
843
851
844 origvfs.makedirs(origbackupdir)
852 origvfs.makedirs(origbackupdir)
845
853
846 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
854 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
847 ui.note(_('removing conflicting directory: %s\n')
855 ui.note(_('removing conflicting directory: %s\n')
848 % origvfs.join(filepathfromroot))
856 % origvfs.join(filepathfromroot))
849 origvfs.rmtree(filepathfromroot, forcibly=True)
857 origvfs.rmtree(filepathfromroot, forcibly=True)
850
858
851 return origvfs.join(filepathfromroot)
859 return origvfs.join(filepathfromroot)
852
860
853 class _containsnode(object):
861 class _containsnode(object):
854 """proxy __contains__(node) to container.__contains__ which accepts revs"""
862 """proxy __contains__(node) to container.__contains__ which accepts revs"""
855
863
856 def __init__(self, repo, revcontainer):
864 def __init__(self, repo, revcontainer):
857 self._torev = repo.changelog.rev
865 self._torev = repo.changelog.rev
858 self._revcontains = revcontainer.__contains__
866 self._revcontains = revcontainer.__contains__
859
867
860 def __contains__(self, node):
868 def __contains__(self, node):
861 return self._revcontains(self._torev(node))
869 return self._revcontains(self._torev(node))
862
870
863 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
871 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
864 fixphase=False, targetphase=None, backup=True):
872 fixphase=False, targetphase=None, backup=True):
865 """do common cleanups when old nodes are replaced by new nodes
873 """do common cleanups when old nodes are replaced by new nodes
866
874
867 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
875 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
868 (we might also want to move working directory parent in the future)
876 (we might also want to move working directory parent in the future)
869
877
870 By default, bookmark moves are calculated automatically from 'replacements',
878 By default, bookmark moves are calculated automatically from 'replacements',
871 but 'moves' can be used to override that. Also, 'moves' may include
879 but 'moves' can be used to override that. Also, 'moves' may include
872 additional bookmark moves that should not have associated obsmarkers.
880 additional bookmark moves that should not have associated obsmarkers.
873
881
874 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
882 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
875 have replacements. operation is a string, like "rebase".
883 have replacements. operation is a string, like "rebase".
876
884
877 metadata is dictionary containing metadata to be stored in obsmarker if
885 metadata is dictionary containing metadata to be stored in obsmarker if
878 obsolescence is enabled.
886 obsolescence is enabled.
879 """
887 """
880 assert fixphase or targetphase is None
888 assert fixphase or targetphase is None
881 if not replacements and not moves:
889 if not replacements and not moves:
882 return
890 return
883
891
884 # translate mapping's other forms
892 # translate mapping's other forms
885 if not util.safehasattr(replacements, 'items'):
893 if not util.safehasattr(replacements, 'items'):
886 replacements = {(n,): () for n in replacements}
894 replacements = {(n,): () for n in replacements}
887 else:
895 else:
888 # upgrading non tuple "source" to tuple ones for BC
896 # upgrading non tuple "source" to tuple ones for BC
889 repls = {}
897 repls = {}
890 for key, value in replacements.items():
898 for key, value in replacements.items():
891 if not isinstance(key, tuple):
899 if not isinstance(key, tuple):
892 key = (key,)
900 key = (key,)
893 repls[key] = value
901 repls[key] = value
894 replacements = repls
902 replacements = repls
895
903
896 # Calculate bookmark movements
904 # Calculate bookmark movements
897 if moves is None:
905 if moves is None:
898 moves = {}
906 moves = {}
899 # Unfiltered repo is needed since nodes in replacements might be hidden.
907 # Unfiltered repo is needed since nodes in replacements might be hidden.
900 unfi = repo.unfiltered()
908 unfi = repo.unfiltered()
901 for oldnodes, newnodes in replacements.items():
909 for oldnodes, newnodes in replacements.items():
902 for oldnode in oldnodes:
910 for oldnode in oldnodes:
903 if oldnode in moves:
911 if oldnode in moves:
904 continue
912 continue
905 if len(newnodes) > 1:
913 if len(newnodes) > 1:
906 # usually a split, take the one with biggest rev number
914 # usually a split, take the one with biggest rev number
907 newnode = next(unfi.set('max(%ln)', newnodes)).node()
915 newnode = next(unfi.set('max(%ln)', newnodes)).node()
908 elif len(newnodes) == 0:
916 elif len(newnodes) == 0:
909 # move bookmark backwards
917 # move bookmark backwards
910 allreplaced = []
918 allreplaced = []
911 for rep in replacements:
919 for rep in replacements:
912 allreplaced.extend(rep)
920 allreplaced.extend(rep)
913 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
921 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
914 allreplaced))
922 allreplaced))
915 if roots:
923 if roots:
916 newnode = roots[0].node()
924 newnode = roots[0].node()
917 else:
925 else:
918 newnode = nullid
926 newnode = nullid
919 else:
927 else:
920 newnode = newnodes[0]
928 newnode = newnodes[0]
921 moves[oldnode] = newnode
929 moves[oldnode] = newnode
922
930
923 allnewnodes = [n for ns in replacements.values() for n in ns]
931 allnewnodes = [n for ns in replacements.values() for n in ns]
924 toretract = {}
932 toretract = {}
925 toadvance = {}
933 toadvance = {}
926 if fixphase:
934 if fixphase:
927 precursors = {}
935 precursors = {}
928 for oldnodes, newnodes in replacements.items():
936 for oldnodes, newnodes in replacements.items():
929 for oldnode in oldnodes:
937 for oldnode in oldnodes:
930 for newnode in newnodes:
938 for newnode in newnodes:
931 precursors.setdefault(newnode, []).append(oldnode)
939 precursors.setdefault(newnode, []).append(oldnode)
932
940
933 allnewnodes.sort(key=lambda n: unfi[n].rev())
941 allnewnodes.sort(key=lambda n: unfi[n].rev())
934 newphases = {}
942 newphases = {}
935 def phase(ctx):
943 def phase(ctx):
936 return newphases.get(ctx.node(), ctx.phase())
944 return newphases.get(ctx.node(), ctx.phase())
937 for newnode in allnewnodes:
945 for newnode in allnewnodes:
938 ctx = unfi[newnode]
946 ctx = unfi[newnode]
939 parentphase = max(phase(p) for p in ctx.parents())
947 parentphase = max(phase(p) for p in ctx.parents())
940 if targetphase is None:
948 if targetphase is None:
941 oldphase = max(unfi[oldnode].phase()
949 oldphase = max(unfi[oldnode].phase()
942 for oldnode in precursors[newnode])
950 for oldnode in precursors[newnode])
943 newphase = max(oldphase, parentphase)
951 newphase = max(oldphase, parentphase)
944 else:
952 else:
945 newphase = max(targetphase, parentphase)
953 newphase = max(targetphase, parentphase)
946 newphases[newnode] = newphase
954 newphases[newnode] = newphase
947 if newphase > ctx.phase():
955 if newphase > ctx.phase():
948 toretract.setdefault(newphase, []).append(newnode)
956 toretract.setdefault(newphase, []).append(newnode)
949 elif newphase < ctx.phase():
957 elif newphase < ctx.phase():
950 toadvance.setdefault(newphase, []).append(newnode)
958 toadvance.setdefault(newphase, []).append(newnode)
951
959
952 with repo.transaction('cleanup') as tr:
960 with repo.transaction('cleanup') as tr:
953 # Move bookmarks
961 # Move bookmarks
954 bmarks = repo._bookmarks
962 bmarks = repo._bookmarks
955 bmarkchanges = []
963 bmarkchanges = []
956 for oldnode, newnode in moves.items():
964 for oldnode, newnode in moves.items():
957 oldbmarks = repo.nodebookmarks(oldnode)
965 oldbmarks = repo.nodebookmarks(oldnode)
958 if not oldbmarks:
966 if not oldbmarks:
959 continue
967 continue
960 from . import bookmarks # avoid import cycle
968 from . import bookmarks # avoid import cycle
961 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
969 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
962 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
970 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
963 hex(oldnode), hex(newnode)))
971 hex(oldnode), hex(newnode)))
964 # Delete divergent bookmarks being parents of related newnodes
972 # Delete divergent bookmarks being parents of related newnodes
965 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
973 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
966 allnewnodes, newnode, oldnode)
974 allnewnodes, newnode, oldnode)
967 deletenodes = _containsnode(repo, deleterevs)
975 deletenodes = _containsnode(repo, deleterevs)
968 for name in oldbmarks:
976 for name in oldbmarks:
969 bmarkchanges.append((name, newnode))
977 bmarkchanges.append((name, newnode))
970 for b in bookmarks.divergent2delete(repo, deletenodes, name):
978 for b in bookmarks.divergent2delete(repo, deletenodes, name):
971 bmarkchanges.append((b, None))
979 bmarkchanges.append((b, None))
972
980
973 if bmarkchanges:
981 if bmarkchanges:
974 bmarks.applychanges(repo, tr, bmarkchanges)
982 bmarks.applychanges(repo, tr, bmarkchanges)
975
983
976 for phase, nodes in toretract.items():
984 for phase, nodes in toretract.items():
977 phases.retractboundary(repo, tr, phase, nodes)
985 phases.retractboundary(repo, tr, phase, nodes)
978 for phase, nodes in toadvance.items():
986 for phase, nodes in toadvance.items():
979 phases.advanceboundary(repo, tr, phase, nodes)
987 phases.advanceboundary(repo, tr, phase, nodes)
980
988
981 # Obsolete or strip nodes
989 # Obsolete or strip nodes
982 if obsolete.isenabled(repo, obsolete.createmarkersopt):
990 if obsolete.isenabled(repo, obsolete.createmarkersopt):
983 # If a node is already obsoleted, and we want to obsolete it
991 # If a node is already obsoleted, and we want to obsolete it
984 # without a successor, skip that obssolete request since it's
992 # without a successor, skip that obssolete request since it's
985 # unnecessary. That's the "if s or not isobs(n)" check below.
993 # unnecessary. That's the "if s or not isobs(n)" check below.
986 # Also sort the node in topology order, that might be useful for
994 # Also sort the node in topology order, that might be useful for
987 # some obsstore logic.
995 # some obsstore logic.
988 # NOTE: the sorting might belong to createmarkers.
996 # NOTE: the sorting might belong to createmarkers.
989 torev = unfi.changelog.rev
997 torev = unfi.changelog.rev
990 sortfunc = lambda ns: torev(ns[0][0])
998 sortfunc = lambda ns: torev(ns[0][0])
991 rels = []
999 rels = []
992 for ns, s in sorted(replacements.items(), key=sortfunc):
1000 for ns, s in sorted(replacements.items(), key=sortfunc):
993 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1001 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
994 rels.append(rel)
1002 rels.append(rel)
995 if rels:
1003 if rels:
996 obsolete.createmarkers(repo, rels, operation=operation,
1004 obsolete.createmarkers(repo, rels, operation=operation,
997 metadata=metadata)
1005 metadata=metadata)
998 else:
1006 else:
999 from . import repair # avoid import cycle
1007 from . import repair # avoid import cycle
1000 tostrip = list(n for ns in replacements for n in ns)
1008 tostrip = list(n for ns in replacements for n in ns)
1001 if tostrip:
1009 if tostrip:
1002 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1010 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1003 backup=backup)
1011 backup=backup)
1004
1012
1005 def addremove(repo, matcher, prefix, opts=None):
1013 def addremove(repo, matcher, prefix, opts=None):
1006 if opts is None:
1014 if opts is None:
1007 opts = {}
1015 opts = {}
1008 m = matcher
1016 m = matcher
1009 dry_run = opts.get('dry_run')
1017 dry_run = opts.get('dry_run')
1010 try:
1018 try:
1011 similarity = float(opts.get('similarity') or 0)
1019 similarity = float(opts.get('similarity') or 0)
1012 except ValueError:
1020 except ValueError:
1013 raise error.Abort(_('similarity must be a number'))
1021 raise error.Abort(_('similarity must be a number'))
1014 if similarity < 0 or similarity > 100:
1022 if similarity < 0 or similarity > 100:
1015 raise error.Abort(_('similarity must be between 0 and 100'))
1023 raise error.Abort(_('similarity must be between 0 and 100'))
1016 similarity /= 100.0
1024 similarity /= 100.0
1017
1025
1018 ret = 0
1026 ret = 0
1019 join = lambda f: os.path.join(prefix, f)
1027 join = lambda f: os.path.join(prefix, f)
1020
1028
1021 wctx = repo[None]
1029 wctx = repo[None]
1022 for subpath in sorted(wctx.substate):
1030 for subpath in sorted(wctx.substate):
1023 submatch = matchmod.subdirmatcher(subpath, m)
1031 submatch = matchmod.subdirmatcher(subpath, m)
1024 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1032 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1025 sub = wctx.sub(subpath)
1033 sub = wctx.sub(subpath)
1026 try:
1034 try:
1027 if sub.addremove(submatch, prefix, opts):
1035 if sub.addremove(submatch, prefix, opts):
1028 ret = 1
1036 ret = 1
1029 except error.LookupError:
1037 except error.LookupError:
1030 repo.ui.status(_("skipping missing subrepository: %s\n")
1038 repo.ui.status(_("skipping missing subrepository: %s\n")
1031 % join(subpath))
1039 % join(subpath))
1032
1040
1033 rejected = []
1041 rejected = []
1034 def badfn(f, msg):
1042 def badfn(f, msg):
1035 if f in m.files():
1043 if f in m.files():
1036 m.bad(f, msg)
1044 m.bad(f, msg)
1037 rejected.append(f)
1045 rejected.append(f)
1038
1046
1039 badmatch = matchmod.badmatch(m, badfn)
1047 badmatch = matchmod.badmatch(m, badfn)
1040 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1048 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1041 badmatch)
1049 badmatch)
1042
1050
1043 unknownset = set(unknown + forgotten)
1051 unknownset = set(unknown + forgotten)
1044 toprint = unknownset.copy()
1052 toprint = unknownset.copy()
1045 toprint.update(deleted)
1053 toprint.update(deleted)
1046 for abs in sorted(toprint):
1054 for abs in sorted(toprint):
1047 if repo.ui.verbose or not m.exact(abs):
1055 if repo.ui.verbose or not m.exact(abs):
1048 if abs in unknownset:
1056 if abs in unknownset:
1049 status = _('adding %s\n') % m.uipath(abs)
1057 status = _('adding %s\n') % m.uipath(abs)
1050 label = 'ui.addremove.added'
1058 label = 'ui.addremove.added'
1051 else:
1059 else:
1052 status = _('removing %s\n') % m.uipath(abs)
1060 status = _('removing %s\n') % m.uipath(abs)
1053 label = 'ui.addremove.removed'
1061 label = 'ui.addremove.removed'
1054 repo.ui.status(status, label=label)
1062 repo.ui.status(status, label=label)
1055
1063
1056 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1064 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1057 similarity)
1065 similarity)
1058
1066
1059 if not dry_run:
1067 if not dry_run:
1060 _markchanges(repo, unknown + forgotten, deleted, renames)
1068 _markchanges(repo, unknown + forgotten, deleted, renames)
1061
1069
1062 for f in rejected:
1070 for f in rejected:
1063 if f in m.files():
1071 if f in m.files():
1064 return 1
1072 return 1
1065 return ret
1073 return ret
1066
1074
1067 def marktouched(repo, files, similarity=0.0):
1075 def marktouched(repo, files, similarity=0.0):
1068 '''Assert that files have somehow been operated upon. files are relative to
1076 '''Assert that files have somehow been operated upon. files are relative to
1069 the repo root.'''
1077 the repo root.'''
1070 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1078 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1071 rejected = []
1079 rejected = []
1072
1080
1073 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1081 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1074
1082
1075 if repo.ui.verbose:
1083 if repo.ui.verbose:
1076 unknownset = set(unknown + forgotten)
1084 unknownset = set(unknown + forgotten)
1077 toprint = unknownset.copy()
1085 toprint = unknownset.copy()
1078 toprint.update(deleted)
1086 toprint.update(deleted)
1079 for abs in sorted(toprint):
1087 for abs in sorted(toprint):
1080 if abs in unknownset:
1088 if abs in unknownset:
1081 status = _('adding %s\n') % abs
1089 status = _('adding %s\n') % abs
1082 else:
1090 else:
1083 status = _('removing %s\n') % abs
1091 status = _('removing %s\n') % abs
1084 repo.ui.status(status)
1092 repo.ui.status(status)
1085
1093
1086 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1094 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1087 similarity)
1095 similarity)
1088
1096
1089 _markchanges(repo, unknown + forgotten, deleted, renames)
1097 _markchanges(repo, unknown + forgotten, deleted, renames)
1090
1098
1091 for f in rejected:
1099 for f in rejected:
1092 if f in m.files():
1100 if f in m.files():
1093 return 1
1101 return 1
1094 return 0
1102 return 0
1095
1103
1096 def _interestingfiles(repo, matcher):
1104 def _interestingfiles(repo, matcher):
1097 '''Walk dirstate with matcher, looking for files that addremove would care
1105 '''Walk dirstate with matcher, looking for files that addremove would care
1098 about.
1106 about.
1099
1107
1100 This is different from dirstate.status because it doesn't care about
1108 This is different from dirstate.status because it doesn't care about
1101 whether files are modified or clean.'''
1109 whether files are modified or clean.'''
1102 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1110 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1103 audit_path = pathutil.pathauditor(repo.root, cached=True)
1111 audit_path = pathutil.pathauditor(repo.root, cached=True)
1104
1112
1105 ctx = repo[None]
1113 ctx = repo[None]
1106 dirstate = repo.dirstate
1114 dirstate = repo.dirstate
1107 matcher = repo.narrowmatch(matcher, includeexact=True)
1115 matcher = repo.narrowmatch(matcher, includeexact=True)
1108 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1116 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1109 unknown=True, ignored=False, full=False)
1117 unknown=True, ignored=False, full=False)
1110 for abs, st in walkresults.iteritems():
1118 for abs, st in walkresults.iteritems():
1111 dstate = dirstate[abs]
1119 dstate = dirstate[abs]
1112 if dstate == '?' and audit_path.check(abs):
1120 if dstate == '?' and audit_path.check(abs):
1113 unknown.append(abs)
1121 unknown.append(abs)
1114 elif dstate != 'r' and not st:
1122 elif dstate != 'r' and not st:
1115 deleted.append(abs)
1123 deleted.append(abs)
1116 elif dstate == 'r' and st:
1124 elif dstate == 'r' and st:
1117 forgotten.append(abs)
1125 forgotten.append(abs)
1118 # for finding renames
1126 # for finding renames
1119 elif dstate == 'r' and not st:
1127 elif dstate == 'r' and not st:
1120 removed.append(abs)
1128 removed.append(abs)
1121 elif dstate == 'a':
1129 elif dstate == 'a':
1122 added.append(abs)
1130 added.append(abs)
1123
1131
1124 return added, unknown, deleted, removed, forgotten
1132 return added, unknown, deleted, removed, forgotten
1125
1133
1126 def _findrenames(repo, matcher, added, removed, similarity):
1134 def _findrenames(repo, matcher, added, removed, similarity):
1127 '''Find renames from removed files to added ones.'''
1135 '''Find renames from removed files to added ones.'''
1128 renames = {}
1136 renames = {}
1129 if similarity > 0:
1137 if similarity > 0:
1130 for old, new, score in similar.findrenames(repo, added, removed,
1138 for old, new, score in similar.findrenames(repo, added, removed,
1131 similarity):
1139 similarity):
1132 if (repo.ui.verbose or not matcher.exact(old)
1140 if (repo.ui.verbose or not matcher.exact(old)
1133 or not matcher.exact(new)):
1141 or not matcher.exact(new)):
1134 repo.ui.status(_('recording removal of %s as rename to %s '
1142 repo.ui.status(_('recording removal of %s as rename to %s '
1135 '(%d%% similar)\n') %
1143 '(%d%% similar)\n') %
1136 (matcher.rel(old), matcher.rel(new),
1144 (matcher.rel(old), matcher.rel(new),
1137 score * 100))
1145 score * 100))
1138 renames[new] = old
1146 renames[new] = old
1139 return renames
1147 return renames
1140
1148
1141 def _markchanges(repo, unknown, deleted, renames):
1149 def _markchanges(repo, unknown, deleted, renames):
1142 '''Marks the files in unknown as added, the files in deleted as removed,
1150 '''Marks the files in unknown as added, the files in deleted as removed,
1143 and the files in renames as copied.'''
1151 and the files in renames as copied.'''
1144 wctx = repo[None]
1152 wctx = repo[None]
1145 with repo.wlock():
1153 with repo.wlock():
1146 wctx.forget(deleted)
1154 wctx.forget(deleted)
1147 wctx.add(unknown)
1155 wctx.add(unknown)
1148 for new, old in renames.iteritems():
1156 for new, old in renames.iteritems():
1149 wctx.copy(old, new)
1157 wctx.copy(old, new)
1150
1158
1151 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1159 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1152 """Update the dirstate to reflect the intent of copying src to dst. For
1160 """Update the dirstate to reflect the intent of copying src to dst. For
1153 different reasons it might not end with dst being marked as copied from src.
1161 different reasons it might not end with dst being marked as copied from src.
1154 """
1162 """
1155 origsrc = repo.dirstate.copied(src) or src
1163 origsrc = repo.dirstate.copied(src) or src
1156 if dst == origsrc: # copying back a copy?
1164 if dst == origsrc: # copying back a copy?
1157 if repo.dirstate[dst] not in 'mn' and not dryrun:
1165 if repo.dirstate[dst] not in 'mn' and not dryrun:
1158 repo.dirstate.normallookup(dst)
1166 repo.dirstate.normallookup(dst)
1159 else:
1167 else:
1160 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1168 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1161 if not ui.quiet:
1169 if not ui.quiet:
1162 ui.warn(_("%s has not been committed yet, so no copy "
1170 ui.warn(_("%s has not been committed yet, so no copy "
1163 "data will be stored for %s.\n")
1171 "data will be stored for %s.\n")
1164 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1172 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1165 if repo.dirstate[dst] in '?r' and not dryrun:
1173 if repo.dirstate[dst] in '?r' and not dryrun:
1166 wctx.add([dst])
1174 wctx.add([dst])
1167 elif not dryrun:
1175 elif not dryrun:
1168 wctx.copy(origsrc, dst)
1176 wctx.copy(origsrc, dst)
1169
1177
1170 def writerequires(opener, requirements):
1178 def writerequires(opener, requirements):
1171 with opener('requires', 'w', atomictemp=True) as fp:
1179 with opener('requires', 'w', atomictemp=True) as fp:
1172 for r in sorted(requirements):
1180 for r in sorted(requirements):
1173 fp.write("%s\n" % r)
1181 fp.write("%s\n" % r)
1174
1182
1175 class filecachesubentry(object):
1183 class filecachesubentry(object):
1176 def __init__(self, path, stat):
1184 def __init__(self, path, stat):
1177 self.path = path
1185 self.path = path
1178 self.cachestat = None
1186 self.cachestat = None
1179 self._cacheable = None
1187 self._cacheable = None
1180
1188
1181 if stat:
1189 if stat:
1182 self.cachestat = filecachesubentry.stat(self.path)
1190 self.cachestat = filecachesubentry.stat(self.path)
1183
1191
1184 if self.cachestat:
1192 if self.cachestat:
1185 self._cacheable = self.cachestat.cacheable()
1193 self._cacheable = self.cachestat.cacheable()
1186 else:
1194 else:
1187 # None means we don't know yet
1195 # None means we don't know yet
1188 self._cacheable = None
1196 self._cacheable = None
1189
1197
1190 def refresh(self):
1198 def refresh(self):
1191 if self.cacheable():
1199 if self.cacheable():
1192 self.cachestat = filecachesubentry.stat(self.path)
1200 self.cachestat = filecachesubentry.stat(self.path)
1193
1201
1194 def cacheable(self):
1202 def cacheable(self):
1195 if self._cacheable is not None:
1203 if self._cacheable is not None:
1196 return self._cacheable
1204 return self._cacheable
1197
1205
1198 # we don't know yet, assume it is for now
1206 # we don't know yet, assume it is for now
1199 return True
1207 return True
1200
1208
1201 def changed(self):
1209 def changed(self):
1202 # no point in going further if we can't cache it
1210 # no point in going further if we can't cache it
1203 if not self.cacheable():
1211 if not self.cacheable():
1204 return True
1212 return True
1205
1213
1206 newstat = filecachesubentry.stat(self.path)
1214 newstat = filecachesubentry.stat(self.path)
1207
1215
1208 # we may not know if it's cacheable yet, check again now
1216 # we may not know if it's cacheable yet, check again now
1209 if newstat and self._cacheable is None:
1217 if newstat and self._cacheable is None:
1210 self._cacheable = newstat.cacheable()
1218 self._cacheable = newstat.cacheable()
1211
1219
1212 # check again
1220 # check again
1213 if not self._cacheable:
1221 if not self._cacheable:
1214 return True
1222 return True
1215
1223
1216 if self.cachestat != newstat:
1224 if self.cachestat != newstat:
1217 self.cachestat = newstat
1225 self.cachestat = newstat
1218 return True
1226 return True
1219 else:
1227 else:
1220 return False
1228 return False
1221
1229
1222 @staticmethod
1230 @staticmethod
1223 def stat(path):
1231 def stat(path):
1224 try:
1232 try:
1225 return util.cachestat(path)
1233 return util.cachestat(path)
1226 except OSError as e:
1234 except OSError as e:
1227 if e.errno != errno.ENOENT:
1235 if e.errno != errno.ENOENT:
1228 raise
1236 raise
1229
1237
1230 class filecacheentry(object):
1238 class filecacheentry(object):
1231 def __init__(self, paths, stat=True):
1239 def __init__(self, paths, stat=True):
1232 self._entries = []
1240 self._entries = []
1233 for path in paths:
1241 for path in paths:
1234 self._entries.append(filecachesubentry(path, stat))
1242 self._entries.append(filecachesubentry(path, stat))
1235
1243
1236 def changed(self):
1244 def changed(self):
1237 '''true if any entry has changed'''
1245 '''true if any entry has changed'''
1238 for entry in self._entries:
1246 for entry in self._entries:
1239 if entry.changed():
1247 if entry.changed():
1240 return True
1248 return True
1241 return False
1249 return False
1242
1250
1243 def refresh(self):
1251 def refresh(self):
1244 for entry in self._entries:
1252 for entry in self._entries:
1245 entry.refresh()
1253 entry.refresh()
1246
1254
1247 class filecache(object):
1255 class filecache(object):
1248 """A property like decorator that tracks files under .hg/ for updates.
1256 """A property like decorator that tracks files under .hg/ for updates.
1249
1257
1250 On first access, the files defined as arguments are stat()ed and the
1258 On first access, the files defined as arguments are stat()ed and the
1251 results cached. The decorated function is called. The results are stashed
1259 results cached. The decorated function is called. The results are stashed
1252 away in a ``_filecache`` dict on the object whose method is decorated.
1260 away in a ``_filecache`` dict on the object whose method is decorated.
1253
1261
1254 On subsequent access, the cached result is used as it is set to the
1262 On subsequent access, the cached result is used as it is set to the
1255 instance dictionary.
1263 instance dictionary.
1256
1264
1257 On external property set/delete operations, the caller must update the
1265 On external property set/delete operations, the caller must update the
1258 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1266 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1259 instead of directly setting <attr>.
1267 instead of directly setting <attr>.
1260
1268
1261 When using the property API, the cached data is always used if available.
1269 When using the property API, the cached data is always used if available.
1262 No stat() is performed to check if the file has changed.
1270 No stat() is performed to check if the file has changed.
1263
1271
1264 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1272 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1265 can populate an entry before the property's getter is called. In this case,
1273 can populate an entry before the property's getter is called. In this case,
1266 entries in ``_filecache`` will be used during property operations,
1274 entries in ``_filecache`` will be used during property operations,
1267 if available. If the underlying file changes, it is up to external callers
1275 if available. If the underlying file changes, it is up to external callers
1268 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1276 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1269 method result as well as possibly calling ``del obj._filecache[attr]`` to
1277 method result as well as possibly calling ``del obj._filecache[attr]`` to
1270 remove the ``filecacheentry``.
1278 remove the ``filecacheentry``.
1271 """
1279 """
1272
1280
1273 def __init__(self, *paths):
1281 def __init__(self, *paths):
1274 self.paths = paths
1282 self.paths = paths
1275
1283
1276 def join(self, obj, fname):
1284 def join(self, obj, fname):
1277 """Used to compute the runtime path of a cached file.
1285 """Used to compute the runtime path of a cached file.
1278
1286
1279 Users should subclass filecache and provide their own version of this
1287 Users should subclass filecache and provide their own version of this
1280 function to call the appropriate join function on 'obj' (an instance
1288 function to call the appropriate join function on 'obj' (an instance
1281 of the class that its member function was decorated).
1289 of the class that its member function was decorated).
1282 """
1290 """
1283 raise NotImplementedError
1291 raise NotImplementedError
1284
1292
1285 def __call__(self, func):
1293 def __call__(self, func):
1286 self.func = func
1294 self.func = func
1287 self.sname = func.__name__
1295 self.sname = func.__name__
1288 self.name = pycompat.sysbytes(self.sname)
1296 self.name = pycompat.sysbytes(self.sname)
1289 return self
1297 return self
1290
1298
1291 def __get__(self, obj, type=None):
1299 def __get__(self, obj, type=None):
1292 # if accessed on the class, return the descriptor itself.
1300 # if accessed on the class, return the descriptor itself.
1293 if obj is None:
1301 if obj is None:
1294 return self
1302 return self
1295
1303
1296 assert self.sname not in obj.__dict__
1304 assert self.sname not in obj.__dict__
1297
1305
1298 entry = obj._filecache.get(self.name)
1306 entry = obj._filecache.get(self.name)
1299
1307
1300 if entry:
1308 if entry:
1301 if entry.changed():
1309 if entry.changed():
1302 entry.obj = self.func(obj)
1310 entry.obj = self.func(obj)
1303 else:
1311 else:
1304 paths = [self.join(obj, path) for path in self.paths]
1312 paths = [self.join(obj, path) for path in self.paths]
1305
1313
1306 # We stat -before- creating the object so our cache doesn't lie if
1314 # We stat -before- creating the object so our cache doesn't lie if
1307 # a writer modified between the time we read and stat
1315 # a writer modified between the time we read and stat
1308 entry = filecacheentry(paths, True)
1316 entry = filecacheentry(paths, True)
1309 entry.obj = self.func(obj)
1317 entry.obj = self.func(obj)
1310
1318
1311 obj._filecache[self.name] = entry
1319 obj._filecache[self.name] = entry
1312
1320
1313 obj.__dict__[self.sname] = entry.obj
1321 obj.__dict__[self.sname] = entry.obj
1314 return entry.obj
1322 return entry.obj
1315
1323
1316 # don't implement __set__(), which would make __dict__ lookup as slow as
1324 # don't implement __set__(), which would make __dict__ lookup as slow as
1317 # function call.
1325 # function call.
1318
1326
1319 def set(self, obj, value):
1327 def set(self, obj, value):
1320 if self.name not in obj._filecache:
1328 if self.name not in obj._filecache:
1321 # we add an entry for the missing value because X in __dict__
1329 # we add an entry for the missing value because X in __dict__
1322 # implies X in _filecache
1330 # implies X in _filecache
1323 paths = [self.join(obj, path) for path in self.paths]
1331 paths = [self.join(obj, path) for path in self.paths]
1324 ce = filecacheentry(paths, False)
1332 ce = filecacheentry(paths, False)
1325 obj._filecache[self.name] = ce
1333 obj._filecache[self.name] = ce
1326 else:
1334 else:
1327 ce = obj._filecache[self.name]
1335 ce = obj._filecache[self.name]
1328
1336
1329 ce.obj = value # update cached copy
1337 ce.obj = value # update cached copy
1330 obj.__dict__[self.sname] = value # update copy returned by obj.x
1338 obj.__dict__[self.sname] = value # update copy returned by obj.x
1331
1339
1332 def extdatasource(repo, source):
1340 def extdatasource(repo, source):
1333 """Gather a map of rev -> value dict from the specified source
1341 """Gather a map of rev -> value dict from the specified source
1334
1342
1335 A source spec is treated as a URL, with a special case shell: type
1343 A source spec is treated as a URL, with a special case shell: type
1336 for parsing the output from a shell command.
1344 for parsing the output from a shell command.
1337
1345
1338 The data is parsed as a series of newline-separated records where
1346 The data is parsed as a series of newline-separated records where
1339 each record is a revision specifier optionally followed by a space
1347 each record is a revision specifier optionally followed by a space
1340 and a freeform string value. If the revision is known locally, it
1348 and a freeform string value. If the revision is known locally, it
1341 is converted to a rev, otherwise the record is skipped.
1349 is converted to a rev, otherwise the record is skipped.
1342
1350
1343 Note that both key and value are treated as UTF-8 and converted to
1351 Note that both key and value are treated as UTF-8 and converted to
1344 the local encoding. This allows uniformity between local and
1352 the local encoding. This allows uniformity between local and
1345 remote data sources.
1353 remote data sources.
1346 """
1354 """
1347
1355
1348 spec = repo.ui.config("extdata", source)
1356 spec = repo.ui.config("extdata", source)
1349 if not spec:
1357 if not spec:
1350 raise error.Abort(_("unknown extdata source '%s'") % source)
1358 raise error.Abort(_("unknown extdata source '%s'") % source)
1351
1359
1352 data = {}
1360 data = {}
1353 src = proc = None
1361 src = proc = None
1354 try:
1362 try:
1355 if spec.startswith("shell:"):
1363 if spec.startswith("shell:"):
1356 # external commands should be run relative to the repo root
1364 # external commands should be run relative to the repo root
1357 cmd = spec[6:]
1365 cmd = spec[6:]
1358 proc = subprocess.Popen(procutil.tonativestr(cmd),
1366 proc = subprocess.Popen(procutil.tonativestr(cmd),
1359 shell=True, bufsize=-1,
1367 shell=True, bufsize=-1,
1360 close_fds=procutil.closefds,
1368 close_fds=procutil.closefds,
1361 stdout=subprocess.PIPE,
1369 stdout=subprocess.PIPE,
1362 cwd=procutil.tonativestr(repo.root))
1370 cwd=procutil.tonativestr(repo.root))
1363 src = proc.stdout
1371 src = proc.stdout
1364 else:
1372 else:
1365 # treat as a URL or file
1373 # treat as a URL or file
1366 src = url.open(repo.ui, spec)
1374 src = url.open(repo.ui, spec)
1367 for l in src:
1375 for l in src:
1368 if " " in l:
1376 if " " in l:
1369 k, v = l.strip().split(" ", 1)
1377 k, v = l.strip().split(" ", 1)
1370 else:
1378 else:
1371 k, v = l.strip(), ""
1379 k, v = l.strip(), ""
1372
1380
1373 k = encoding.tolocal(k)
1381 k = encoding.tolocal(k)
1374 try:
1382 try:
1375 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1383 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1376 except (error.LookupError, error.RepoLookupError):
1384 except (error.LookupError, error.RepoLookupError):
1377 pass # we ignore data for nodes that don't exist locally
1385 pass # we ignore data for nodes that don't exist locally
1378 finally:
1386 finally:
1379 if proc:
1387 if proc:
1380 proc.communicate()
1388 proc.communicate()
1381 if src:
1389 if src:
1382 src.close()
1390 src.close()
1383 if proc and proc.returncode != 0:
1391 if proc and proc.returncode != 0:
1384 raise error.Abort(_("extdata command '%s' failed: %s")
1392 raise error.Abort(_("extdata command '%s' failed: %s")
1385 % (cmd, procutil.explainexit(proc.returncode)))
1393 % (cmd, procutil.explainexit(proc.returncode)))
1386
1394
1387 return data
1395 return data
1388
1396
1389 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1397 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1390 if lock is None:
1398 if lock is None:
1391 raise error.LockInheritanceContractViolation(
1399 raise error.LockInheritanceContractViolation(
1392 'lock can only be inherited while held')
1400 'lock can only be inherited while held')
1393 if environ is None:
1401 if environ is None:
1394 environ = {}
1402 environ = {}
1395 with lock.inherit() as locker:
1403 with lock.inherit() as locker:
1396 environ[envvar] = locker
1404 environ[envvar] = locker
1397 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1405 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1398
1406
1399 def wlocksub(repo, cmd, *args, **kwargs):
1407 def wlocksub(repo, cmd, *args, **kwargs):
1400 """run cmd as a subprocess that allows inheriting repo's wlock
1408 """run cmd as a subprocess that allows inheriting repo's wlock
1401
1409
1402 This can only be called while the wlock is held. This takes all the
1410 This can only be called while the wlock is held. This takes all the
1403 arguments that ui.system does, and returns the exit code of the
1411 arguments that ui.system does, and returns the exit code of the
1404 subprocess."""
1412 subprocess."""
1405 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1413 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1406 **kwargs)
1414 **kwargs)
1407
1415
1408 class progress(object):
1416 class progress(object):
1409 def __init__(self, ui, topic, unit="", total=None):
1417 def __init__(self, ui, topic, unit="", total=None):
1410 self.ui = ui
1418 self.ui = ui
1411 self.pos = 0
1419 self.pos = 0
1412 self.topic = topic
1420 self.topic = topic
1413 self.unit = unit
1421 self.unit = unit
1414 self.total = total
1422 self.total = total
1415
1423
1416 def __enter__(self):
1424 def __enter__(self):
1417 return self
1425 return self
1418
1426
1419 def __exit__(self, exc_type, exc_value, exc_tb):
1427 def __exit__(self, exc_type, exc_value, exc_tb):
1420 self.complete()
1428 self.complete()
1421
1429
1422 def update(self, pos, item="", total=None):
1430 def update(self, pos, item="", total=None):
1423 assert pos is not None
1431 assert pos is not None
1424 if total:
1432 if total:
1425 self.total = total
1433 self.total = total
1426 self.pos = pos
1434 self.pos = pos
1427 self._print(item)
1435 self._print(item)
1428
1436
1429 def increment(self, step=1, item="", total=None):
1437 def increment(self, step=1, item="", total=None):
1430 self.update(self.pos + step, item, total)
1438 self.update(self.pos + step, item, total)
1431
1439
1432 def complete(self):
1440 def complete(self):
1433 self.ui.progress(self.topic, None)
1441 self.ui.progress(self.topic, None)
1434
1442
1435 def _print(self, item):
1443 def _print(self, item):
1436 self.ui.progress(self.topic, self.pos, item, self.unit,
1444 self.ui.progress(self.topic, self.pos, item, self.unit,
1437 self.total)
1445 self.total)
1438
1446
1439 def gdinitconfig(ui):
1447 def gdinitconfig(ui):
1440 """helper function to know if a repo should be created as general delta
1448 """helper function to know if a repo should be created as general delta
1441 """
1449 """
1442 # experimental config: format.generaldelta
1450 # experimental config: format.generaldelta
1443 return (ui.configbool('format', 'generaldelta')
1451 return (ui.configbool('format', 'generaldelta')
1444 or ui.configbool('format', 'usegeneraldelta')
1452 or ui.configbool('format', 'usegeneraldelta')
1445 or ui.configbool('format', 'sparse-revlog'))
1453 or ui.configbool('format', 'sparse-revlog'))
1446
1454
1447 def gddeltaconfig(ui):
1455 def gddeltaconfig(ui):
1448 """helper function to know if incoming delta should be optimised
1456 """helper function to know if incoming delta should be optimised
1449 """
1457 """
1450 # experimental config: format.generaldelta
1458 # experimental config: format.generaldelta
1451 return ui.configbool('format', 'generaldelta')
1459 return ui.configbool('format', 'generaldelta')
1452
1460
1453 class simplekeyvaluefile(object):
1461 class simplekeyvaluefile(object):
1454 """A simple file with key=value lines
1462 """A simple file with key=value lines
1455
1463
1456 Keys must be alphanumerics and start with a letter, values must not
1464 Keys must be alphanumerics and start with a letter, values must not
1457 contain '\n' characters"""
1465 contain '\n' characters"""
1458 firstlinekey = '__firstline'
1466 firstlinekey = '__firstline'
1459
1467
1460 def __init__(self, vfs, path, keys=None):
1468 def __init__(self, vfs, path, keys=None):
1461 self.vfs = vfs
1469 self.vfs = vfs
1462 self.path = path
1470 self.path = path
1463
1471
1464 def read(self, firstlinenonkeyval=False):
1472 def read(self, firstlinenonkeyval=False):
1465 """Read the contents of a simple key-value file
1473 """Read the contents of a simple key-value file
1466
1474
1467 'firstlinenonkeyval' indicates whether the first line of file should
1475 'firstlinenonkeyval' indicates whether the first line of file should
1468 be treated as a key-value pair or reuturned fully under the
1476 be treated as a key-value pair or reuturned fully under the
1469 __firstline key."""
1477 __firstline key."""
1470 lines = self.vfs.readlines(self.path)
1478 lines = self.vfs.readlines(self.path)
1471 d = {}
1479 d = {}
1472 if firstlinenonkeyval:
1480 if firstlinenonkeyval:
1473 if not lines:
1481 if not lines:
1474 e = _("empty simplekeyvalue file")
1482 e = _("empty simplekeyvalue file")
1475 raise error.CorruptedState(e)
1483 raise error.CorruptedState(e)
1476 # we don't want to include '\n' in the __firstline
1484 # we don't want to include '\n' in the __firstline
1477 d[self.firstlinekey] = lines[0][:-1]
1485 d[self.firstlinekey] = lines[0][:-1]
1478 del lines[0]
1486 del lines[0]
1479
1487
1480 try:
1488 try:
1481 # the 'if line.strip()' part prevents us from failing on empty
1489 # the 'if line.strip()' part prevents us from failing on empty
1482 # lines which only contain '\n' therefore are not skipped
1490 # lines which only contain '\n' therefore are not skipped
1483 # by 'if line'
1491 # by 'if line'
1484 updatedict = dict(line[:-1].split('=', 1) for line in lines
1492 updatedict = dict(line[:-1].split('=', 1) for line in lines
1485 if line.strip())
1493 if line.strip())
1486 if self.firstlinekey in updatedict:
1494 if self.firstlinekey in updatedict:
1487 e = _("%r can't be used as a key")
1495 e = _("%r can't be used as a key")
1488 raise error.CorruptedState(e % self.firstlinekey)
1496 raise error.CorruptedState(e % self.firstlinekey)
1489 d.update(updatedict)
1497 d.update(updatedict)
1490 except ValueError as e:
1498 except ValueError as e:
1491 raise error.CorruptedState(str(e))
1499 raise error.CorruptedState(str(e))
1492 return d
1500 return d
1493
1501
1494 def write(self, data, firstline=None):
1502 def write(self, data, firstline=None):
1495 """Write key=>value mapping to a file
1503 """Write key=>value mapping to a file
1496 data is a dict. Keys must be alphanumerical and start with a letter.
1504 data is a dict. Keys must be alphanumerical and start with a letter.
1497 Values must not contain newline characters.
1505 Values must not contain newline characters.
1498
1506
1499 If 'firstline' is not None, it is written to file before
1507 If 'firstline' is not None, it is written to file before
1500 everything else, as it is, not in a key=value form"""
1508 everything else, as it is, not in a key=value form"""
1501 lines = []
1509 lines = []
1502 if firstline is not None:
1510 if firstline is not None:
1503 lines.append('%s\n' % firstline)
1511 lines.append('%s\n' % firstline)
1504
1512
1505 for k, v in data.items():
1513 for k, v in data.items():
1506 if k == self.firstlinekey:
1514 if k == self.firstlinekey:
1507 e = "key name '%s' is reserved" % self.firstlinekey
1515 e = "key name '%s' is reserved" % self.firstlinekey
1508 raise error.ProgrammingError(e)
1516 raise error.ProgrammingError(e)
1509 if not k[0:1].isalpha():
1517 if not k[0:1].isalpha():
1510 e = "keys must start with a letter in a key-value file"
1518 e = "keys must start with a letter in a key-value file"
1511 raise error.ProgrammingError(e)
1519 raise error.ProgrammingError(e)
1512 if not k.isalnum():
1520 if not k.isalnum():
1513 e = "invalid key name in a simple key-value file"
1521 e = "invalid key name in a simple key-value file"
1514 raise error.ProgrammingError(e)
1522 raise error.ProgrammingError(e)
1515 if '\n' in v:
1523 if '\n' in v:
1516 e = "invalid value in a simple key-value file"
1524 e = "invalid value in a simple key-value file"
1517 raise error.ProgrammingError(e)
1525 raise error.ProgrammingError(e)
1518 lines.append("%s=%s\n" % (k, v))
1526 lines.append("%s=%s\n" % (k, v))
1519 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1527 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1520 fp.write(''.join(lines))
1528 fp.write(''.join(lines))
1521
1529
1522 _reportobsoletedsource = [
1530 _reportobsoletedsource = [
1523 'debugobsolete',
1531 'debugobsolete',
1524 'pull',
1532 'pull',
1525 'push',
1533 'push',
1526 'serve',
1534 'serve',
1527 'unbundle',
1535 'unbundle',
1528 ]
1536 ]
1529
1537
1530 _reportnewcssource = [
1538 _reportnewcssource = [
1531 'pull',
1539 'pull',
1532 'unbundle',
1540 'unbundle',
1533 ]
1541 ]
1534
1542
1535 def prefetchfiles(repo, revs, match):
1543 def prefetchfiles(repo, revs, match):
1536 """Invokes the registered file prefetch functions, allowing extensions to
1544 """Invokes the registered file prefetch functions, allowing extensions to
1537 ensure the corresponding files are available locally, before the command
1545 ensure the corresponding files are available locally, before the command
1538 uses them."""
1546 uses them."""
1539 if match:
1547 if match:
1540 # The command itself will complain about files that don't exist, so
1548 # The command itself will complain about files that don't exist, so
1541 # don't duplicate the message.
1549 # don't duplicate the message.
1542 match = matchmod.badmatch(match, lambda fn, msg: None)
1550 match = matchmod.badmatch(match, lambda fn, msg: None)
1543 else:
1551 else:
1544 match = matchall(repo)
1552 match = matchall(repo)
1545
1553
1546 fileprefetchhooks(repo, revs, match)
1554 fileprefetchhooks(repo, revs, match)
1547
1555
1548 # a list of (repo, revs, match) prefetch functions
1556 # a list of (repo, revs, match) prefetch functions
1549 fileprefetchhooks = util.hooks()
1557 fileprefetchhooks = util.hooks()
1550
1558
1551 # A marker that tells the evolve extension to suppress its own reporting
1559 # A marker that tells the evolve extension to suppress its own reporting
1552 _reportstroubledchangesets = True
1560 _reportstroubledchangesets = True
1553
1561
1554 def registersummarycallback(repo, otr, txnname=''):
1562 def registersummarycallback(repo, otr, txnname=''):
1555 """register a callback to issue a summary after the transaction is closed
1563 """register a callback to issue a summary after the transaction is closed
1556 """
1564 """
1557 def txmatch(sources):
1565 def txmatch(sources):
1558 return any(txnname.startswith(source) for source in sources)
1566 return any(txnname.startswith(source) for source in sources)
1559
1567
1560 categories = []
1568 categories = []
1561
1569
1562 def reportsummary(func):
1570 def reportsummary(func):
1563 """decorator for report callbacks."""
1571 """decorator for report callbacks."""
1564 # The repoview life cycle is shorter than the one of the actual
1572 # The repoview life cycle is shorter than the one of the actual
1565 # underlying repository. So the filtered object can die before the
1573 # underlying repository. So the filtered object can die before the
1566 # weakref is used leading to troubles. We keep a reference to the
1574 # weakref is used leading to troubles. We keep a reference to the
1567 # unfiltered object and restore the filtering when retrieving the
1575 # unfiltered object and restore the filtering when retrieving the
1568 # repository through the weakref.
1576 # repository through the weakref.
1569 filtername = repo.filtername
1577 filtername = repo.filtername
1570 reporef = weakref.ref(repo.unfiltered())
1578 reporef = weakref.ref(repo.unfiltered())
1571 def wrapped(tr):
1579 def wrapped(tr):
1572 repo = reporef()
1580 repo = reporef()
1573 if filtername:
1581 if filtername:
1574 repo = repo.filtered(filtername)
1582 repo = repo.filtered(filtername)
1575 func(repo, tr)
1583 func(repo, tr)
1576 newcat = '%02i-txnreport' % len(categories)
1584 newcat = '%02i-txnreport' % len(categories)
1577 otr.addpostclose(newcat, wrapped)
1585 otr.addpostclose(newcat, wrapped)
1578 categories.append(newcat)
1586 categories.append(newcat)
1579 return wrapped
1587 return wrapped
1580
1588
1581 if txmatch(_reportobsoletedsource):
1589 if txmatch(_reportobsoletedsource):
1582 @reportsummary
1590 @reportsummary
1583 def reportobsoleted(repo, tr):
1591 def reportobsoleted(repo, tr):
1584 obsoleted = obsutil.getobsoleted(repo, tr)
1592 obsoleted = obsutil.getobsoleted(repo, tr)
1585 if obsoleted:
1593 if obsoleted:
1586 repo.ui.status(_('obsoleted %i changesets\n')
1594 repo.ui.status(_('obsoleted %i changesets\n')
1587 % len(obsoleted))
1595 % len(obsoleted))
1588
1596
1589 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1597 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1590 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1598 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1591 instabilitytypes = [
1599 instabilitytypes = [
1592 ('orphan', 'orphan'),
1600 ('orphan', 'orphan'),
1593 ('phase-divergent', 'phasedivergent'),
1601 ('phase-divergent', 'phasedivergent'),
1594 ('content-divergent', 'contentdivergent'),
1602 ('content-divergent', 'contentdivergent'),
1595 ]
1603 ]
1596
1604
1597 def getinstabilitycounts(repo):
1605 def getinstabilitycounts(repo):
1598 filtered = repo.changelog.filteredrevs
1606 filtered = repo.changelog.filteredrevs
1599 counts = {}
1607 counts = {}
1600 for instability, revset in instabilitytypes:
1608 for instability, revset in instabilitytypes:
1601 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1609 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1602 filtered)
1610 filtered)
1603 return counts
1611 return counts
1604
1612
1605 oldinstabilitycounts = getinstabilitycounts(repo)
1613 oldinstabilitycounts = getinstabilitycounts(repo)
1606 @reportsummary
1614 @reportsummary
1607 def reportnewinstabilities(repo, tr):
1615 def reportnewinstabilities(repo, tr):
1608 newinstabilitycounts = getinstabilitycounts(repo)
1616 newinstabilitycounts = getinstabilitycounts(repo)
1609 for instability, revset in instabilitytypes:
1617 for instability, revset in instabilitytypes:
1610 delta = (newinstabilitycounts[instability] -
1618 delta = (newinstabilitycounts[instability] -
1611 oldinstabilitycounts[instability])
1619 oldinstabilitycounts[instability])
1612 msg = getinstabilitymessage(delta, instability)
1620 msg = getinstabilitymessage(delta, instability)
1613 if msg:
1621 if msg:
1614 repo.ui.warn(msg)
1622 repo.ui.warn(msg)
1615
1623
1616 if txmatch(_reportnewcssource):
1624 if txmatch(_reportnewcssource):
1617 @reportsummary
1625 @reportsummary
1618 def reportnewcs(repo, tr):
1626 def reportnewcs(repo, tr):
1619 """Report the range of new revisions pulled/unbundled."""
1627 """Report the range of new revisions pulled/unbundled."""
1620 origrepolen = tr.changes.get('origrepolen', len(repo))
1628 origrepolen = tr.changes.get('origrepolen', len(repo))
1621 unfi = repo.unfiltered()
1629 unfi = repo.unfiltered()
1622 if origrepolen >= len(unfi):
1630 if origrepolen >= len(unfi):
1623 return
1631 return
1624
1632
1625 # Compute the bounds of new visible revisions' range.
1633 # Compute the bounds of new visible revisions' range.
1626 revs = smartset.spanset(repo, start=origrepolen)
1634 revs = smartset.spanset(repo, start=origrepolen)
1627 if revs:
1635 if revs:
1628 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1636 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1629
1637
1630 if minrev == maxrev:
1638 if minrev == maxrev:
1631 revrange = minrev
1639 revrange = minrev
1632 else:
1640 else:
1633 revrange = '%s:%s' % (minrev, maxrev)
1641 revrange = '%s:%s' % (minrev, maxrev)
1634 draft = len(repo.revs('%ld and draft()', revs))
1642 draft = len(repo.revs('%ld and draft()', revs))
1635 secret = len(repo.revs('%ld and secret()', revs))
1643 secret = len(repo.revs('%ld and secret()', revs))
1636 if not (draft or secret):
1644 if not (draft or secret):
1637 msg = _('new changesets %s\n') % revrange
1645 msg = _('new changesets %s\n') % revrange
1638 elif draft and secret:
1646 elif draft and secret:
1639 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1647 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1640 msg %= (revrange, draft, secret)
1648 msg %= (revrange, draft, secret)
1641 elif draft:
1649 elif draft:
1642 msg = _('new changesets %s (%d drafts)\n')
1650 msg = _('new changesets %s (%d drafts)\n')
1643 msg %= (revrange, draft)
1651 msg %= (revrange, draft)
1644 elif secret:
1652 elif secret:
1645 msg = _('new changesets %s (%d secrets)\n')
1653 msg = _('new changesets %s (%d secrets)\n')
1646 msg %= (revrange, secret)
1654 msg %= (revrange, secret)
1647 else:
1655 else:
1648 errormsg = 'entered unreachable condition'
1656 errormsg = 'entered unreachable condition'
1649 raise error.ProgrammingError(errormsg)
1657 raise error.ProgrammingError(errormsg)
1650 repo.ui.status(msg)
1658 repo.ui.status(msg)
1651
1659
1652 # search new changesets directly pulled as obsolete
1660 # search new changesets directly pulled as obsolete
1653 duplicates = tr.changes.get('revduplicates', ())
1661 duplicates = tr.changes.get('revduplicates', ())
1654 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1662 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1655 origrepolen, duplicates)
1663 origrepolen, duplicates)
1656 cl = repo.changelog
1664 cl = repo.changelog
1657 extinctadded = [r for r in obsadded if r not in cl]
1665 extinctadded = [r for r in obsadded if r not in cl]
1658 if extinctadded:
1666 if extinctadded:
1659 # They are not just obsolete, but obsolete and invisible
1667 # They are not just obsolete, but obsolete and invisible
1660 # we call them "extinct" internally but the terms have not been
1668 # we call them "extinct" internally but the terms have not been
1661 # exposed to users.
1669 # exposed to users.
1662 msg = '(%d other changesets obsolete on arrival)\n'
1670 msg = '(%d other changesets obsolete on arrival)\n'
1663 repo.ui.status(msg % len(extinctadded))
1671 repo.ui.status(msg % len(extinctadded))
1664
1672
1665 @reportsummary
1673 @reportsummary
1666 def reportphasechanges(repo, tr):
1674 def reportphasechanges(repo, tr):
1667 """Report statistics of phase changes for changesets pre-existing
1675 """Report statistics of phase changes for changesets pre-existing
1668 pull/unbundle.
1676 pull/unbundle.
1669 """
1677 """
1670 origrepolen = tr.changes.get('origrepolen', len(repo))
1678 origrepolen = tr.changes.get('origrepolen', len(repo))
1671 phasetracking = tr.changes.get('phases', {})
1679 phasetracking = tr.changes.get('phases', {})
1672 if not phasetracking:
1680 if not phasetracking:
1673 return
1681 return
1674 published = [
1682 published = [
1675 rev for rev, (old, new) in phasetracking.iteritems()
1683 rev for rev, (old, new) in phasetracking.iteritems()
1676 if new == phases.public and rev < origrepolen
1684 if new == phases.public and rev < origrepolen
1677 ]
1685 ]
1678 if not published:
1686 if not published:
1679 return
1687 return
1680 repo.ui.status(_('%d local changesets published\n')
1688 repo.ui.status(_('%d local changesets published\n')
1681 % len(published))
1689 % len(published))
1682
1690
1683 def getinstabilitymessage(delta, instability):
1691 def getinstabilitymessage(delta, instability):
1684 """function to return the message to show warning about new instabilities
1692 """function to return the message to show warning about new instabilities
1685
1693
1686 exists as a separate function so that extension can wrap to show more
1694 exists as a separate function so that extension can wrap to show more
1687 information like how to fix instabilities"""
1695 information like how to fix instabilities"""
1688 if delta > 0:
1696 if delta > 0:
1689 return _('%i new %s changesets\n') % (delta, instability)
1697 return _('%i new %s changesets\n') % (delta, instability)
1690
1698
1691 def nodesummaries(repo, nodes, maxnumnodes=4):
1699 def nodesummaries(repo, nodes, maxnumnodes=4):
1692 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1700 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1693 return ' '.join(short(h) for h in nodes)
1701 return ' '.join(short(h) for h in nodes)
1694 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1702 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1695 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1703 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1696
1704
1697 def enforcesinglehead(repo, tr, desc):
1705 def enforcesinglehead(repo, tr, desc):
1698 """check that no named branch has multiple heads"""
1706 """check that no named branch has multiple heads"""
1699 if desc in ('strip', 'repair'):
1707 if desc in ('strip', 'repair'):
1700 # skip the logic during strip
1708 # skip the logic during strip
1701 return
1709 return
1702 visible = repo.filtered('visible')
1710 visible = repo.filtered('visible')
1703 # possible improvement: we could restrict the check to affected branch
1711 # possible improvement: we could restrict the check to affected branch
1704 for name, heads in visible.branchmap().iteritems():
1712 for name, heads in visible.branchmap().iteritems():
1705 if len(heads) > 1:
1713 if len(heads) > 1:
1706 msg = _('rejecting multiple heads on branch "%s"')
1714 msg = _('rejecting multiple heads on branch "%s"')
1707 msg %= name
1715 msg %= name
1708 hint = _('%d heads: %s')
1716 hint = _('%d heads: %s')
1709 hint %= (len(heads), nodesummaries(repo, heads))
1717 hint %= (len(heads), nodesummaries(repo, heads))
1710 raise error.Abort(msg, hint=hint)
1718 raise error.Abort(msg, hint=hint)
1711
1719
1712 def wrapconvertsink(sink):
1720 def wrapconvertsink(sink):
1713 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1721 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1714 before it is used, whether or not the convert extension was formally loaded.
1722 before it is used, whether or not the convert extension was formally loaded.
1715 """
1723 """
1716 return sink
1724 return sink
1717
1725
1718 def unhidehashlikerevs(repo, specs, hiddentype):
1726 def unhidehashlikerevs(repo, specs, hiddentype):
1719 """parse the user specs and unhide changesets whose hash or revision number
1727 """parse the user specs and unhide changesets whose hash or revision number
1720 is passed.
1728 is passed.
1721
1729
1722 hiddentype can be: 1) 'warn': warn while unhiding changesets
1730 hiddentype can be: 1) 'warn': warn while unhiding changesets
1723 2) 'nowarn': don't warn while unhiding changesets
1731 2) 'nowarn': don't warn while unhiding changesets
1724
1732
1725 returns a repo object with the required changesets unhidden
1733 returns a repo object with the required changesets unhidden
1726 """
1734 """
1727 if not repo.filtername or not repo.ui.configbool('experimental',
1735 if not repo.filtername or not repo.ui.configbool('experimental',
1728 'directaccess'):
1736 'directaccess'):
1729 return repo
1737 return repo
1730
1738
1731 if repo.filtername not in ('visible', 'visible-hidden'):
1739 if repo.filtername not in ('visible', 'visible-hidden'):
1732 return repo
1740 return repo
1733
1741
1734 symbols = set()
1742 symbols = set()
1735 for spec in specs:
1743 for spec in specs:
1736 try:
1744 try:
1737 tree = revsetlang.parse(spec)
1745 tree = revsetlang.parse(spec)
1738 except error.ParseError: # will be reported by scmutil.revrange()
1746 except error.ParseError: # will be reported by scmutil.revrange()
1739 continue
1747 continue
1740
1748
1741 symbols.update(revsetlang.gethashlikesymbols(tree))
1749 symbols.update(revsetlang.gethashlikesymbols(tree))
1742
1750
1743 if not symbols:
1751 if not symbols:
1744 return repo
1752 return repo
1745
1753
1746 revs = _getrevsfromsymbols(repo, symbols)
1754 revs = _getrevsfromsymbols(repo, symbols)
1747
1755
1748 if not revs:
1756 if not revs:
1749 return repo
1757 return repo
1750
1758
1751 if hiddentype == 'warn':
1759 if hiddentype == 'warn':
1752 unfi = repo.unfiltered()
1760 unfi = repo.unfiltered()
1753 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1761 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1754 repo.ui.warn(_("warning: accessing hidden changesets for write "
1762 repo.ui.warn(_("warning: accessing hidden changesets for write "
1755 "operation: %s\n") % revstr)
1763 "operation: %s\n") % revstr)
1756
1764
1757 # we have to use new filtername to separate branch/tags cache until we can
1765 # we have to use new filtername to separate branch/tags cache until we can
1758 # disbale these cache when revisions are dynamically pinned.
1766 # disbale these cache when revisions are dynamically pinned.
1759 return repo.filtered('visible-hidden', revs)
1767 return repo.filtered('visible-hidden', revs)
1760
1768
1761 def _getrevsfromsymbols(repo, symbols):
1769 def _getrevsfromsymbols(repo, symbols):
1762 """parse the list of symbols and returns a set of revision numbers of hidden
1770 """parse the list of symbols and returns a set of revision numbers of hidden
1763 changesets present in symbols"""
1771 changesets present in symbols"""
1764 revs = set()
1772 revs = set()
1765 unfi = repo.unfiltered()
1773 unfi = repo.unfiltered()
1766 unficl = unfi.changelog
1774 unficl = unfi.changelog
1767 cl = repo.changelog
1775 cl = repo.changelog
1768 tiprev = len(unficl)
1776 tiprev = len(unficl)
1769 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1777 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1770 for s in symbols:
1778 for s in symbols:
1771 try:
1779 try:
1772 n = int(s)
1780 n = int(s)
1773 if n <= tiprev:
1781 if n <= tiprev:
1774 if not allowrevnums:
1782 if not allowrevnums:
1775 continue
1783 continue
1776 else:
1784 else:
1777 if n not in cl:
1785 if n not in cl:
1778 revs.add(n)
1786 revs.add(n)
1779 continue
1787 continue
1780 except ValueError:
1788 except ValueError:
1781 pass
1789 pass
1782
1790
1783 try:
1791 try:
1784 s = resolvehexnodeidprefix(unfi, s)
1792 s = resolvehexnodeidprefix(unfi, s)
1785 except (error.LookupError, error.WdirUnsupported):
1793 except (error.LookupError, error.WdirUnsupported):
1786 s = None
1794 s = None
1787
1795
1788 if s is not None:
1796 if s is not None:
1789 rev = unficl.rev(s)
1797 rev = unficl.rev(s)
1790 if rev not in cl:
1798 if rev not in cl:
1791 revs.add(rev)
1799 revs.add(rev)
1792
1800
1793 return revs
1801 return revs
1794
1802
1795 def bookmarkrevs(repo, mark):
1803 def bookmarkrevs(repo, mark):
1796 """
1804 """
1797 Select revisions reachable by a given bookmark
1805 Select revisions reachable by a given bookmark
1798 """
1806 """
1799 return repo.revs("ancestors(bookmark(%s)) - "
1807 return repo.revs("ancestors(bookmark(%s)) - "
1800 "ancestors(head() and not bookmark(%s)) - "
1808 "ancestors(head() and not bookmark(%s)) - "
1801 "ancestors(bookmark() and not bookmark(%s))",
1809 "ancestors(bookmark() and not bookmark(%s))",
1802 mark, mark, mark)
1810 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now