##// END OF EJS Templates
progress: split up _print() method in bar-updating and debug-printing...
Martin von Zweigbergk -
r41180:3025fd3c default
parent child Browse files
Show More
@@ -1,1839 +1,1839 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 encoding,
31 encoding,
32 error,
32 error,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 obsutil,
35 obsutil,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 policy,
38 policy,
39 pycompat,
39 pycompat,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 procutil,
49 procutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 if pycompat.iswindows:
53 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
54 from . import scmwindows as scmplatform
55 else:
55 else:
56 from . import scmposix as scmplatform
56 from . import scmposix as scmplatform
57
57
58 parsers = policy.importmod(r'parsers')
58 parsers = policy.importmod(r'parsers')
59
59
60 termsize = scmplatform.termsize
60 termsize = scmplatform.termsize
61
61
62 class status(tuple):
62 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
64 and 'ignored' properties are only relevant to the working copy.
65 '''
65 '''
66
66
67 __slots__ = ()
67 __slots__ = ()
68
68
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
70 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
72 ignored, clean))
73
73
74 @property
74 @property
75 def modified(self):
75 def modified(self):
76 '''files that have been modified'''
76 '''files that have been modified'''
77 return self[0]
77 return self[0]
78
78
79 @property
79 @property
80 def added(self):
80 def added(self):
81 '''files that have been added'''
81 '''files that have been added'''
82 return self[1]
82 return self[1]
83
83
84 @property
84 @property
85 def removed(self):
85 def removed(self):
86 '''files that have been removed'''
86 '''files that have been removed'''
87 return self[2]
87 return self[2]
88
88
89 @property
89 @property
90 def deleted(self):
90 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
91 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
92 working copy (aka "missing")
93 '''
93 '''
94 return self[3]
94 return self[3]
95
95
96 @property
96 @property
97 def unknown(self):
97 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
98 '''files not in the dirstate that are not ignored'''
99 return self[4]
99 return self[4]
100
100
101 @property
101 @property
102 def ignored(self):
102 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
104 return self[5]
105
105
106 @property
106 @property
107 def clean(self):
107 def clean(self):
108 '''files that have not been modified'''
108 '''files that have not been modified'''
109 return self[6]
109 return self[6]
110
110
111 def __repr__(self, *args, **kwargs):
111 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
115
116 def itersubrepos(ctx1, ctx2):
116 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
117 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
123
124 missing = set()
124 missing = set()
125
125
126 for subpath in ctx2.substate:
126 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
127 if subpath not in ctx1.substate:
128 del subpaths[subpath]
128 del subpaths[subpath]
129 missing.add(subpath)
129 missing.add(subpath)
130
130
131 for subpath, ctx in sorted(subpaths.iteritems()):
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
132 yield subpath, ctx.sub(subpath)
133
133
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
135 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
137 # against itself.
138 for subpath in missing:
138 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140
140
141 def nochangesfound(ui, repo, excluded=None):
141 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
142 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
143 nodes excluded from the push/pull.
144 '''
144 '''
145 secretlist = []
145 secretlist = []
146 if excluded:
146 if excluded:
147 for n in excluded:
147 for n in excluded:
148 ctx = repo[n]
148 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
150 secretlist.append(n)
151
151
152 if secretlist:
152 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
154 % len(secretlist))
155 else:
155 else:
156 ui.status(_("no changes found\n"))
156 ui.status(_("no changes found\n"))
157
157
158 def callcatch(ui, func):
158 def callcatch(ui, func):
159 """call func() with global exception handling
159 """call func() with global exception handling
160
160
161 return func() if no exception happens. otherwise do some error handling
161 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
162 and return an exit code accordingly. does not handle all exceptions.
163 """
163 """
164 try:
164 try:
165 try:
165 try:
166 return func()
166 return func()
167 except: # re-raises
167 except: # re-raises
168 ui.traceback()
168 ui.traceback()
169 raise
169 raise
170 # Global exception handling, alphabetically
170 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
171 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
172 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
173 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % (
174 reason = _('timed out waiting for lock held by %r') % (
175 pycompat.bytestr(inst.locker))
175 pycompat.bytestr(inst.locker))
176 else:
176 else:
177 reason = _('lock held by %r') % inst.locker
177 reason = _('lock held by %r') % inst.locker
178 ui.error(_("abort: %s: %s\n") % (
178 ui.error(_("abort: %s: %s\n") % (
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 if not inst.locker:
180 if not inst.locker:
181 ui.error(_("(lock might be very busy)\n"))
181 ui.error(_("(lock might be very busy)\n"))
182 except error.LockUnavailable as inst:
182 except error.LockUnavailable as inst:
183 ui.error(_("abort: could not lock %s: %s\n") %
183 ui.error(_("abort: could not lock %s: %s\n") %
184 (inst.desc or stringutil.forcebytestr(inst.filename),
184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 encoding.strtolocal(inst.strerror)))
185 encoding.strtolocal(inst.strerror)))
186 except error.OutOfBandError as inst:
186 except error.OutOfBandError as inst:
187 if inst.args:
187 if inst.args:
188 msg = _("abort: remote error:\n")
188 msg = _("abort: remote error:\n")
189 else:
189 else:
190 msg = _("abort: remote error\n")
190 msg = _("abort: remote error\n")
191 ui.error(msg)
191 ui.error(msg)
192 if inst.args:
192 if inst.args:
193 ui.error(''.join(inst.args))
193 ui.error(''.join(inst.args))
194 if inst.hint:
194 if inst.hint:
195 ui.error('(%s)\n' % inst.hint)
195 ui.error('(%s)\n' % inst.hint)
196 except error.RepoError as inst:
196 except error.RepoError as inst:
197 ui.error(_("abort: %s!\n") % inst)
197 ui.error(_("abort: %s!\n") % inst)
198 if inst.hint:
198 if inst.hint:
199 ui.error(_("(%s)\n") % inst.hint)
199 ui.error(_("(%s)\n") % inst.hint)
200 except error.ResponseError as inst:
200 except error.ResponseError as inst:
201 ui.error(_("abort: %s") % inst.args[0])
201 ui.error(_("abort: %s") % inst.args[0])
202 msg = inst.args[1]
202 msg = inst.args[1]
203 if isinstance(msg, type(u'')):
203 if isinstance(msg, type(u'')):
204 msg = pycompat.sysbytes(msg)
204 msg = pycompat.sysbytes(msg)
205 if not isinstance(msg, bytes):
205 if not isinstance(msg, bytes):
206 ui.error(" %r\n" % (msg,))
206 ui.error(" %r\n" % (msg,))
207 elif not msg:
207 elif not msg:
208 ui.error(_(" empty string\n"))
208 ui.error(_(" empty string\n"))
209 else:
209 else:
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 except error.CensoredNodeError as inst:
211 except error.CensoredNodeError as inst:
212 ui.error(_("abort: file censored %s!\n") % inst)
212 ui.error(_("abort: file censored %s!\n") % inst)
213 except error.StorageError as inst:
213 except error.StorageError as inst:
214 ui.error(_("abort: %s!\n") % inst)
214 ui.error(_("abort: %s!\n") % inst)
215 if inst.hint:
215 if inst.hint:
216 ui.error(_("(%s)\n") % inst.hint)
216 ui.error(_("(%s)\n") % inst.hint)
217 except error.InterventionRequired as inst:
217 except error.InterventionRequired as inst:
218 ui.error("%s\n" % inst)
218 ui.error("%s\n" % inst)
219 if inst.hint:
219 if inst.hint:
220 ui.error(_("(%s)\n") % inst.hint)
220 ui.error(_("(%s)\n") % inst.hint)
221 return 1
221 return 1
222 except error.WdirUnsupported:
222 except error.WdirUnsupported:
223 ui.error(_("abort: working directory revision cannot be specified\n"))
223 ui.error(_("abort: working directory revision cannot be specified\n"))
224 except error.Abort as inst:
224 except error.Abort as inst:
225 ui.error(_("abort: %s\n") % inst)
225 ui.error(_("abort: %s\n") % inst)
226 if inst.hint:
226 if inst.hint:
227 ui.error(_("(%s)\n") % inst.hint)
227 ui.error(_("(%s)\n") % inst.hint)
228 except ImportError as inst:
228 except ImportError as inst:
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 m = stringutil.forcebytestr(inst).split()[-1]
230 m = stringutil.forcebytestr(inst).split()[-1]
231 if m in "mpatch bdiff".split():
231 if m in "mpatch bdiff".split():
232 ui.error(_("(did you forget to compile extensions?)\n"))
232 ui.error(_("(did you forget to compile extensions?)\n"))
233 elif m in "zlib".split():
233 elif m in "zlib".split():
234 ui.error(_("(is your Python install correct?)\n"))
234 ui.error(_("(is your Python install correct?)\n"))
235 except IOError as inst:
235 except IOError as inst:
236 if util.safehasattr(inst, "code"):
236 if util.safehasattr(inst, "code"):
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 elif util.safehasattr(inst, "reason"):
238 elif util.safehasattr(inst, "reason"):
239 try: # usually it is in the form (errno, strerror)
239 try: # usually it is in the form (errno, strerror)
240 reason = inst.reason.args[1]
240 reason = inst.reason.args[1]
241 except (AttributeError, IndexError):
241 except (AttributeError, IndexError):
242 # it might be anything, for example a string
242 # it might be anything, for example a string
243 reason = inst.reason
243 reason = inst.reason
244 if isinstance(reason, pycompat.unicode):
244 if isinstance(reason, pycompat.unicode):
245 # SSLError of Python 2.7.9 contains a unicode
245 # SSLError of Python 2.7.9 contains a unicode
246 reason = encoding.unitolocal(reason)
246 reason = encoding.unitolocal(reason)
247 ui.error(_("abort: error: %s\n") % reason)
247 ui.error(_("abort: error: %s\n") % reason)
248 elif (util.safehasattr(inst, "args")
248 elif (util.safehasattr(inst, "args")
249 and inst.args and inst.args[0] == errno.EPIPE):
249 and inst.args and inst.args[0] == errno.EPIPE):
250 pass
250 pass
251 elif getattr(inst, "strerror", None):
251 elif getattr(inst, "strerror", None):
252 if getattr(inst, "filename", None):
252 if getattr(inst, "filename", None):
253 ui.error(_("abort: %s: %s\n") % (
253 ui.error(_("abort: %s: %s\n") % (
254 encoding.strtolocal(inst.strerror),
254 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
255 stringutil.forcebytestr(inst.filename)))
256 else:
256 else:
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 else:
258 else:
259 raise
259 raise
260 except OSError as inst:
260 except OSError as inst:
261 if getattr(inst, "filename", None) is not None:
261 if getattr(inst, "filename", None) is not None:
262 ui.error(_("abort: %s: '%s'\n") % (
262 ui.error(_("abort: %s: '%s'\n") % (
263 encoding.strtolocal(inst.strerror),
263 encoding.strtolocal(inst.strerror),
264 stringutil.forcebytestr(inst.filename)))
264 stringutil.forcebytestr(inst.filename)))
265 else:
265 else:
266 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
266 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
267 except MemoryError:
267 except MemoryError:
268 ui.error(_("abort: out of memory\n"))
268 ui.error(_("abort: out of memory\n"))
269 except SystemExit as inst:
269 except SystemExit as inst:
270 # Commands shouldn't sys.exit directly, but give a return code.
270 # Commands shouldn't sys.exit directly, but give a return code.
271 # Just in case catch this and and pass exit code to caller.
271 # Just in case catch this and and pass exit code to caller.
272 return inst.code
272 return inst.code
273 except socket.error as inst:
273 except socket.error as inst:
274 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
274 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
275
275
276 return -1
276 return -1
277
277
278 def checknewlabel(repo, lbl, kind):
278 def checknewlabel(repo, lbl, kind):
279 # Do not use the "kind" parameter in ui output.
279 # Do not use the "kind" parameter in ui output.
280 # It makes strings difficult to translate.
280 # It makes strings difficult to translate.
281 if lbl in ['tip', '.', 'null']:
281 if lbl in ['tip', '.', 'null']:
282 raise error.Abort(_("the name '%s' is reserved") % lbl)
282 raise error.Abort(_("the name '%s' is reserved") % lbl)
283 for c in (':', '\0', '\n', '\r'):
283 for c in (':', '\0', '\n', '\r'):
284 if c in lbl:
284 if c in lbl:
285 raise error.Abort(
285 raise error.Abort(
286 _("%r cannot be used in a name") % pycompat.bytestr(c))
286 _("%r cannot be used in a name") % pycompat.bytestr(c))
287 try:
287 try:
288 int(lbl)
288 int(lbl)
289 raise error.Abort(_("cannot use an integer as a name"))
289 raise error.Abort(_("cannot use an integer as a name"))
290 except ValueError:
290 except ValueError:
291 pass
291 pass
292 if lbl.strip() != lbl:
292 if lbl.strip() != lbl:
293 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
293 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
294
294
295 def checkfilename(f):
295 def checkfilename(f):
296 '''Check that the filename f is an acceptable filename for a tracked file'''
296 '''Check that the filename f is an acceptable filename for a tracked file'''
297 if '\r' in f or '\n' in f:
297 if '\r' in f or '\n' in f:
298 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
298 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
299 % pycompat.bytestr(f))
299 % pycompat.bytestr(f))
300
300
301 def checkportable(ui, f):
301 def checkportable(ui, f):
302 '''Check if filename f is portable and warn or abort depending on config'''
302 '''Check if filename f is portable and warn or abort depending on config'''
303 checkfilename(f)
303 checkfilename(f)
304 abort, warn = checkportabilityalert(ui)
304 abort, warn = checkportabilityalert(ui)
305 if abort or warn:
305 if abort or warn:
306 msg = util.checkwinfilename(f)
306 msg = util.checkwinfilename(f)
307 if msg:
307 if msg:
308 msg = "%s: %s" % (msg, procutil.shellquote(f))
308 msg = "%s: %s" % (msg, procutil.shellquote(f))
309 if abort:
309 if abort:
310 raise error.Abort(msg)
310 raise error.Abort(msg)
311 ui.warn(_("warning: %s\n") % msg)
311 ui.warn(_("warning: %s\n") % msg)
312
312
313 def checkportabilityalert(ui):
313 def checkportabilityalert(ui):
314 '''check if the user's config requests nothing, a warning, or abort for
314 '''check if the user's config requests nothing, a warning, or abort for
315 non-portable filenames'''
315 non-portable filenames'''
316 val = ui.config('ui', 'portablefilenames')
316 val = ui.config('ui', 'portablefilenames')
317 lval = val.lower()
317 lval = val.lower()
318 bval = stringutil.parsebool(val)
318 bval = stringutil.parsebool(val)
319 abort = pycompat.iswindows or lval == 'abort'
319 abort = pycompat.iswindows or lval == 'abort'
320 warn = bval or lval == 'warn'
320 warn = bval or lval == 'warn'
321 if bval is None and not (warn or abort or lval == 'ignore'):
321 if bval is None and not (warn or abort or lval == 'ignore'):
322 raise error.ConfigError(
322 raise error.ConfigError(
323 _("ui.portablefilenames value is invalid ('%s')") % val)
323 _("ui.portablefilenames value is invalid ('%s')") % val)
324 return abort, warn
324 return abort, warn
325
325
326 class casecollisionauditor(object):
326 class casecollisionauditor(object):
327 def __init__(self, ui, abort, dirstate):
327 def __init__(self, ui, abort, dirstate):
328 self._ui = ui
328 self._ui = ui
329 self._abort = abort
329 self._abort = abort
330 allfiles = '\0'.join(dirstate._map)
330 allfiles = '\0'.join(dirstate._map)
331 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
331 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
332 self._dirstate = dirstate
332 self._dirstate = dirstate
333 # The purpose of _newfiles is so that we don't complain about
333 # The purpose of _newfiles is so that we don't complain about
334 # case collisions if someone were to call this object with the
334 # case collisions if someone were to call this object with the
335 # same filename twice.
335 # same filename twice.
336 self._newfiles = set()
336 self._newfiles = set()
337
337
338 def __call__(self, f):
338 def __call__(self, f):
339 if f in self._newfiles:
339 if f in self._newfiles:
340 return
340 return
341 fl = encoding.lower(f)
341 fl = encoding.lower(f)
342 if fl in self._loweredfiles and f not in self._dirstate:
342 if fl in self._loweredfiles and f not in self._dirstate:
343 msg = _('possible case-folding collision for %s') % f
343 msg = _('possible case-folding collision for %s') % f
344 if self._abort:
344 if self._abort:
345 raise error.Abort(msg)
345 raise error.Abort(msg)
346 self._ui.warn(_("warning: %s\n") % msg)
346 self._ui.warn(_("warning: %s\n") % msg)
347 self._loweredfiles.add(fl)
347 self._loweredfiles.add(fl)
348 self._newfiles.add(f)
348 self._newfiles.add(f)
349
349
350 def filteredhash(repo, maxrev):
350 def filteredhash(repo, maxrev):
351 """build hash of filtered revisions in the current repoview.
351 """build hash of filtered revisions in the current repoview.
352
352
353 Multiple caches perform up-to-date validation by checking that the
353 Multiple caches perform up-to-date validation by checking that the
354 tiprev and tipnode stored in the cache file match the current repository.
354 tiprev and tipnode stored in the cache file match the current repository.
355 However, this is not sufficient for validating repoviews because the set
355 However, this is not sufficient for validating repoviews because the set
356 of revisions in the view may change without the repository tiprev and
356 of revisions in the view may change without the repository tiprev and
357 tipnode changing.
357 tipnode changing.
358
358
359 This function hashes all the revs filtered from the view and returns
359 This function hashes all the revs filtered from the view and returns
360 that SHA-1 digest.
360 that SHA-1 digest.
361 """
361 """
362 cl = repo.changelog
362 cl = repo.changelog
363 if not cl.filteredrevs:
363 if not cl.filteredrevs:
364 return None
364 return None
365 key = None
365 key = None
366 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
366 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
367 if revs:
367 if revs:
368 s = hashlib.sha1()
368 s = hashlib.sha1()
369 for rev in revs:
369 for rev in revs:
370 s.update('%d;' % rev)
370 s.update('%d;' % rev)
371 key = s.digest()
371 key = s.digest()
372 return key
372 return key
373
373
374 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
374 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
375 '''yield every hg repository under path, always recursively.
375 '''yield every hg repository under path, always recursively.
376 The recurse flag will only control recursion into repo working dirs'''
376 The recurse flag will only control recursion into repo working dirs'''
377 def errhandler(err):
377 def errhandler(err):
378 if err.filename == path:
378 if err.filename == path:
379 raise err
379 raise err
380 samestat = getattr(os.path, 'samestat', None)
380 samestat = getattr(os.path, 'samestat', None)
381 if followsym and samestat is not None:
381 if followsym and samestat is not None:
382 def adddir(dirlst, dirname):
382 def adddir(dirlst, dirname):
383 dirstat = os.stat(dirname)
383 dirstat = os.stat(dirname)
384 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
384 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
385 if not match:
385 if not match:
386 dirlst.append(dirstat)
386 dirlst.append(dirstat)
387 return not match
387 return not match
388 else:
388 else:
389 followsym = False
389 followsym = False
390
390
391 if (seen_dirs is None) and followsym:
391 if (seen_dirs is None) and followsym:
392 seen_dirs = []
392 seen_dirs = []
393 adddir(seen_dirs, path)
393 adddir(seen_dirs, path)
394 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
394 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
395 dirs.sort()
395 dirs.sort()
396 if '.hg' in dirs:
396 if '.hg' in dirs:
397 yield root # found a repository
397 yield root # found a repository
398 qroot = os.path.join(root, '.hg', 'patches')
398 qroot = os.path.join(root, '.hg', 'patches')
399 if os.path.isdir(os.path.join(qroot, '.hg')):
399 if os.path.isdir(os.path.join(qroot, '.hg')):
400 yield qroot # we have a patch queue repo here
400 yield qroot # we have a patch queue repo here
401 if recurse:
401 if recurse:
402 # avoid recursing inside the .hg directory
402 # avoid recursing inside the .hg directory
403 dirs.remove('.hg')
403 dirs.remove('.hg')
404 else:
404 else:
405 dirs[:] = [] # don't descend further
405 dirs[:] = [] # don't descend further
406 elif followsym:
406 elif followsym:
407 newdirs = []
407 newdirs = []
408 for d in dirs:
408 for d in dirs:
409 fname = os.path.join(root, d)
409 fname = os.path.join(root, d)
410 if adddir(seen_dirs, fname):
410 if adddir(seen_dirs, fname):
411 if os.path.islink(fname):
411 if os.path.islink(fname):
412 for hgname in walkrepos(fname, True, seen_dirs):
412 for hgname in walkrepos(fname, True, seen_dirs):
413 yield hgname
413 yield hgname
414 else:
414 else:
415 newdirs.append(d)
415 newdirs.append(d)
416 dirs[:] = newdirs
416 dirs[:] = newdirs
417
417
418 def binnode(ctx):
418 def binnode(ctx):
419 """Return binary node id for a given basectx"""
419 """Return binary node id for a given basectx"""
420 node = ctx.node()
420 node = ctx.node()
421 if node is None:
421 if node is None:
422 return wdirid
422 return wdirid
423 return node
423 return node
424
424
425 def intrev(ctx):
425 def intrev(ctx):
426 """Return integer for a given basectx that can be used in comparison or
426 """Return integer for a given basectx that can be used in comparison or
427 arithmetic operation"""
427 arithmetic operation"""
428 rev = ctx.rev()
428 rev = ctx.rev()
429 if rev is None:
429 if rev is None:
430 return wdirrev
430 return wdirrev
431 return rev
431 return rev
432
432
433 def formatchangeid(ctx):
433 def formatchangeid(ctx):
434 """Format changectx as '{rev}:{node|formatnode}', which is the default
434 """Format changectx as '{rev}:{node|formatnode}', which is the default
435 template provided by logcmdutil.changesettemplater"""
435 template provided by logcmdutil.changesettemplater"""
436 repo = ctx.repo()
436 repo = ctx.repo()
437 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
437 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
438
438
439 def formatrevnode(ui, rev, node):
439 def formatrevnode(ui, rev, node):
440 """Format given revision and node depending on the current verbosity"""
440 """Format given revision and node depending on the current verbosity"""
441 if ui.debugflag:
441 if ui.debugflag:
442 hexfunc = hex
442 hexfunc = hex
443 else:
443 else:
444 hexfunc = short
444 hexfunc = short
445 return '%d:%s' % (rev, hexfunc(node))
445 return '%d:%s' % (rev, hexfunc(node))
446
446
447 def resolvehexnodeidprefix(repo, prefix):
447 def resolvehexnodeidprefix(repo, prefix):
448 if (prefix.startswith('x') and
448 if (prefix.startswith('x') and
449 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
449 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
450 prefix = prefix[1:]
450 prefix = prefix[1:]
451 try:
451 try:
452 # Uses unfiltered repo because it's faster when prefix is ambiguous/
452 # Uses unfiltered repo because it's faster when prefix is ambiguous/
453 # This matches the shortesthexnodeidprefix() function below.
453 # This matches the shortesthexnodeidprefix() function below.
454 node = repo.unfiltered().changelog._partialmatch(prefix)
454 node = repo.unfiltered().changelog._partialmatch(prefix)
455 except error.AmbiguousPrefixLookupError:
455 except error.AmbiguousPrefixLookupError:
456 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
456 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
457 if revset:
457 if revset:
458 # Clear config to avoid infinite recursion
458 # Clear config to avoid infinite recursion
459 configoverrides = {('experimental',
459 configoverrides = {('experimental',
460 'revisions.disambiguatewithin'): None}
460 'revisions.disambiguatewithin'): None}
461 with repo.ui.configoverride(configoverrides):
461 with repo.ui.configoverride(configoverrides):
462 revs = repo.anyrevs([revset], user=True)
462 revs = repo.anyrevs([revset], user=True)
463 matches = []
463 matches = []
464 for rev in revs:
464 for rev in revs:
465 node = repo.changelog.node(rev)
465 node = repo.changelog.node(rev)
466 if hex(node).startswith(prefix):
466 if hex(node).startswith(prefix):
467 matches.append(node)
467 matches.append(node)
468 if len(matches) == 1:
468 if len(matches) == 1:
469 return matches[0]
469 return matches[0]
470 raise
470 raise
471 if node is None:
471 if node is None:
472 return
472 return
473 repo.changelog.rev(node) # make sure node isn't filtered
473 repo.changelog.rev(node) # make sure node isn't filtered
474 return node
474 return node
475
475
476 def mayberevnum(repo, prefix):
476 def mayberevnum(repo, prefix):
477 """Checks if the given prefix may be mistaken for a revision number"""
477 """Checks if the given prefix may be mistaken for a revision number"""
478 try:
478 try:
479 i = int(prefix)
479 i = int(prefix)
480 # if we are a pure int, then starting with zero will not be
480 # if we are a pure int, then starting with zero will not be
481 # confused as a rev; or, obviously, if the int is larger
481 # confused as a rev; or, obviously, if the int is larger
482 # than the value of the tip rev. We still need to disambiguate if
482 # than the value of the tip rev. We still need to disambiguate if
483 # prefix == '0', since that *is* a valid revnum.
483 # prefix == '0', since that *is* a valid revnum.
484 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
484 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
485 return False
485 return False
486 return True
486 return True
487 except ValueError:
487 except ValueError:
488 return False
488 return False
489
489
490 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
490 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
491 """Find the shortest unambiguous prefix that matches hexnode.
491 """Find the shortest unambiguous prefix that matches hexnode.
492
492
493 If "cache" is not None, it must be a dictionary that can be used for
493 If "cache" is not None, it must be a dictionary that can be used for
494 caching between calls to this method.
494 caching between calls to this method.
495 """
495 """
496 # _partialmatch() of filtered changelog could take O(len(repo)) time,
496 # _partialmatch() of filtered changelog could take O(len(repo)) time,
497 # which would be unacceptably slow. so we look for hash collision in
497 # which would be unacceptably slow. so we look for hash collision in
498 # unfiltered space, which means some hashes may be slightly longer.
498 # unfiltered space, which means some hashes may be slightly longer.
499
499
500 minlength=max(minlength, 1)
500 minlength=max(minlength, 1)
501
501
502 def disambiguate(prefix):
502 def disambiguate(prefix):
503 """Disambiguate against revnums."""
503 """Disambiguate against revnums."""
504 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
504 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
505 if mayberevnum(repo, prefix):
505 if mayberevnum(repo, prefix):
506 return 'x' + prefix
506 return 'x' + prefix
507 else:
507 else:
508 return prefix
508 return prefix
509
509
510 hexnode = hex(node)
510 hexnode = hex(node)
511 for length in range(len(prefix), len(hexnode) + 1):
511 for length in range(len(prefix), len(hexnode) + 1):
512 prefix = hexnode[:length]
512 prefix = hexnode[:length]
513 if not mayberevnum(repo, prefix):
513 if not mayberevnum(repo, prefix):
514 return prefix
514 return prefix
515
515
516 cl = repo.unfiltered().changelog
516 cl = repo.unfiltered().changelog
517 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
517 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
518 if revset:
518 if revset:
519 revs = None
519 revs = None
520 if cache is not None:
520 if cache is not None:
521 revs = cache.get('disambiguationrevset')
521 revs = cache.get('disambiguationrevset')
522 if revs is None:
522 if revs is None:
523 revs = repo.anyrevs([revset], user=True)
523 revs = repo.anyrevs([revset], user=True)
524 if cache is not None:
524 if cache is not None:
525 cache['disambiguationrevset'] = revs
525 cache['disambiguationrevset'] = revs
526 if cl.rev(node) in revs:
526 if cl.rev(node) in revs:
527 hexnode = hex(node)
527 hexnode = hex(node)
528 nodetree = None
528 nodetree = None
529 if cache is not None:
529 if cache is not None:
530 nodetree = cache.get('disambiguationnodetree')
530 nodetree = cache.get('disambiguationnodetree')
531 if not nodetree:
531 if not nodetree:
532 try:
532 try:
533 nodetree = parsers.nodetree(cl.index, len(revs))
533 nodetree = parsers.nodetree(cl.index, len(revs))
534 except AttributeError:
534 except AttributeError:
535 # no native nodetree
535 # no native nodetree
536 pass
536 pass
537 else:
537 else:
538 for r in revs:
538 for r in revs:
539 nodetree.insert(r)
539 nodetree.insert(r)
540 if cache is not None:
540 if cache is not None:
541 cache['disambiguationnodetree'] = nodetree
541 cache['disambiguationnodetree'] = nodetree
542 if nodetree is not None:
542 if nodetree is not None:
543 length = max(nodetree.shortest(node), minlength)
543 length = max(nodetree.shortest(node), minlength)
544 prefix = hexnode[:length]
544 prefix = hexnode[:length]
545 return disambiguate(prefix)
545 return disambiguate(prefix)
546 for length in range(minlength, len(hexnode) + 1):
546 for length in range(minlength, len(hexnode) + 1):
547 matches = []
547 matches = []
548 prefix = hexnode[:length]
548 prefix = hexnode[:length]
549 for rev in revs:
549 for rev in revs:
550 otherhexnode = repo[rev].hex()
550 otherhexnode = repo[rev].hex()
551 if prefix == otherhexnode[:length]:
551 if prefix == otherhexnode[:length]:
552 matches.append(otherhexnode)
552 matches.append(otherhexnode)
553 if len(matches) == 1:
553 if len(matches) == 1:
554 return disambiguate(prefix)
554 return disambiguate(prefix)
555
555
556 try:
556 try:
557 return disambiguate(cl.shortest(node, minlength))
557 return disambiguate(cl.shortest(node, minlength))
558 except error.LookupError:
558 except error.LookupError:
559 raise error.RepoLookupError()
559 raise error.RepoLookupError()
560
560
561 def isrevsymbol(repo, symbol):
561 def isrevsymbol(repo, symbol):
562 """Checks if a symbol exists in the repo.
562 """Checks if a symbol exists in the repo.
563
563
564 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
564 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
565 symbol is an ambiguous nodeid prefix.
565 symbol is an ambiguous nodeid prefix.
566 """
566 """
567 try:
567 try:
568 revsymbol(repo, symbol)
568 revsymbol(repo, symbol)
569 return True
569 return True
570 except error.RepoLookupError:
570 except error.RepoLookupError:
571 return False
571 return False
572
572
573 def revsymbol(repo, symbol):
573 def revsymbol(repo, symbol):
574 """Returns a context given a single revision symbol (as string).
574 """Returns a context given a single revision symbol (as string).
575
575
576 This is similar to revsingle(), but accepts only a single revision symbol,
576 This is similar to revsingle(), but accepts only a single revision symbol,
577 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
577 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
578 not "max(public())".
578 not "max(public())".
579 """
579 """
580 if not isinstance(symbol, bytes):
580 if not isinstance(symbol, bytes):
581 msg = ("symbol (%s of type %s) was not a string, did you mean "
581 msg = ("symbol (%s of type %s) was not a string, did you mean "
582 "repo[symbol]?" % (symbol, type(symbol)))
582 "repo[symbol]?" % (symbol, type(symbol)))
583 raise error.ProgrammingError(msg)
583 raise error.ProgrammingError(msg)
584 try:
584 try:
585 if symbol in ('.', 'tip', 'null'):
585 if symbol in ('.', 'tip', 'null'):
586 return repo[symbol]
586 return repo[symbol]
587
587
588 try:
588 try:
589 r = int(symbol)
589 r = int(symbol)
590 if '%d' % r != symbol:
590 if '%d' % r != symbol:
591 raise ValueError
591 raise ValueError
592 l = len(repo.changelog)
592 l = len(repo.changelog)
593 if r < 0:
593 if r < 0:
594 r += l
594 r += l
595 if r < 0 or r >= l and r != wdirrev:
595 if r < 0 or r >= l and r != wdirrev:
596 raise ValueError
596 raise ValueError
597 return repo[r]
597 return repo[r]
598 except error.FilteredIndexError:
598 except error.FilteredIndexError:
599 raise
599 raise
600 except (ValueError, OverflowError, IndexError):
600 except (ValueError, OverflowError, IndexError):
601 pass
601 pass
602
602
603 if len(symbol) == 40:
603 if len(symbol) == 40:
604 try:
604 try:
605 node = bin(symbol)
605 node = bin(symbol)
606 rev = repo.changelog.rev(node)
606 rev = repo.changelog.rev(node)
607 return repo[rev]
607 return repo[rev]
608 except error.FilteredLookupError:
608 except error.FilteredLookupError:
609 raise
609 raise
610 except (TypeError, LookupError):
610 except (TypeError, LookupError):
611 pass
611 pass
612
612
613 # look up bookmarks through the name interface
613 # look up bookmarks through the name interface
614 try:
614 try:
615 node = repo.names.singlenode(repo, symbol)
615 node = repo.names.singlenode(repo, symbol)
616 rev = repo.changelog.rev(node)
616 rev = repo.changelog.rev(node)
617 return repo[rev]
617 return repo[rev]
618 except KeyError:
618 except KeyError:
619 pass
619 pass
620
620
621 node = resolvehexnodeidprefix(repo, symbol)
621 node = resolvehexnodeidprefix(repo, symbol)
622 if node is not None:
622 if node is not None:
623 rev = repo.changelog.rev(node)
623 rev = repo.changelog.rev(node)
624 return repo[rev]
624 return repo[rev]
625
625
626 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
626 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
627
627
628 except error.WdirUnsupported:
628 except error.WdirUnsupported:
629 return repo[None]
629 return repo[None]
630 except (error.FilteredIndexError, error.FilteredLookupError,
630 except (error.FilteredIndexError, error.FilteredLookupError,
631 error.FilteredRepoLookupError):
631 error.FilteredRepoLookupError):
632 raise _filterederror(repo, symbol)
632 raise _filterederror(repo, symbol)
633
633
634 def _filterederror(repo, changeid):
634 def _filterederror(repo, changeid):
635 """build an exception to be raised about a filtered changeid
635 """build an exception to be raised about a filtered changeid
636
636
637 This is extracted in a function to help extensions (eg: evolve) to
637 This is extracted in a function to help extensions (eg: evolve) to
638 experiment with various message variants."""
638 experiment with various message variants."""
639 if repo.filtername.startswith('visible'):
639 if repo.filtername.startswith('visible'):
640
640
641 # Check if the changeset is obsolete
641 # Check if the changeset is obsolete
642 unfilteredrepo = repo.unfiltered()
642 unfilteredrepo = repo.unfiltered()
643 ctx = revsymbol(unfilteredrepo, changeid)
643 ctx = revsymbol(unfilteredrepo, changeid)
644
644
645 # If the changeset is obsolete, enrich the message with the reason
645 # If the changeset is obsolete, enrich the message with the reason
646 # that made this changeset not visible
646 # that made this changeset not visible
647 if ctx.obsolete():
647 if ctx.obsolete():
648 msg = obsutil._getfilteredreason(repo, changeid, ctx)
648 msg = obsutil._getfilteredreason(repo, changeid, ctx)
649 else:
649 else:
650 msg = _("hidden revision '%s'") % changeid
650 msg = _("hidden revision '%s'") % changeid
651
651
652 hint = _('use --hidden to access hidden revisions')
652 hint = _('use --hidden to access hidden revisions')
653
653
654 return error.FilteredRepoLookupError(msg, hint=hint)
654 return error.FilteredRepoLookupError(msg, hint=hint)
655 msg = _("filtered revision '%s' (not in '%s' subset)")
655 msg = _("filtered revision '%s' (not in '%s' subset)")
656 msg %= (changeid, repo.filtername)
656 msg %= (changeid, repo.filtername)
657 return error.FilteredRepoLookupError(msg)
657 return error.FilteredRepoLookupError(msg)
658
658
659 def revsingle(repo, revspec, default='.', localalias=None):
659 def revsingle(repo, revspec, default='.', localalias=None):
660 if not revspec and revspec != 0:
660 if not revspec and revspec != 0:
661 return repo[default]
661 return repo[default]
662
662
663 l = revrange(repo, [revspec], localalias=localalias)
663 l = revrange(repo, [revspec], localalias=localalias)
664 if not l:
664 if not l:
665 raise error.Abort(_('empty revision set'))
665 raise error.Abort(_('empty revision set'))
666 return repo[l.last()]
666 return repo[l.last()]
667
667
668 def _pairspec(revspec):
668 def _pairspec(revspec):
669 tree = revsetlang.parse(revspec)
669 tree = revsetlang.parse(revspec)
670 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
670 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
671
671
672 def revpair(repo, revs):
672 def revpair(repo, revs):
673 if not revs:
673 if not revs:
674 return repo['.'], repo[None]
674 return repo['.'], repo[None]
675
675
676 l = revrange(repo, revs)
676 l = revrange(repo, revs)
677
677
678 if not l:
678 if not l:
679 first = second = None
679 first = second = None
680 elif l.isascending():
680 elif l.isascending():
681 first = l.min()
681 first = l.min()
682 second = l.max()
682 second = l.max()
683 elif l.isdescending():
683 elif l.isdescending():
684 first = l.max()
684 first = l.max()
685 second = l.min()
685 second = l.min()
686 else:
686 else:
687 first = l.first()
687 first = l.first()
688 second = l.last()
688 second = l.last()
689
689
690 if first is None:
690 if first is None:
691 raise error.Abort(_('empty revision range'))
691 raise error.Abort(_('empty revision range'))
692 if (first == second and len(revs) >= 2
692 if (first == second and len(revs) >= 2
693 and not all(revrange(repo, [r]) for r in revs)):
693 and not all(revrange(repo, [r]) for r in revs)):
694 raise error.Abort(_('empty revision on one side of range'))
694 raise error.Abort(_('empty revision on one side of range'))
695
695
696 # if top-level is range expression, the result must always be a pair
696 # if top-level is range expression, the result must always be a pair
697 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
697 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
698 return repo[first], repo[None]
698 return repo[first], repo[None]
699
699
700 return repo[first], repo[second]
700 return repo[first], repo[second]
701
701
702 def revrange(repo, specs, localalias=None):
702 def revrange(repo, specs, localalias=None):
703 """Execute 1 to many revsets and return the union.
703 """Execute 1 to many revsets and return the union.
704
704
705 This is the preferred mechanism for executing revsets using user-specified
705 This is the preferred mechanism for executing revsets using user-specified
706 config options, such as revset aliases.
706 config options, such as revset aliases.
707
707
708 The revsets specified by ``specs`` will be executed via a chained ``OR``
708 The revsets specified by ``specs`` will be executed via a chained ``OR``
709 expression. If ``specs`` is empty, an empty result is returned.
709 expression. If ``specs`` is empty, an empty result is returned.
710
710
711 ``specs`` can contain integers, in which case they are assumed to be
711 ``specs`` can contain integers, in which case they are assumed to be
712 revision numbers.
712 revision numbers.
713
713
714 It is assumed the revsets are already formatted. If you have arguments
714 It is assumed the revsets are already formatted. If you have arguments
715 that need to be expanded in the revset, call ``revsetlang.formatspec()``
715 that need to be expanded in the revset, call ``revsetlang.formatspec()``
716 and pass the result as an element of ``specs``.
716 and pass the result as an element of ``specs``.
717
717
718 Specifying a single revset is allowed.
718 Specifying a single revset is allowed.
719
719
720 Returns a ``revset.abstractsmartset`` which is a list-like interface over
720 Returns a ``revset.abstractsmartset`` which is a list-like interface over
721 integer revisions.
721 integer revisions.
722 """
722 """
723 allspecs = []
723 allspecs = []
724 for spec in specs:
724 for spec in specs:
725 if isinstance(spec, int):
725 if isinstance(spec, int):
726 spec = revsetlang.formatspec('rev(%d)', spec)
726 spec = revsetlang.formatspec('rev(%d)', spec)
727 allspecs.append(spec)
727 allspecs.append(spec)
728 return repo.anyrevs(allspecs, user=True, localalias=localalias)
728 return repo.anyrevs(allspecs, user=True, localalias=localalias)
729
729
730 def meaningfulparents(repo, ctx):
730 def meaningfulparents(repo, ctx):
731 """Return list of meaningful (or all if debug) parentrevs for rev.
731 """Return list of meaningful (or all if debug) parentrevs for rev.
732
732
733 For merges (two non-nullrev revisions) both parents are meaningful.
733 For merges (two non-nullrev revisions) both parents are meaningful.
734 Otherwise the first parent revision is considered meaningful if it
734 Otherwise the first parent revision is considered meaningful if it
735 is not the preceding revision.
735 is not the preceding revision.
736 """
736 """
737 parents = ctx.parents()
737 parents = ctx.parents()
738 if len(parents) > 1:
738 if len(parents) > 1:
739 return parents
739 return parents
740 if repo.ui.debugflag:
740 if repo.ui.debugflag:
741 return [parents[0], repo[nullrev]]
741 return [parents[0], repo[nullrev]]
742 if parents[0].rev() >= intrev(ctx) - 1:
742 if parents[0].rev() >= intrev(ctx) - 1:
743 return []
743 return []
744 return parents
744 return parents
745
745
746 def expandpats(pats):
746 def expandpats(pats):
747 '''Expand bare globs when running on windows.
747 '''Expand bare globs when running on windows.
748 On posix we assume it already has already been done by sh.'''
748 On posix we assume it already has already been done by sh.'''
749 if not util.expandglobs:
749 if not util.expandglobs:
750 return list(pats)
750 return list(pats)
751 ret = []
751 ret = []
752 for kindpat in pats:
752 for kindpat in pats:
753 kind, pat = matchmod._patsplit(kindpat, None)
753 kind, pat = matchmod._patsplit(kindpat, None)
754 if kind is None:
754 if kind is None:
755 try:
755 try:
756 globbed = glob.glob(pat)
756 globbed = glob.glob(pat)
757 except re.error:
757 except re.error:
758 globbed = [pat]
758 globbed = [pat]
759 if globbed:
759 if globbed:
760 ret.extend(globbed)
760 ret.extend(globbed)
761 continue
761 continue
762 ret.append(kindpat)
762 ret.append(kindpat)
763 return ret
763 return ret
764
764
765 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
765 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
766 badfn=None):
766 badfn=None):
767 '''Return a matcher and the patterns that were used.
767 '''Return a matcher and the patterns that were used.
768 The matcher will warn about bad matches, unless an alternate badfn callback
768 The matcher will warn about bad matches, unless an alternate badfn callback
769 is provided.'''
769 is provided.'''
770 if pats == ("",):
770 if pats == ("",):
771 pats = []
771 pats = []
772 if opts is None:
772 if opts is None:
773 opts = {}
773 opts = {}
774 if not globbed and default == 'relpath':
774 if not globbed and default == 'relpath':
775 pats = expandpats(pats or [])
775 pats = expandpats(pats or [])
776
776
777 def bad(f, msg):
777 def bad(f, msg):
778 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
778 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
779
779
780 if badfn is None:
780 if badfn is None:
781 badfn = bad
781 badfn = bad
782
782
783 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
783 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
784 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
784 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
785
785
786 if m.always():
786 if m.always():
787 pats = []
787 pats = []
788 return m, pats
788 return m, pats
789
789
790 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
790 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
791 badfn=None):
791 badfn=None):
792 '''Return a matcher that will warn about bad matches.'''
792 '''Return a matcher that will warn about bad matches.'''
793 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
793 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
794
794
795 def matchall(repo):
795 def matchall(repo):
796 '''Return a matcher that will efficiently match everything.'''
796 '''Return a matcher that will efficiently match everything.'''
797 return matchmod.always(repo.root, repo.getcwd())
797 return matchmod.always(repo.root, repo.getcwd())
798
798
799 def matchfiles(repo, files, badfn=None):
799 def matchfiles(repo, files, badfn=None):
800 '''Return a matcher that will efficiently match exactly these files.'''
800 '''Return a matcher that will efficiently match exactly these files.'''
801 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
801 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
802
802
803 def parsefollowlinespattern(repo, rev, pat, msg):
803 def parsefollowlinespattern(repo, rev, pat, msg):
804 """Return a file name from `pat` pattern suitable for usage in followlines
804 """Return a file name from `pat` pattern suitable for usage in followlines
805 logic.
805 logic.
806 """
806 """
807 if not matchmod.patkind(pat):
807 if not matchmod.patkind(pat):
808 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
808 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
809 else:
809 else:
810 ctx = repo[rev]
810 ctx = repo[rev]
811 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
811 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
812 files = [f for f in ctx if m(f)]
812 files = [f for f in ctx if m(f)]
813 if len(files) != 1:
813 if len(files) != 1:
814 raise error.ParseError(msg)
814 raise error.ParseError(msg)
815 return files[0]
815 return files[0]
816
816
817 def getorigvfs(ui, repo):
817 def getorigvfs(ui, repo):
818 """return a vfs suitable to save 'orig' file
818 """return a vfs suitable to save 'orig' file
819
819
820 return None if no special directory is configured"""
820 return None if no special directory is configured"""
821 origbackuppath = ui.config('ui', 'origbackuppath')
821 origbackuppath = ui.config('ui', 'origbackuppath')
822 if not origbackuppath:
822 if not origbackuppath:
823 return None
823 return None
824 return vfs.vfs(repo.wvfs.join(origbackuppath))
824 return vfs.vfs(repo.wvfs.join(origbackuppath))
825
825
826 def origpath(ui, repo, filepath):
826 def origpath(ui, repo, filepath):
827 '''customize where .orig files are created
827 '''customize where .orig files are created
828
828
829 Fetch user defined path from config file: [ui] origbackuppath = <path>
829 Fetch user defined path from config file: [ui] origbackuppath = <path>
830 Fall back to default (filepath with .orig suffix) if not specified
830 Fall back to default (filepath with .orig suffix) if not specified
831 '''
831 '''
832 origvfs = getorigvfs(ui, repo)
832 origvfs = getorigvfs(ui, repo)
833 if origvfs is None:
833 if origvfs is None:
834 return filepath + ".orig"
834 return filepath + ".orig"
835
835
836 # Convert filepath from an absolute path into a path inside the repo.
836 # Convert filepath from an absolute path into a path inside the repo.
837 filepathfromroot = util.normpath(os.path.relpath(filepath,
837 filepathfromroot = util.normpath(os.path.relpath(filepath,
838 start=repo.root))
838 start=repo.root))
839
839
840 origbackupdir = origvfs.dirname(filepathfromroot)
840 origbackupdir = origvfs.dirname(filepathfromroot)
841 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
841 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
842 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
842 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
843
843
844 # Remove any files that conflict with the backup file's path
844 # Remove any files that conflict with the backup file's path
845 for f in reversed(list(util.finddirs(filepathfromroot))):
845 for f in reversed(list(util.finddirs(filepathfromroot))):
846 if origvfs.isfileorlink(f):
846 if origvfs.isfileorlink(f):
847 ui.note(_('removing conflicting file: %s\n')
847 ui.note(_('removing conflicting file: %s\n')
848 % origvfs.join(f))
848 % origvfs.join(f))
849 origvfs.unlink(f)
849 origvfs.unlink(f)
850 break
850 break
851
851
852 origvfs.makedirs(origbackupdir)
852 origvfs.makedirs(origbackupdir)
853
853
854 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
854 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
855 ui.note(_('removing conflicting directory: %s\n')
855 ui.note(_('removing conflicting directory: %s\n')
856 % origvfs.join(filepathfromroot))
856 % origvfs.join(filepathfromroot))
857 origvfs.rmtree(filepathfromroot, forcibly=True)
857 origvfs.rmtree(filepathfromroot, forcibly=True)
858
858
859 return origvfs.join(filepathfromroot)
859 return origvfs.join(filepathfromroot)
860
860
861 class _containsnode(object):
861 class _containsnode(object):
862 """proxy __contains__(node) to container.__contains__ which accepts revs"""
862 """proxy __contains__(node) to container.__contains__ which accepts revs"""
863
863
864 def __init__(self, repo, revcontainer):
864 def __init__(self, repo, revcontainer):
865 self._torev = repo.changelog.rev
865 self._torev = repo.changelog.rev
866 self._revcontains = revcontainer.__contains__
866 self._revcontains = revcontainer.__contains__
867
867
868 def __contains__(self, node):
868 def __contains__(self, node):
869 return self._revcontains(self._torev(node))
869 return self._revcontains(self._torev(node))
870
870
871 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
871 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
872 fixphase=False, targetphase=None, backup=True):
872 fixphase=False, targetphase=None, backup=True):
873 """do common cleanups when old nodes are replaced by new nodes
873 """do common cleanups when old nodes are replaced by new nodes
874
874
875 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
875 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
876 (we might also want to move working directory parent in the future)
876 (we might also want to move working directory parent in the future)
877
877
878 By default, bookmark moves are calculated automatically from 'replacements',
878 By default, bookmark moves are calculated automatically from 'replacements',
879 but 'moves' can be used to override that. Also, 'moves' may include
879 but 'moves' can be used to override that. Also, 'moves' may include
880 additional bookmark moves that should not have associated obsmarkers.
880 additional bookmark moves that should not have associated obsmarkers.
881
881
882 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
882 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
883 have replacements. operation is a string, like "rebase".
883 have replacements. operation is a string, like "rebase".
884
884
885 metadata is dictionary containing metadata to be stored in obsmarker if
885 metadata is dictionary containing metadata to be stored in obsmarker if
886 obsolescence is enabled.
886 obsolescence is enabled.
887 """
887 """
888 assert fixphase or targetphase is None
888 assert fixphase or targetphase is None
889 if not replacements and not moves:
889 if not replacements and not moves:
890 return
890 return
891
891
892 # translate mapping's other forms
892 # translate mapping's other forms
893 if not util.safehasattr(replacements, 'items'):
893 if not util.safehasattr(replacements, 'items'):
894 replacements = {(n,): () for n in replacements}
894 replacements = {(n,): () for n in replacements}
895 else:
895 else:
896 # upgrading non tuple "source" to tuple ones for BC
896 # upgrading non tuple "source" to tuple ones for BC
897 repls = {}
897 repls = {}
898 for key, value in replacements.items():
898 for key, value in replacements.items():
899 if not isinstance(key, tuple):
899 if not isinstance(key, tuple):
900 key = (key,)
900 key = (key,)
901 repls[key] = value
901 repls[key] = value
902 replacements = repls
902 replacements = repls
903
903
904 # Unfiltered repo is needed since nodes in replacements might be hidden.
904 # Unfiltered repo is needed since nodes in replacements might be hidden.
905 unfi = repo.unfiltered()
905 unfi = repo.unfiltered()
906
906
907 # Calculate bookmark movements
907 # Calculate bookmark movements
908 if moves is None:
908 if moves is None:
909 moves = {}
909 moves = {}
910 for oldnodes, newnodes in replacements.items():
910 for oldnodes, newnodes in replacements.items():
911 for oldnode in oldnodes:
911 for oldnode in oldnodes:
912 if oldnode in moves:
912 if oldnode in moves:
913 continue
913 continue
914 if len(newnodes) > 1:
914 if len(newnodes) > 1:
915 # usually a split, take the one with biggest rev number
915 # usually a split, take the one with biggest rev number
916 newnode = next(unfi.set('max(%ln)', newnodes)).node()
916 newnode = next(unfi.set('max(%ln)', newnodes)).node()
917 elif len(newnodes) == 0:
917 elif len(newnodes) == 0:
918 # move bookmark backwards
918 # move bookmark backwards
919 allreplaced = []
919 allreplaced = []
920 for rep in replacements:
920 for rep in replacements:
921 allreplaced.extend(rep)
921 allreplaced.extend(rep)
922 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
922 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
923 allreplaced))
923 allreplaced))
924 if roots:
924 if roots:
925 newnode = roots[0].node()
925 newnode = roots[0].node()
926 else:
926 else:
927 newnode = nullid
927 newnode = nullid
928 else:
928 else:
929 newnode = newnodes[0]
929 newnode = newnodes[0]
930 moves[oldnode] = newnode
930 moves[oldnode] = newnode
931
931
932 allnewnodes = [n for ns in replacements.values() for n in ns]
932 allnewnodes = [n for ns in replacements.values() for n in ns]
933 toretract = {}
933 toretract = {}
934 toadvance = {}
934 toadvance = {}
935 if fixphase:
935 if fixphase:
936 precursors = {}
936 precursors = {}
937 for oldnodes, newnodes in replacements.items():
937 for oldnodes, newnodes in replacements.items():
938 for oldnode in oldnodes:
938 for oldnode in oldnodes:
939 for newnode in newnodes:
939 for newnode in newnodes:
940 precursors.setdefault(newnode, []).append(oldnode)
940 precursors.setdefault(newnode, []).append(oldnode)
941
941
942 allnewnodes.sort(key=lambda n: unfi[n].rev())
942 allnewnodes.sort(key=lambda n: unfi[n].rev())
943 newphases = {}
943 newphases = {}
944 def phase(ctx):
944 def phase(ctx):
945 return newphases.get(ctx.node(), ctx.phase())
945 return newphases.get(ctx.node(), ctx.phase())
946 for newnode in allnewnodes:
946 for newnode in allnewnodes:
947 ctx = unfi[newnode]
947 ctx = unfi[newnode]
948 parentphase = max(phase(p) for p in ctx.parents())
948 parentphase = max(phase(p) for p in ctx.parents())
949 if targetphase is None:
949 if targetphase is None:
950 oldphase = max(unfi[oldnode].phase()
950 oldphase = max(unfi[oldnode].phase()
951 for oldnode in precursors[newnode])
951 for oldnode in precursors[newnode])
952 newphase = max(oldphase, parentphase)
952 newphase = max(oldphase, parentphase)
953 else:
953 else:
954 newphase = max(targetphase, parentphase)
954 newphase = max(targetphase, parentphase)
955 newphases[newnode] = newphase
955 newphases[newnode] = newphase
956 if newphase > ctx.phase():
956 if newphase > ctx.phase():
957 toretract.setdefault(newphase, []).append(newnode)
957 toretract.setdefault(newphase, []).append(newnode)
958 elif newphase < ctx.phase():
958 elif newphase < ctx.phase():
959 toadvance.setdefault(newphase, []).append(newnode)
959 toadvance.setdefault(newphase, []).append(newnode)
960
960
961 with repo.transaction('cleanup') as tr:
961 with repo.transaction('cleanup') as tr:
962 # Move bookmarks
962 # Move bookmarks
963 bmarks = repo._bookmarks
963 bmarks = repo._bookmarks
964 bmarkchanges = []
964 bmarkchanges = []
965 for oldnode, newnode in moves.items():
965 for oldnode, newnode in moves.items():
966 oldbmarks = repo.nodebookmarks(oldnode)
966 oldbmarks = repo.nodebookmarks(oldnode)
967 if not oldbmarks:
967 if not oldbmarks:
968 continue
968 continue
969 from . import bookmarks # avoid import cycle
969 from . import bookmarks # avoid import cycle
970 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
970 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
971 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
971 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
972 hex(oldnode), hex(newnode)))
972 hex(oldnode), hex(newnode)))
973 # Delete divergent bookmarks being parents of related newnodes
973 # Delete divergent bookmarks being parents of related newnodes
974 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
974 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
975 allnewnodes, newnode, oldnode)
975 allnewnodes, newnode, oldnode)
976 deletenodes = _containsnode(repo, deleterevs)
976 deletenodes = _containsnode(repo, deleterevs)
977 for name in oldbmarks:
977 for name in oldbmarks:
978 bmarkchanges.append((name, newnode))
978 bmarkchanges.append((name, newnode))
979 for b in bookmarks.divergent2delete(repo, deletenodes, name):
979 for b in bookmarks.divergent2delete(repo, deletenodes, name):
980 bmarkchanges.append((b, None))
980 bmarkchanges.append((b, None))
981
981
982 if bmarkchanges:
982 if bmarkchanges:
983 bmarks.applychanges(repo, tr, bmarkchanges)
983 bmarks.applychanges(repo, tr, bmarkchanges)
984
984
985 for phase, nodes in toretract.items():
985 for phase, nodes in toretract.items():
986 phases.retractboundary(repo, tr, phase, nodes)
986 phases.retractboundary(repo, tr, phase, nodes)
987 for phase, nodes in toadvance.items():
987 for phase, nodes in toadvance.items():
988 phases.advanceboundary(repo, tr, phase, nodes)
988 phases.advanceboundary(repo, tr, phase, nodes)
989
989
990 # Obsolete or strip nodes
990 # Obsolete or strip nodes
991 if obsolete.isenabled(repo, obsolete.createmarkersopt):
991 if obsolete.isenabled(repo, obsolete.createmarkersopt):
992 # If a node is already obsoleted, and we want to obsolete it
992 # If a node is already obsoleted, and we want to obsolete it
993 # without a successor, skip that obssolete request since it's
993 # without a successor, skip that obssolete request since it's
994 # unnecessary. That's the "if s or not isobs(n)" check below.
994 # unnecessary. That's the "if s or not isobs(n)" check below.
995 # Also sort the node in topology order, that might be useful for
995 # Also sort the node in topology order, that might be useful for
996 # some obsstore logic.
996 # some obsstore logic.
997 # NOTE: the sorting might belong to createmarkers.
997 # NOTE: the sorting might belong to createmarkers.
998 torev = unfi.changelog.rev
998 torev = unfi.changelog.rev
999 sortfunc = lambda ns: torev(ns[0][0])
999 sortfunc = lambda ns: torev(ns[0][0])
1000 rels = []
1000 rels = []
1001 for ns, s in sorted(replacements.items(), key=sortfunc):
1001 for ns, s in sorted(replacements.items(), key=sortfunc):
1002 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1002 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1003 rels.append(rel)
1003 rels.append(rel)
1004 if rels:
1004 if rels:
1005 obsolete.createmarkers(repo, rels, operation=operation,
1005 obsolete.createmarkers(repo, rels, operation=operation,
1006 metadata=metadata)
1006 metadata=metadata)
1007 else:
1007 else:
1008 from . import repair # avoid import cycle
1008 from . import repair # avoid import cycle
1009 tostrip = list(n for ns in replacements for n in ns)
1009 tostrip = list(n for ns in replacements for n in ns)
1010 if tostrip:
1010 if tostrip:
1011 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1011 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1012 backup=backup)
1012 backup=backup)
1013
1013
1014 def addremove(repo, matcher, prefix, opts=None):
1014 def addremove(repo, matcher, prefix, opts=None):
1015 if opts is None:
1015 if opts is None:
1016 opts = {}
1016 opts = {}
1017 m = matcher
1017 m = matcher
1018 dry_run = opts.get('dry_run')
1018 dry_run = opts.get('dry_run')
1019 try:
1019 try:
1020 similarity = float(opts.get('similarity') or 0)
1020 similarity = float(opts.get('similarity') or 0)
1021 except ValueError:
1021 except ValueError:
1022 raise error.Abort(_('similarity must be a number'))
1022 raise error.Abort(_('similarity must be a number'))
1023 if similarity < 0 or similarity > 100:
1023 if similarity < 0 or similarity > 100:
1024 raise error.Abort(_('similarity must be between 0 and 100'))
1024 raise error.Abort(_('similarity must be between 0 and 100'))
1025 similarity /= 100.0
1025 similarity /= 100.0
1026
1026
1027 ret = 0
1027 ret = 0
1028 join = lambda f: os.path.join(prefix, f)
1028 join = lambda f: os.path.join(prefix, f)
1029
1029
1030 wctx = repo[None]
1030 wctx = repo[None]
1031 for subpath in sorted(wctx.substate):
1031 for subpath in sorted(wctx.substate):
1032 submatch = matchmod.subdirmatcher(subpath, m)
1032 submatch = matchmod.subdirmatcher(subpath, m)
1033 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1033 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1034 sub = wctx.sub(subpath)
1034 sub = wctx.sub(subpath)
1035 try:
1035 try:
1036 if sub.addremove(submatch, prefix, opts):
1036 if sub.addremove(submatch, prefix, opts):
1037 ret = 1
1037 ret = 1
1038 except error.LookupError:
1038 except error.LookupError:
1039 repo.ui.status(_("skipping missing subrepository: %s\n")
1039 repo.ui.status(_("skipping missing subrepository: %s\n")
1040 % join(subpath))
1040 % join(subpath))
1041
1041
1042 rejected = []
1042 rejected = []
1043 def badfn(f, msg):
1043 def badfn(f, msg):
1044 if f in m.files():
1044 if f in m.files():
1045 m.bad(f, msg)
1045 m.bad(f, msg)
1046 rejected.append(f)
1046 rejected.append(f)
1047
1047
1048 badmatch = matchmod.badmatch(m, badfn)
1048 badmatch = matchmod.badmatch(m, badfn)
1049 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1049 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1050 badmatch)
1050 badmatch)
1051
1051
1052 unknownset = set(unknown + forgotten)
1052 unknownset = set(unknown + forgotten)
1053 toprint = unknownset.copy()
1053 toprint = unknownset.copy()
1054 toprint.update(deleted)
1054 toprint.update(deleted)
1055 for abs in sorted(toprint):
1055 for abs in sorted(toprint):
1056 if repo.ui.verbose or not m.exact(abs):
1056 if repo.ui.verbose or not m.exact(abs):
1057 if abs in unknownset:
1057 if abs in unknownset:
1058 status = _('adding %s\n') % m.uipath(abs)
1058 status = _('adding %s\n') % m.uipath(abs)
1059 label = 'ui.addremove.added'
1059 label = 'ui.addremove.added'
1060 else:
1060 else:
1061 status = _('removing %s\n') % m.uipath(abs)
1061 status = _('removing %s\n') % m.uipath(abs)
1062 label = 'ui.addremove.removed'
1062 label = 'ui.addremove.removed'
1063 repo.ui.status(status, label=label)
1063 repo.ui.status(status, label=label)
1064
1064
1065 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1065 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1066 similarity)
1066 similarity)
1067
1067
1068 if not dry_run:
1068 if not dry_run:
1069 _markchanges(repo, unknown + forgotten, deleted, renames)
1069 _markchanges(repo, unknown + forgotten, deleted, renames)
1070
1070
1071 for f in rejected:
1071 for f in rejected:
1072 if f in m.files():
1072 if f in m.files():
1073 return 1
1073 return 1
1074 return ret
1074 return ret
1075
1075
1076 def marktouched(repo, files, similarity=0.0):
1076 def marktouched(repo, files, similarity=0.0):
1077 '''Assert that files have somehow been operated upon. files are relative to
1077 '''Assert that files have somehow been operated upon. files are relative to
1078 the repo root.'''
1078 the repo root.'''
1079 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1079 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1080 rejected = []
1080 rejected = []
1081
1081
1082 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1082 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1083
1083
1084 if repo.ui.verbose:
1084 if repo.ui.verbose:
1085 unknownset = set(unknown + forgotten)
1085 unknownset = set(unknown + forgotten)
1086 toprint = unknownset.copy()
1086 toprint = unknownset.copy()
1087 toprint.update(deleted)
1087 toprint.update(deleted)
1088 for abs in sorted(toprint):
1088 for abs in sorted(toprint):
1089 if abs in unknownset:
1089 if abs in unknownset:
1090 status = _('adding %s\n') % abs
1090 status = _('adding %s\n') % abs
1091 else:
1091 else:
1092 status = _('removing %s\n') % abs
1092 status = _('removing %s\n') % abs
1093 repo.ui.status(status)
1093 repo.ui.status(status)
1094
1094
1095 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1095 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1096 similarity)
1096 similarity)
1097
1097
1098 _markchanges(repo, unknown + forgotten, deleted, renames)
1098 _markchanges(repo, unknown + forgotten, deleted, renames)
1099
1099
1100 for f in rejected:
1100 for f in rejected:
1101 if f in m.files():
1101 if f in m.files():
1102 return 1
1102 return 1
1103 return 0
1103 return 0
1104
1104
1105 def _interestingfiles(repo, matcher):
1105 def _interestingfiles(repo, matcher):
1106 '''Walk dirstate with matcher, looking for files that addremove would care
1106 '''Walk dirstate with matcher, looking for files that addremove would care
1107 about.
1107 about.
1108
1108
1109 This is different from dirstate.status because it doesn't care about
1109 This is different from dirstate.status because it doesn't care about
1110 whether files are modified or clean.'''
1110 whether files are modified or clean.'''
1111 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1111 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1112 audit_path = pathutil.pathauditor(repo.root, cached=True)
1112 audit_path = pathutil.pathauditor(repo.root, cached=True)
1113
1113
1114 ctx = repo[None]
1114 ctx = repo[None]
1115 dirstate = repo.dirstate
1115 dirstate = repo.dirstate
1116 matcher = repo.narrowmatch(matcher, includeexact=True)
1116 matcher = repo.narrowmatch(matcher, includeexact=True)
1117 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1117 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1118 unknown=True, ignored=False, full=False)
1118 unknown=True, ignored=False, full=False)
1119 for abs, st in walkresults.iteritems():
1119 for abs, st in walkresults.iteritems():
1120 dstate = dirstate[abs]
1120 dstate = dirstate[abs]
1121 if dstate == '?' and audit_path.check(abs):
1121 if dstate == '?' and audit_path.check(abs):
1122 unknown.append(abs)
1122 unknown.append(abs)
1123 elif dstate != 'r' and not st:
1123 elif dstate != 'r' and not st:
1124 deleted.append(abs)
1124 deleted.append(abs)
1125 elif dstate == 'r' and st:
1125 elif dstate == 'r' and st:
1126 forgotten.append(abs)
1126 forgotten.append(abs)
1127 # for finding renames
1127 # for finding renames
1128 elif dstate == 'r' and not st:
1128 elif dstate == 'r' and not st:
1129 removed.append(abs)
1129 removed.append(abs)
1130 elif dstate == 'a':
1130 elif dstate == 'a':
1131 added.append(abs)
1131 added.append(abs)
1132
1132
1133 return added, unknown, deleted, removed, forgotten
1133 return added, unknown, deleted, removed, forgotten
1134
1134
1135 def _findrenames(repo, matcher, added, removed, similarity):
1135 def _findrenames(repo, matcher, added, removed, similarity):
1136 '''Find renames from removed files to added ones.'''
1136 '''Find renames from removed files to added ones.'''
1137 renames = {}
1137 renames = {}
1138 if similarity > 0:
1138 if similarity > 0:
1139 for old, new, score in similar.findrenames(repo, added, removed,
1139 for old, new, score in similar.findrenames(repo, added, removed,
1140 similarity):
1140 similarity):
1141 if (repo.ui.verbose or not matcher.exact(old)
1141 if (repo.ui.verbose or not matcher.exact(old)
1142 or not matcher.exact(new)):
1142 or not matcher.exact(new)):
1143 repo.ui.status(_('recording removal of %s as rename to %s '
1143 repo.ui.status(_('recording removal of %s as rename to %s '
1144 '(%d%% similar)\n') %
1144 '(%d%% similar)\n') %
1145 (matcher.rel(old), matcher.rel(new),
1145 (matcher.rel(old), matcher.rel(new),
1146 score * 100))
1146 score * 100))
1147 renames[new] = old
1147 renames[new] = old
1148 return renames
1148 return renames
1149
1149
1150 def _markchanges(repo, unknown, deleted, renames):
1150 def _markchanges(repo, unknown, deleted, renames):
1151 '''Marks the files in unknown as added, the files in deleted as removed,
1151 '''Marks the files in unknown as added, the files in deleted as removed,
1152 and the files in renames as copied.'''
1152 and the files in renames as copied.'''
1153 wctx = repo[None]
1153 wctx = repo[None]
1154 with repo.wlock():
1154 with repo.wlock():
1155 wctx.forget(deleted)
1155 wctx.forget(deleted)
1156 wctx.add(unknown)
1156 wctx.add(unknown)
1157 for new, old in renames.iteritems():
1157 for new, old in renames.iteritems():
1158 wctx.copy(old, new)
1158 wctx.copy(old, new)
1159
1159
1160 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1160 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1161 """Update the dirstate to reflect the intent of copying src to dst. For
1161 """Update the dirstate to reflect the intent of copying src to dst. For
1162 different reasons it might not end with dst being marked as copied from src.
1162 different reasons it might not end with dst being marked as copied from src.
1163 """
1163 """
1164 origsrc = repo.dirstate.copied(src) or src
1164 origsrc = repo.dirstate.copied(src) or src
1165 if dst == origsrc: # copying back a copy?
1165 if dst == origsrc: # copying back a copy?
1166 if repo.dirstate[dst] not in 'mn' and not dryrun:
1166 if repo.dirstate[dst] not in 'mn' and not dryrun:
1167 repo.dirstate.normallookup(dst)
1167 repo.dirstate.normallookup(dst)
1168 else:
1168 else:
1169 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1169 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1170 if not ui.quiet:
1170 if not ui.quiet:
1171 ui.warn(_("%s has not been committed yet, so no copy "
1171 ui.warn(_("%s has not been committed yet, so no copy "
1172 "data will be stored for %s.\n")
1172 "data will be stored for %s.\n")
1173 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1173 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1174 if repo.dirstate[dst] in '?r' and not dryrun:
1174 if repo.dirstate[dst] in '?r' and not dryrun:
1175 wctx.add([dst])
1175 wctx.add([dst])
1176 elif not dryrun:
1176 elif not dryrun:
1177 wctx.copy(origsrc, dst)
1177 wctx.copy(origsrc, dst)
1178
1178
1179 def writerequires(opener, requirements):
1179 def writerequires(opener, requirements):
1180 with opener('requires', 'w', atomictemp=True) as fp:
1180 with opener('requires', 'w', atomictemp=True) as fp:
1181 for r in sorted(requirements):
1181 for r in sorted(requirements):
1182 fp.write("%s\n" % r)
1182 fp.write("%s\n" % r)
1183
1183
1184 class filecachesubentry(object):
1184 class filecachesubentry(object):
1185 def __init__(self, path, stat):
1185 def __init__(self, path, stat):
1186 self.path = path
1186 self.path = path
1187 self.cachestat = None
1187 self.cachestat = None
1188 self._cacheable = None
1188 self._cacheable = None
1189
1189
1190 if stat:
1190 if stat:
1191 self.cachestat = filecachesubentry.stat(self.path)
1191 self.cachestat = filecachesubentry.stat(self.path)
1192
1192
1193 if self.cachestat:
1193 if self.cachestat:
1194 self._cacheable = self.cachestat.cacheable()
1194 self._cacheable = self.cachestat.cacheable()
1195 else:
1195 else:
1196 # None means we don't know yet
1196 # None means we don't know yet
1197 self._cacheable = None
1197 self._cacheable = None
1198
1198
1199 def refresh(self):
1199 def refresh(self):
1200 if self.cacheable():
1200 if self.cacheable():
1201 self.cachestat = filecachesubentry.stat(self.path)
1201 self.cachestat = filecachesubentry.stat(self.path)
1202
1202
1203 def cacheable(self):
1203 def cacheable(self):
1204 if self._cacheable is not None:
1204 if self._cacheable is not None:
1205 return self._cacheable
1205 return self._cacheable
1206
1206
1207 # we don't know yet, assume it is for now
1207 # we don't know yet, assume it is for now
1208 return True
1208 return True
1209
1209
1210 def changed(self):
1210 def changed(self):
1211 # no point in going further if we can't cache it
1211 # no point in going further if we can't cache it
1212 if not self.cacheable():
1212 if not self.cacheable():
1213 return True
1213 return True
1214
1214
1215 newstat = filecachesubentry.stat(self.path)
1215 newstat = filecachesubentry.stat(self.path)
1216
1216
1217 # we may not know if it's cacheable yet, check again now
1217 # we may not know if it's cacheable yet, check again now
1218 if newstat and self._cacheable is None:
1218 if newstat and self._cacheable is None:
1219 self._cacheable = newstat.cacheable()
1219 self._cacheable = newstat.cacheable()
1220
1220
1221 # check again
1221 # check again
1222 if not self._cacheable:
1222 if not self._cacheable:
1223 return True
1223 return True
1224
1224
1225 if self.cachestat != newstat:
1225 if self.cachestat != newstat:
1226 self.cachestat = newstat
1226 self.cachestat = newstat
1227 return True
1227 return True
1228 else:
1228 else:
1229 return False
1229 return False
1230
1230
1231 @staticmethod
1231 @staticmethod
1232 def stat(path):
1232 def stat(path):
1233 try:
1233 try:
1234 return util.cachestat(path)
1234 return util.cachestat(path)
1235 except OSError as e:
1235 except OSError as e:
1236 if e.errno != errno.ENOENT:
1236 if e.errno != errno.ENOENT:
1237 raise
1237 raise
1238
1238
1239 class filecacheentry(object):
1239 class filecacheentry(object):
1240 def __init__(self, paths, stat=True):
1240 def __init__(self, paths, stat=True):
1241 self._entries = []
1241 self._entries = []
1242 for path in paths:
1242 for path in paths:
1243 self._entries.append(filecachesubentry(path, stat))
1243 self._entries.append(filecachesubentry(path, stat))
1244
1244
1245 def changed(self):
1245 def changed(self):
1246 '''true if any entry has changed'''
1246 '''true if any entry has changed'''
1247 for entry in self._entries:
1247 for entry in self._entries:
1248 if entry.changed():
1248 if entry.changed():
1249 return True
1249 return True
1250 return False
1250 return False
1251
1251
1252 def refresh(self):
1252 def refresh(self):
1253 for entry in self._entries:
1253 for entry in self._entries:
1254 entry.refresh()
1254 entry.refresh()
1255
1255
1256 class filecache(object):
1256 class filecache(object):
1257 """A property like decorator that tracks files under .hg/ for updates.
1257 """A property like decorator that tracks files under .hg/ for updates.
1258
1258
1259 On first access, the files defined as arguments are stat()ed and the
1259 On first access, the files defined as arguments are stat()ed and the
1260 results cached. The decorated function is called. The results are stashed
1260 results cached. The decorated function is called. The results are stashed
1261 away in a ``_filecache`` dict on the object whose method is decorated.
1261 away in a ``_filecache`` dict on the object whose method is decorated.
1262
1262
1263 On subsequent access, the cached result is used as it is set to the
1263 On subsequent access, the cached result is used as it is set to the
1264 instance dictionary.
1264 instance dictionary.
1265
1265
1266 On external property set/delete operations, the caller must update the
1266 On external property set/delete operations, the caller must update the
1267 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1267 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1268 instead of directly setting <attr>.
1268 instead of directly setting <attr>.
1269
1269
1270 When using the property API, the cached data is always used if available.
1270 When using the property API, the cached data is always used if available.
1271 No stat() is performed to check if the file has changed.
1271 No stat() is performed to check if the file has changed.
1272
1272
1273 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1273 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1274 can populate an entry before the property's getter is called. In this case,
1274 can populate an entry before the property's getter is called. In this case,
1275 entries in ``_filecache`` will be used during property operations,
1275 entries in ``_filecache`` will be used during property operations,
1276 if available. If the underlying file changes, it is up to external callers
1276 if available. If the underlying file changes, it is up to external callers
1277 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1277 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1278 method result as well as possibly calling ``del obj._filecache[attr]`` to
1278 method result as well as possibly calling ``del obj._filecache[attr]`` to
1279 remove the ``filecacheentry``.
1279 remove the ``filecacheentry``.
1280 """
1280 """
1281
1281
1282 def __init__(self, *paths):
1282 def __init__(self, *paths):
1283 self.paths = paths
1283 self.paths = paths
1284
1284
1285 def join(self, obj, fname):
1285 def join(self, obj, fname):
1286 """Used to compute the runtime path of a cached file.
1286 """Used to compute the runtime path of a cached file.
1287
1287
1288 Users should subclass filecache and provide their own version of this
1288 Users should subclass filecache and provide their own version of this
1289 function to call the appropriate join function on 'obj' (an instance
1289 function to call the appropriate join function on 'obj' (an instance
1290 of the class that its member function was decorated).
1290 of the class that its member function was decorated).
1291 """
1291 """
1292 raise NotImplementedError
1292 raise NotImplementedError
1293
1293
1294 def __call__(self, func):
1294 def __call__(self, func):
1295 self.func = func
1295 self.func = func
1296 self.sname = func.__name__
1296 self.sname = func.__name__
1297 self.name = pycompat.sysbytes(self.sname)
1297 self.name = pycompat.sysbytes(self.sname)
1298 return self
1298 return self
1299
1299
1300 def __get__(self, obj, type=None):
1300 def __get__(self, obj, type=None):
1301 # if accessed on the class, return the descriptor itself.
1301 # if accessed on the class, return the descriptor itself.
1302 if obj is None:
1302 if obj is None:
1303 return self
1303 return self
1304
1304
1305 assert self.sname not in obj.__dict__
1305 assert self.sname not in obj.__dict__
1306
1306
1307 entry = obj._filecache.get(self.name)
1307 entry = obj._filecache.get(self.name)
1308
1308
1309 if entry:
1309 if entry:
1310 if entry.changed():
1310 if entry.changed():
1311 entry.obj = self.func(obj)
1311 entry.obj = self.func(obj)
1312 else:
1312 else:
1313 paths = [self.join(obj, path) for path in self.paths]
1313 paths = [self.join(obj, path) for path in self.paths]
1314
1314
1315 # We stat -before- creating the object so our cache doesn't lie if
1315 # We stat -before- creating the object so our cache doesn't lie if
1316 # a writer modified between the time we read and stat
1316 # a writer modified between the time we read and stat
1317 entry = filecacheentry(paths, True)
1317 entry = filecacheentry(paths, True)
1318 entry.obj = self.func(obj)
1318 entry.obj = self.func(obj)
1319
1319
1320 obj._filecache[self.name] = entry
1320 obj._filecache[self.name] = entry
1321
1321
1322 obj.__dict__[self.sname] = entry.obj
1322 obj.__dict__[self.sname] = entry.obj
1323 return entry.obj
1323 return entry.obj
1324
1324
1325 # don't implement __set__(), which would make __dict__ lookup as slow as
1325 # don't implement __set__(), which would make __dict__ lookup as slow as
1326 # function call.
1326 # function call.
1327
1327
1328 def set(self, obj, value):
1328 def set(self, obj, value):
1329 if self.name not in obj._filecache:
1329 if self.name not in obj._filecache:
1330 # we add an entry for the missing value because X in __dict__
1330 # we add an entry for the missing value because X in __dict__
1331 # implies X in _filecache
1331 # implies X in _filecache
1332 paths = [self.join(obj, path) for path in self.paths]
1332 paths = [self.join(obj, path) for path in self.paths]
1333 ce = filecacheentry(paths, False)
1333 ce = filecacheentry(paths, False)
1334 obj._filecache[self.name] = ce
1334 obj._filecache[self.name] = ce
1335 else:
1335 else:
1336 ce = obj._filecache[self.name]
1336 ce = obj._filecache[self.name]
1337
1337
1338 ce.obj = value # update cached copy
1338 ce.obj = value # update cached copy
1339 obj.__dict__[self.sname] = value # update copy returned by obj.x
1339 obj.__dict__[self.sname] = value # update copy returned by obj.x
1340
1340
1341 def extdatasource(repo, source):
1341 def extdatasource(repo, source):
1342 """Gather a map of rev -> value dict from the specified source
1342 """Gather a map of rev -> value dict from the specified source
1343
1343
1344 A source spec is treated as a URL, with a special case shell: type
1344 A source spec is treated as a URL, with a special case shell: type
1345 for parsing the output from a shell command.
1345 for parsing the output from a shell command.
1346
1346
1347 The data is parsed as a series of newline-separated records where
1347 The data is parsed as a series of newline-separated records where
1348 each record is a revision specifier optionally followed by a space
1348 each record is a revision specifier optionally followed by a space
1349 and a freeform string value. If the revision is known locally, it
1349 and a freeform string value. If the revision is known locally, it
1350 is converted to a rev, otherwise the record is skipped.
1350 is converted to a rev, otherwise the record is skipped.
1351
1351
1352 Note that both key and value are treated as UTF-8 and converted to
1352 Note that both key and value are treated as UTF-8 and converted to
1353 the local encoding. This allows uniformity between local and
1353 the local encoding. This allows uniformity between local and
1354 remote data sources.
1354 remote data sources.
1355 """
1355 """
1356
1356
1357 spec = repo.ui.config("extdata", source)
1357 spec = repo.ui.config("extdata", source)
1358 if not spec:
1358 if not spec:
1359 raise error.Abort(_("unknown extdata source '%s'") % source)
1359 raise error.Abort(_("unknown extdata source '%s'") % source)
1360
1360
1361 data = {}
1361 data = {}
1362 src = proc = None
1362 src = proc = None
1363 try:
1363 try:
1364 if spec.startswith("shell:"):
1364 if spec.startswith("shell:"):
1365 # external commands should be run relative to the repo root
1365 # external commands should be run relative to the repo root
1366 cmd = spec[6:]
1366 cmd = spec[6:]
1367 proc = subprocess.Popen(procutil.tonativestr(cmd),
1367 proc = subprocess.Popen(procutil.tonativestr(cmd),
1368 shell=True, bufsize=-1,
1368 shell=True, bufsize=-1,
1369 close_fds=procutil.closefds,
1369 close_fds=procutil.closefds,
1370 stdout=subprocess.PIPE,
1370 stdout=subprocess.PIPE,
1371 cwd=procutil.tonativestr(repo.root))
1371 cwd=procutil.tonativestr(repo.root))
1372 src = proc.stdout
1372 src = proc.stdout
1373 else:
1373 else:
1374 # treat as a URL or file
1374 # treat as a URL or file
1375 src = url.open(repo.ui, spec)
1375 src = url.open(repo.ui, spec)
1376 for l in src:
1376 for l in src:
1377 if " " in l:
1377 if " " in l:
1378 k, v = l.strip().split(" ", 1)
1378 k, v = l.strip().split(" ", 1)
1379 else:
1379 else:
1380 k, v = l.strip(), ""
1380 k, v = l.strip(), ""
1381
1381
1382 k = encoding.tolocal(k)
1382 k = encoding.tolocal(k)
1383 try:
1383 try:
1384 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1384 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1385 except (error.LookupError, error.RepoLookupError):
1385 except (error.LookupError, error.RepoLookupError):
1386 pass # we ignore data for nodes that don't exist locally
1386 pass # we ignore data for nodes that don't exist locally
1387 finally:
1387 finally:
1388 if proc:
1388 if proc:
1389 proc.communicate()
1389 proc.communicate()
1390 if src:
1390 if src:
1391 src.close()
1391 src.close()
1392 if proc and proc.returncode != 0:
1392 if proc and proc.returncode != 0:
1393 raise error.Abort(_("extdata command '%s' failed: %s")
1393 raise error.Abort(_("extdata command '%s' failed: %s")
1394 % (cmd, procutil.explainexit(proc.returncode)))
1394 % (cmd, procutil.explainexit(proc.returncode)))
1395
1395
1396 return data
1396 return data
1397
1397
1398 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1398 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1399 if lock is None:
1399 if lock is None:
1400 raise error.LockInheritanceContractViolation(
1400 raise error.LockInheritanceContractViolation(
1401 'lock can only be inherited while held')
1401 'lock can only be inherited while held')
1402 if environ is None:
1402 if environ is None:
1403 environ = {}
1403 environ = {}
1404 with lock.inherit() as locker:
1404 with lock.inherit() as locker:
1405 environ[envvar] = locker
1405 environ[envvar] = locker
1406 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1406 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1407
1407
1408 def wlocksub(repo, cmd, *args, **kwargs):
1408 def wlocksub(repo, cmd, *args, **kwargs):
1409 """run cmd as a subprocess that allows inheriting repo's wlock
1409 """run cmd as a subprocess that allows inheriting repo's wlock
1410
1410
1411 This can only be called while the wlock is held. This takes all the
1411 This can only be called while the wlock is held. This takes all the
1412 arguments that ui.system does, and returns the exit code of the
1412 arguments that ui.system does, and returns the exit code of the
1413 subprocess."""
1413 subprocess."""
1414 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1414 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1415 **kwargs)
1415 **kwargs)
1416
1416
1417 class progress(object):
1417 class progress(object):
1418 def __init__(self, ui, topic, unit="", total=None):
1418 def __init__(self, ui, topic, unit="", total=None):
1419 self.ui = ui
1419 self.ui = ui
1420 self.pos = 0
1420 self.pos = 0
1421 self.topic = topic
1421 self.topic = topic
1422 self.unit = unit
1422 self.unit = unit
1423 self.total = total
1423 self.total = total
1424 self.debug = ui.configbool('progress', 'debug')
1424 self.debug = ui.configbool('progress', 'debug')
1425
1425
1426 def __enter__(self):
1426 def __enter__(self):
1427 return self
1427 return self
1428
1428
1429 def __exit__(self, exc_type, exc_value, exc_tb):
1429 def __exit__(self, exc_type, exc_value, exc_tb):
1430 self.complete()
1430 self.complete()
1431
1431
1432 def update(self, pos, item="", total=None):
1432 def update(self, pos, item="", total=None):
1433 assert pos is not None
1433 assert pos is not None
1434 if total:
1434 if total:
1435 self.total = total
1435 self.total = total
1436 self.pos = pos
1436 self.pos = pos
1437 self._print(item)
1437 self._updatebar(item)
1438 if self.debug:
1439 self._printdebug(item)
1438
1440
1439 def increment(self, step=1, item="", total=None):
1441 def increment(self, step=1, item="", total=None):
1440 self.update(self.pos + step, item, total)
1442 self.update(self.pos + step, item, total)
1441
1443
1442 def complete(self):
1444 def complete(self):
1443 self.pos = None
1445 self.pos = None
1444 self.unit = ""
1446 self.unit = ""
1445 self.total = None
1447 self.total = None
1446 self._print("")
1448 self._updatebar("")
1447
1449
1448 def _print(self, item):
1450 def _updatebar(self, item):
1449 if getattr(self.ui._fmsgerr, 'structured', False):
1451 if getattr(self.ui._fmsgerr, 'structured', False):
1450 # channel for machine-readable output with metadata, just send
1452 # channel for machine-readable output with metadata, just send
1451 # raw information
1453 # raw information
1452 # TODO: consider porting some useful information (e.g. estimated
1454 # TODO: consider porting some useful information (e.g. estimated
1453 # time) from progbar. we might want to support update delay to
1455 # time) from progbar. we might want to support update delay to
1454 # reduce the cost of transferring progress messages.
1456 # reduce the cost of transferring progress messages.
1455 self.ui._fmsgerr.write(None, type=b'progress', topic=self.topic,
1457 self.ui._fmsgerr.write(None, type=b'progress', topic=self.topic,
1456 pos=self.pos, item=item, unit=self.unit,
1458 pos=self.pos, item=item, unit=self.unit,
1457 total=self.total)
1459 total=self.total)
1458 elif self.ui._progbar is not None:
1460 elif self.ui._progbar is not None:
1459 self.ui._progbar.progress(self.topic, self.pos, item=item,
1461 self.ui._progbar.progress(self.topic, self.pos, item=item,
1460 unit=self.unit, total=self.total)
1462 unit=self.unit, total=self.total)
1461
1463
1462 if self.pos is None or not self.debug:
1464 def _printdebug(self, item):
1463 return
1464
1465 if self.unit:
1465 if self.unit:
1466 unit = ' ' + self.unit
1466 unit = ' ' + self.unit
1467 if item:
1467 if item:
1468 item = ' ' + item
1468 item = ' ' + item
1469
1469
1470 if self.total:
1470 if self.total:
1471 pct = 100.0 * self.pos / self.total
1471 pct = 100.0 * self.pos / self.total
1472 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1472 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1473 % (self.topic, item, self.pos, self.total, unit, pct))
1473 % (self.topic, item, self.pos, self.total, unit, pct))
1474 else:
1474 else:
1475 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1475 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1476
1476
1477 def gdinitconfig(ui):
1477 def gdinitconfig(ui):
1478 """helper function to know if a repo should be created as general delta
1478 """helper function to know if a repo should be created as general delta
1479 """
1479 """
1480 # experimental config: format.generaldelta
1480 # experimental config: format.generaldelta
1481 return (ui.configbool('format', 'generaldelta')
1481 return (ui.configbool('format', 'generaldelta')
1482 or ui.configbool('format', 'usegeneraldelta'))
1482 or ui.configbool('format', 'usegeneraldelta'))
1483
1483
1484 def gddeltaconfig(ui):
1484 def gddeltaconfig(ui):
1485 """helper function to know if incoming delta should be optimised
1485 """helper function to know if incoming delta should be optimised
1486 """
1486 """
1487 # experimental config: format.generaldelta
1487 # experimental config: format.generaldelta
1488 return ui.configbool('format', 'generaldelta')
1488 return ui.configbool('format', 'generaldelta')
1489
1489
1490 class simplekeyvaluefile(object):
1490 class simplekeyvaluefile(object):
1491 """A simple file with key=value lines
1491 """A simple file with key=value lines
1492
1492
1493 Keys must be alphanumerics and start with a letter, values must not
1493 Keys must be alphanumerics and start with a letter, values must not
1494 contain '\n' characters"""
1494 contain '\n' characters"""
1495 firstlinekey = '__firstline'
1495 firstlinekey = '__firstline'
1496
1496
1497 def __init__(self, vfs, path, keys=None):
1497 def __init__(self, vfs, path, keys=None):
1498 self.vfs = vfs
1498 self.vfs = vfs
1499 self.path = path
1499 self.path = path
1500
1500
1501 def read(self, firstlinenonkeyval=False):
1501 def read(self, firstlinenonkeyval=False):
1502 """Read the contents of a simple key-value file
1502 """Read the contents of a simple key-value file
1503
1503
1504 'firstlinenonkeyval' indicates whether the first line of file should
1504 'firstlinenonkeyval' indicates whether the first line of file should
1505 be treated as a key-value pair or reuturned fully under the
1505 be treated as a key-value pair or reuturned fully under the
1506 __firstline key."""
1506 __firstline key."""
1507 lines = self.vfs.readlines(self.path)
1507 lines = self.vfs.readlines(self.path)
1508 d = {}
1508 d = {}
1509 if firstlinenonkeyval:
1509 if firstlinenonkeyval:
1510 if not lines:
1510 if not lines:
1511 e = _("empty simplekeyvalue file")
1511 e = _("empty simplekeyvalue file")
1512 raise error.CorruptedState(e)
1512 raise error.CorruptedState(e)
1513 # we don't want to include '\n' in the __firstline
1513 # we don't want to include '\n' in the __firstline
1514 d[self.firstlinekey] = lines[0][:-1]
1514 d[self.firstlinekey] = lines[0][:-1]
1515 del lines[0]
1515 del lines[0]
1516
1516
1517 try:
1517 try:
1518 # the 'if line.strip()' part prevents us from failing on empty
1518 # the 'if line.strip()' part prevents us from failing on empty
1519 # lines which only contain '\n' therefore are not skipped
1519 # lines which only contain '\n' therefore are not skipped
1520 # by 'if line'
1520 # by 'if line'
1521 updatedict = dict(line[:-1].split('=', 1) for line in lines
1521 updatedict = dict(line[:-1].split('=', 1) for line in lines
1522 if line.strip())
1522 if line.strip())
1523 if self.firstlinekey in updatedict:
1523 if self.firstlinekey in updatedict:
1524 e = _("%r can't be used as a key")
1524 e = _("%r can't be used as a key")
1525 raise error.CorruptedState(e % self.firstlinekey)
1525 raise error.CorruptedState(e % self.firstlinekey)
1526 d.update(updatedict)
1526 d.update(updatedict)
1527 except ValueError as e:
1527 except ValueError as e:
1528 raise error.CorruptedState(str(e))
1528 raise error.CorruptedState(str(e))
1529 return d
1529 return d
1530
1530
1531 def write(self, data, firstline=None):
1531 def write(self, data, firstline=None):
1532 """Write key=>value mapping to a file
1532 """Write key=>value mapping to a file
1533 data is a dict. Keys must be alphanumerical and start with a letter.
1533 data is a dict. Keys must be alphanumerical and start with a letter.
1534 Values must not contain newline characters.
1534 Values must not contain newline characters.
1535
1535
1536 If 'firstline' is not None, it is written to file before
1536 If 'firstline' is not None, it is written to file before
1537 everything else, as it is, not in a key=value form"""
1537 everything else, as it is, not in a key=value form"""
1538 lines = []
1538 lines = []
1539 if firstline is not None:
1539 if firstline is not None:
1540 lines.append('%s\n' % firstline)
1540 lines.append('%s\n' % firstline)
1541
1541
1542 for k, v in data.items():
1542 for k, v in data.items():
1543 if k == self.firstlinekey:
1543 if k == self.firstlinekey:
1544 e = "key name '%s' is reserved" % self.firstlinekey
1544 e = "key name '%s' is reserved" % self.firstlinekey
1545 raise error.ProgrammingError(e)
1545 raise error.ProgrammingError(e)
1546 if not k[0:1].isalpha():
1546 if not k[0:1].isalpha():
1547 e = "keys must start with a letter in a key-value file"
1547 e = "keys must start with a letter in a key-value file"
1548 raise error.ProgrammingError(e)
1548 raise error.ProgrammingError(e)
1549 if not k.isalnum():
1549 if not k.isalnum():
1550 e = "invalid key name in a simple key-value file"
1550 e = "invalid key name in a simple key-value file"
1551 raise error.ProgrammingError(e)
1551 raise error.ProgrammingError(e)
1552 if '\n' in v:
1552 if '\n' in v:
1553 e = "invalid value in a simple key-value file"
1553 e = "invalid value in a simple key-value file"
1554 raise error.ProgrammingError(e)
1554 raise error.ProgrammingError(e)
1555 lines.append("%s=%s\n" % (k, v))
1555 lines.append("%s=%s\n" % (k, v))
1556 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1556 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1557 fp.write(''.join(lines))
1557 fp.write(''.join(lines))
1558
1558
1559 _reportobsoletedsource = [
1559 _reportobsoletedsource = [
1560 'debugobsolete',
1560 'debugobsolete',
1561 'pull',
1561 'pull',
1562 'push',
1562 'push',
1563 'serve',
1563 'serve',
1564 'unbundle',
1564 'unbundle',
1565 ]
1565 ]
1566
1566
1567 _reportnewcssource = [
1567 _reportnewcssource = [
1568 'pull',
1568 'pull',
1569 'unbundle',
1569 'unbundle',
1570 ]
1570 ]
1571
1571
1572 def prefetchfiles(repo, revs, match):
1572 def prefetchfiles(repo, revs, match):
1573 """Invokes the registered file prefetch functions, allowing extensions to
1573 """Invokes the registered file prefetch functions, allowing extensions to
1574 ensure the corresponding files are available locally, before the command
1574 ensure the corresponding files are available locally, before the command
1575 uses them."""
1575 uses them."""
1576 if match:
1576 if match:
1577 # The command itself will complain about files that don't exist, so
1577 # The command itself will complain about files that don't exist, so
1578 # don't duplicate the message.
1578 # don't duplicate the message.
1579 match = matchmod.badmatch(match, lambda fn, msg: None)
1579 match = matchmod.badmatch(match, lambda fn, msg: None)
1580 else:
1580 else:
1581 match = matchall(repo)
1581 match = matchall(repo)
1582
1582
1583 fileprefetchhooks(repo, revs, match)
1583 fileprefetchhooks(repo, revs, match)
1584
1584
1585 # a list of (repo, revs, match) prefetch functions
1585 # a list of (repo, revs, match) prefetch functions
1586 fileprefetchhooks = util.hooks()
1586 fileprefetchhooks = util.hooks()
1587
1587
1588 # A marker that tells the evolve extension to suppress its own reporting
1588 # A marker that tells the evolve extension to suppress its own reporting
1589 _reportstroubledchangesets = True
1589 _reportstroubledchangesets = True
1590
1590
1591 def registersummarycallback(repo, otr, txnname=''):
1591 def registersummarycallback(repo, otr, txnname=''):
1592 """register a callback to issue a summary after the transaction is closed
1592 """register a callback to issue a summary after the transaction is closed
1593 """
1593 """
1594 def txmatch(sources):
1594 def txmatch(sources):
1595 return any(txnname.startswith(source) for source in sources)
1595 return any(txnname.startswith(source) for source in sources)
1596
1596
1597 categories = []
1597 categories = []
1598
1598
1599 def reportsummary(func):
1599 def reportsummary(func):
1600 """decorator for report callbacks."""
1600 """decorator for report callbacks."""
1601 # The repoview life cycle is shorter than the one of the actual
1601 # The repoview life cycle is shorter than the one of the actual
1602 # underlying repository. So the filtered object can die before the
1602 # underlying repository. So the filtered object can die before the
1603 # weakref is used leading to troubles. We keep a reference to the
1603 # weakref is used leading to troubles. We keep a reference to the
1604 # unfiltered object and restore the filtering when retrieving the
1604 # unfiltered object and restore the filtering when retrieving the
1605 # repository through the weakref.
1605 # repository through the weakref.
1606 filtername = repo.filtername
1606 filtername = repo.filtername
1607 reporef = weakref.ref(repo.unfiltered())
1607 reporef = weakref.ref(repo.unfiltered())
1608 def wrapped(tr):
1608 def wrapped(tr):
1609 repo = reporef()
1609 repo = reporef()
1610 if filtername:
1610 if filtername:
1611 repo = repo.filtered(filtername)
1611 repo = repo.filtered(filtername)
1612 func(repo, tr)
1612 func(repo, tr)
1613 newcat = '%02i-txnreport' % len(categories)
1613 newcat = '%02i-txnreport' % len(categories)
1614 otr.addpostclose(newcat, wrapped)
1614 otr.addpostclose(newcat, wrapped)
1615 categories.append(newcat)
1615 categories.append(newcat)
1616 return wrapped
1616 return wrapped
1617
1617
1618 if txmatch(_reportobsoletedsource):
1618 if txmatch(_reportobsoletedsource):
1619 @reportsummary
1619 @reportsummary
1620 def reportobsoleted(repo, tr):
1620 def reportobsoleted(repo, tr):
1621 obsoleted = obsutil.getobsoleted(repo, tr)
1621 obsoleted = obsutil.getobsoleted(repo, tr)
1622 if obsoleted:
1622 if obsoleted:
1623 repo.ui.status(_('obsoleted %i changesets\n')
1623 repo.ui.status(_('obsoleted %i changesets\n')
1624 % len(obsoleted))
1624 % len(obsoleted))
1625
1625
1626 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1626 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1627 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1627 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1628 instabilitytypes = [
1628 instabilitytypes = [
1629 ('orphan', 'orphan'),
1629 ('orphan', 'orphan'),
1630 ('phase-divergent', 'phasedivergent'),
1630 ('phase-divergent', 'phasedivergent'),
1631 ('content-divergent', 'contentdivergent'),
1631 ('content-divergent', 'contentdivergent'),
1632 ]
1632 ]
1633
1633
1634 def getinstabilitycounts(repo):
1634 def getinstabilitycounts(repo):
1635 filtered = repo.changelog.filteredrevs
1635 filtered = repo.changelog.filteredrevs
1636 counts = {}
1636 counts = {}
1637 for instability, revset in instabilitytypes:
1637 for instability, revset in instabilitytypes:
1638 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1638 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1639 filtered)
1639 filtered)
1640 return counts
1640 return counts
1641
1641
1642 oldinstabilitycounts = getinstabilitycounts(repo)
1642 oldinstabilitycounts = getinstabilitycounts(repo)
1643 @reportsummary
1643 @reportsummary
1644 def reportnewinstabilities(repo, tr):
1644 def reportnewinstabilities(repo, tr):
1645 newinstabilitycounts = getinstabilitycounts(repo)
1645 newinstabilitycounts = getinstabilitycounts(repo)
1646 for instability, revset in instabilitytypes:
1646 for instability, revset in instabilitytypes:
1647 delta = (newinstabilitycounts[instability] -
1647 delta = (newinstabilitycounts[instability] -
1648 oldinstabilitycounts[instability])
1648 oldinstabilitycounts[instability])
1649 msg = getinstabilitymessage(delta, instability)
1649 msg = getinstabilitymessage(delta, instability)
1650 if msg:
1650 if msg:
1651 repo.ui.warn(msg)
1651 repo.ui.warn(msg)
1652
1652
1653 if txmatch(_reportnewcssource):
1653 if txmatch(_reportnewcssource):
1654 @reportsummary
1654 @reportsummary
1655 def reportnewcs(repo, tr):
1655 def reportnewcs(repo, tr):
1656 """Report the range of new revisions pulled/unbundled."""
1656 """Report the range of new revisions pulled/unbundled."""
1657 origrepolen = tr.changes.get('origrepolen', len(repo))
1657 origrepolen = tr.changes.get('origrepolen', len(repo))
1658 unfi = repo.unfiltered()
1658 unfi = repo.unfiltered()
1659 if origrepolen >= len(unfi):
1659 if origrepolen >= len(unfi):
1660 return
1660 return
1661
1661
1662 # Compute the bounds of new visible revisions' range.
1662 # Compute the bounds of new visible revisions' range.
1663 revs = smartset.spanset(repo, start=origrepolen)
1663 revs = smartset.spanset(repo, start=origrepolen)
1664 if revs:
1664 if revs:
1665 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1665 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1666
1666
1667 if minrev == maxrev:
1667 if minrev == maxrev:
1668 revrange = minrev
1668 revrange = minrev
1669 else:
1669 else:
1670 revrange = '%s:%s' % (minrev, maxrev)
1670 revrange = '%s:%s' % (minrev, maxrev)
1671 draft = len(repo.revs('%ld and draft()', revs))
1671 draft = len(repo.revs('%ld and draft()', revs))
1672 secret = len(repo.revs('%ld and secret()', revs))
1672 secret = len(repo.revs('%ld and secret()', revs))
1673 if not (draft or secret):
1673 if not (draft or secret):
1674 msg = _('new changesets %s\n') % revrange
1674 msg = _('new changesets %s\n') % revrange
1675 elif draft and secret:
1675 elif draft and secret:
1676 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1676 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1677 msg %= (revrange, draft, secret)
1677 msg %= (revrange, draft, secret)
1678 elif draft:
1678 elif draft:
1679 msg = _('new changesets %s (%d drafts)\n')
1679 msg = _('new changesets %s (%d drafts)\n')
1680 msg %= (revrange, draft)
1680 msg %= (revrange, draft)
1681 elif secret:
1681 elif secret:
1682 msg = _('new changesets %s (%d secrets)\n')
1682 msg = _('new changesets %s (%d secrets)\n')
1683 msg %= (revrange, secret)
1683 msg %= (revrange, secret)
1684 else:
1684 else:
1685 errormsg = 'entered unreachable condition'
1685 errormsg = 'entered unreachable condition'
1686 raise error.ProgrammingError(errormsg)
1686 raise error.ProgrammingError(errormsg)
1687 repo.ui.status(msg)
1687 repo.ui.status(msg)
1688
1688
1689 # search new changesets directly pulled as obsolete
1689 # search new changesets directly pulled as obsolete
1690 duplicates = tr.changes.get('revduplicates', ())
1690 duplicates = tr.changes.get('revduplicates', ())
1691 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1691 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1692 origrepolen, duplicates)
1692 origrepolen, duplicates)
1693 cl = repo.changelog
1693 cl = repo.changelog
1694 extinctadded = [r for r in obsadded if r not in cl]
1694 extinctadded = [r for r in obsadded if r not in cl]
1695 if extinctadded:
1695 if extinctadded:
1696 # They are not just obsolete, but obsolete and invisible
1696 # They are not just obsolete, but obsolete and invisible
1697 # we call them "extinct" internally but the terms have not been
1697 # we call them "extinct" internally but the terms have not been
1698 # exposed to users.
1698 # exposed to users.
1699 msg = '(%d other changesets obsolete on arrival)\n'
1699 msg = '(%d other changesets obsolete on arrival)\n'
1700 repo.ui.status(msg % len(extinctadded))
1700 repo.ui.status(msg % len(extinctadded))
1701
1701
1702 @reportsummary
1702 @reportsummary
1703 def reportphasechanges(repo, tr):
1703 def reportphasechanges(repo, tr):
1704 """Report statistics of phase changes for changesets pre-existing
1704 """Report statistics of phase changes for changesets pre-existing
1705 pull/unbundle.
1705 pull/unbundle.
1706 """
1706 """
1707 origrepolen = tr.changes.get('origrepolen', len(repo))
1707 origrepolen = tr.changes.get('origrepolen', len(repo))
1708 phasetracking = tr.changes.get('phases', {})
1708 phasetracking = tr.changes.get('phases', {})
1709 if not phasetracking:
1709 if not phasetracking:
1710 return
1710 return
1711 published = [
1711 published = [
1712 rev for rev, (old, new) in phasetracking.iteritems()
1712 rev for rev, (old, new) in phasetracking.iteritems()
1713 if new == phases.public and rev < origrepolen
1713 if new == phases.public and rev < origrepolen
1714 ]
1714 ]
1715 if not published:
1715 if not published:
1716 return
1716 return
1717 repo.ui.status(_('%d local changesets published\n')
1717 repo.ui.status(_('%d local changesets published\n')
1718 % len(published))
1718 % len(published))
1719
1719
1720 def getinstabilitymessage(delta, instability):
1720 def getinstabilitymessage(delta, instability):
1721 """function to return the message to show warning about new instabilities
1721 """function to return the message to show warning about new instabilities
1722
1722
1723 exists as a separate function so that extension can wrap to show more
1723 exists as a separate function so that extension can wrap to show more
1724 information like how to fix instabilities"""
1724 information like how to fix instabilities"""
1725 if delta > 0:
1725 if delta > 0:
1726 return _('%i new %s changesets\n') % (delta, instability)
1726 return _('%i new %s changesets\n') % (delta, instability)
1727
1727
1728 def nodesummaries(repo, nodes, maxnumnodes=4):
1728 def nodesummaries(repo, nodes, maxnumnodes=4):
1729 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1729 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1730 return ' '.join(short(h) for h in nodes)
1730 return ' '.join(short(h) for h in nodes)
1731 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1731 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1732 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1732 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1733
1733
1734 def enforcesinglehead(repo, tr, desc):
1734 def enforcesinglehead(repo, tr, desc):
1735 """check that no named branch has multiple heads"""
1735 """check that no named branch has multiple heads"""
1736 if desc in ('strip', 'repair'):
1736 if desc in ('strip', 'repair'):
1737 # skip the logic during strip
1737 # skip the logic during strip
1738 return
1738 return
1739 visible = repo.filtered('visible')
1739 visible = repo.filtered('visible')
1740 # possible improvement: we could restrict the check to affected branch
1740 # possible improvement: we could restrict the check to affected branch
1741 for name, heads in visible.branchmap().iteritems():
1741 for name, heads in visible.branchmap().iteritems():
1742 if len(heads) > 1:
1742 if len(heads) > 1:
1743 msg = _('rejecting multiple heads on branch "%s"')
1743 msg = _('rejecting multiple heads on branch "%s"')
1744 msg %= name
1744 msg %= name
1745 hint = _('%d heads: %s')
1745 hint = _('%d heads: %s')
1746 hint %= (len(heads), nodesummaries(repo, heads))
1746 hint %= (len(heads), nodesummaries(repo, heads))
1747 raise error.Abort(msg, hint=hint)
1747 raise error.Abort(msg, hint=hint)
1748
1748
1749 def wrapconvertsink(sink):
1749 def wrapconvertsink(sink):
1750 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1750 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1751 before it is used, whether or not the convert extension was formally loaded.
1751 before it is used, whether or not the convert extension was formally loaded.
1752 """
1752 """
1753 return sink
1753 return sink
1754
1754
1755 def unhidehashlikerevs(repo, specs, hiddentype):
1755 def unhidehashlikerevs(repo, specs, hiddentype):
1756 """parse the user specs and unhide changesets whose hash or revision number
1756 """parse the user specs and unhide changesets whose hash or revision number
1757 is passed.
1757 is passed.
1758
1758
1759 hiddentype can be: 1) 'warn': warn while unhiding changesets
1759 hiddentype can be: 1) 'warn': warn while unhiding changesets
1760 2) 'nowarn': don't warn while unhiding changesets
1760 2) 'nowarn': don't warn while unhiding changesets
1761
1761
1762 returns a repo object with the required changesets unhidden
1762 returns a repo object with the required changesets unhidden
1763 """
1763 """
1764 if not repo.filtername or not repo.ui.configbool('experimental',
1764 if not repo.filtername or not repo.ui.configbool('experimental',
1765 'directaccess'):
1765 'directaccess'):
1766 return repo
1766 return repo
1767
1767
1768 if repo.filtername not in ('visible', 'visible-hidden'):
1768 if repo.filtername not in ('visible', 'visible-hidden'):
1769 return repo
1769 return repo
1770
1770
1771 symbols = set()
1771 symbols = set()
1772 for spec in specs:
1772 for spec in specs:
1773 try:
1773 try:
1774 tree = revsetlang.parse(spec)
1774 tree = revsetlang.parse(spec)
1775 except error.ParseError: # will be reported by scmutil.revrange()
1775 except error.ParseError: # will be reported by scmutil.revrange()
1776 continue
1776 continue
1777
1777
1778 symbols.update(revsetlang.gethashlikesymbols(tree))
1778 symbols.update(revsetlang.gethashlikesymbols(tree))
1779
1779
1780 if not symbols:
1780 if not symbols:
1781 return repo
1781 return repo
1782
1782
1783 revs = _getrevsfromsymbols(repo, symbols)
1783 revs = _getrevsfromsymbols(repo, symbols)
1784
1784
1785 if not revs:
1785 if not revs:
1786 return repo
1786 return repo
1787
1787
1788 if hiddentype == 'warn':
1788 if hiddentype == 'warn':
1789 unfi = repo.unfiltered()
1789 unfi = repo.unfiltered()
1790 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1790 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1791 repo.ui.warn(_("warning: accessing hidden changesets for write "
1791 repo.ui.warn(_("warning: accessing hidden changesets for write "
1792 "operation: %s\n") % revstr)
1792 "operation: %s\n") % revstr)
1793
1793
1794 # we have to use new filtername to separate branch/tags cache until we can
1794 # we have to use new filtername to separate branch/tags cache until we can
1795 # disbale these cache when revisions are dynamically pinned.
1795 # disbale these cache when revisions are dynamically pinned.
1796 return repo.filtered('visible-hidden', revs)
1796 return repo.filtered('visible-hidden', revs)
1797
1797
1798 def _getrevsfromsymbols(repo, symbols):
1798 def _getrevsfromsymbols(repo, symbols):
1799 """parse the list of symbols and returns a set of revision numbers of hidden
1799 """parse the list of symbols and returns a set of revision numbers of hidden
1800 changesets present in symbols"""
1800 changesets present in symbols"""
1801 revs = set()
1801 revs = set()
1802 unfi = repo.unfiltered()
1802 unfi = repo.unfiltered()
1803 unficl = unfi.changelog
1803 unficl = unfi.changelog
1804 cl = repo.changelog
1804 cl = repo.changelog
1805 tiprev = len(unficl)
1805 tiprev = len(unficl)
1806 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1806 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1807 for s in symbols:
1807 for s in symbols:
1808 try:
1808 try:
1809 n = int(s)
1809 n = int(s)
1810 if n <= tiprev:
1810 if n <= tiprev:
1811 if not allowrevnums:
1811 if not allowrevnums:
1812 continue
1812 continue
1813 else:
1813 else:
1814 if n not in cl:
1814 if n not in cl:
1815 revs.add(n)
1815 revs.add(n)
1816 continue
1816 continue
1817 except ValueError:
1817 except ValueError:
1818 pass
1818 pass
1819
1819
1820 try:
1820 try:
1821 s = resolvehexnodeidprefix(unfi, s)
1821 s = resolvehexnodeidprefix(unfi, s)
1822 except (error.LookupError, error.WdirUnsupported):
1822 except (error.LookupError, error.WdirUnsupported):
1823 s = None
1823 s = None
1824
1824
1825 if s is not None:
1825 if s is not None:
1826 rev = unficl.rev(s)
1826 rev = unficl.rev(s)
1827 if rev not in cl:
1827 if rev not in cl:
1828 revs.add(rev)
1828 revs.add(rev)
1829
1829
1830 return revs
1830 return revs
1831
1831
1832 def bookmarkrevs(repo, mark):
1832 def bookmarkrevs(repo, mark):
1833 """
1833 """
1834 Select revisions reachable by a given bookmark
1834 Select revisions reachable by a given bookmark
1835 """
1835 """
1836 return repo.revs("ancestors(bookmark(%s)) - "
1836 return repo.revs("ancestors(bookmark(%s)) - "
1837 "ancestors(head() and not bookmark(%s)) - "
1837 "ancestors(head() and not bookmark(%s)) - "
1838 "ancestors(bookmark() and not bookmark(%s))",
1838 "ancestors(bookmark() and not bookmark(%s))",
1839 mark, mark, mark)
1839 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now