##// END OF EJS Templates
extdata: avoid crashing inside subprocess when we get a revset parse error...
Augie Fackler -
r42776:ea6558db default
parent child Browse files
Show More
@@ -1,1981 +1,1986 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 copies as copiesmod,
31 copies as copiesmod,
32 encoding,
32 encoding,
33 error,
33 error,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 obsutil,
36 obsutil,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 policy,
39 policy,
40 pycompat,
40 pycompat,
41 revsetlang,
41 revsetlang,
42 similar,
42 similar,
43 smartset,
43 smartset,
44 url,
44 url,
45 util,
45 util,
46 vfs,
46 vfs,
47 )
47 )
48
48
49 from .utils import (
49 from .utils import (
50 procutil,
50 procutil,
51 stringutil,
51 stringutil,
52 )
52 )
53
53
54 if pycompat.iswindows:
54 if pycompat.iswindows:
55 from . import scmwindows as scmplatform
55 from . import scmwindows as scmplatform
56 else:
56 else:
57 from . import scmposix as scmplatform
57 from . import scmposix as scmplatform
58
58
59 parsers = policy.importmod(r'parsers')
59 parsers = policy.importmod(r'parsers')
60
60
61 termsize = scmplatform.termsize
61 termsize = scmplatform.termsize
62
62
63 class status(tuple):
63 class status(tuple):
64 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
65 and 'ignored' properties are only relevant to the working copy.
65 and 'ignored' properties are only relevant to the working copy.
66 '''
66 '''
67
67
68 __slots__ = ()
68 __slots__ = ()
69
69
70 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
71 clean):
71 clean):
72 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
73 ignored, clean))
73 ignored, clean))
74
74
75 @property
75 @property
76 def modified(self):
76 def modified(self):
77 '''files that have been modified'''
77 '''files that have been modified'''
78 return self[0]
78 return self[0]
79
79
80 @property
80 @property
81 def added(self):
81 def added(self):
82 '''files that have been added'''
82 '''files that have been added'''
83 return self[1]
83 return self[1]
84
84
85 @property
85 @property
86 def removed(self):
86 def removed(self):
87 '''files that have been removed'''
87 '''files that have been removed'''
88 return self[2]
88 return self[2]
89
89
90 @property
90 @property
91 def deleted(self):
91 def deleted(self):
92 '''files that are in the dirstate, but have been deleted from the
92 '''files that are in the dirstate, but have been deleted from the
93 working copy (aka "missing")
93 working copy (aka "missing")
94 '''
94 '''
95 return self[3]
95 return self[3]
96
96
97 @property
97 @property
98 def unknown(self):
98 def unknown(self):
99 '''files not in the dirstate that are not ignored'''
99 '''files not in the dirstate that are not ignored'''
100 return self[4]
100 return self[4]
101
101
102 @property
102 @property
103 def ignored(self):
103 def ignored(self):
104 '''files not in the dirstate that are ignored (by _dirignore())'''
104 '''files not in the dirstate that are ignored (by _dirignore())'''
105 return self[5]
105 return self[5]
106
106
107 @property
107 @property
108 def clean(self):
108 def clean(self):
109 '''files that have not been modified'''
109 '''files that have not been modified'''
110 return self[6]
110 return self[6]
111
111
112 def __repr__(self, *args, **kwargs):
112 def __repr__(self, *args, **kwargs):
113 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
114 r'unknown=%s, ignored=%s, clean=%s>') %
114 r'unknown=%s, ignored=%s, clean=%s>') %
115 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
116
116
117 def itersubrepos(ctx1, ctx2):
117 def itersubrepos(ctx1, ctx2):
118 """find subrepos in ctx1 or ctx2"""
118 """find subrepos in ctx1 or ctx2"""
119 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # Create a (subpath, ctx) mapping where we prefer subpaths from
120 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # ctx1. The subpaths from ctx2 are important when the .hgsub file
121 # has been modified (in ctx2) but not yet committed (in ctx1).
121 # has been modified (in ctx2) but not yet committed (in ctx1).
122 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths = dict.fromkeys(ctx2.substate, ctx2)
123 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
124
124
125 missing = set()
125 missing = set()
126
126
127 for subpath in ctx2.substate:
127 for subpath in ctx2.substate:
128 if subpath not in ctx1.substate:
128 if subpath not in ctx1.substate:
129 del subpaths[subpath]
129 del subpaths[subpath]
130 missing.add(subpath)
130 missing.add(subpath)
131
131
132 for subpath, ctx in sorted(subpaths.iteritems()):
132 for subpath, ctx in sorted(subpaths.iteritems()):
133 yield subpath, ctx.sub(subpath)
133 yield subpath, ctx.sub(subpath)
134
134
135 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
136 # status and diff will have an accurate result when it does
136 # status and diff will have an accurate result when it does
137 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
138 # against itself.
138 # against itself.
139 for subpath in missing:
139 for subpath in missing:
140 yield subpath, ctx2.nullsub(subpath, ctx1)
140 yield subpath, ctx2.nullsub(subpath, ctx1)
141
141
142 def nochangesfound(ui, repo, excluded=None):
142 def nochangesfound(ui, repo, excluded=None):
143 '''Report no changes for push/pull, excluded is None or a list of
143 '''Report no changes for push/pull, excluded is None or a list of
144 nodes excluded from the push/pull.
144 nodes excluded from the push/pull.
145 '''
145 '''
146 secretlist = []
146 secretlist = []
147 if excluded:
147 if excluded:
148 for n in excluded:
148 for n in excluded:
149 ctx = repo[n]
149 ctx = repo[n]
150 if ctx.phase() >= phases.secret and not ctx.extinct():
150 if ctx.phase() >= phases.secret and not ctx.extinct():
151 secretlist.append(n)
151 secretlist.append(n)
152
152
153 if secretlist:
153 if secretlist:
154 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 ui.status(_("no changes found (ignored %d secret changesets)\n")
155 % len(secretlist))
155 % len(secretlist))
156 else:
156 else:
157 ui.status(_("no changes found\n"))
157 ui.status(_("no changes found\n"))
158
158
159 def callcatch(ui, func):
159 def callcatch(ui, func):
160 """call func() with global exception handling
160 """call func() with global exception handling
161
161
162 return func() if no exception happens. otherwise do some error handling
162 return func() if no exception happens. otherwise do some error handling
163 and return an exit code accordingly. does not handle all exceptions.
163 and return an exit code accordingly. does not handle all exceptions.
164 """
164 """
165 try:
165 try:
166 try:
166 try:
167 return func()
167 return func()
168 except: # re-raises
168 except: # re-raises
169 ui.traceback()
169 ui.traceback()
170 raise
170 raise
171 # Global exception handling, alphabetically
171 # Global exception handling, alphabetically
172 # Mercurial-specific first, followed by built-in and library exceptions
172 # Mercurial-specific first, followed by built-in and library exceptions
173 except error.LockHeld as inst:
173 except error.LockHeld as inst:
174 if inst.errno == errno.ETIMEDOUT:
174 if inst.errno == errno.ETIMEDOUT:
175 reason = _('timed out waiting for lock held by %r') % (
175 reason = _('timed out waiting for lock held by %r') % (
176 pycompat.bytestr(inst.locker))
176 pycompat.bytestr(inst.locker))
177 else:
177 else:
178 reason = _('lock held by %r') % inst.locker
178 reason = _('lock held by %r') % inst.locker
179 ui.error(_("abort: %s: %s\n") % (
179 ui.error(_("abort: %s: %s\n") % (
180 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 inst.desc or stringutil.forcebytestr(inst.filename), reason))
181 if not inst.locker:
181 if not inst.locker:
182 ui.error(_("(lock might be very busy)\n"))
182 ui.error(_("(lock might be very busy)\n"))
183 except error.LockUnavailable as inst:
183 except error.LockUnavailable as inst:
184 ui.error(_("abort: could not lock %s: %s\n") %
184 ui.error(_("abort: could not lock %s: %s\n") %
185 (inst.desc or stringutil.forcebytestr(inst.filename),
185 (inst.desc or stringutil.forcebytestr(inst.filename),
186 encoding.strtolocal(inst.strerror)))
186 encoding.strtolocal(inst.strerror)))
187 except error.OutOfBandError as inst:
187 except error.OutOfBandError as inst:
188 if inst.args:
188 if inst.args:
189 msg = _("abort: remote error:\n")
189 msg = _("abort: remote error:\n")
190 else:
190 else:
191 msg = _("abort: remote error\n")
191 msg = _("abort: remote error\n")
192 ui.error(msg)
192 ui.error(msg)
193 if inst.args:
193 if inst.args:
194 ui.error(''.join(inst.args))
194 ui.error(''.join(inst.args))
195 if inst.hint:
195 if inst.hint:
196 ui.error('(%s)\n' % inst.hint)
196 ui.error('(%s)\n' % inst.hint)
197 except error.RepoError as inst:
197 except error.RepoError as inst:
198 ui.error(_("abort: %s!\n") % inst)
198 ui.error(_("abort: %s!\n") % inst)
199 if inst.hint:
199 if inst.hint:
200 ui.error(_("(%s)\n") % inst.hint)
200 ui.error(_("(%s)\n") % inst.hint)
201 except error.ResponseError as inst:
201 except error.ResponseError as inst:
202 ui.error(_("abort: %s") % inst.args[0])
202 ui.error(_("abort: %s") % inst.args[0])
203 msg = inst.args[1]
203 msg = inst.args[1]
204 if isinstance(msg, type(u'')):
204 if isinstance(msg, type(u'')):
205 msg = pycompat.sysbytes(msg)
205 msg = pycompat.sysbytes(msg)
206 if not isinstance(msg, bytes):
206 if not isinstance(msg, bytes):
207 ui.error(" %r\n" % (msg,))
207 ui.error(" %r\n" % (msg,))
208 elif not msg:
208 elif not msg:
209 ui.error(_(" empty string\n"))
209 ui.error(_(" empty string\n"))
210 else:
210 else:
211 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
212 except error.CensoredNodeError as inst:
212 except error.CensoredNodeError as inst:
213 ui.error(_("abort: file censored %s!\n") % inst)
213 ui.error(_("abort: file censored %s!\n") % inst)
214 except error.StorageError as inst:
214 except error.StorageError as inst:
215 ui.error(_("abort: %s!\n") % inst)
215 ui.error(_("abort: %s!\n") % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.error(_("(%s)\n") % inst.hint)
217 ui.error(_("(%s)\n") % inst.hint)
218 except error.InterventionRequired as inst:
218 except error.InterventionRequired as inst:
219 ui.error("%s\n" % inst)
219 ui.error("%s\n" % inst)
220 if inst.hint:
220 if inst.hint:
221 ui.error(_("(%s)\n") % inst.hint)
221 ui.error(_("(%s)\n") % inst.hint)
222 return 1
222 return 1
223 except error.WdirUnsupported:
223 except error.WdirUnsupported:
224 ui.error(_("abort: working directory revision cannot be specified\n"))
224 ui.error(_("abort: working directory revision cannot be specified\n"))
225 except error.Abort as inst:
225 except error.Abort as inst:
226 ui.error(_("abort: %s\n") % inst)
226 ui.error(_("abort: %s\n") % inst)
227 if inst.hint:
227 if inst.hint:
228 ui.error(_("(%s)\n") % inst.hint)
228 ui.error(_("(%s)\n") % inst.hint)
229 except ImportError as inst:
229 except ImportError as inst:
230 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
231 m = stringutil.forcebytestr(inst).split()[-1]
231 m = stringutil.forcebytestr(inst).split()[-1]
232 if m in "mpatch bdiff".split():
232 if m in "mpatch bdiff".split():
233 ui.error(_("(did you forget to compile extensions?)\n"))
233 ui.error(_("(did you forget to compile extensions?)\n"))
234 elif m in "zlib".split():
234 elif m in "zlib".split():
235 ui.error(_("(is your Python install correct?)\n"))
235 ui.error(_("(is your Python install correct?)\n"))
236 except (IOError, OSError) as inst:
236 except (IOError, OSError) as inst:
237 if util.safehasattr(inst, "code"): # HTTPError
237 if util.safehasattr(inst, "code"): # HTTPError
238 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
239 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 elif util.safehasattr(inst, "reason"): # URLError or SSLError
240 try: # usually it is in the form (errno, strerror)
240 try: # usually it is in the form (errno, strerror)
241 reason = inst.reason.args[1]
241 reason = inst.reason.args[1]
242 except (AttributeError, IndexError):
242 except (AttributeError, IndexError):
243 # it might be anything, for example a string
243 # it might be anything, for example a string
244 reason = inst.reason
244 reason = inst.reason
245 if isinstance(reason, pycompat.unicode):
245 if isinstance(reason, pycompat.unicode):
246 # SSLError of Python 2.7.9 contains a unicode
246 # SSLError of Python 2.7.9 contains a unicode
247 reason = encoding.unitolocal(reason)
247 reason = encoding.unitolocal(reason)
248 ui.error(_("abort: error: %s\n") % reason)
248 ui.error(_("abort: error: %s\n") % reason)
249 elif (util.safehasattr(inst, "args")
249 elif (util.safehasattr(inst, "args")
250 and inst.args and inst.args[0] == errno.EPIPE):
250 and inst.args and inst.args[0] == errno.EPIPE):
251 pass
251 pass
252 elif getattr(inst, "strerror", None): # common IOError or OSError
252 elif getattr(inst, "strerror", None): # common IOError or OSError
253 if getattr(inst, "filename", None) is not None:
253 if getattr(inst, "filename", None) is not None:
254 ui.error(_("abort: %s: '%s'\n") % (
254 ui.error(_("abort: %s: '%s'\n") % (
255 encoding.strtolocal(inst.strerror),
255 encoding.strtolocal(inst.strerror),
256 stringutil.forcebytestr(inst.filename)))
256 stringutil.forcebytestr(inst.filename)))
257 else:
257 else:
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 else: # suspicious IOError
259 else: # suspicious IOError
260 raise
260 raise
261 except MemoryError:
261 except MemoryError:
262 ui.error(_("abort: out of memory\n"))
262 ui.error(_("abort: out of memory\n"))
263 except SystemExit as inst:
263 except SystemExit as inst:
264 # Commands shouldn't sys.exit directly, but give a return code.
264 # Commands shouldn't sys.exit directly, but give a return code.
265 # Just in case catch this and and pass exit code to caller.
265 # Just in case catch this and and pass exit code to caller.
266 return inst.code
266 return inst.code
267
267
268 return -1
268 return -1
269
269
270 def checknewlabel(repo, lbl, kind):
270 def checknewlabel(repo, lbl, kind):
271 # Do not use the "kind" parameter in ui output.
271 # Do not use the "kind" parameter in ui output.
272 # It makes strings difficult to translate.
272 # It makes strings difficult to translate.
273 if lbl in ['tip', '.', 'null']:
273 if lbl in ['tip', '.', 'null']:
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 for c in (':', '\0', '\n', '\r'):
275 for c in (':', '\0', '\n', '\r'):
276 if c in lbl:
276 if c in lbl:
277 raise error.Abort(
277 raise error.Abort(
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 raise error.Abort(_("cannot use an integer as a name"))
281 raise error.Abort(_("cannot use an integer as a name"))
282 except ValueError:
282 except ValueError:
283 pass
283 pass
284 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286
286
287 def checkfilename(f):
287 def checkfilename(f):
288 '''Check that the filename f is an acceptable filename for a tracked file'''
288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 if '\r' in f or '\n' in f:
289 if '\r' in f or '\n' in f:
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 % pycompat.bytestr(f))
291 % pycompat.bytestr(f))
292
292
293 def checkportable(ui, f):
293 def checkportable(ui, f):
294 '''Check if filename f is portable and warn or abort depending on config'''
294 '''Check if filename f is portable and warn or abort depending on config'''
295 checkfilename(f)
295 checkfilename(f)
296 abort, warn = checkportabilityalert(ui)
296 abort, warn = checkportabilityalert(ui)
297 if abort or warn:
297 if abort or warn:
298 msg = util.checkwinfilename(f)
298 msg = util.checkwinfilename(f)
299 if msg:
299 if msg:
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 if abort:
301 if abort:
302 raise error.Abort(msg)
302 raise error.Abort(msg)
303 ui.warn(_("warning: %s\n") % msg)
303 ui.warn(_("warning: %s\n") % msg)
304
304
305 def checkportabilityalert(ui):
305 def checkportabilityalert(ui):
306 '''check if the user's config requests nothing, a warning, or abort for
306 '''check if the user's config requests nothing, a warning, or abort for
307 non-portable filenames'''
307 non-portable filenames'''
308 val = ui.config('ui', 'portablefilenames')
308 val = ui.config('ui', 'portablefilenames')
309 lval = val.lower()
309 lval = val.lower()
310 bval = stringutil.parsebool(val)
310 bval = stringutil.parsebool(val)
311 abort = pycompat.iswindows or lval == 'abort'
311 abort = pycompat.iswindows or lval == 'abort'
312 warn = bval or lval == 'warn'
312 warn = bval or lval == 'warn'
313 if bval is None and not (warn or abort or lval == 'ignore'):
313 if bval is None and not (warn or abort or lval == 'ignore'):
314 raise error.ConfigError(
314 raise error.ConfigError(
315 _("ui.portablefilenames value is invalid ('%s')") % val)
315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 return abort, warn
316 return abort, warn
317
317
318 class casecollisionauditor(object):
318 class casecollisionauditor(object):
319 def __init__(self, ui, abort, dirstate):
319 def __init__(self, ui, abort, dirstate):
320 self._ui = ui
320 self._ui = ui
321 self._abort = abort
321 self._abort = abort
322 allfiles = '\0'.join(dirstate._map)
322 allfiles = '\0'.join(dirstate._map)
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 self._dirstate = dirstate
324 self._dirstate = dirstate
325 # The purpose of _newfiles is so that we don't complain about
325 # The purpose of _newfiles is so that we don't complain about
326 # case collisions if someone were to call this object with the
326 # case collisions if someone were to call this object with the
327 # same filename twice.
327 # same filename twice.
328 self._newfiles = set()
328 self._newfiles = set()
329
329
330 def __call__(self, f):
330 def __call__(self, f):
331 if f in self._newfiles:
331 if f in self._newfiles:
332 return
332 return
333 fl = encoding.lower(f)
333 fl = encoding.lower(f)
334 if fl in self._loweredfiles and f not in self._dirstate:
334 if fl in self._loweredfiles and f not in self._dirstate:
335 msg = _('possible case-folding collision for %s') % f
335 msg = _('possible case-folding collision for %s') % f
336 if self._abort:
336 if self._abort:
337 raise error.Abort(msg)
337 raise error.Abort(msg)
338 self._ui.warn(_("warning: %s\n") % msg)
338 self._ui.warn(_("warning: %s\n") % msg)
339 self._loweredfiles.add(fl)
339 self._loweredfiles.add(fl)
340 self._newfiles.add(f)
340 self._newfiles.add(f)
341
341
342 def filteredhash(repo, maxrev):
342 def filteredhash(repo, maxrev):
343 """build hash of filtered revisions in the current repoview.
343 """build hash of filtered revisions in the current repoview.
344
344
345 Multiple caches perform up-to-date validation by checking that the
345 Multiple caches perform up-to-date validation by checking that the
346 tiprev and tipnode stored in the cache file match the current repository.
346 tiprev and tipnode stored in the cache file match the current repository.
347 However, this is not sufficient for validating repoviews because the set
347 However, this is not sufficient for validating repoviews because the set
348 of revisions in the view may change without the repository tiprev and
348 of revisions in the view may change without the repository tiprev and
349 tipnode changing.
349 tipnode changing.
350
350
351 This function hashes all the revs filtered from the view and returns
351 This function hashes all the revs filtered from the view and returns
352 that SHA-1 digest.
352 that SHA-1 digest.
353 """
353 """
354 cl = repo.changelog
354 cl = repo.changelog
355 if not cl.filteredrevs:
355 if not cl.filteredrevs:
356 return None
356 return None
357 key = None
357 key = None
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 if revs:
359 if revs:
360 s = hashlib.sha1()
360 s = hashlib.sha1()
361 for rev in revs:
361 for rev in revs:
362 s.update('%d;' % rev)
362 s.update('%d;' % rev)
363 key = s.digest()
363 key = s.digest()
364 return key
364 return key
365
365
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 '''yield every hg repository under path, always recursively.
367 '''yield every hg repository under path, always recursively.
368 The recurse flag will only control recursion into repo working dirs'''
368 The recurse flag will only control recursion into repo working dirs'''
369 def errhandler(err):
369 def errhandler(err):
370 if err.filename == path:
370 if err.filename == path:
371 raise err
371 raise err
372 samestat = getattr(os.path, 'samestat', None)
372 samestat = getattr(os.path, 'samestat', None)
373 if followsym and samestat is not None:
373 if followsym and samestat is not None:
374 def adddir(dirlst, dirname):
374 def adddir(dirlst, dirname):
375 dirstat = os.stat(dirname)
375 dirstat = os.stat(dirname)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 if not match:
377 if not match:
378 dirlst.append(dirstat)
378 dirlst.append(dirstat)
379 return not match
379 return not match
380 else:
380 else:
381 followsym = False
381 followsym = False
382
382
383 if (seen_dirs is None) and followsym:
383 if (seen_dirs is None) and followsym:
384 seen_dirs = []
384 seen_dirs = []
385 adddir(seen_dirs, path)
385 adddir(seen_dirs, path)
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 dirs.sort()
387 dirs.sort()
388 if '.hg' in dirs:
388 if '.hg' in dirs:
389 yield root # found a repository
389 yield root # found a repository
390 qroot = os.path.join(root, '.hg', 'patches')
390 qroot = os.path.join(root, '.hg', 'patches')
391 if os.path.isdir(os.path.join(qroot, '.hg')):
391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 yield qroot # we have a patch queue repo here
392 yield qroot # we have a patch queue repo here
393 if recurse:
393 if recurse:
394 # avoid recursing inside the .hg directory
394 # avoid recursing inside the .hg directory
395 dirs.remove('.hg')
395 dirs.remove('.hg')
396 else:
396 else:
397 dirs[:] = [] # don't descend further
397 dirs[:] = [] # don't descend further
398 elif followsym:
398 elif followsym:
399 newdirs = []
399 newdirs = []
400 for d in dirs:
400 for d in dirs:
401 fname = os.path.join(root, d)
401 fname = os.path.join(root, d)
402 if adddir(seen_dirs, fname):
402 if adddir(seen_dirs, fname):
403 if os.path.islink(fname):
403 if os.path.islink(fname):
404 for hgname in walkrepos(fname, True, seen_dirs):
404 for hgname in walkrepos(fname, True, seen_dirs):
405 yield hgname
405 yield hgname
406 else:
406 else:
407 newdirs.append(d)
407 newdirs.append(d)
408 dirs[:] = newdirs
408 dirs[:] = newdirs
409
409
410 def binnode(ctx):
410 def binnode(ctx):
411 """Return binary node id for a given basectx"""
411 """Return binary node id for a given basectx"""
412 node = ctx.node()
412 node = ctx.node()
413 if node is None:
413 if node is None:
414 return wdirid
414 return wdirid
415 return node
415 return node
416
416
417 def intrev(ctx):
417 def intrev(ctx):
418 """Return integer for a given basectx that can be used in comparison or
418 """Return integer for a given basectx that can be used in comparison or
419 arithmetic operation"""
419 arithmetic operation"""
420 rev = ctx.rev()
420 rev = ctx.rev()
421 if rev is None:
421 if rev is None:
422 return wdirrev
422 return wdirrev
423 return rev
423 return rev
424
424
425 def formatchangeid(ctx):
425 def formatchangeid(ctx):
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 template provided by logcmdutil.changesettemplater"""
427 template provided by logcmdutil.changesettemplater"""
428 repo = ctx.repo()
428 repo = ctx.repo()
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430
430
431 def formatrevnode(ui, rev, node):
431 def formatrevnode(ui, rev, node):
432 """Format given revision and node depending on the current verbosity"""
432 """Format given revision and node depending on the current verbosity"""
433 if ui.debugflag:
433 if ui.debugflag:
434 hexfunc = hex
434 hexfunc = hex
435 else:
435 else:
436 hexfunc = short
436 hexfunc = short
437 return '%d:%s' % (rev, hexfunc(node))
437 return '%d:%s' % (rev, hexfunc(node))
438
438
439 def resolvehexnodeidprefix(repo, prefix):
439 def resolvehexnodeidprefix(repo, prefix):
440 if (prefix.startswith('x') and
440 if (prefix.startswith('x') and
441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
442 prefix = prefix[1:]
442 prefix = prefix[1:]
443 try:
443 try:
444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
445 # This matches the shortesthexnodeidprefix() function below.
445 # This matches the shortesthexnodeidprefix() function below.
446 node = repo.unfiltered().changelog._partialmatch(prefix)
446 node = repo.unfiltered().changelog._partialmatch(prefix)
447 except error.AmbiguousPrefixLookupError:
447 except error.AmbiguousPrefixLookupError:
448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
449 if revset:
449 if revset:
450 # Clear config to avoid infinite recursion
450 # Clear config to avoid infinite recursion
451 configoverrides = {('experimental',
451 configoverrides = {('experimental',
452 'revisions.disambiguatewithin'): None}
452 'revisions.disambiguatewithin'): None}
453 with repo.ui.configoverride(configoverrides):
453 with repo.ui.configoverride(configoverrides):
454 revs = repo.anyrevs([revset], user=True)
454 revs = repo.anyrevs([revset], user=True)
455 matches = []
455 matches = []
456 for rev in revs:
456 for rev in revs:
457 node = repo.changelog.node(rev)
457 node = repo.changelog.node(rev)
458 if hex(node).startswith(prefix):
458 if hex(node).startswith(prefix):
459 matches.append(node)
459 matches.append(node)
460 if len(matches) == 1:
460 if len(matches) == 1:
461 return matches[0]
461 return matches[0]
462 raise
462 raise
463 if node is None:
463 if node is None:
464 return
464 return
465 repo.changelog.rev(node) # make sure node isn't filtered
465 repo.changelog.rev(node) # make sure node isn't filtered
466 return node
466 return node
467
467
468 def mayberevnum(repo, prefix):
468 def mayberevnum(repo, prefix):
469 """Checks if the given prefix may be mistaken for a revision number"""
469 """Checks if the given prefix may be mistaken for a revision number"""
470 try:
470 try:
471 i = int(prefix)
471 i = int(prefix)
472 # if we are a pure int, then starting with zero will not be
472 # if we are a pure int, then starting with zero will not be
473 # confused as a rev; or, obviously, if the int is larger
473 # confused as a rev; or, obviously, if the int is larger
474 # than the value of the tip rev. We still need to disambiguate if
474 # than the value of the tip rev. We still need to disambiguate if
475 # prefix == '0', since that *is* a valid revnum.
475 # prefix == '0', since that *is* a valid revnum.
476 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
477 return False
477 return False
478 return True
478 return True
479 except ValueError:
479 except ValueError:
480 return False
480 return False
481
481
482 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
483 """Find the shortest unambiguous prefix that matches hexnode.
483 """Find the shortest unambiguous prefix that matches hexnode.
484
484
485 If "cache" is not None, it must be a dictionary that can be used for
485 If "cache" is not None, it must be a dictionary that can be used for
486 caching between calls to this method.
486 caching between calls to this method.
487 """
487 """
488 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 # _partialmatch() of filtered changelog could take O(len(repo)) time,
489 # which would be unacceptably slow. so we look for hash collision in
489 # which would be unacceptably slow. so we look for hash collision in
490 # unfiltered space, which means some hashes may be slightly longer.
490 # unfiltered space, which means some hashes may be slightly longer.
491
491
492 minlength=max(minlength, 1)
492 minlength=max(minlength, 1)
493
493
494 def disambiguate(prefix):
494 def disambiguate(prefix):
495 """Disambiguate against revnums."""
495 """Disambiguate against revnums."""
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
497 if mayberevnum(repo, prefix):
497 if mayberevnum(repo, prefix):
498 return 'x' + prefix
498 return 'x' + prefix
499 else:
499 else:
500 return prefix
500 return prefix
501
501
502 hexnode = hex(node)
502 hexnode = hex(node)
503 for length in range(len(prefix), len(hexnode) + 1):
503 for length in range(len(prefix), len(hexnode) + 1):
504 prefix = hexnode[:length]
504 prefix = hexnode[:length]
505 if not mayberevnum(repo, prefix):
505 if not mayberevnum(repo, prefix):
506 return prefix
506 return prefix
507
507
508 cl = repo.unfiltered().changelog
508 cl = repo.unfiltered().changelog
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
510 if revset:
510 if revset:
511 revs = None
511 revs = None
512 if cache is not None:
512 if cache is not None:
513 revs = cache.get('disambiguationrevset')
513 revs = cache.get('disambiguationrevset')
514 if revs is None:
514 if revs is None:
515 revs = repo.anyrevs([revset], user=True)
515 revs = repo.anyrevs([revset], user=True)
516 if cache is not None:
516 if cache is not None:
517 cache['disambiguationrevset'] = revs
517 cache['disambiguationrevset'] = revs
518 if cl.rev(node) in revs:
518 if cl.rev(node) in revs:
519 hexnode = hex(node)
519 hexnode = hex(node)
520 nodetree = None
520 nodetree = None
521 if cache is not None:
521 if cache is not None:
522 nodetree = cache.get('disambiguationnodetree')
522 nodetree = cache.get('disambiguationnodetree')
523 if not nodetree:
523 if not nodetree:
524 try:
524 try:
525 nodetree = parsers.nodetree(cl.index, len(revs))
525 nodetree = parsers.nodetree(cl.index, len(revs))
526 except AttributeError:
526 except AttributeError:
527 # no native nodetree
527 # no native nodetree
528 pass
528 pass
529 else:
529 else:
530 for r in revs:
530 for r in revs:
531 nodetree.insert(r)
531 nodetree.insert(r)
532 if cache is not None:
532 if cache is not None:
533 cache['disambiguationnodetree'] = nodetree
533 cache['disambiguationnodetree'] = nodetree
534 if nodetree is not None:
534 if nodetree is not None:
535 length = max(nodetree.shortest(node), minlength)
535 length = max(nodetree.shortest(node), minlength)
536 prefix = hexnode[:length]
536 prefix = hexnode[:length]
537 return disambiguate(prefix)
537 return disambiguate(prefix)
538 for length in range(minlength, len(hexnode) + 1):
538 for length in range(minlength, len(hexnode) + 1):
539 matches = []
539 matches = []
540 prefix = hexnode[:length]
540 prefix = hexnode[:length]
541 for rev in revs:
541 for rev in revs:
542 otherhexnode = repo[rev].hex()
542 otherhexnode = repo[rev].hex()
543 if prefix == otherhexnode[:length]:
543 if prefix == otherhexnode[:length]:
544 matches.append(otherhexnode)
544 matches.append(otherhexnode)
545 if len(matches) == 1:
545 if len(matches) == 1:
546 return disambiguate(prefix)
546 return disambiguate(prefix)
547
547
548 try:
548 try:
549 return disambiguate(cl.shortest(node, minlength))
549 return disambiguate(cl.shortest(node, minlength))
550 except error.LookupError:
550 except error.LookupError:
551 raise error.RepoLookupError()
551 raise error.RepoLookupError()
552
552
553 def isrevsymbol(repo, symbol):
553 def isrevsymbol(repo, symbol):
554 """Checks if a symbol exists in the repo.
554 """Checks if a symbol exists in the repo.
555
555
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
557 symbol is an ambiguous nodeid prefix.
557 symbol is an ambiguous nodeid prefix.
558 """
558 """
559 try:
559 try:
560 revsymbol(repo, symbol)
560 revsymbol(repo, symbol)
561 return True
561 return True
562 except error.RepoLookupError:
562 except error.RepoLookupError:
563 return False
563 return False
564
564
565 def revsymbol(repo, symbol):
565 def revsymbol(repo, symbol):
566 """Returns a context given a single revision symbol (as string).
566 """Returns a context given a single revision symbol (as string).
567
567
568 This is similar to revsingle(), but accepts only a single revision symbol,
568 This is similar to revsingle(), but accepts only a single revision symbol,
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
570 not "max(public())".
570 not "max(public())".
571 """
571 """
572 if not isinstance(symbol, bytes):
572 if not isinstance(symbol, bytes):
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
574 "repo[symbol]?" % (symbol, type(symbol)))
574 "repo[symbol]?" % (symbol, type(symbol)))
575 raise error.ProgrammingError(msg)
575 raise error.ProgrammingError(msg)
576 try:
576 try:
577 if symbol in ('.', 'tip', 'null'):
577 if symbol in ('.', 'tip', 'null'):
578 return repo[symbol]
578 return repo[symbol]
579
579
580 try:
580 try:
581 r = int(symbol)
581 r = int(symbol)
582 if '%d' % r != symbol:
582 if '%d' % r != symbol:
583 raise ValueError
583 raise ValueError
584 l = len(repo.changelog)
584 l = len(repo.changelog)
585 if r < 0:
585 if r < 0:
586 r += l
586 r += l
587 if r < 0 or r >= l and r != wdirrev:
587 if r < 0 or r >= l and r != wdirrev:
588 raise ValueError
588 raise ValueError
589 return repo[r]
589 return repo[r]
590 except error.FilteredIndexError:
590 except error.FilteredIndexError:
591 raise
591 raise
592 except (ValueError, OverflowError, IndexError):
592 except (ValueError, OverflowError, IndexError):
593 pass
593 pass
594
594
595 if len(symbol) == 40:
595 if len(symbol) == 40:
596 try:
596 try:
597 node = bin(symbol)
597 node = bin(symbol)
598 rev = repo.changelog.rev(node)
598 rev = repo.changelog.rev(node)
599 return repo[rev]
599 return repo[rev]
600 except error.FilteredLookupError:
600 except error.FilteredLookupError:
601 raise
601 raise
602 except (TypeError, LookupError):
602 except (TypeError, LookupError):
603 pass
603 pass
604
604
605 # look up bookmarks through the name interface
605 # look up bookmarks through the name interface
606 try:
606 try:
607 node = repo.names.singlenode(repo, symbol)
607 node = repo.names.singlenode(repo, symbol)
608 rev = repo.changelog.rev(node)
608 rev = repo.changelog.rev(node)
609 return repo[rev]
609 return repo[rev]
610 except KeyError:
610 except KeyError:
611 pass
611 pass
612
612
613 node = resolvehexnodeidprefix(repo, symbol)
613 node = resolvehexnodeidprefix(repo, symbol)
614 if node is not None:
614 if node is not None:
615 rev = repo.changelog.rev(node)
615 rev = repo.changelog.rev(node)
616 return repo[rev]
616 return repo[rev]
617
617
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
619
619
620 except error.WdirUnsupported:
620 except error.WdirUnsupported:
621 return repo[None]
621 return repo[None]
622 except (error.FilteredIndexError, error.FilteredLookupError,
622 except (error.FilteredIndexError, error.FilteredLookupError,
623 error.FilteredRepoLookupError):
623 error.FilteredRepoLookupError):
624 raise _filterederror(repo, symbol)
624 raise _filterederror(repo, symbol)
625
625
626 def _filterederror(repo, changeid):
626 def _filterederror(repo, changeid):
627 """build an exception to be raised about a filtered changeid
627 """build an exception to be raised about a filtered changeid
628
628
629 This is extracted in a function to help extensions (eg: evolve) to
629 This is extracted in a function to help extensions (eg: evolve) to
630 experiment with various message variants."""
630 experiment with various message variants."""
631 if repo.filtername.startswith('visible'):
631 if repo.filtername.startswith('visible'):
632
632
633 # Check if the changeset is obsolete
633 # Check if the changeset is obsolete
634 unfilteredrepo = repo.unfiltered()
634 unfilteredrepo = repo.unfiltered()
635 ctx = revsymbol(unfilteredrepo, changeid)
635 ctx = revsymbol(unfilteredrepo, changeid)
636
636
637 # If the changeset is obsolete, enrich the message with the reason
637 # If the changeset is obsolete, enrich the message with the reason
638 # that made this changeset not visible
638 # that made this changeset not visible
639 if ctx.obsolete():
639 if ctx.obsolete():
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
641 else:
641 else:
642 msg = _("hidden revision '%s'") % changeid
642 msg = _("hidden revision '%s'") % changeid
643
643
644 hint = _('use --hidden to access hidden revisions')
644 hint = _('use --hidden to access hidden revisions')
645
645
646 return error.FilteredRepoLookupError(msg, hint=hint)
646 return error.FilteredRepoLookupError(msg, hint=hint)
647 msg = _("filtered revision '%s' (not in '%s' subset)")
647 msg = _("filtered revision '%s' (not in '%s' subset)")
648 msg %= (changeid, repo.filtername)
648 msg %= (changeid, repo.filtername)
649 return error.FilteredRepoLookupError(msg)
649 return error.FilteredRepoLookupError(msg)
650
650
651 def revsingle(repo, revspec, default='.', localalias=None):
651 def revsingle(repo, revspec, default='.', localalias=None):
652 if not revspec and revspec != 0:
652 if not revspec and revspec != 0:
653 return repo[default]
653 return repo[default]
654
654
655 l = revrange(repo, [revspec], localalias=localalias)
655 l = revrange(repo, [revspec], localalias=localalias)
656 if not l:
656 if not l:
657 raise error.Abort(_('empty revision set'))
657 raise error.Abort(_('empty revision set'))
658 return repo[l.last()]
658 return repo[l.last()]
659
659
660 def _pairspec(revspec):
660 def _pairspec(revspec):
661 tree = revsetlang.parse(revspec)
661 tree = revsetlang.parse(revspec)
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
663
663
664 def revpair(repo, revs):
664 def revpair(repo, revs):
665 if not revs:
665 if not revs:
666 return repo['.'], repo[None]
666 return repo['.'], repo[None]
667
667
668 l = revrange(repo, revs)
668 l = revrange(repo, revs)
669
669
670 if not l:
670 if not l:
671 raise error.Abort(_('empty revision range'))
671 raise error.Abort(_('empty revision range'))
672
672
673 first = l.first()
673 first = l.first()
674 second = l.last()
674 second = l.last()
675
675
676 if (first == second and len(revs) >= 2
676 if (first == second and len(revs) >= 2
677 and not all(revrange(repo, [r]) for r in revs)):
677 and not all(revrange(repo, [r]) for r in revs)):
678 raise error.Abort(_('empty revision on one side of range'))
678 raise error.Abort(_('empty revision on one side of range'))
679
679
680 # if top-level is range expression, the result must always be a pair
680 # if top-level is range expression, the result must always be a pair
681 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
682 return repo[first], repo[None]
682 return repo[first], repo[None]
683
683
684 return repo[first], repo[second]
684 return repo[first], repo[second]
685
685
686 def revrange(repo, specs, localalias=None):
686 def revrange(repo, specs, localalias=None):
687 """Execute 1 to many revsets and return the union.
687 """Execute 1 to many revsets and return the union.
688
688
689 This is the preferred mechanism for executing revsets using user-specified
689 This is the preferred mechanism for executing revsets using user-specified
690 config options, such as revset aliases.
690 config options, such as revset aliases.
691
691
692 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 The revsets specified by ``specs`` will be executed via a chained ``OR``
693 expression. If ``specs`` is empty, an empty result is returned.
693 expression. If ``specs`` is empty, an empty result is returned.
694
694
695 ``specs`` can contain integers, in which case they are assumed to be
695 ``specs`` can contain integers, in which case they are assumed to be
696 revision numbers.
696 revision numbers.
697
697
698 It is assumed the revsets are already formatted. If you have arguments
698 It is assumed the revsets are already formatted. If you have arguments
699 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 that need to be expanded in the revset, call ``revsetlang.formatspec()``
700 and pass the result as an element of ``specs``.
700 and pass the result as an element of ``specs``.
701
701
702 Specifying a single revset is allowed.
702 Specifying a single revset is allowed.
703
703
704 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 Returns a ``revset.abstractsmartset`` which is a list-like interface over
705 integer revisions.
705 integer revisions.
706 """
706 """
707 allspecs = []
707 allspecs = []
708 for spec in specs:
708 for spec in specs:
709 if isinstance(spec, int):
709 if isinstance(spec, int):
710 spec = revsetlang.formatspec('%d', spec)
710 spec = revsetlang.formatspec('%d', spec)
711 allspecs.append(spec)
711 allspecs.append(spec)
712 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712 return repo.anyrevs(allspecs, user=True, localalias=localalias)
713
713
714 def meaningfulparents(repo, ctx):
714 def meaningfulparents(repo, ctx):
715 """Return list of meaningful (or all if debug) parentrevs for rev.
715 """Return list of meaningful (or all if debug) parentrevs for rev.
716
716
717 For merges (two non-nullrev revisions) both parents are meaningful.
717 For merges (two non-nullrev revisions) both parents are meaningful.
718 Otherwise the first parent revision is considered meaningful if it
718 Otherwise the first parent revision is considered meaningful if it
719 is not the preceding revision.
719 is not the preceding revision.
720 """
720 """
721 parents = ctx.parents()
721 parents = ctx.parents()
722 if len(parents) > 1:
722 if len(parents) > 1:
723 return parents
723 return parents
724 if repo.ui.debugflag:
724 if repo.ui.debugflag:
725 return [parents[0], repo[nullrev]]
725 return [parents[0], repo[nullrev]]
726 if parents[0].rev() >= intrev(ctx) - 1:
726 if parents[0].rev() >= intrev(ctx) - 1:
727 return []
727 return []
728 return parents
728 return parents
729
729
730 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
731 """Return a function that produced paths for presenting to the user.
731 """Return a function that produced paths for presenting to the user.
732
732
733 The returned function takes a repo-relative path and produces a path
733 The returned function takes a repo-relative path and produces a path
734 that can be presented in the UI.
734 that can be presented in the UI.
735
735
736 Depending on the value of ui.relative-paths, either a repo-relative or
736 Depending on the value of ui.relative-paths, either a repo-relative or
737 cwd-relative path will be produced.
737 cwd-relative path will be produced.
738
738
739 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739 legacyrelativevalue is the value to use if ui.relative-paths=legacy
740
740
741 If forcerelativevalue is not None, then that value will be used regardless
741 If forcerelativevalue is not None, then that value will be used regardless
742 of what ui.relative-paths is set to.
742 of what ui.relative-paths is set to.
743 """
743 """
744 if forcerelativevalue is not None:
744 if forcerelativevalue is not None:
745 relative = forcerelativevalue
745 relative = forcerelativevalue
746 else:
746 else:
747 config = repo.ui.config('ui', 'relative-paths')
747 config = repo.ui.config('ui', 'relative-paths')
748 if config == 'legacy':
748 if config == 'legacy':
749 relative = legacyrelativevalue
749 relative = legacyrelativevalue
750 else:
750 else:
751 relative = stringutil.parsebool(config)
751 relative = stringutil.parsebool(config)
752 if relative is None:
752 if relative is None:
753 raise error.ConfigError(
753 raise error.ConfigError(
754 _("ui.relative-paths is not a boolean ('%s')") % config)
754 _("ui.relative-paths is not a boolean ('%s')") % config)
755
755
756 if relative:
756 if relative:
757 cwd = repo.getcwd()
757 cwd = repo.getcwd()
758 pathto = repo.pathto
758 pathto = repo.pathto
759 return lambda f: pathto(f, cwd)
759 return lambda f: pathto(f, cwd)
760 elif repo.ui.configbool('ui', 'slash'):
760 elif repo.ui.configbool('ui', 'slash'):
761 return lambda f: f
761 return lambda f: f
762 else:
762 else:
763 return util.localpath
763 return util.localpath
764
764
765 def subdiruipathfn(subpath, uipathfn):
765 def subdiruipathfn(subpath, uipathfn):
766 '''Create a new uipathfn that treats the file as relative to subpath.'''
766 '''Create a new uipathfn that treats the file as relative to subpath.'''
767 return lambda f: uipathfn(posixpath.join(subpath, f))
767 return lambda f: uipathfn(posixpath.join(subpath, f))
768
768
769 def anypats(pats, opts):
769 def anypats(pats, opts):
770 '''Checks if any patterns, including --include and --exclude were given.
770 '''Checks if any patterns, including --include and --exclude were given.
771
771
772 Some commands (e.g. addremove) use this condition for deciding whether to
772 Some commands (e.g. addremove) use this condition for deciding whether to
773 print absolute or relative paths.
773 print absolute or relative paths.
774 '''
774 '''
775 return bool(pats or opts.get('include') or opts.get('exclude'))
775 return bool(pats or opts.get('include') or opts.get('exclude'))
776
776
777 def expandpats(pats):
777 def expandpats(pats):
778 '''Expand bare globs when running on windows.
778 '''Expand bare globs when running on windows.
779 On posix we assume it already has already been done by sh.'''
779 On posix we assume it already has already been done by sh.'''
780 if not util.expandglobs:
780 if not util.expandglobs:
781 return list(pats)
781 return list(pats)
782 ret = []
782 ret = []
783 for kindpat in pats:
783 for kindpat in pats:
784 kind, pat = matchmod._patsplit(kindpat, None)
784 kind, pat = matchmod._patsplit(kindpat, None)
785 if kind is None:
785 if kind is None:
786 try:
786 try:
787 globbed = glob.glob(pat)
787 globbed = glob.glob(pat)
788 except re.error:
788 except re.error:
789 globbed = [pat]
789 globbed = [pat]
790 if globbed:
790 if globbed:
791 ret.extend(globbed)
791 ret.extend(globbed)
792 continue
792 continue
793 ret.append(kindpat)
793 ret.append(kindpat)
794 return ret
794 return ret
795
795
796 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
796 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
797 badfn=None):
797 badfn=None):
798 '''Return a matcher and the patterns that were used.
798 '''Return a matcher and the patterns that were used.
799 The matcher will warn about bad matches, unless an alternate badfn callback
799 The matcher will warn about bad matches, unless an alternate badfn callback
800 is provided.'''
800 is provided.'''
801 if opts is None:
801 if opts is None:
802 opts = {}
802 opts = {}
803 if not globbed and default == 'relpath':
803 if not globbed and default == 'relpath':
804 pats = expandpats(pats or [])
804 pats = expandpats(pats or [])
805
805
806 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
806 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
807 def bad(f, msg):
807 def bad(f, msg):
808 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
808 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
809
809
810 if badfn is None:
810 if badfn is None:
811 badfn = bad
811 badfn = bad
812
812
813 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
813 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
814 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
814 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
815
815
816 if m.always():
816 if m.always():
817 pats = []
817 pats = []
818 return m, pats
818 return m, pats
819
819
820 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
820 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
821 badfn=None):
821 badfn=None):
822 '''Return a matcher that will warn about bad matches.'''
822 '''Return a matcher that will warn about bad matches.'''
823 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
823 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
824
824
825 def matchall(repo):
825 def matchall(repo):
826 '''Return a matcher that will efficiently match everything.'''
826 '''Return a matcher that will efficiently match everything.'''
827 return matchmod.always()
827 return matchmod.always()
828
828
829 def matchfiles(repo, files, badfn=None):
829 def matchfiles(repo, files, badfn=None):
830 '''Return a matcher that will efficiently match exactly these files.'''
830 '''Return a matcher that will efficiently match exactly these files.'''
831 return matchmod.exact(files, badfn=badfn)
831 return matchmod.exact(files, badfn=badfn)
832
832
833 def parsefollowlinespattern(repo, rev, pat, msg):
833 def parsefollowlinespattern(repo, rev, pat, msg):
834 """Return a file name from `pat` pattern suitable for usage in followlines
834 """Return a file name from `pat` pattern suitable for usage in followlines
835 logic.
835 logic.
836 """
836 """
837 if not matchmod.patkind(pat):
837 if not matchmod.patkind(pat):
838 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
838 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
839 else:
839 else:
840 ctx = repo[rev]
840 ctx = repo[rev]
841 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
841 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
842 files = [f for f in ctx if m(f)]
842 files = [f for f in ctx if m(f)]
843 if len(files) != 1:
843 if len(files) != 1:
844 raise error.ParseError(msg)
844 raise error.ParseError(msg)
845 return files[0]
845 return files[0]
846
846
847 def getorigvfs(ui, repo):
847 def getorigvfs(ui, repo):
848 """return a vfs suitable to save 'orig' file
848 """return a vfs suitable to save 'orig' file
849
849
850 return None if no special directory is configured"""
850 return None if no special directory is configured"""
851 origbackuppath = ui.config('ui', 'origbackuppath')
851 origbackuppath = ui.config('ui', 'origbackuppath')
852 if not origbackuppath:
852 if not origbackuppath:
853 return None
853 return None
854 return vfs.vfs(repo.wvfs.join(origbackuppath))
854 return vfs.vfs(repo.wvfs.join(origbackuppath))
855
855
856 def backuppath(ui, repo, filepath):
856 def backuppath(ui, repo, filepath):
857 '''customize where working copy backup files (.orig files) are created
857 '''customize where working copy backup files (.orig files) are created
858
858
859 Fetch user defined path from config file: [ui] origbackuppath = <path>
859 Fetch user defined path from config file: [ui] origbackuppath = <path>
860 Fall back to default (filepath with .orig suffix) if not specified
860 Fall back to default (filepath with .orig suffix) if not specified
861
861
862 filepath is repo-relative
862 filepath is repo-relative
863
863
864 Returns an absolute path
864 Returns an absolute path
865 '''
865 '''
866 origvfs = getorigvfs(ui, repo)
866 origvfs = getorigvfs(ui, repo)
867 if origvfs is None:
867 if origvfs is None:
868 return repo.wjoin(filepath + ".orig")
868 return repo.wjoin(filepath + ".orig")
869
869
870 origbackupdir = origvfs.dirname(filepath)
870 origbackupdir = origvfs.dirname(filepath)
871 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
871 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
872 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
872 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
873
873
874 # Remove any files that conflict with the backup file's path
874 # Remove any files that conflict with the backup file's path
875 for f in reversed(list(util.finddirs(filepath))):
875 for f in reversed(list(util.finddirs(filepath))):
876 if origvfs.isfileorlink(f):
876 if origvfs.isfileorlink(f):
877 ui.note(_('removing conflicting file: %s\n')
877 ui.note(_('removing conflicting file: %s\n')
878 % origvfs.join(f))
878 % origvfs.join(f))
879 origvfs.unlink(f)
879 origvfs.unlink(f)
880 break
880 break
881
881
882 origvfs.makedirs(origbackupdir)
882 origvfs.makedirs(origbackupdir)
883
883
884 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
884 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
885 ui.note(_('removing conflicting directory: %s\n')
885 ui.note(_('removing conflicting directory: %s\n')
886 % origvfs.join(filepath))
886 % origvfs.join(filepath))
887 origvfs.rmtree(filepath, forcibly=True)
887 origvfs.rmtree(filepath, forcibly=True)
888
888
889 return origvfs.join(filepath)
889 return origvfs.join(filepath)
890
890
891 class _containsnode(object):
891 class _containsnode(object):
892 """proxy __contains__(node) to container.__contains__ which accepts revs"""
892 """proxy __contains__(node) to container.__contains__ which accepts revs"""
893
893
894 def __init__(self, repo, revcontainer):
894 def __init__(self, repo, revcontainer):
895 self._torev = repo.changelog.rev
895 self._torev = repo.changelog.rev
896 self._revcontains = revcontainer.__contains__
896 self._revcontains = revcontainer.__contains__
897
897
898 def __contains__(self, node):
898 def __contains__(self, node):
899 return self._revcontains(self._torev(node))
899 return self._revcontains(self._torev(node))
900
900
901 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
901 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
902 fixphase=False, targetphase=None, backup=True):
902 fixphase=False, targetphase=None, backup=True):
903 """do common cleanups when old nodes are replaced by new nodes
903 """do common cleanups when old nodes are replaced by new nodes
904
904
905 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
905 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
906 (we might also want to move working directory parent in the future)
906 (we might also want to move working directory parent in the future)
907
907
908 By default, bookmark moves are calculated automatically from 'replacements',
908 By default, bookmark moves are calculated automatically from 'replacements',
909 but 'moves' can be used to override that. Also, 'moves' may include
909 but 'moves' can be used to override that. Also, 'moves' may include
910 additional bookmark moves that should not have associated obsmarkers.
910 additional bookmark moves that should not have associated obsmarkers.
911
911
912 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
912 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
913 have replacements. operation is a string, like "rebase".
913 have replacements. operation is a string, like "rebase".
914
914
915 metadata is dictionary containing metadata to be stored in obsmarker if
915 metadata is dictionary containing metadata to be stored in obsmarker if
916 obsolescence is enabled.
916 obsolescence is enabled.
917 """
917 """
918 assert fixphase or targetphase is None
918 assert fixphase or targetphase is None
919 if not replacements and not moves:
919 if not replacements and not moves:
920 return
920 return
921
921
922 # translate mapping's other forms
922 # translate mapping's other forms
923 if not util.safehasattr(replacements, 'items'):
923 if not util.safehasattr(replacements, 'items'):
924 replacements = {(n,): () for n in replacements}
924 replacements = {(n,): () for n in replacements}
925 else:
925 else:
926 # upgrading non tuple "source" to tuple ones for BC
926 # upgrading non tuple "source" to tuple ones for BC
927 repls = {}
927 repls = {}
928 for key, value in replacements.items():
928 for key, value in replacements.items():
929 if not isinstance(key, tuple):
929 if not isinstance(key, tuple):
930 key = (key,)
930 key = (key,)
931 repls[key] = value
931 repls[key] = value
932 replacements = repls
932 replacements = repls
933
933
934 # Unfiltered repo is needed since nodes in replacements might be hidden.
934 # Unfiltered repo is needed since nodes in replacements might be hidden.
935 unfi = repo.unfiltered()
935 unfi = repo.unfiltered()
936
936
937 # Calculate bookmark movements
937 # Calculate bookmark movements
938 if moves is None:
938 if moves is None:
939 moves = {}
939 moves = {}
940 for oldnodes, newnodes in replacements.items():
940 for oldnodes, newnodes in replacements.items():
941 for oldnode in oldnodes:
941 for oldnode in oldnodes:
942 if oldnode in moves:
942 if oldnode in moves:
943 continue
943 continue
944 if len(newnodes) > 1:
944 if len(newnodes) > 1:
945 # usually a split, take the one with biggest rev number
945 # usually a split, take the one with biggest rev number
946 newnode = next(unfi.set('max(%ln)', newnodes)).node()
946 newnode = next(unfi.set('max(%ln)', newnodes)).node()
947 elif len(newnodes) == 0:
947 elif len(newnodes) == 0:
948 # move bookmark backwards
948 # move bookmark backwards
949 allreplaced = []
949 allreplaced = []
950 for rep in replacements:
950 for rep in replacements:
951 allreplaced.extend(rep)
951 allreplaced.extend(rep)
952 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
952 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
953 allreplaced))
953 allreplaced))
954 if roots:
954 if roots:
955 newnode = roots[0].node()
955 newnode = roots[0].node()
956 else:
956 else:
957 newnode = nullid
957 newnode = nullid
958 else:
958 else:
959 newnode = newnodes[0]
959 newnode = newnodes[0]
960 moves[oldnode] = newnode
960 moves[oldnode] = newnode
961
961
962 allnewnodes = [n for ns in replacements.values() for n in ns]
962 allnewnodes = [n for ns in replacements.values() for n in ns]
963 toretract = {}
963 toretract = {}
964 toadvance = {}
964 toadvance = {}
965 if fixphase:
965 if fixphase:
966 precursors = {}
966 precursors = {}
967 for oldnodes, newnodes in replacements.items():
967 for oldnodes, newnodes in replacements.items():
968 for oldnode in oldnodes:
968 for oldnode in oldnodes:
969 for newnode in newnodes:
969 for newnode in newnodes:
970 precursors.setdefault(newnode, []).append(oldnode)
970 precursors.setdefault(newnode, []).append(oldnode)
971
971
972 allnewnodes.sort(key=lambda n: unfi[n].rev())
972 allnewnodes.sort(key=lambda n: unfi[n].rev())
973 newphases = {}
973 newphases = {}
974 def phase(ctx):
974 def phase(ctx):
975 return newphases.get(ctx.node(), ctx.phase())
975 return newphases.get(ctx.node(), ctx.phase())
976 for newnode in allnewnodes:
976 for newnode in allnewnodes:
977 ctx = unfi[newnode]
977 ctx = unfi[newnode]
978 parentphase = max(phase(p) for p in ctx.parents())
978 parentphase = max(phase(p) for p in ctx.parents())
979 if targetphase is None:
979 if targetphase is None:
980 oldphase = max(unfi[oldnode].phase()
980 oldphase = max(unfi[oldnode].phase()
981 for oldnode in precursors[newnode])
981 for oldnode in precursors[newnode])
982 newphase = max(oldphase, parentphase)
982 newphase = max(oldphase, parentphase)
983 else:
983 else:
984 newphase = max(targetphase, parentphase)
984 newphase = max(targetphase, parentphase)
985 newphases[newnode] = newphase
985 newphases[newnode] = newphase
986 if newphase > ctx.phase():
986 if newphase > ctx.phase():
987 toretract.setdefault(newphase, []).append(newnode)
987 toretract.setdefault(newphase, []).append(newnode)
988 elif newphase < ctx.phase():
988 elif newphase < ctx.phase():
989 toadvance.setdefault(newphase, []).append(newnode)
989 toadvance.setdefault(newphase, []).append(newnode)
990
990
991 with repo.transaction('cleanup') as tr:
991 with repo.transaction('cleanup') as tr:
992 # Move bookmarks
992 # Move bookmarks
993 bmarks = repo._bookmarks
993 bmarks = repo._bookmarks
994 bmarkchanges = []
994 bmarkchanges = []
995 for oldnode, newnode in moves.items():
995 for oldnode, newnode in moves.items():
996 oldbmarks = repo.nodebookmarks(oldnode)
996 oldbmarks = repo.nodebookmarks(oldnode)
997 if not oldbmarks:
997 if not oldbmarks:
998 continue
998 continue
999 from . import bookmarks # avoid import cycle
999 from . import bookmarks # avoid import cycle
1000 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1000 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1001 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1001 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1002 hex(oldnode), hex(newnode)))
1002 hex(oldnode), hex(newnode)))
1003 # Delete divergent bookmarks being parents of related newnodes
1003 # Delete divergent bookmarks being parents of related newnodes
1004 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1004 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1005 allnewnodes, newnode, oldnode)
1005 allnewnodes, newnode, oldnode)
1006 deletenodes = _containsnode(repo, deleterevs)
1006 deletenodes = _containsnode(repo, deleterevs)
1007 for name in oldbmarks:
1007 for name in oldbmarks:
1008 bmarkchanges.append((name, newnode))
1008 bmarkchanges.append((name, newnode))
1009 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1009 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1010 bmarkchanges.append((b, None))
1010 bmarkchanges.append((b, None))
1011
1011
1012 if bmarkchanges:
1012 if bmarkchanges:
1013 bmarks.applychanges(repo, tr, bmarkchanges)
1013 bmarks.applychanges(repo, tr, bmarkchanges)
1014
1014
1015 for phase, nodes in toretract.items():
1015 for phase, nodes in toretract.items():
1016 phases.retractboundary(repo, tr, phase, nodes)
1016 phases.retractboundary(repo, tr, phase, nodes)
1017 for phase, nodes in toadvance.items():
1017 for phase, nodes in toadvance.items():
1018 phases.advanceboundary(repo, tr, phase, nodes)
1018 phases.advanceboundary(repo, tr, phase, nodes)
1019
1019
1020 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1020 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1021 # Obsolete or strip nodes
1021 # Obsolete or strip nodes
1022 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1022 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1023 # If a node is already obsoleted, and we want to obsolete it
1023 # If a node is already obsoleted, and we want to obsolete it
1024 # without a successor, skip that obssolete request since it's
1024 # without a successor, skip that obssolete request since it's
1025 # unnecessary. That's the "if s or not isobs(n)" check below.
1025 # unnecessary. That's the "if s or not isobs(n)" check below.
1026 # Also sort the node in topology order, that might be useful for
1026 # Also sort the node in topology order, that might be useful for
1027 # some obsstore logic.
1027 # some obsstore logic.
1028 # NOTE: the sorting might belong to createmarkers.
1028 # NOTE: the sorting might belong to createmarkers.
1029 torev = unfi.changelog.rev
1029 torev = unfi.changelog.rev
1030 sortfunc = lambda ns: torev(ns[0][0])
1030 sortfunc = lambda ns: torev(ns[0][0])
1031 rels = []
1031 rels = []
1032 for ns, s in sorted(replacements.items(), key=sortfunc):
1032 for ns, s in sorted(replacements.items(), key=sortfunc):
1033 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1033 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1034 rels.append(rel)
1034 rels.append(rel)
1035 if rels:
1035 if rels:
1036 obsolete.createmarkers(repo, rels, operation=operation,
1036 obsolete.createmarkers(repo, rels, operation=operation,
1037 metadata=metadata)
1037 metadata=metadata)
1038 elif phases.supportinternal(repo) and mayusearchived:
1038 elif phases.supportinternal(repo) and mayusearchived:
1039 # this assume we do not have "unstable" nodes above the cleaned ones
1039 # this assume we do not have "unstable" nodes above the cleaned ones
1040 allreplaced = set()
1040 allreplaced = set()
1041 for ns in replacements.keys():
1041 for ns in replacements.keys():
1042 allreplaced.update(ns)
1042 allreplaced.update(ns)
1043 if backup:
1043 if backup:
1044 from . import repair # avoid import cycle
1044 from . import repair # avoid import cycle
1045 node = min(allreplaced, key=repo.changelog.rev)
1045 node = min(allreplaced, key=repo.changelog.rev)
1046 repair.backupbundle(repo, allreplaced, allreplaced, node,
1046 repair.backupbundle(repo, allreplaced, allreplaced, node,
1047 operation)
1047 operation)
1048 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1048 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1049 else:
1049 else:
1050 from . import repair # avoid import cycle
1050 from . import repair # avoid import cycle
1051 tostrip = list(n for ns in replacements for n in ns)
1051 tostrip = list(n for ns in replacements for n in ns)
1052 if tostrip:
1052 if tostrip:
1053 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1053 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1054 backup=backup)
1054 backup=backup)
1055
1055
1056 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1056 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1057 if opts is None:
1057 if opts is None:
1058 opts = {}
1058 opts = {}
1059 m = matcher
1059 m = matcher
1060 dry_run = opts.get('dry_run')
1060 dry_run = opts.get('dry_run')
1061 try:
1061 try:
1062 similarity = float(opts.get('similarity') or 0)
1062 similarity = float(opts.get('similarity') or 0)
1063 except ValueError:
1063 except ValueError:
1064 raise error.Abort(_('similarity must be a number'))
1064 raise error.Abort(_('similarity must be a number'))
1065 if similarity < 0 or similarity > 100:
1065 if similarity < 0 or similarity > 100:
1066 raise error.Abort(_('similarity must be between 0 and 100'))
1066 raise error.Abort(_('similarity must be between 0 and 100'))
1067 similarity /= 100.0
1067 similarity /= 100.0
1068
1068
1069 ret = 0
1069 ret = 0
1070
1070
1071 wctx = repo[None]
1071 wctx = repo[None]
1072 for subpath in sorted(wctx.substate):
1072 for subpath in sorted(wctx.substate):
1073 submatch = matchmod.subdirmatcher(subpath, m)
1073 submatch = matchmod.subdirmatcher(subpath, m)
1074 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1074 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1075 sub = wctx.sub(subpath)
1075 sub = wctx.sub(subpath)
1076 subprefix = repo.wvfs.reljoin(prefix, subpath)
1076 subprefix = repo.wvfs.reljoin(prefix, subpath)
1077 subuipathfn = subdiruipathfn(subpath, uipathfn)
1077 subuipathfn = subdiruipathfn(subpath, uipathfn)
1078 try:
1078 try:
1079 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1079 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1080 ret = 1
1080 ret = 1
1081 except error.LookupError:
1081 except error.LookupError:
1082 repo.ui.status(_("skipping missing subrepository: %s\n")
1082 repo.ui.status(_("skipping missing subrepository: %s\n")
1083 % uipathfn(subpath))
1083 % uipathfn(subpath))
1084
1084
1085 rejected = []
1085 rejected = []
1086 def badfn(f, msg):
1086 def badfn(f, msg):
1087 if f in m.files():
1087 if f in m.files():
1088 m.bad(f, msg)
1088 m.bad(f, msg)
1089 rejected.append(f)
1089 rejected.append(f)
1090
1090
1091 badmatch = matchmod.badmatch(m, badfn)
1091 badmatch = matchmod.badmatch(m, badfn)
1092 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1092 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1093 badmatch)
1093 badmatch)
1094
1094
1095 unknownset = set(unknown + forgotten)
1095 unknownset = set(unknown + forgotten)
1096 toprint = unknownset.copy()
1096 toprint = unknownset.copy()
1097 toprint.update(deleted)
1097 toprint.update(deleted)
1098 for abs in sorted(toprint):
1098 for abs in sorted(toprint):
1099 if repo.ui.verbose or not m.exact(abs):
1099 if repo.ui.verbose or not m.exact(abs):
1100 if abs in unknownset:
1100 if abs in unknownset:
1101 status = _('adding %s\n') % uipathfn(abs)
1101 status = _('adding %s\n') % uipathfn(abs)
1102 label = 'ui.addremove.added'
1102 label = 'ui.addremove.added'
1103 else:
1103 else:
1104 status = _('removing %s\n') % uipathfn(abs)
1104 status = _('removing %s\n') % uipathfn(abs)
1105 label = 'ui.addremove.removed'
1105 label = 'ui.addremove.removed'
1106 repo.ui.status(status, label=label)
1106 repo.ui.status(status, label=label)
1107
1107
1108 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1108 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1109 similarity, uipathfn)
1109 similarity, uipathfn)
1110
1110
1111 if not dry_run:
1111 if not dry_run:
1112 _markchanges(repo, unknown + forgotten, deleted, renames)
1112 _markchanges(repo, unknown + forgotten, deleted, renames)
1113
1113
1114 for f in rejected:
1114 for f in rejected:
1115 if f in m.files():
1115 if f in m.files():
1116 return 1
1116 return 1
1117 return ret
1117 return ret
1118
1118
1119 def marktouched(repo, files, similarity=0.0):
1119 def marktouched(repo, files, similarity=0.0):
1120 '''Assert that files have somehow been operated upon. files are relative to
1120 '''Assert that files have somehow been operated upon. files are relative to
1121 the repo root.'''
1121 the repo root.'''
1122 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1122 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1123 rejected = []
1123 rejected = []
1124
1124
1125 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1125 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1126
1126
1127 if repo.ui.verbose:
1127 if repo.ui.verbose:
1128 unknownset = set(unknown + forgotten)
1128 unknownset = set(unknown + forgotten)
1129 toprint = unknownset.copy()
1129 toprint = unknownset.copy()
1130 toprint.update(deleted)
1130 toprint.update(deleted)
1131 for abs in sorted(toprint):
1131 for abs in sorted(toprint):
1132 if abs in unknownset:
1132 if abs in unknownset:
1133 status = _('adding %s\n') % abs
1133 status = _('adding %s\n') % abs
1134 else:
1134 else:
1135 status = _('removing %s\n') % abs
1135 status = _('removing %s\n') % abs
1136 repo.ui.status(status)
1136 repo.ui.status(status)
1137
1137
1138 # TODO: We should probably have the caller pass in uipathfn and apply it to
1138 # TODO: We should probably have the caller pass in uipathfn and apply it to
1139 # the messages above too. legacyrelativevalue=True is consistent with how
1139 # the messages above too. legacyrelativevalue=True is consistent with how
1140 # it used to work.
1140 # it used to work.
1141 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1141 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1142 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1142 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1143 similarity, uipathfn)
1143 similarity, uipathfn)
1144
1144
1145 _markchanges(repo, unknown + forgotten, deleted, renames)
1145 _markchanges(repo, unknown + forgotten, deleted, renames)
1146
1146
1147 for f in rejected:
1147 for f in rejected:
1148 if f in m.files():
1148 if f in m.files():
1149 return 1
1149 return 1
1150 return 0
1150 return 0
1151
1151
1152 def _interestingfiles(repo, matcher):
1152 def _interestingfiles(repo, matcher):
1153 '''Walk dirstate with matcher, looking for files that addremove would care
1153 '''Walk dirstate with matcher, looking for files that addremove would care
1154 about.
1154 about.
1155
1155
1156 This is different from dirstate.status because it doesn't care about
1156 This is different from dirstate.status because it doesn't care about
1157 whether files are modified or clean.'''
1157 whether files are modified or clean.'''
1158 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1158 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1159 audit_path = pathutil.pathauditor(repo.root, cached=True)
1159 audit_path = pathutil.pathauditor(repo.root, cached=True)
1160
1160
1161 ctx = repo[None]
1161 ctx = repo[None]
1162 dirstate = repo.dirstate
1162 dirstate = repo.dirstate
1163 matcher = repo.narrowmatch(matcher, includeexact=True)
1163 matcher = repo.narrowmatch(matcher, includeexact=True)
1164 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1164 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1165 unknown=True, ignored=False, full=False)
1165 unknown=True, ignored=False, full=False)
1166 for abs, st in walkresults.iteritems():
1166 for abs, st in walkresults.iteritems():
1167 dstate = dirstate[abs]
1167 dstate = dirstate[abs]
1168 if dstate == '?' and audit_path.check(abs):
1168 if dstate == '?' and audit_path.check(abs):
1169 unknown.append(abs)
1169 unknown.append(abs)
1170 elif dstate != 'r' and not st:
1170 elif dstate != 'r' and not st:
1171 deleted.append(abs)
1171 deleted.append(abs)
1172 elif dstate == 'r' and st:
1172 elif dstate == 'r' and st:
1173 forgotten.append(abs)
1173 forgotten.append(abs)
1174 # for finding renames
1174 # for finding renames
1175 elif dstate == 'r' and not st:
1175 elif dstate == 'r' and not st:
1176 removed.append(abs)
1176 removed.append(abs)
1177 elif dstate == 'a':
1177 elif dstate == 'a':
1178 added.append(abs)
1178 added.append(abs)
1179
1179
1180 return added, unknown, deleted, removed, forgotten
1180 return added, unknown, deleted, removed, forgotten
1181
1181
1182 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1182 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1183 '''Find renames from removed files to added ones.'''
1183 '''Find renames from removed files to added ones.'''
1184 renames = {}
1184 renames = {}
1185 if similarity > 0:
1185 if similarity > 0:
1186 for old, new, score in similar.findrenames(repo, added, removed,
1186 for old, new, score in similar.findrenames(repo, added, removed,
1187 similarity):
1187 similarity):
1188 if (repo.ui.verbose or not matcher.exact(old)
1188 if (repo.ui.verbose or not matcher.exact(old)
1189 or not matcher.exact(new)):
1189 or not matcher.exact(new)):
1190 repo.ui.status(_('recording removal of %s as rename to %s '
1190 repo.ui.status(_('recording removal of %s as rename to %s '
1191 '(%d%% similar)\n') %
1191 '(%d%% similar)\n') %
1192 (uipathfn(old), uipathfn(new),
1192 (uipathfn(old), uipathfn(new),
1193 score * 100))
1193 score * 100))
1194 renames[new] = old
1194 renames[new] = old
1195 return renames
1195 return renames
1196
1196
1197 def _markchanges(repo, unknown, deleted, renames):
1197 def _markchanges(repo, unknown, deleted, renames):
1198 '''Marks the files in unknown as added, the files in deleted as removed,
1198 '''Marks the files in unknown as added, the files in deleted as removed,
1199 and the files in renames as copied.'''
1199 and the files in renames as copied.'''
1200 wctx = repo[None]
1200 wctx = repo[None]
1201 with repo.wlock():
1201 with repo.wlock():
1202 wctx.forget(deleted)
1202 wctx.forget(deleted)
1203 wctx.add(unknown)
1203 wctx.add(unknown)
1204 for new, old in renames.iteritems():
1204 for new, old in renames.iteritems():
1205 wctx.copy(old, new)
1205 wctx.copy(old, new)
1206
1206
1207 def getrenamedfn(repo, endrev=None):
1207 def getrenamedfn(repo, endrev=None):
1208 if copiesmod.usechangesetcentricalgo(repo):
1208 if copiesmod.usechangesetcentricalgo(repo):
1209 def getrenamed(fn, rev):
1209 def getrenamed(fn, rev):
1210 ctx = repo[rev]
1210 ctx = repo[rev]
1211 p1copies = ctx.p1copies()
1211 p1copies = ctx.p1copies()
1212 if fn in p1copies:
1212 if fn in p1copies:
1213 return p1copies[fn]
1213 return p1copies[fn]
1214 p2copies = ctx.p2copies()
1214 p2copies = ctx.p2copies()
1215 if fn in p2copies:
1215 if fn in p2copies:
1216 return p2copies[fn]
1216 return p2copies[fn]
1217 return None
1217 return None
1218 return getrenamed
1218 return getrenamed
1219
1219
1220 rcache = {}
1220 rcache = {}
1221 if endrev is None:
1221 if endrev is None:
1222 endrev = len(repo)
1222 endrev = len(repo)
1223
1223
1224 def getrenamed(fn, rev):
1224 def getrenamed(fn, rev):
1225 '''looks up all renames for a file (up to endrev) the first
1225 '''looks up all renames for a file (up to endrev) the first
1226 time the file is given. It indexes on the changerev and only
1226 time the file is given. It indexes on the changerev and only
1227 parses the manifest if linkrev != changerev.
1227 parses the manifest if linkrev != changerev.
1228 Returns rename info for fn at changerev rev.'''
1228 Returns rename info for fn at changerev rev.'''
1229 if fn not in rcache:
1229 if fn not in rcache:
1230 rcache[fn] = {}
1230 rcache[fn] = {}
1231 fl = repo.file(fn)
1231 fl = repo.file(fn)
1232 for i in fl:
1232 for i in fl:
1233 lr = fl.linkrev(i)
1233 lr = fl.linkrev(i)
1234 renamed = fl.renamed(fl.node(i))
1234 renamed = fl.renamed(fl.node(i))
1235 rcache[fn][lr] = renamed and renamed[0]
1235 rcache[fn][lr] = renamed and renamed[0]
1236 if lr >= endrev:
1236 if lr >= endrev:
1237 break
1237 break
1238 if rev in rcache[fn]:
1238 if rev in rcache[fn]:
1239 return rcache[fn][rev]
1239 return rcache[fn][rev]
1240
1240
1241 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1241 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1242 # filectx logic.
1242 # filectx logic.
1243 try:
1243 try:
1244 return repo[rev][fn].copysource()
1244 return repo[rev][fn].copysource()
1245 except error.LookupError:
1245 except error.LookupError:
1246 return None
1246 return None
1247
1247
1248 return getrenamed
1248 return getrenamed
1249
1249
1250 def getcopiesfn(repo, endrev=None):
1250 def getcopiesfn(repo, endrev=None):
1251 if copiesmod.usechangesetcentricalgo(repo):
1251 if copiesmod.usechangesetcentricalgo(repo):
1252 def copiesfn(ctx):
1252 def copiesfn(ctx):
1253 if ctx.p2copies():
1253 if ctx.p2copies():
1254 allcopies = ctx.p1copies().copy()
1254 allcopies = ctx.p1copies().copy()
1255 # There should be no overlap
1255 # There should be no overlap
1256 allcopies.update(ctx.p2copies())
1256 allcopies.update(ctx.p2copies())
1257 return sorted(allcopies.items())
1257 return sorted(allcopies.items())
1258 else:
1258 else:
1259 return sorted(ctx.p1copies().items())
1259 return sorted(ctx.p1copies().items())
1260 else:
1260 else:
1261 getrenamed = getrenamedfn(repo, endrev)
1261 getrenamed = getrenamedfn(repo, endrev)
1262 def copiesfn(ctx):
1262 def copiesfn(ctx):
1263 copies = []
1263 copies = []
1264 for fn in ctx.files():
1264 for fn in ctx.files():
1265 rename = getrenamed(fn, ctx.rev())
1265 rename = getrenamed(fn, ctx.rev())
1266 if rename:
1266 if rename:
1267 copies.append((fn, rename))
1267 copies.append((fn, rename))
1268 return copies
1268 return copies
1269
1269
1270 return copiesfn
1270 return copiesfn
1271
1271
1272 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1272 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1273 """Update the dirstate to reflect the intent of copying src to dst. For
1273 """Update the dirstate to reflect the intent of copying src to dst. For
1274 different reasons it might not end with dst being marked as copied from src.
1274 different reasons it might not end with dst being marked as copied from src.
1275 """
1275 """
1276 origsrc = repo.dirstate.copied(src) or src
1276 origsrc = repo.dirstate.copied(src) or src
1277 if dst == origsrc: # copying back a copy?
1277 if dst == origsrc: # copying back a copy?
1278 if repo.dirstate[dst] not in 'mn' and not dryrun:
1278 if repo.dirstate[dst] not in 'mn' and not dryrun:
1279 repo.dirstate.normallookup(dst)
1279 repo.dirstate.normallookup(dst)
1280 else:
1280 else:
1281 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1281 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1282 if not ui.quiet:
1282 if not ui.quiet:
1283 ui.warn(_("%s has not been committed yet, so no copy "
1283 ui.warn(_("%s has not been committed yet, so no copy "
1284 "data will be stored for %s.\n")
1284 "data will be stored for %s.\n")
1285 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1285 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1286 if repo.dirstate[dst] in '?r' and not dryrun:
1286 if repo.dirstate[dst] in '?r' and not dryrun:
1287 wctx.add([dst])
1287 wctx.add([dst])
1288 elif not dryrun:
1288 elif not dryrun:
1289 wctx.copy(origsrc, dst)
1289 wctx.copy(origsrc, dst)
1290
1290
1291 def movedirstate(repo, newctx, match=None):
1291 def movedirstate(repo, newctx, match=None):
1292 """Move the dirstate to newctx and adjust it as necessary.
1292 """Move the dirstate to newctx and adjust it as necessary.
1293
1293
1294 A matcher can be provided as an optimization. It is probably a bug to pass
1294 A matcher can be provided as an optimization. It is probably a bug to pass
1295 a matcher that doesn't match all the differences between the parent of the
1295 a matcher that doesn't match all the differences between the parent of the
1296 working copy and newctx.
1296 working copy and newctx.
1297 """
1297 """
1298 oldctx = repo['.']
1298 oldctx = repo['.']
1299 ds = repo.dirstate
1299 ds = repo.dirstate
1300 ds.setparents(newctx.node(), nullid)
1300 ds.setparents(newctx.node(), nullid)
1301 copies = dict(ds.copies())
1301 copies = dict(ds.copies())
1302 s = newctx.status(oldctx, match=match)
1302 s = newctx.status(oldctx, match=match)
1303 for f in s.modified:
1303 for f in s.modified:
1304 if ds[f] == 'r':
1304 if ds[f] == 'r':
1305 # modified + removed -> removed
1305 # modified + removed -> removed
1306 continue
1306 continue
1307 ds.normallookup(f)
1307 ds.normallookup(f)
1308
1308
1309 for f in s.added:
1309 for f in s.added:
1310 if ds[f] == 'r':
1310 if ds[f] == 'r':
1311 # added + removed -> unknown
1311 # added + removed -> unknown
1312 ds.drop(f)
1312 ds.drop(f)
1313 elif ds[f] != 'a':
1313 elif ds[f] != 'a':
1314 ds.add(f)
1314 ds.add(f)
1315
1315
1316 for f in s.removed:
1316 for f in s.removed:
1317 if ds[f] == 'a':
1317 if ds[f] == 'a':
1318 # removed + added -> normal
1318 # removed + added -> normal
1319 ds.normallookup(f)
1319 ds.normallookup(f)
1320 elif ds[f] != 'r':
1320 elif ds[f] != 'r':
1321 ds.remove(f)
1321 ds.remove(f)
1322
1322
1323 # Merge old parent and old working dir copies
1323 # Merge old parent and old working dir copies
1324 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1324 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1325 oldcopies.update(copies)
1325 oldcopies.update(copies)
1326 copies = dict((dst, oldcopies.get(src, src))
1326 copies = dict((dst, oldcopies.get(src, src))
1327 for dst, src in oldcopies.iteritems())
1327 for dst, src in oldcopies.iteritems())
1328 # Adjust the dirstate copies
1328 # Adjust the dirstate copies
1329 for dst, src in copies.iteritems():
1329 for dst, src in copies.iteritems():
1330 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
1330 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
1331 src = None
1331 src = None
1332 ds.copy(src, dst)
1332 ds.copy(src, dst)
1333
1333
1334 def writerequires(opener, requirements):
1334 def writerequires(opener, requirements):
1335 with opener('requires', 'w', atomictemp=True) as fp:
1335 with opener('requires', 'w', atomictemp=True) as fp:
1336 for r in sorted(requirements):
1336 for r in sorted(requirements):
1337 fp.write("%s\n" % r)
1337 fp.write("%s\n" % r)
1338
1338
1339 class filecachesubentry(object):
1339 class filecachesubentry(object):
1340 def __init__(self, path, stat):
1340 def __init__(self, path, stat):
1341 self.path = path
1341 self.path = path
1342 self.cachestat = None
1342 self.cachestat = None
1343 self._cacheable = None
1343 self._cacheable = None
1344
1344
1345 if stat:
1345 if stat:
1346 self.cachestat = filecachesubentry.stat(self.path)
1346 self.cachestat = filecachesubentry.stat(self.path)
1347
1347
1348 if self.cachestat:
1348 if self.cachestat:
1349 self._cacheable = self.cachestat.cacheable()
1349 self._cacheable = self.cachestat.cacheable()
1350 else:
1350 else:
1351 # None means we don't know yet
1351 # None means we don't know yet
1352 self._cacheable = None
1352 self._cacheable = None
1353
1353
1354 def refresh(self):
1354 def refresh(self):
1355 if self.cacheable():
1355 if self.cacheable():
1356 self.cachestat = filecachesubentry.stat(self.path)
1356 self.cachestat = filecachesubentry.stat(self.path)
1357
1357
1358 def cacheable(self):
1358 def cacheable(self):
1359 if self._cacheable is not None:
1359 if self._cacheable is not None:
1360 return self._cacheable
1360 return self._cacheable
1361
1361
1362 # we don't know yet, assume it is for now
1362 # we don't know yet, assume it is for now
1363 return True
1363 return True
1364
1364
1365 def changed(self):
1365 def changed(self):
1366 # no point in going further if we can't cache it
1366 # no point in going further if we can't cache it
1367 if not self.cacheable():
1367 if not self.cacheable():
1368 return True
1368 return True
1369
1369
1370 newstat = filecachesubentry.stat(self.path)
1370 newstat = filecachesubentry.stat(self.path)
1371
1371
1372 # we may not know if it's cacheable yet, check again now
1372 # we may not know if it's cacheable yet, check again now
1373 if newstat and self._cacheable is None:
1373 if newstat and self._cacheable is None:
1374 self._cacheable = newstat.cacheable()
1374 self._cacheable = newstat.cacheable()
1375
1375
1376 # check again
1376 # check again
1377 if not self._cacheable:
1377 if not self._cacheable:
1378 return True
1378 return True
1379
1379
1380 if self.cachestat != newstat:
1380 if self.cachestat != newstat:
1381 self.cachestat = newstat
1381 self.cachestat = newstat
1382 return True
1382 return True
1383 else:
1383 else:
1384 return False
1384 return False
1385
1385
1386 @staticmethod
1386 @staticmethod
1387 def stat(path):
1387 def stat(path):
1388 try:
1388 try:
1389 return util.cachestat(path)
1389 return util.cachestat(path)
1390 except OSError as e:
1390 except OSError as e:
1391 if e.errno != errno.ENOENT:
1391 if e.errno != errno.ENOENT:
1392 raise
1392 raise
1393
1393
1394 class filecacheentry(object):
1394 class filecacheentry(object):
1395 def __init__(self, paths, stat=True):
1395 def __init__(self, paths, stat=True):
1396 self._entries = []
1396 self._entries = []
1397 for path in paths:
1397 for path in paths:
1398 self._entries.append(filecachesubentry(path, stat))
1398 self._entries.append(filecachesubentry(path, stat))
1399
1399
1400 def changed(self):
1400 def changed(self):
1401 '''true if any entry has changed'''
1401 '''true if any entry has changed'''
1402 for entry in self._entries:
1402 for entry in self._entries:
1403 if entry.changed():
1403 if entry.changed():
1404 return True
1404 return True
1405 return False
1405 return False
1406
1406
1407 def refresh(self):
1407 def refresh(self):
1408 for entry in self._entries:
1408 for entry in self._entries:
1409 entry.refresh()
1409 entry.refresh()
1410
1410
1411 class filecache(object):
1411 class filecache(object):
1412 """A property like decorator that tracks files under .hg/ for updates.
1412 """A property like decorator that tracks files under .hg/ for updates.
1413
1413
1414 On first access, the files defined as arguments are stat()ed and the
1414 On first access, the files defined as arguments are stat()ed and the
1415 results cached. The decorated function is called. The results are stashed
1415 results cached. The decorated function is called. The results are stashed
1416 away in a ``_filecache`` dict on the object whose method is decorated.
1416 away in a ``_filecache`` dict on the object whose method is decorated.
1417
1417
1418 On subsequent access, the cached result is used as it is set to the
1418 On subsequent access, the cached result is used as it is set to the
1419 instance dictionary.
1419 instance dictionary.
1420
1420
1421 On external property set/delete operations, the caller must update the
1421 On external property set/delete operations, the caller must update the
1422 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1422 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1423 instead of directly setting <attr>.
1423 instead of directly setting <attr>.
1424
1424
1425 When using the property API, the cached data is always used if available.
1425 When using the property API, the cached data is always used if available.
1426 No stat() is performed to check if the file has changed.
1426 No stat() is performed to check if the file has changed.
1427
1427
1428 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1428 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1429 can populate an entry before the property's getter is called. In this case,
1429 can populate an entry before the property's getter is called. In this case,
1430 entries in ``_filecache`` will be used during property operations,
1430 entries in ``_filecache`` will be used during property operations,
1431 if available. If the underlying file changes, it is up to external callers
1431 if available. If the underlying file changes, it is up to external callers
1432 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1432 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1433 method result as well as possibly calling ``del obj._filecache[attr]`` to
1433 method result as well as possibly calling ``del obj._filecache[attr]`` to
1434 remove the ``filecacheentry``.
1434 remove the ``filecacheentry``.
1435 """
1435 """
1436
1436
1437 def __init__(self, *paths):
1437 def __init__(self, *paths):
1438 self.paths = paths
1438 self.paths = paths
1439
1439
1440 def join(self, obj, fname):
1440 def join(self, obj, fname):
1441 """Used to compute the runtime path of a cached file.
1441 """Used to compute the runtime path of a cached file.
1442
1442
1443 Users should subclass filecache and provide their own version of this
1443 Users should subclass filecache and provide their own version of this
1444 function to call the appropriate join function on 'obj' (an instance
1444 function to call the appropriate join function on 'obj' (an instance
1445 of the class that its member function was decorated).
1445 of the class that its member function was decorated).
1446 """
1446 """
1447 raise NotImplementedError
1447 raise NotImplementedError
1448
1448
1449 def __call__(self, func):
1449 def __call__(self, func):
1450 self.func = func
1450 self.func = func
1451 self.sname = func.__name__
1451 self.sname = func.__name__
1452 self.name = pycompat.sysbytes(self.sname)
1452 self.name = pycompat.sysbytes(self.sname)
1453 return self
1453 return self
1454
1454
1455 def __get__(self, obj, type=None):
1455 def __get__(self, obj, type=None):
1456 # if accessed on the class, return the descriptor itself.
1456 # if accessed on the class, return the descriptor itself.
1457 if obj is None:
1457 if obj is None:
1458 return self
1458 return self
1459
1459
1460 assert self.sname not in obj.__dict__
1460 assert self.sname not in obj.__dict__
1461
1461
1462 entry = obj._filecache.get(self.name)
1462 entry = obj._filecache.get(self.name)
1463
1463
1464 if entry:
1464 if entry:
1465 if entry.changed():
1465 if entry.changed():
1466 entry.obj = self.func(obj)
1466 entry.obj = self.func(obj)
1467 else:
1467 else:
1468 paths = [self.join(obj, path) for path in self.paths]
1468 paths = [self.join(obj, path) for path in self.paths]
1469
1469
1470 # We stat -before- creating the object so our cache doesn't lie if
1470 # We stat -before- creating the object so our cache doesn't lie if
1471 # a writer modified between the time we read and stat
1471 # a writer modified between the time we read and stat
1472 entry = filecacheentry(paths, True)
1472 entry = filecacheentry(paths, True)
1473 entry.obj = self.func(obj)
1473 entry.obj = self.func(obj)
1474
1474
1475 obj._filecache[self.name] = entry
1475 obj._filecache[self.name] = entry
1476
1476
1477 obj.__dict__[self.sname] = entry.obj
1477 obj.__dict__[self.sname] = entry.obj
1478 return entry.obj
1478 return entry.obj
1479
1479
1480 # don't implement __set__(), which would make __dict__ lookup as slow as
1480 # don't implement __set__(), which would make __dict__ lookup as slow as
1481 # function call.
1481 # function call.
1482
1482
1483 def set(self, obj, value):
1483 def set(self, obj, value):
1484 if self.name not in obj._filecache:
1484 if self.name not in obj._filecache:
1485 # we add an entry for the missing value because X in __dict__
1485 # we add an entry for the missing value because X in __dict__
1486 # implies X in _filecache
1486 # implies X in _filecache
1487 paths = [self.join(obj, path) for path in self.paths]
1487 paths = [self.join(obj, path) for path in self.paths]
1488 ce = filecacheentry(paths, False)
1488 ce = filecacheentry(paths, False)
1489 obj._filecache[self.name] = ce
1489 obj._filecache[self.name] = ce
1490 else:
1490 else:
1491 ce = obj._filecache[self.name]
1491 ce = obj._filecache[self.name]
1492
1492
1493 ce.obj = value # update cached copy
1493 ce.obj = value # update cached copy
1494 obj.__dict__[self.sname] = value # update copy returned by obj.x
1494 obj.__dict__[self.sname] = value # update copy returned by obj.x
1495
1495
1496 def extdatasource(repo, source):
1496 def extdatasource(repo, source):
1497 """Gather a map of rev -> value dict from the specified source
1497 """Gather a map of rev -> value dict from the specified source
1498
1498
1499 A source spec is treated as a URL, with a special case shell: type
1499 A source spec is treated as a URL, with a special case shell: type
1500 for parsing the output from a shell command.
1500 for parsing the output from a shell command.
1501
1501
1502 The data is parsed as a series of newline-separated records where
1502 The data is parsed as a series of newline-separated records where
1503 each record is a revision specifier optionally followed by a space
1503 each record is a revision specifier optionally followed by a space
1504 and a freeform string value. If the revision is known locally, it
1504 and a freeform string value. If the revision is known locally, it
1505 is converted to a rev, otherwise the record is skipped.
1505 is converted to a rev, otherwise the record is skipped.
1506
1506
1507 Note that both key and value are treated as UTF-8 and converted to
1507 Note that both key and value are treated as UTF-8 and converted to
1508 the local encoding. This allows uniformity between local and
1508 the local encoding. This allows uniformity between local and
1509 remote data sources.
1509 remote data sources.
1510 """
1510 """
1511
1511
1512 spec = repo.ui.config("extdata", source)
1512 spec = repo.ui.config("extdata", source)
1513 if not spec:
1513 if not spec:
1514 raise error.Abort(_("unknown extdata source '%s'") % source)
1514 raise error.Abort(_("unknown extdata source '%s'") % source)
1515
1515
1516 data = {}
1516 data = {}
1517 src = proc = None
1517 src = proc = None
1518 try:
1518 try:
1519 if spec.startswith("shell:"):
1519 if spec.startswith("shell:"):
1520 # external commands should be run relative to the repo root
1520 # external commands should be run relative to the repo root
1521 cmd = spec[6:]
1521 cmd = spec[6:]
1522 proc = subprocess.Popen(procutil.tonativestr(cmd),
1522 proc = subprocess.Popen(procutil.tonativestr(cmd),
1523 shell=True, bufsize=-1,
1523 shell=True, bufsize=-1,
1524 close_fds=procutil.closefds,
1524 close_fds=procutil.closefds,
1525 stdout=subprocess.PIPE,
1525 stdout=subprocess.PIPE,
1526 cwd=procutil.tonativestr(repo.root))
1526 cwd=procutil.tonativestr(repo.root))
1527 src = proc.stdout
1527 src = proc.stdout
1528 else:
1528 else:
1529 # treat as a URL or file
1529 # treat as a URL or file
1530 src = url.open(repo.ui, spec)
1530 src = url.open(repo.ui, spec)
1531 for l in src:
1531 for l in src:
1532 if " " in l:
1532 if " " in l:
1533 k, v = l.strip().split(" ", 1)
1533 k, v = l.strip().split(" ", 1)
1534 else:
1534 else:
1535 k, v = l.strip(), ""
1535 k, v = l.strip(), ""
1536
1536
1537 k = encoding.tolocal(k)
1537 k = encoding.tolocal(k)
1538 try:
1538 try:
1539 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1539 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1540 except (error.LookupError, error.RepoLookupError):
1540 except (error.LookupError, error.RepoLookupError):
1541 pass # we ignore data for nodes that don't exist locally
1541 pass # we ignore data for nodes that don't exist locally
1542 finally:
1542 finally:
1543 if proc:
1543 if proc:
1544 proc.communicate()
1544 try:
1545 proc.communicate()
1546 except ValueError:
1547 # This happens if we started iterating src and then
1548 # get a parse error on a line. It should be safe to ignore.
1549 pass
1545 if src:
1550 if src:
1546 src.close()
1551 src.close()
1547 if proc and proc.returncode != 0:
1552 if proc and proc.returncode != 0:
1548 raise error.Abort(_("extdata command '%s' failed: %s")
1553 raise error.Abort(_("extdata command '%s' failed: %s")
1549 % (cmd, procutil.explainexit(proc.returncode)))
1554 % (cmd, procutil.explainexit(proc.returncode)))
1550
1555
1551 return data
1556 return data
1552
1557
1553 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1558 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1554 if lock is None:
1559 if lock is None:
1555 raise error.LockInheritanceContractViolation(
1560 raise error.LockInheritanceContractViolation(
1556 'lock can only be inherited while held')
1561 'lock can only be inherited while held')
1557 if environ is None:
1562 if environ is None:
1558 environ = {}
1563 environ = {}
1559 with lock.inherit() as locker:
1564 with lock.inherit() as locker:
1560 environ[envvar] = locker
1565 environ[envvar] = locker
1561 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1566 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1562
1567
1563 def wlocksub(repo, cmd, *args, **kwargs):
1568 def wlocksub(repo, cmd, *args, **kwargs):
1564 """run cmd as a subprocess that allows inheriting repo's wlock
1569 """run cmd as a subprocess that allows inheriting repo's wlock
1565
1570
1566 This can only be called while the wlock is held. This takes all the
1571 This can only be called while the wlock is held. This takes all the
1567 arguments that ui.system does, and returns the exit code of the
1572 arguments that ui.system does, and returns the exit code of the
1568 subprocess."""
1573 subprocess."""
1569 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1574 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1570 **kwargs)
1575 **kwargs)
1571
1576
1572 class progress(object):
1577 class progress(object):
1573 def __init__(self, ui, updatebar, topic, unit="", total=None):
1578 def __init__(self, ui, updatebar, topic, unit="", total=None):
1574 self.ui = ui
1579 self.ui = ui
1575 self.pos = 0
1580 self.pos = 0
1576 self.topic = topic
1581 self.topic = topic
1577 self.unit = unit
1582 self.unit = unit
1578 self.total = total
1583 self.total = total
1579 self.debug = ui.configbool('progress', 'debug')
1584 self.debug = ui.configbool('progress', 'debug')
1580 self._updatebar = updatebar
1585 self._updatebar = updatebar
1581
1586
1582 def __enter__(self):
1587 def __enter__(self):
1583 return self
1588 return self
1584
1589
1585 def __exit__(self, exc_type, exc_value, exc_tb):
1590 def __exit__(self, exc_type, exc_value, exc_tb):
1586 self.complete()
1591 self.complete()
1587
1592
1588 def update(self, pos, item="", total=None):
1593 def update(self, pos, item="", total=None):
1589 assert pos is not None
1594 assert pos is not None
1590 if total:
1595 if total:
1591 self.total = total
1596 self.total = total
1592 self.pos = pos
1597 self.pos = pos
1593 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1598 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1594 if self.debug:
1599 if self.debug:
1595 self._printdebug(item)
1600 self._printdebug(item)
1596
1601
1597 def increment(self, step=1, item="", total=None):
1602 def increment(self, step=1, item="", total=None):
1598 self.update(self.pos + step, item, total)
1603 self.update(self.pos + step, item, total)
1599
1604
1600 def complete(self):
1605 def complete(self):
1601 self.pos = None
1606 self.pos = None
1602 self.unit = ""
1607 self.unit = ""
1603 self.total = None
1608 self.total = None
1604 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1609 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1605
1610
1606 def _printdebug(self, item):
1611 def _printdebug(self, item):
1607 if self.unit:
1612 if self.unit:
1608 unit = ' ' + self.unit
1613 unit = ' ' + self.unit
1609 if item:
1614 if item:
1610 item = ' ' + item
1615 item = ' ' + item
1611
1616
1612 if self.total:
1617 if self.total:
1613 pct = 100.0 * self.pos / self.total
1618 pct = 100.0 * self.pos / self.total
1614 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1619 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1615 % (self.topic, item, self.pos, self.total, unit, pct))
1620 % (self.topic, item, self.pos, self.total, unit, pct))
1616 else:
1621 else:
1617 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1622 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1618
1623
1619 def gdinitconfig(ui):
1624 def gdinitconfig(ui):
1620 """helper function to know if a repo should be created as general delta
1625 """helper function to know if a repo should be created as general delta
1621 """
1626 """
1622 # experimental config: format.generaldelta
1627 # experimental config: format.generaldelta
1623 return (ui.configbool('format', 'generaldelta')
1628 return (ui.configbool('format', 'generaldelta')
1624 or ui.configbool('format', 'usegeneraldelta'))
1629 or ui.configbool('format', 'usegeneraldelta'))
1625
1630
1626 def gddeltaconfig(ui):
1631 def gddeltaconfig(ui):
1627 """helper function to know if incoming delta should be optimised
1632 """helper function to know if incoming delta should be optimised
1628 """
1633 """
1629 # experimental config: format.generaldelta
1634 # experimental config: format.generaldelta
1630 return ui.configbool('format', 'generaldelta')
1635 return ui.configbool('format', 'generaldelta')
1631
1636
1632 class simplekeyvaluefile(object):
1637 class simplekeyvaluefile(object):
1633 """A simple file with key=value lines
1638 """A simple file with key=value lines
1634
1639
1635 Keys must be alphanumerics and start with a letter, values must not
1640 Keys must be alphanumerics and start with a letter, values must not
1636 contain '\n' characters"""
1641 contain '\n' characters"""
1637 firstlinekey = '__firstline'
1642 firstlinekey = '__firstline'
1638
1643
1639 def __init__(self, vfs, path, keys=None):
1644 def __init__(self, vfs, path, keys=None):
1640 self.vfs = vfs
1645 self.vfs = vfs
1641 self.path = path
1646 self.path = path
1642
1647
1643 def read(self, firstlinenonkeyval=False):
1648 def read(self, firstlinenonkeyval=False):
1644 """Read the contents of a simple key-value file
1649 """Read the contents of a simple key-value file
1645
1650
1646 'firstlinenonkeyval' indicates whether the first line of file should
1651 'firstlinenonkeyval' indicates whether the first line of file should
1647 be treated as a key-value pair or reuturned fully under the
1652 be treated as a key-value pair or reuturned fully under the
1648 __firstline key."""
1653 __firstline key."""
1649 lines = self.vfs.readlines(self.path)
1654 lines = self.vfs.readlines(self.path)
1650 d = {}
1655 d = {}
1651 if firstlinenonkeyval:
1656 if firstlinenonkeyval:
1652 if not lines:
1657 if not lines:
1653 e = _("empty simplekeyvalue file")
1658 e = _("empty simplekeyvalue file")
1654 raise error.CorruptedState(e)
1659 raise error.CorruptedState(e)
1655 # we don't want to include '\n' in the __firstline
1660 # we don't want to include '\n' in the __firstline
1656 d[self.firstlinekey] = lines[0][:-1]
1661 d[self.firstlinekey] = lines[0][:-1]
1657 del lines[0]
1662 del lines[0]
1658
1663
1659 try:
1664 try:
1660 # the 'if line.strip()' part prevents us from failing on empty
1665 # the 'if line.strip()' part prevents us from failing on empty
1661 # lines which only contain '\n' therefore are not skipped
1666 # lines which only contain '\n' therefore are not skipped
1662 # by 'if line'
1667 # by 'if line'
1663 updatedict = dict(line[:-1].split('=', 1) for line in lines
1668 updatedict = dict(line[:-1].split('=', 1) for line in lines
1664 if line.strip())
1669 if line.strip())
1665 if self.firstlinekey in updatedict:
1670 if self.firstlinekey in updatedict:
1666 e = _("%r can't be used as a key")
1671 e = _("%r can't be used as a key")
1667 raise error.CorruptedState(e % self.firstlinekey)
1672 raise error.CorruptedState(e % self.firstlinekey)
1668 d.update(updatedict)
1673 d.update(updatedict)
1669 except ValueError as e:
1674 except ValueError as e:
1670 raise error.CorruptedState(str(e))
1675 raise error.CorruptedState(str(e))
1671 return d
1676 return d
1672
1677
1673 def write(self, data, firstline=None):
1678 def write(self, data, firstline=None):
1674 """Write key=>value mapping to a file
1679 """Write key=>value mapping to a file
1675 data is a dict. Keys must be alphanumerical and start with a letter.
1680 data is a dict. Keys must be alphanumerical and start with a letter.
1676 Values must not contain newline characters.
1681 Values must not contain newline characters.
1677
1682
1678 If 'firstline' is not None, it is written to file before
1683 If 'firstline' is not None, it is written to file before
1679 everything else, as it is, not in a key=value form"""
1684 everything else, as it is, not in a key=value form"""
1680 lines = []
1685 lines = []
1681 if firstline is not None:
1686 if firstline is not None:
1682 lines.append('%s\n' % firstline)
1687 lines.append('%s\n' % firstline)
1683
1688
1684 for k, v in data.items():
1689 for k, v in data.items():
1685 if k == self.firstlinekey:
1690 if k == self.firstlinekey:
1686 e = "key name '%s' is reserved" % self.firstlinekey
1691 e = "key name '%s' is reserved" % self.firstlinekey
1687 raise error.ProgrammingError(e)
1692 raise error.ProgrammingError(e)
1688 if not k[0:1].isalpha():
1693 if not k[0:1].isalpha():
1689 e = "keys must start with a letter in a key-value file"
1694 e = "keys must start with a letter in a key-value file"
1690 raise error.ProgrammingError(e)
1695 raise error.ProgrammingError(e)
1691 if not k.isalnum():
1696 if not k.isalnum():
1692 e = "invalid key name in a simple key-value file"
1697 e = "invalid key name in a simple key-value file"
1693 raise error.ProgrammingError(e)
1698 raise error.ProgrammingError(e)
1694 if '\n' in v:
1699 if '\n' in v:
1695 e = "invalid value in a simple key-value file"
1700 e = "invalid value in a simple key-value file"
1696 raise error.ProgrammingError(e)
1701 raise error.ProgrammingError(e)
1697 lines.append("%s=%s\n" % (k, v))
1702 lines.append("%s=%s\n" % (k, v))
1698 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1703 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1699 fp.write(''.join(lines))
1704 fp.write(''.join(lines))
1700
1705
1701 _reportobsoletedsource = [
1706 _reportobsoletedsource = [
1702 'debugobsolete',
1707 'debugobsolete',
1703 'pull',
1708 'pull',
1704 'push',
1709 'push',
1705 'serve',
1710 'serve',
1706 'unbundle',
1711 'unbundle',
1707 ]
1712 ]
1708
1713
1709 _reportnewcssource = [
1714 _reportnewcssource = [
1710 'pull',
1715 'pull',
1711 'unbundle',
1716 'unbundle',
1712 ]
1717 ]
1713
1718
1714 def prefetchfiles(repo, revs, match):
1719 def prefetchfiles(repo, revs, match):
1715 """Invokes the registered file prefetch functions, allowing extensions to
1720 """Invokes the registered file prefetch functions, allowing extensions to
1716 ensure the corresponding files are available locally, before the command
1721 ensure the corresponding files are available locally, before the command
1717 uses them."""
1722 uses them."""
1718 if match:
1723 if match:
1719 # The command itself will complain about files that don't exist, so
1724 # The command itself will complain about files that don't exist, so
1720 # don't duplicate the message.
1725 # don't duplicate the message.
1721 match = matchmod.badmatch(match, lambda fn, msg: None)
1726 match = matchmod.badmatch(match, lambda fn, msg: None)
1722 else:
1727 else:
1723 match = matchall(repo)
1728 match = matchall(repo)
1724
1729
1725 fileprefetchhooks(repo, revs, match)
1730 fileprefetchhooks(repo, revs, match)
1726
1731
1727 # a list of (repo, revs, match) prefetch functions
1732 # a list of (repo, revs, match) prefetch functions
1728 fileprefetchhooks = util.hooks()
1733 fileprefetchhooks = util.hooks()
1729
1734
1730 # A marker that tells the evolve extension to suppress its own reporting
1735 # A marker that tells the evolve extension to suppress its own reporting
1731 _reportstroubledchangesets = True
1736 _reportstroubledchangesets = True
1732
1737
1733 def registersummarycallback(repo, otr, txnname=''):
1738 def registersummarycallback(repo, otr, txnname=''):
1734 """register a callback to issue a summary after the transaction is closed
1739 """register a callback to issue a summary after the transaction is closed
1735 """
1740 """
1736 def txmatch(sources):
1741 def txmatch(sources):
1737 return any(txnname.startswith(source) for source in sources)
1742 return any(txnname.startswith(source) for source in sources)
1738
1743
1739 categories = []
1744 categories = []
1740
1745
1741 def reportsummary(func):
1746 def reportsummary(func):
1742 """decorator for report callbacks."""
1747 """decorator for report callbacks."""
1743 # The repoview life cycle is shorter than the one of the actual
1748 # The repoview life cycle is shorter than the one of the actual
1744 # underlying repository. So the filtered object can die before the
1749 # underlying repository. So the filtered object can die before the
1745 # weakref is used leading to troubles. We keep a reference to the
1750 # weakref is used leading to troubles. We keep a reference to the
1746 # unfiltered object and restore the filtering when retrieving the
1751 # unfiltered object and restore the filtering when retrieving the
1747 # repository through the weakref.
1752 # repository through the weakref.
1748 filtername = repo.filtername
1753 filtername = repo.filtername
1749 reporef = weakref.ref(repo.unfiltered())
1754 reporef = weakref.ref(repo.unfiltered())
1750 def wrapped(tr):
1755 def wrapped(tr):
1751 repo = reporef()
1756 repo = reporef()
1752 if filtername:
1757 if filtername:
1753 repo = repo.filtered(filtername)
1758 repo = repo.filtered(filtername)
1754 func(repo, tr)
1759 func(repo, tr)
1755 newcat = '%02i-txnreport' % len(categories)
1760 newcat = '%02i-txnreport' % len(categories)
1756 otr.addpostclose(newcat, wrapped)
1761 otr.addpostclose(newcat, wrapped)
1757 categories.append(newcat)
1762 categories.append(newcat)
1758 return wrapped
1763 return wrapped
1759
1764
1760 if txmatch(_reportobsoletedsource):
1765 if txmatch(_reportobsoletedsource):
1761 @reportsummary
1766 @reportsummary
1762 def reportobsoleted(repo, tr):
1767 def reportobsoleted(repo, tr):
1763 obsoleted = obsutil.getobsoleted(repo, tr)
1768 obsoleted = obsutil.getobsoleted(repo, tr)
1764 if obsoleted:
1769 if obsoleted:
1765 repo.ui.status(_('obsoleted %i changesets\n')
1770 repo.ui.status(_('obsoleted %i changesets\n')
1766 % len(obsoleted))
1771 % len(obsoleted))
1767
1772
1768 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1773 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1769 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1774 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1770 instabilitytypes = [
1775 instabilitytypes = [
1771 ('orphan', 'orphan'),
1776 ('orphan', 'orphan'),
1772 ('phase-divergent', 'phasedivergent'),
1777 ('phase-divergent', 'phasedivergent'),
1773 ('content-divergent', 'contentdivergent'),
1778 ('content-divergent', 'contentdivergent'),
1774 ]
1779 ]
1775
1780
1776 def getinstabilitycounts(repo):
1781 def getinstabilitycounts(repo):
1777 filtered = repo.changelog.filteredrevs
1782 filtered = repo.changelog.filteredrevs
1778 counts = {}
1783 counts = {}
1779 for instability, revset in instabilitytypes:
1784 for instability, revset in instabilitytypes:
1780 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1785 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1781 filtered)
1786 filtered)
1782 return counts
1787 return counts
1783
1788
1784 oldinstabilitycounts = getinstabilitycounts(repo)
1789 oldinstabilitycounts = getinstabilitycounts(repo)
1785 @reportsummary
1790 @reportsummary
1786 def reportnewinstabilities(repo, tr):
1791 def reportnewinstabilities(repo, tr):
1787 newinstabilitycounts = getinstabilitycounts(repo)
1792 newinstabilitycounts = getinstabilitycounts(repo)
1788 for instability, revset in instabilitytypes:
1793 for instability, revset in instabilitytypes:
1789 delta = (newinstabilitycounts[instability] -
1794 delta = (newinstabilitycounts[instability] -
1790 oldinstabilitycounts[instability])
1795 oldinstabilitycounts[instability])
1791 msg = getinstabilitymessage(delta, instability)
1796 msg = getinstabilitymessage(delta, instability)
1792 if msg:
1797 if msg:
1793 repo.ui.warn(msg)
1798 repo.ui.warn(msg)
1794
1799
1795 if txmatch(_reportnewcssource):
1800 if txmatch(_reportnewcssource):
1796 @reportsummary
1801 @reportsummary
1797 def reportnewcs(repo, tr):
1802 def reportnewcs(repo, tr):
1798 """Report the range of new revisions pulled/unbundled."""
1803 """Report the range of new revisions pulled/unbundled."""
1799 origrepolen = tr.changes.get('origrepolen', len(repo))
1804 origrepolen = tr.changes.get('origrepolen', len(repo))
1800 unfi = repo.unfiltered()
1805 unfi = repo.unfiltered()
1801 if origrepolen >= len(unfi):
1806 if origrepolen >= len(unfi):
1802 return
1807 return
1803
1808
1804 # Compute the bounds of new visible revisions' range.
1809 # Compute the bounds of new visible revisions' range.
1805 revs = smartset.spanset(repo, start=origrepolen)
1810 revs = smartset.spanset(repo, start=origrepolen)
1806 if revs:
1811 if revs:
1807 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1812 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1808
1813
1809 if minrev == maxrev:
1814 if minrev == maxrev:
1810 revrange = minrev
1815 revrange = minrev
1811 else:
1816 else:
1812 revrange = '%s:%s' % (minrev, maxrev)
1817 revrange = '%s:%s' % (minrev, maxrev)
1813 draft = len(repo.revs('%ld and draft()', revs))
1818 draft = len(repo.revs('%ld and draft()', revs))
1814 secret = len(repo.revs('%ld and secret()', revs))
1819 secret = len(repo.revs('%ld and secret()', revs))
1815 if not (draft or secret):
1820 if not (draft or secret):
1816 msg = _('new changesets %s\n') % revrange
1821 msg = _('new changesets %s\n') % revrange
1817 elif draft and secret:
1822 elif draft and secret:
1818 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1823 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1819 msg %= (revrange, draft, secret)
1824 msg %= (revrange, draft, secret)
1820 elif draft:
1825 elif draft:
1821 msg = _('new changesets %s (%d drafts)\n')
1826 msg = _('new changesets %s (%d drafts)\n')
1822 msg %= (revrange, draft)
1827 msg %= (revrange, draft)
1823 elif secret:
1828 elif secret:
1824 msg = _('new changesets %s (%d secrets)\n')
1829 msg = _('new changesets %s (%d secrets)\n')
1825 msg %= (revrange, secret)
1830 msg %= (revrange, secret)
1826 else:
1831 else:
1827 errormsg = 'entered unreachable condition'
1832 errormsg = 'entered unreachable condition'
1828 raise error.ProgrammingError(errormsg)
1833 raise error.ProgrammingError(errormsg)
1829 repo.ui.status(msg)
1834 repo.ui.status(msg)
1830
1835
1831 # search new changesets directly pulled as obsolete
1836 # search new changesets directly pulled as obsolete
1832 duplicates = tr.changes.get('revduplicates', ())
1837 duplicates = tr.changes.get('revduplicates', ())
1833 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1838 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1834 origrepolen, duplicates)
1839 origrepolen, duplicates)
1835 cl = repo.changelog
1840 cl = repo.changelog
1836 extinctadded = [r for r in obsadded if r not in cl]
1841 extinctadded = [r for r in obsadded if r not in cl]
1837 if extinctadded:
1842 if extinctadded:
1838 # They are not just obsolete, but obsolete and invisible
1843 # They are not just obsolete, but obsolete and invisible
1839 # we call them "extinct" internally but the terms have not been
1844 # we call them "extinct" internally but the terms have not been
1840 # exposed to users.
1845 # exposed to users.
1841 msg = '(%d other changesets obsolete on arrival)\n'
1846 msg = '(%d other changesets obsolete on arrival)\n'
1842 repo.ui.status(msg % len(extinctadded))
1847 repo.ui.status(msg % len(extinctadded))
1843
1848
1844 @reportsummary
1849 @reportsummary
1845 def reportphasechanges(repo, tr):
1850 def reportphasechanges(repo, tr):
1846 """Report statistics of phase changes for changesets pre-existing
1851 """Report statistics of phase changes for changesets pre-existing
1847 pull/unbundle.
1852 pull/unbundle.
1848 """
1853 """
1849 origrepolen = tr.changes.get('origrepolen', len(repo))
1854 origrepolen = tr.changes.get('origrepolen', len(repo))
1850 phasetracking = tr.changes.get('phases', {})
1855 phasetracking = tr.changes.get('phases', {})
1851 if not phasetracking:
1856 if not phasetracking:
1852 return
1857 return
1853 published = [
1858 published = [
1854 rev for rev, (old, new) in phasetracking.iteritems()
1859 rev for rev, (old, new) in phasetracking.iteritems()
1855 if new == phases.public and rev < origrepolen
1860 if new == phases.public and rev < origrepolen
1856 ]
1861 ]
1857 if not published:
1862 if not published:
1858 return
1863 return
1859 repo.ui.status(_('%d local changesets published\n')
1864 repo.ui.status(_('%d local changesets published\n')
1860 % len(published))
1865 % len(published))
1861
1866
1862 def getinstabilitymessage(delta, instability):
1867 def getinstabilitymessage(delta, instability):
1863 """function to return the message to show warning about new instabilities
1868 """function to return the message to show warning about new instabilities
1864
1869
1865 exists as a separate function so that extension can wrap to show more
1870 exists as a separate function so that extension can wrap to show more
1866 information like how to fix instabilities"""
1871 information like how to fix instabilities"""
1867 if delta > 0:
1872 if delta > 0:
1868 return _('%i new %s changesets\n') % (delta, instability)
1873 return _('%i new %s changesets\n') % (delta, instability)
1869
1874
1870 def nodesummaries(repo, nodes, maxnumnodes=4):
1875 def nodesummaries(repo, nodes, maxnumnodes=4):
1871 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1876 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1872 return ' '.join(short(h) for h in nodes)
1877 return ' '.join(short(h) for h in nodes)
1873 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1878 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1874 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1879 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1875
1880
1876 def enforcesinglehead(repo, tr, desc):
1881 def enforcesinglehead(repo, tr, desc):
1877 """check that no named branch has multiple heads"""
1882 """check that no named branch has multiple heads"""
1878 if desc in ('strip', 'repair'):
1883 if desc in ('strip', 'repair'):
1879 # skip the logic during strip
1884 # skip the logic during strip
1880 return
1885 return
1881 visible = repo.filtered('visible')
1886 visible = repo.filtered('visible')
1882 # possible improvement: we could restrict the check to affected branch
1887 # possible improvement: we could restrict the check to affected branch
1883 for name, heads in visible.branchmap().iteritems():
1888 for name, heads in visible.branchmap().iteritems():
1884 if len(heads) > 1:
1889 if len(heads) > 1:
1885 msg = _('rejecting multiple heads on branch "%s"')
1890 msg = _('rejecting multiple heads on branch "%s"')
1886 msg %= name
1891 msg %= name
1887 hint = _('%d heads: %s')
1892 hint = _('%d heads: %s')
1888 hint %= (len(heads), nodesummaries(repo, heads))
1893 hint %= (len(heads), nodesummaries(repo, heads))
1889 raise error.Abort(msg, hint=hint)
1894 raise error.Abort(msg, hint=hint)
1890
1895
1891 def wrapconvertsink(sink):
1896 def wrapconvertsink(sink):
1892 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1897 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1893 before it is used, whether or not the convert extension was formally loaded.
1898 before it is used, whether or not the convert extension was formally loaded.
1894 """
1899 """
1895 return sink
1900 return sink
1896
1901
1897 def unhidehashlikerevs(repo, specs, hiddentype):
1902 def unhidehashlikerevs(repo, specs, hiddentype):
1898 """parse the user specs and unhide changesets whose hash or revision number
1903 """parse the user specs and unhide changesets whose hash or revision number
1899 is passed.
1904 is passed.
1900
1905
1901 hiddentype can be: 1) 'warn': warn while unhiding changesets
1906 hiddentype can be: 1) 'warn': warn while unhiding changesets
1902 2) 'nowarn': don't warn while unhiding changesets
1907 2) 'nowarn': don't warn while unhiding changesets
1903
1908
1904 returns a repo object with the required changesets unhidden
1909 returns a repo object with the required changesets unhidden
1905 """
1910 """
1906 if not repo.filtername or not repo.ui.configbool('experimental',
1911 if not repo.filtername or not repo.ui.configbool('experimental',
1907 'directaccess'):
1912 'directaccess'):
1908 return repo
1913 return repo
1909
1914
1910 if repo.filtername not in ('visible', 'visible-hidden'):
1915 if repo.filtername not in ('visible', 'visible-hidden'):
1911 return repo
1916 return repo
1912
1917
1913 symbols = set()
1918 symbols = set()
1914 for spec in specs:
1919 for spec in specs:
1915 try:
1920 try:
1916 tree = revsetlang.parse(spec)
1921 tree = revsetlang.parse(spec)
1917 except error.ParseError: # will be reported by scmutil.revrange()
1922 except error.ParseError: # will be reported by scmutil.revrange()
1918 continue
1923 continue
1919
1924
1920 symbols.update(revsetlang.gethashlikesymbols(tree))
1925 symbols.update(revsetlang.gethashlikesymbols(tree))
1921
1926
1922 if not symbols:
1927 if not symbols:
1923 return repo
1928 return repo
1924
1929
1925 revs = _getrevsfromsymbols(repo, symbols)
1930 revs = _getrevsfromsymbols(repo, symbols)
1926
1931
1927 if not revs:
1932 if not revs:
1928 return repo
1933 return repo
1929
1934
1930 if hiddentype == 'warn':
1935 if hiddentype == 'warn':
1931 unfi = repo.unfiltered()
1936 unfi = repo.unfiltered()
1932 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1937 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1933 repo.ui.warn(_("warning: accessing hidden changesets for write "
1938 repo.ui.warn(_("warning: accessing hidden changesets for write "
1934 "operation: %s\n") % revstr)
1939 "operation: %s\n") % revstr)
1935
1940
1936 # we have to use new filtername to separate branch/tags cache until we can
1941 # we have to use new filtername to separate branch/tags cache until we can
1937 # disbale these cache when revisions are dynamically pinned.
1942 # disbale these cache when revisions are dynamically pinned.
1938 return repo.filtered('visible-hidden', revs)
1943 return repo.filtered('visible-hidden', revs)
1939
1944
1940 def _getrevsfromsymbols(repo, symbols):
1945 def _getrevsfromsymbols(repo, symbols):
1941 """parse the list of symbols and returns a set of revision numbers of hidden
1946 """parse the list of symbols and returns a set of revision numbers of hidden
1942 changesets present in symbols"""
1947 changesets present in symbols"""
1943 revs = set()
1948 revs = set()
1944 unfi = repo.unfiltered()
1949 unfi = repo.unfiltered()
1945 unficl = unfi.changelog
1950 unficl = unfi.changelog
1946 cl = repo.changelog
1951 cl = repo.changelog
1947 tiprev = len(unficl)
1952 tiprev = len(unficl)
1948 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1953 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1949 for s in symbols:
1954 for s in symbols:
1950 try:
1955 try:
1951 n = int(s)
1956 n = int(s)
1952 if n <= tiprev:
1957 if n <= tiprev:
1953 if not allowrevnums:
1958 if not allowrevnums:
1954 continue
1959 continue
1955 else:
1960 else:
1956 if n not in cl:
1961 if n not in cl:
1957 revs.add(n)
1962 revs.add(n)
1958 continue
1963 continue
1959 except ValueError:
1964 except ValueError:
1960 pass
1965 pass
1961
1966
1962 try:
1967 try:
1963 s = resolvehexnodeidprefix(unfi, s)
1968 s = resolvehexnodeidprefix(unfi, s)
1964 except (error.LookupError, error.WdirUnsupported):
1969 except (error.LookupError, error.WdirUnsupported):
1965 s = None
1970 s = None
1966
1971
1967 if s is not None:
1972 if s is not None:
1968 rev = unficl.rev(s)
1973 rev = unficl.rev(s)
1969 if rev not in cl:
1974 if rev not in cl:
1970 revs.add(rev)
1975 revs.add(rev)
1971
1976
1972 return revs
1977 return revs
1973
1978
1974 def bookmarkrevs(repo, mark):
1979 def bookmarkrevs(repo, mark):
1975 """
1980 """
1976 Select revisions reachable by a given bookmark
1981 Select revisions reachable by a given bookmark
1977 """
1982 """
1978 return repo.revs("ancestors(bookmark(%s)) - "
1983 return repo.revs("ancestors(bookmark(%s)) - "
1979 "ancestors(head() and not bookmark(%s)) - "
1984 "ancestors(head() and not bookmark(%s)) - "
1980 "ancestors(bookmark() and not bookmark(%s))",
1985 "ancestors(bookmark() and not bookmark(%s))",
1981 mark, mark, mark)
1986 mark, mark, mark)
@@ -1,115 +1,120 b''
1 $ hg init repo
1 $ hg init repo
2 $ cd repo
2 $ cd repo
3 $ for n in 0 1 2 3 4 5 6 7 8 9 10 11; do
3 $ for n in 0 1 2 3 4 5 6 7 8 9 10 11; do
4 > echo $n > $n
4 > echo $n > $n
5 > hg ci -qAm $n
5 > hg ci -qAm $n
6 > done
6 > done
7
7
8 test revset support
8 test revset support
9
9
10 $ cat <<'EOF' >> .hg/hgrc
10 $ cat <<'EOF' >> .hg/hgrc
11 > [extdata]
11 > [extdata]
12 > filedata = file:extdata.txt
12 > filedata = file:extdata.txt
13 > notes = notes.txt
13 > notes = notes.txt
14 > shelldata = shell:cat extdata.txt | grep 2
14 > shelldata = shell:cat extdata.txt | grep 2
15 > emptygrep = shell:cat extdata.txt | grep empty
15 > emptygrep = shell:cat extdata.txt | grep empty
16 > badparse = shell:cat badparse.txt
16 > badparse = shell:cat badparse.txt
17 > EOF
17 > EOF
18 $ cat <<'EOF' > extdata.txt
18 $ cat <<'EOF' > extdata.txt
19 > 2 another comment on 2
19 > 2 another comment on 2
20 > 3
20 > 3
21 > EOF
21 > EOF
22 $ cat <<'EOF' > notes.txt
22 $ cat <<'EOF' > notes.txt
23 > f6ed this change is great!
23 > f6ed this change is great!
24 > e834 this is buggy :(
24 > e834 this is buggy :(
25 > 0625 first post
25 > 0625 first post
26 > bogusnode gives no error
26 > bogusnode gives no error
27 > a ambiguous node gives no error
27 > a ambiguous node gives no error
28 > EOF
28 > EOF
29
29
30 $ hg log -qr "extdata(filedata)"
30 $ hg log -qr "extdata(filedata)"
31 2:f6ed99a58333
31 2:f6ed99a58333
32 3:9de260b1e88e
32 3:9de260b1e88e
33 $ hg log -qr "extdata(shelldata)"
33 $ hg log -qr "extdata(shelldata)"
34 2:f6ed99a58333
34 2:f6ed99a58333
35
35
36 test weight of extdata() revset
36 test weight of extdata() revset
37
37
38 $ hg debugrevspec -p optimized "extdata(filedata) & 3"
38 $ hg debugrevspec -p optimized "extdata(filedata) & 3"
39 * optimized:
39 * optimized:
40 (andsmally
40 (andsmally
41 (func
41 (func
42 (symbol 'extdata')
42 (symbol 'extdata')
43 (symbol 'filedata'))
43 (symbol 'filedata'))
44 (symbol '3'))
44 (symbol '3'))
45 3
45 3
46
46
47 test non-zero exit of shell command
47 test non-zero exit of shell command
48
48
49 $ hg log -qr "extdata(emptygrep)"
49 $ hg log -qr "extdata(emptygrep)"
50 abort: extdata command 'cat extdata.txt | grep empty' failed: exited with status 1
50 abort: extdata command 'cat extdata.txt | grep empty' failed: exited with status 1
51 [255]
51 [255]
52
52
53 test bad extdata() revset source
53 test bad extdata() revset source
54
54
55 $ hg log -qr "extdata()"
55 $ hg log -qr "extdata()"
56 hg: parse error: extdata takes at least 1 string argument
56 hg: parse error: extdata takes at least 1 string argument
57 [255]
57 [255]
58 $ hg log -qr "extdata(unknown)"
58 $ hg log -qr "extdata(unknown)"
59 abort: unknown extdata source 'unknown'
59 abort: unknown extdata source 'unknown'
60 [255]
60 [255]
61
61
62 test a zero-exiting source that emits garbage to confuse the revset parser
62 test a zero-exiting source that emits garbage to confuse the revset parser
63
63
64 $ cat > badparse.txt <<'EOF'
64 $ cat > badparse.txt <<'EOF'
65 > +---------------------------------------+
65 > +---------------------------------------+
66 > 9de260b1e88e
66 > 9de260b1e88e
67 > EOF
67 > EOF
68
68
69 BUG: this should print the revset parse error
69 It might be nice if this error message mentioned where the bad string
70 $ hg log -qr "extdata(badparse)" 2>&1 | grep ValueError
70 came from (eg line X of extdata source S), but the important thing is
71 ValueError: Mixing iteration and read methods would lose data
71 that we don't crash before we can print the parse error.
72 $ hg log -qr "extdata(badparse)"
73 hg: parse error at 0: not a prefix: +
74 (+---------------------------------------+
75 ^ here)
76 [255]
72
77
73 test template support:
78 test template support:
74
79
75 $ hg log -r:3 -T "{node|short}{if(extdata('notes'), ' # {extdata('notes')}')}\n"
80 $ hg log -r:3 -T "{node|short}{if(extdata('notes'), ' # {extdata('notes')}')}\n"
76 06254b906311 # first post
81 06254b906311 # first post
77 e8342c9a2ed1 # this is buggy :(
82 e8342c9a2ed1 # this is buggy :(
78 f6ed99a58333 # this change is great!
83 f6ed99a58333 # this change is great!
79 9de260b1e88e
84 9de260b1e88e
80
85
81 test template cache:
86 test template cache:
82
87
83 $ hg log -r:3 -T '{rev} "{extdata("notes")}" "{extdata("shelldata")}"\n'
88 $ hg log -r:3 -T '{rev} "{extdata("notes")}" "{extdata("shelldata")}"\n'
84 0 "first post" ""
89 0 "first post" ""
85 1 "this is buggy :(" ""
90 1 "this is buggy :(" ""
86 2 "this change is great!" "another comment on 2"
91 2 "this change is great!" "another comment on 2"
87 3 "" ""
92 3 "" ""
88
93
89 test bad extdata() template source
94 test bad extdata() template source
90
95
91 $ hg log -T "{extdata()}\n"
96 $ hg log -T "{extdata()}\n"
92 hg: parse error: extdata expects one argument
97 hg: parse error: extdata expects one argument
93 [255]
98 [255]
94 $ hg log -T "{extdata('unknown')}\n"
99 $ hg log -T "{extdata('unknown')}\n"
95 abort: unknown extdata source 'unknown'
100 abort: unknown extdata source 'unknown'
96 [255]
101 [255]
97 $ hg log -T "{extdata(unknown)}\n"
102 $ hg log -T "{extdata(unknown)}\n"
98 hg: parse error: empty data source specified
103 hg: parse error: empty data source specified
99 (did you mean extdata('unknown')?)
104 (did you mean extdata('unknown')?)
100 [255]
105 [255]
101 $ hg log -T "{extdata('{unknown}')}\n"
106 $ hg log -T "{extdata('{unknown}')}\n"
102 hg: parse error: empty data source specified
107 hg: parse error: empty data source specified
103 [255]
108 [255]
104
109
105 we don't fix up relative file URLs, but we do run shell commands in repo root
110 we don't fix up relative file URLs, but we do run shell commands in repo root
106
111
107 $ mkdir sub
112 $ mkdir sub
108 $ cd sub
113 $ cd sub
109 $ hg log -qr "extdata(filedata)"
114 $ hg log -qr "extdata(filedata)"
110 abort: error: $ENOENT$
115 abort: error: $ENOENT$
111 [255]
116 [255]
112 $ hg log -qr "extdata(shelldata)"
117 $ hg log -qr "extdata(shelldata)"
113 2:f6ed99a58333
118 2:f6ed99a58333
114
119
115 $ cd ..
120 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now