##// END OF EJS Templates
scmutil: document matcher argument of movedirstate()...
Martin von Zweigbergk -
r42104:ad4a3e2e default
parent child Browse files
Show More
@@ -1,1942 +1,1947 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 copies as copiesmod,
31 copies as copiesmod,
32 encoding,
32 encoding,
33 error,
33 error,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 obsutil,
36 obsutil,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 policy,
39 policy,
40 pycompat,
40 pycompat,
41 revsetlang,
41 revsetlang,
42 similar,
42 similar,
43 smartset,
43 smartset,
44 url,
44 url,
45 util,
45 util,
46 vfs,
46 vfs,
47 )
47 )
48
48
49 from .utils import (
49 from .utils import (
50 procutil,
50 procutil,
51 stringutil,
51 stringutil,
52 )
52 )
53
53
54 if pycompat.iswindows:
54 if pycompat.iswindows:
55 from . import scmwindows as scmplatform
55 from . import scmwindows as scmplatform
56 else:
56 else:
57 from . import scmposix as scmplatform
57 from . import scmposix as scmplatform
58
58
59 parsers = policy.importmod(r'parsers')
59 parsers = policy.importmod(r'parsers')
60
60
61 termsize = scmplatform.termsize
61 termsize = scmplatform.termsize
62
62
63 class status(tuple):
63 class status(tuple):
64 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
65 and 'ignored' properties are only relevant to the working copy.
65 and 'ignored' properties are only relevant to the working copy.
66 '''
66 '''
67
67
68 __slots__ = ()
68 __slots__ = ()
69
69
70 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
71 clean):
71 clean):
72 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
73 ignored, clean))
73 ignored, clean))
74
74
75 @property
75 @property
76 def modified(self):
76 def modified(self):
77 '''files that have been modified'''
77 '''files that have been modified'''
78 return self[0]
78 return self[0]
79
79
80 @property
80 @property
81 def added(self):
81 def added(self):
82 '''files that have been added'''
82 '''files that have been added'''
83 return self[1]
83 return self[1]
84
84
85 @property
85 @property
86 def removed(self):
86 def removed(self):
87 '''files that have been removed'''
87 '''files that have been removed'''
88 return self[2]
88 return self[2]
89
89
90 @property
90 @property
91 def deleted(self):
91 def deleted(self):
92 '''files that are in the dirstate, but have been deleted from the
92 '''files that are in the dirstate, but have been deleted from the
93 working copy (aka "missing")
93 working copy (aka "missing")
94 '''
94 '''
95 return self[3]
95 return self[3]
96
96
97 @property
97 @property
98 def unknown(self):
98 def unknown(self):
99 '''files not in the dirstate that are not ignored'''
99 '''files not in the dirstate that are not ignored'''
100 return self[4]
100 return self[4]
101
101
102 @property
102 @property
103 def ignored(self):
103 def ignored(self):
104 '''files not in the dirstate that are ignored (by _dirignore())'''
104 '''files not in the dirstate that are ignored (by _dirignore())'''
105 return self[5]
105 return self[5]
106
106
107 @property
107 @property
108 def clean(self):
108 def clean(self):
109 '''files that have not been modified'''
109 '''files that have not been modified'''
110 return self[6]
110 return self[6]
111
111
112 def __repr__(self, *args, **kwargs):
112 def __repr__(self, *args, **kwargs):
113 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
114 r'unknown=%s, ignored=%s, clean=%s>') %
114 r'unknown=%s, ignored=%s, clean=%s>') %
115 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
116
116
117 def itersubrepos(ctx1, ctx2):
117 def itersubrepos(ctx1, ctx2):
118 """find subrepos in ctx1 or ctx2"""
118 """find subrepos in ctx1 or ctx2"""
119 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # Create a (subpath, ctx) mapping where we prefer subpaths from
120 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # ctx1. The subpaths from ctx2 are important when the .hgsub file
121 # has been modified (in ctx2) but not yet committed (in ctx1).
121 # has been modified (in ctx2) but not yet committed (in ctx1).
122 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths = dict.fromkeys(ctx2.substate, ctx2)
123 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
124
124
125 missing = set()
125 missing = set()
126
126
127 for subpath in ctx2.substate:
127 for subpath in ctx2.substate:
128 if subpath not in ctx1.substate:
128 if subpath not in ctx1.substate:
129 del subpaths[subpath]
129 del subpaths[subpath]
130 missing.add(subpath)
130 missing.add(subpath)
131
131
132 for subpath, ctx in sorted(subpaths.iteritems()):
132 for subpath, ctx in sorted(subpaths.iteritems()):
133 yield subpath, ctx.sub(subpath)
133 yield subpath, ctx.sub(subpath)
134
134
135 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
136 # status and diff will have an accurate result when it does
136 # status and diff will have an accurate result when it does
137 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
138 # against itself.
138 # against itself.
139 for subpath in missing:
139 for subpath in missing:
140 yield subpath, ctx2.nullsub(subpath, ctx1)
140 yield subpath, ctx2.nullsub(subpath, ctx1)
141
141
142 def nochangesfound(ui, repo, excluded=None):
142 def nochangesfound(ui, repo, excluded=None):
143 '''Report no changes for push/pull, excluded is None or a list of
143 '''Report no changes for push/pull, excluded is None or a list of
144 nodes excluded from the push/pull.
144 nodes excluded from the push/pull.
145 '''
145 '''
146 secretlist = []
146 secretlist = []
147 if excluded:
147 if excluded:
148 for n in excluded:
148 for n in excluded:
149 ctx = repo[n]
149 ctx = repo[n]
150 if ctx.phase() >= phases.secret and not ctx.extinct():
150 if ctx.phase() >= phases.secret and not ctx.extinct():
151 secretlist.append(n)
151 secretlist.append(n)
152
152
153 if secretlist:
153 if secretlist:
154 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 ui.status(_("no changes found (ignored %d secret changesets)\n")
155 % len(secretlist))
155 % len(secretlist))
156 else:
156 else:
157 ui.status(_("no changes found\n"))
157 ui.status(_("no changes found\n"))
158
158
159 def callcatch(ui, func):
159 def callcatch(ui, func):
160 """call func() with global exception handling
160 """call func() with global exception handling
161
161
162 return func() if no exception happens. otherwise do some error handling
162 return func() if no exception happens. otherwise do some error handling
163 and return an exit code accordingly. does not handle all exceptions.
163 and return an exit code accordingly. does not handle all exceptions.
164 """
164 """
165 try:
165 try:
166 try:
166 try:
167 return func()
167 return func()
168 except: # re-raises
168 except: # re-raises
169 ui.traceback()
169 ui.traceback()
170 raise
170 raise
171 # Global exception handling, alphabetically
171 # Global exception handling, alphabetically
172 # Mercurial-specific first, followed by built-in and library exceptions
172 # Mercurial-specific first, followed by built-in and library exceptions
173 except error.LockHeld as inst:
173 except error.LockHeld as inst:
174 if inst.errno == errno.ETIMEDOUT:
174 if inst.errno == errno.ETIMEDOUT:
175 reason = _('timed out waiting for lock held by %r') % (
175 reason = _('timed out waiting for lock held by %r') % (
176 pycompat.bytestr(inst.locker))
176 pycompat.bytestr(inst.locker))
177 else:
177 else:
178 reason = _('lock held by %r') % inst.locker
178 reason = _('lock held by %r') % inst.locker
179 ui.error(_("abort: %s: %s\n") % (
179 ui.error(_("abort: %s: %s\n") % (
180 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 inst.desc or stringutil.forcebytestr(inst.filename), reason))
181 if not inst.locker:
181 if not inst.locker:
182 ui.error(_("(lock might be very busy)\n"))
182 ui.error(_("(lock might be very busy)\n"))
183 except error.LockUnavailable as inst:
183 except error.LockUnavailable as inst:
184 ui.error(_("abort: could not lock %s: %s\n") %
184 ui.error(_("abort: could not lock %s: %s\n") %
185 (inst.desc or stringutil.forcebytestr(inst.filename),
185 (inst.desc or stringutil.forcebytestr(inst.filename),
186 encoding.strtolocal(inst.strerror)))
186 encoding.strtolocal(inst.strerror)))
187 except error.OutOfBandError as inst:
187 except error.OutOfBandError as inst:
188 if inst.args:
188 if inst.args:
189 msg = _("abort: remote error:\n")
189 msg = _("abort: remote error:\n")
190 else:
190 else:
191 msg = _("abort: remote error\n")
191 msg = _("abort: remote error\n")
192 ui.error(msg)
192 ui.error(msg)
193 if inst.args:
193 if inst.args:
194 ui.error(''.join(inst.args))
194 ui.error(''.join(inst.args))
195 if inst.hint:
195 if inst.hint:
196 ui.error('(%s)\n' % inst.hint)
196 ui.error('(%s)\n' % inst.hint)
197 except error.RepoError as inst:
197 except error.RepoError as inst:
198 ui.error(_("abort: %s!\n") % inst)
198 ui.error(_("abort: %s!\n") % inst)
199 if inst.hint:
199 if inst.hint:
200 ui.error(_("(%s)\n") % inst.hint)
200 ui.error(_("(%s)\n") % inst.hint)
201 except error.ResponseError as inst:
201 except error.ResponseError as inst:
202 ui.error(_("abort: %s") % inst.args[0])
202 ui.error(_("abort: %s") % inst.args[0])
203 msg = inst.args[1]
203 msg = inst.args[1]
204 if isinstance(msg, type(u'')):
204 if isinstance(msg, type(u'')):
205 msg = pycompat.sysbytes(msg)
205 msg = pycompat.sysbytes(msg)
206 if not isinstance(msg, bytes):
206 if not isinstance(msg, bytes):
207 ui.error(" %r\n" % (msg,))
207 ui.error(" %r\n" % (msg,))
208 elif not msg:
208 elif not msg:
209 ui.error(_(" empty string\n"))
209 ui.error(_(" empty string\n"))
210 else:
210 else:
211 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
212 except error.CensoredNodeError as inst:
212 except error.CensoredNodeError as inst:
213 ui.error(_("abort: file censored %s!\n") % inst)
213 ui.error(_("abort: file censored %s!\n") % inst)
214 except error.StorageError as inst:
214 except error.StorageError as inst:
215 ui.error(_("abort: %s!\n") % inst)
215 ui.error(_("abort: %s!\n") % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.error(_("(%s)\n") % inst.hint)
217 ui.error(_("(%s)\n") % inst.hint)
218 except error.InterventionRequired as inst:
218 except error.InterventionRequired as inst:
219 ui.error("%s\n" % inst)
219 ui.error("%s\n" % inst)
220 if inst.hint:
220 if inst.hint:
221 ui.error(_("(%s)\n") % inst.hint)
221 ui.error(_("(%s)\n") % inst.hint)
222 return 1
222 return 1
223 except error.WdirUnsupported:
223 except error.WdirUnsupported:
224 ui.error(_("abort: working directory revision cannot be specified\n"))
224 ui.error(_("abort: working directory revision cannot be specified\n"))
225 except error.Abort as inst:
225 except error.Abort as inst:
226 ui.error(_("abort: %s\n") % inst)
226 ui.error(_("abort: %s\n") % inst)
227 if inst.hint:
227 if inst.hint:
228 ui.error(_("(%s)\n") % inst.hint)
228 ui.error(_("(%s)\n") % inst.hint)
229 except ImportError as inst:
229 except ImportError as inst:
230 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
231 m = stringutil.forcebytestr(inst).split()[-1]
231 m = stringutil.forcebytestr(inst).split()[-1]
232 if m in "mpatch bdiff".split():
232 if m in "mpatch bdiff".split():
233 ui.error(_("(did you forget to compile extensions?)\n"))
233 ui.error(_("(did you forget to compile extensions?)\n"))
234 elif m in "zlib".split():
234 elif m in "zlib".split():
235 ui.error(_("(is your Python install correct?)\n"))
235 ui.error(_("(is your Python install correct?)\n"))
236 except (IOError, OSError) as inst:
236 except (IOError, OSError) as inst:
237 if util.safehasattr(inst, "code"): # HTTPError
237 if util.safehasattr(inst, "code"): # HTTPError
238 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
239 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 elif util.safehasattr(inst, "reason"): # URLError or SSLError
240 try: # usually it is in the form (errno, strerror)
240 try: # usually it is in the form (errno, strerror)
241 reason = inst.reason.args[1]
241 reason = inst.reason.args[1]
242 except (AttributeError, IndexError):
242 except (AttributeError, IndexError):
243 # it might be anything, for example a string
243 # it might be anything, for example a string
244 reason = inst.reason
244 reason = inst.reason
245 if isinstance(reason, pycompat.unicode):
245 if isinstance(reason, pycompat.unicode):
246 # SSLError of Python 2.7.9 contains a unicode
246 # SSLError of Python 2.7.9 contains a unicode
247 reason = encoding.unitolocal(reason)
247 reason = encoding.unitolocal(reason)
248 ui.error(_("abort: error: %s\n") % reason)
248 ui.error(_("abort: error: %s\n") % reason)
249 elif (util.safehasattr(inst, "args")
249 elif (util.safehasattr(inst, "args")
250 and inst.args and inst.args[0] == errno.EPIPE):
250 and inst.args and inst.args[0] == errno.EPIPE):
251 pass
251 pass
252 elif getattr(inst, "strerror", None): # common IOError or OSError
252 elif getattr(inst, "strerror", None): # common IOError or OSError
253 if getattr(inst, "filename", None) is not None:
253 if getattr(inst, "filename", None) is not None:
254 ui.error(_("abort: %s: '%s'\n") % (
254 ui.error(_("abort: %s: '%s'\n") % (
255 encoding.strtolocal(inst.strerror),
255 encoding.strtolocal(inst.strerror),
256 stringutil.forcebytestr(inst.filename)))
256 stringutil.forcebytestr(inst.filename)))
257 else:
257 else:
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 else: # suspicious IOError
259 else: # suspicious IOError
260 raise
260 raise
261 except MemoryError:
261 except MemoryError:
262 ui.error(_("abort: out of memory\n"))
262 ui.error(_("abort: out of memory\n"))
263 except SystemExit as inst:
263 except SystemExit as inst:
264 # Commands shouldn't sys.exit directly, but give a return code.
264 # Commands shouldn't sys.exit directly, but give a return code.
265 # Just in case catch this and and pass exit code to caller.
265 # Just in case catch this and and pass exit code to caller.
266 return inst.code
266 return inst.code
267
267
268 return -1
268 return -1
269
269
270 def checknewlabel(repo, lbl, kind):
270 def checknewlabel(repo, lbl, kind):
271 # Do not use the "kind" parameter in ui output.
271 # Do not use the "kind" parameter in ui output.
272 # It makes strings difficult to translate.
272 # It makes strings difficult to translate.
273 if lbl in ['tip', '.', 'null']:
273 if lbl in ['tip', '.', 'null']:
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 for c in (':', '\0', '\n', '\r'):
275 for c in (':', '\0', '\n', '\r'):
276 if c in lbl:
276 if c in lbl:
277 raise error.Abort(
277 raise error.Abort(
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 raise error.Abort(_("cannot use an integer as a name"))
281 raise error.Abort(_("cannot use an integer as a name"))
282 except ValueError:
282 except ValueError:
283 pass
283 pass
284 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286
286
287 def checkfilename(f):
287 def checkfilename(f):
288 '''Check that the filename f is an acceptable filename for a tracked file'''
288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 if '\r' in f or '\n' in f:
289 if '\r' in f or '\n' in f:
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 % pycompat.bytestr(f))
291 % pycompat.bytestr(f))
292
292
293 def checkportable(ui, f):
293 def checkportable(ui, f):
294 '''Check if filename f is portable and warn or abort depending on config'''
294 '''Check if filename f is portable and warn or abort depending on config'''
295 checkfilename(f)
295 checkfilename(f)
296 abort, warn = checkportabilityalert(ui)
296 abort, warn = checkportabilityalert(ui)
297 if abort or warn:
297 if abort or warn:
298 msg = util.checkwinfilename(f)
298 msg = util.checkwinfilename(f)
299 if msg:
299 if msg:
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 if abort:
301 if abort:
302 raise error.Abort(msg)
302 raise error.Abort(msg)
303 ui.warn(_("warning: %s\n") % msg)
303 ui.warn(_("warning: %s\n") % msg)
304
304
305 def checkportabilityalert(ui):
305 def checkportabilityalert(ui):
306 '''check if the user's config requests nothing, a warning, or abort for
306 '''check if the user's config requests nothing, a warning, or abort for
307 non-portable filenames'''
307 non-portable filenames'''
308 val = ui.config('ui', 'portablefilenames')
308 val = ui.config('ui', 'portablefilenames')
309 lval = val.lower()
309 lval = val.lower()
310 bval = stringutil.parsebool(val)
310 bval = stringutil.parsebool(val)
311 abort = pycompat.iswindows or lval == 'abort'
311 abort = pycompat.iswindows or lval == 'abort'
312 warn = bval or lval == 'warn'
312 warn = bval or lval == 'warn'
313 if bval is None and not (warn or abort or lval == 'ignore'):
313 if bval is None and not (warn or abort or lval == 'ignore'):
314 raise error.ConfigError(
314 raise error.ConfigError(
315 _("ui.portablefilenames value is invalid ('%s')") % val)
315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 return abort, warn
316 return abort, warn
317
317
318 class casecollisionauditor(object):
318 class casecollisionauditor(object):
319 def __init__(self, ui, abort, dirstate):
319 def __init__(self, ui, abort, dirstate):
320 self._ui = ui
320 self._ui = ui
321 self._abort = abort
321 self._abort = abort
322 allfiles = '\0'.join(dirstate._map)
322 allfiles = '\0'.join(dirstate._map)
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 self._dirstate = dirstate
324 self._dirstate = dirstate
325 # The purpose of _newfiles is so that we don't complain about
325 # The purpose of _newfiles is so that we don't complain about
326 # case collisions if someone were to call this object with the
326 # case collisions if someone were to call this object with the
327 # same filename twice.
327 # same filename twice.
328 self._newfiles = set()
328 self._newfiles = set()
329
329
330 def __call__(self, f):
330 def __call__(self, f):
331 if f in self._newfiles:
331 if f in self._newfiles:
332 return
332 return
333 fl = encoding.lower(f)
333 fl = encoding.lower(f)
334 if fl in self._loweredfiles and f not in self._dirstate:
334 if fl in self._loweredfiles and f not in self._dirstate:
335 msg = _('possible case-folding collision for %s') % f
335 msg = _('possible case-folding collision for %s') % f
336 if self._abort:
336 if self._abort:
337 raise error.Abort(msg)
337 raise error.Abort(msg)
338 self._ui.warn(_("warning: %s\n") % msg)
338 self._ui.warn(_("warning: %s\n") % msg)
339 self._loweredfiles.add(fl)
339 self._loweredfiles.add(fl)
340 self._newfiles.add(f)
340 self._newfiles.add(f)
341
341
342 def filteredhash(repo, maxrev):
342 def filteredhash(repo, maxrev):
343 """build hash of filtered revisions in the current repoview.
343 """build hash of filtered revisions in the current repoview.
344
344
345 Multiple caches perform up-to-date validation by checking that the
345 Multiple caches perform up-to-date validation by checking that the
346 tiprev and tipnode stored in the cache file match the current repository.
346 tiprev and tipnode stored in the cache file match the current repository.
347 However, this is not sufficient for validating repoviews because the set
347 However, this is not sufficient for validating repoviews because the set
348 of revisions in the view may change without the repository tiprev and
348 of revisions in the view may change without the repository tiprev and
349 tipnode changing.
349 tipnode changing.
350
350
351 This function hashes all the revs filtered from the view and returns
351 This function hashes all the revs filtered from the view and returns
352 that SHA-1 digest.
352 that SHA-1 digest.
353 """
353 """
354 cl = repo.changelog
354 cl = repo.changelog
355 if not cl.filteredrevs:
355 if not cl.filteredrevs:
356 return None
356 return None
357 key = None
357 key = None
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 if revs:
359 if revs:
360 s = hashlib.sha1()
360 s = hashlib.sha1()
361 for rev in revs:
361 for rev in revs:
362 s.update('%d;' % rev)
362 s.update('%d;' % rev)
363 key = s.digest()
363 key = s.digest()
364 return key
364 return key
365
365
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 '''yield every hg repository under path, always recursively.
367 '''yield every hg repository under path, always recursively.
368 The recurse flag will only control recursion into repo working dirs'''
368 The recurse flag will only control recursion into repo working dirs'''
369 def errhandler(err):
369 def errhandler(err):
370 if err.filename == path:
370 if err.filename == path:
371 raise err
371 raise err
372 samestat = getattr(os.path, 'samestat', None)
372 samestat = getattr(os.path, 'samestat', None)
373 if followsym and samestat is not None:
373 if followsym and samestat is not None:
374 def adddir(dirlst, dirname):
374 def adddir(dirlst, dirname):
375 dirstat = os.stat(dirname)
375 dirstat = os.stat(dirname)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 if not match:
377 if not match:
378 dirlst.append(dirstat)
378 dirlst.append(dirstat)
379 return not match
379 return not match
380 else:
380 else:
381 followsym = False
381 followsym = False
382
382
383 if (seen_dirs is None) and followsym:
383 if (seen_dirs is None) and followsym:
384 seen_dirs = []
384 seen_dirs = []
385 adddir(seen_dirs, path)
385 adddir(seen_dirs, path)
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 dirs.sort()
387 dirs.sort()
388 if '.hg' in dirs:
388 if '.hg' in dirs:
389 yield root # found a repository
389 yield root # found a repository
390 qroot = os.path.join(root, '.hg', 'patches')
390 qroot = os.path.join(root, '.hg', 'patches')
391 if os.path.isdir(os.path.join(qroot, '.hg')):
391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 yield qroot # we have a patch queue repo here
392 yield qroot # we have a patch queue repo here
393 if recurse:
393 if recurse:
394 # avoid recursing inside the .hg directory
394 # avoid recursing inside the .hg directory
395 dirs.remove('.hg')
395 dirs.remove('.hg')
396 else:
396 else:
397 dirs[:] = [] # don't descend further
397 dirs[:] = [] # don't descend further
398 elif followsym:
398 elif followsym:
399 newdirs = []
399 newdirs = []
400 for d in dirs:
400 for d in dirs:
401 fname = os.path.join(root, d)
401 fname = os.path.join(root, d)
402 if adddir(seen_dirs, fname):
402 if adddir(seen_dirs, fname):
403 if os.path.islink(fname):
403 if os.path.islink(fname):
404 for hgname in walkrepos(fname, True, seen_dirs):
404 for hgname in walkrepos(fname, True, seen_dirs):
405 yield hgname
405 yield hgname
406 else:
406 else:
407 newdirs.append(d)
407 newdirs.append(d)
408 dirs[:] = newdirs
408 dirs[:] = newdirs
409
409
410 def binnode(ctx):
410 def binnode(ctx):
411 """Return binary node id for a given basectx"""
411 """Return binary node id for a given basectx"""
412 node = ctx.node()
412 node = ctx.node()
413 if node is None:
413 if node is None:
414 return wdirid
414 return wdirid
415 return node
415 return node
416
416
417 def intrev(ctx):
417 def intrev(ctx):
418 """Return integer for a given basectx that can be used in comparison or
418 """Return integer for a given basectx that can be used in comparison or
419 arithmetic operation"""
419 arithmetic operation"""
420 rev = ctx.rev()
420 rev = ctx.rev()
421 if rev is None:
421 if rev is None:
422 return wdirrev
422 return wdirrev
423 return rev
423 return rev
424
424
425 def formatchangeid(ctx):
425 def formatchangeid(ctx):
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 template provided by logcmdutil.changesettemplater"""
427 template provided by logcmdutil.changesettemplater"""
428 repo = ctx.repo()
428 repo = ctx.repo()
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430
430
431 def formatrevnode(ui, rev, node):
431 def formatrevnode(ui, rev, node):
432 """Format given revision and node depending on the current verbosity"""
432 """Format given revision and node depending on the current verbosity"""
433 if ui.debugflag:
433 if ui.debugflag:
434 hexfunc = hex
434 hexfunc = hex
435 else:
435 else:
436 hexfunc = short
436 hexfunc = short
437 return '%d:%s' % (rev, hexfunc(node))
437 return '%d:%s' % (rev, hexfunc(node))
438
438
439 def resolvehexnodeidprefix(repo, prefix):
439 def resolvehexnodeidprefix(repo, prefix):
440 if (prefix.startswith('x') and
440 if (prefix.startswith('x') and
441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
442 prefix = prefix[1:]
442 prefix = prefix[1:]
443 try:
443 try:
444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
445 # This matches the shortesthexnodeidprefix() function below.
445 # This matches the shortesthexnodeidprefix() function below.
446 node = repo.unfiltered().changelog._partialmatch(prefix)
446 node = repo.unfiltered().changelog._partialmatch(prefix)
447 except error.AmbiguousPrefixLookupError:
447 except error.AmbiguousPrefixLookupError:
448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
449 if revset:
449 if revset:
450 # Clear config to avoid infinite recursion
450 # Clear config to avoid infinite recursion
451 configoverrides = {('experimental',
451 configoverrides = {('experimental',
452 'revisions.disambiguatewithin'): None}
452 'revisions.disambiguatewithin'): None}
453 with repo.ui.configoverride(configoverrides):
453 with repo.ui.configoverride(configoverrides):
454 revs = repo.anyrevs([revset], user=True)
454 revs = repo.anyrevs([revset], user=True)
455 matches = []
455 matches = []
456 for rev in revs:
456 for rev in revs:
457 node = repo.changelog.node(rev)
457 node = repo.changelog.node(rev)
458 if hex(node).startswith(prefix):
458 if hex(node).startswith(prefix):
459 matches.append(node)
459 matches.append(node)
460 if len(matches) == 1:
460 if len(matches) == 1:
461 return matches[0]
461 return matches[0]
462 raise
462 raise
463 if node is None:
463 if node is None:
464 return
464 return
465 repo.changelog.rev(node) # make sure node isn't filtered
465 repo.changelog.rev(node) # make sure node isn't filtered
466 return node
466 return node
467
467
468 def mayberevnum(repo, prefix):
468 def mayberevnum(repo, prefix):
469 """Checks if the given prefix may be mistaken for a revision number"""
469 """Checks if the given prefix may be mistaken for a revision number"""
470 try:
470 try:
471 i = int(prefix)
471 i = int(prefix)
472 # if we are a pure int, then starting with zero will not be
472 # if we are a pure int, then starting with zero will not be
473 # confused as a rev; or, obviously, if the int is larger
473 # confused as a rev; or, obviously, if the int is larger
474 # than the value of the tip rev. We still need to disambiguate if
474 # than the value of the tip rev. We still need to disambiguate if
475 # prefix == '0', since that *is* a valid revnum.
475 # prefix == '0', since that *is* a valid revnum.
476 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
477 return False
477 return False
478 return True
478 return True
479 except ValueError:
479 except ValueError:
480 return False
480 return False
481
481
482 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
483 """Find the shortest unambiguous prefix that matches hexnode.
483 """Find the shortest unambiguous prefix that matches hexnode.
484
484
485 If "cache" is not None, it must be a dictionary that can be used for
485 If "cache" is not None, it must be a dictionary that can be used for
486 caching between calls to this method.
486 caching between calls to this method.
487 """
487 """
488 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 # _partialmatch() of filtered changelog could take O(len(repo)) time,
489 # which would be unacceptably slow. so we look for hash collision in
489 # which would be unacceptably slow. so we look for hash collision in
490 # unfiltered space, which means some hashes may be slightly longer.
490 # unfiltered space, which means some hashes may be slightly longer.
491
491
492 minlength=max(minlength, 1)
492 minlength=max(minlength, 1)
493
493
494 def disambiguate(prefix):
494 def disambiguate(prefix):
495 """Disambiguate against revnums."""
495 """Disambiguate against revnums."""
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
497 if mayberevnum(repo, prefix):
497 if mayberevnum(repo, prefix):
498 return 'x' + prefix
498 return 'x' + prefix
499 else:
499 else:
500 return prefix
500 return prefix
501
501
502 hexnode = hex(node)
502 hexnode = hex(node)
503 for length in range(len(prefix), len(hexnode) + 1):
503 for length in range(len(prefix), len(hexnode) + 1):
504 prefix = hexnode[:length]
504 prefix = hexnode[:length]
505 if not mayberevnum(repo, prefix):
505 if not mayberevnum(repo, prefix):
506 return prefix
506 return prefix
507
507
508 cl = repo.unfiltered().changelog
508 cl = repo.unfiltered().changelog
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
510 if revset:
510 if revset:
511 revs = None
511 revs = None
512 if cache is not None:
512 if cache is not None:
513 revs = cache.get('disambiguationrevset')
513 revs = cache.get('disambiguationrevset')
514 if revs is None:
514 if revs is None:
515 revs = repo.anyrevs([revset], user=True)
515 revs = repo.anyrevs([revset], user=True)
516 if cache is not None:
516 if cache is not None:
517 cache['disambiguationrevset'] = revs
517 cache['disambiguationrevset'] = revs
518 if cl.rev(node) in revs:
518 if cl.rev(node) in revs:
519 hexnode = hex(node)
519 hexnode = hex(node)
520 nodetree = None
520 nodetree = None
521 if cache is not None:
521 if cache is not None:
522 nodetree = cache.get('disambiguationnodetree')
522 nodetree = cache.get('disambiguationnodetree')
523 if not nodetree:
523 if not nodetree:
524 try:
524 try:
525 nodetree = parsers.nodetree(cl.index, len(revs))
525 nodetree = parsers.nodetree(cl.index, len(revs))
526 except AttributeError:
526 except AttributeError:
527 # no native nodetree
527 # no native nodetree
528 pass
528 pass
529 else:
529 else:
530 for r in revs:
530 for r in revs:
531 nodetree.insert(r)
531 nodetree.insert(r)
532 if cache is not None:
532 if cache is not None:
533 cache['disambiguationnodetree'] = nodetree
533 cache['disambiguationnodetree'] = nodetree
534 if nodetree is not None:
534 if nodetree is not None:
535 length = max(nodetree.shortest(node), minlength)
535 length = max(nodetree.shortest(node), minlength)
536 prefix = hexnode[:length]
536 prefix = hexnode[:length]
537 return disambiguate(prefix)
537 return disambiguate(prefix)
538 for length in range(minlength, len(hexnode) + 1):
538 for length in range(minlength, len(hexnode) + 1):
539 matches = []
539 matches = []
540 prefix = hexnode[:length]
540 prefix = hexnode[:length]
541 for rev in revs:
541 for rev in revs:
542 otherhexnode = repo[rev].hex()
542 otherhexnode = repo[rev].hex()
543 if prefix == otherhexnode[:length]:
543 if prefix == otherhexnode[:length]:
544 matches.append(otherhexnode)
544 matches.append(otherhexnode)
545 if len(matches) == 1:
545 if len(matches) == 1:
546 return disambiguate(prefix)
546 return disambiguate(prefix)
547
547
548 try:
548 try:
549 return disambiguate(cl.shortest(node, minlength))
549 return disambiguate(cl.shortest(node, minlength))
550 except error.LookupError:
550 except error.LookupError:
551 raise error.RepoLookupError()
551 raise error.RepoLookupError()
552
552
553 def isrevsymbol(repo, symbol):
553 def isrevsymbol(repo, symbol):
554 """Checks if a symbol exists in the repo.
554 """Checks if a symbol exists in the repo.
555
555
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
557 symbol is an ambiguous nodeid prefix.
557 symbol is an ambiguous nodeid prefix.
558 """
558 """
559 try:
559 try:
560 revsymbol(repo, symbol)
560 revsymbol(repo, symbol)
561 return True
561 return True
562 except error.RepoLookupError:
562 except error.RepoLookupError:
563 return False
563 return False
564
564
565 def revsymbol(repo, symbol):
565 def revsymbol(repo, symbol):
566 """Returns a context given a single revision symbol (as string).
566 """Returns a context given a single revision symbol (as string).
567
567
568 This is similar to revsingle(), but accepts only a single revision symbol,
568 This is similar to revsingle(), but accepts only a single revision symbol,
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
570 not "max(public())".
570 not "max(public())".
571 """
571 """
572 if not isinstance(symbol, bytes):
572 if not isinstance(symbol, bytes):
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
574 "repo[symbol]?" % (symbol, type(symbol)))
574 "repo[symbol]?" % (symbol, type(symbol)))
575 raise error.ProgrammingError(msg)
575 raise error.ProgrammingError(msg)
576 try:
576 try:
577 if symbol in ('.', 'tip', 'null'):
577 if symbol in ('.', 'tip', 'null'):
578 return repo[symbol]
578 return repo[symbol]
579
579
580 try:
580 try:
581 r = int(symbol)
581 r = int(symbol)
582 if '%d' % r != symbol:
582 if '%d' % r != symbol:
583 raise ValueError
583 raise ValueError
584 l = len(repo.changelog)
584 l = len(repo.changelog)
585 if r < 0:
585 if r < 0:
586 r += l
586 r += l
587 if r < 0 or r >= l and r != wdirrev:
587 if r < 0 or r >= l and r != wdirrev:
588 raise ValueError
588 raise ValueError
589 return repo[r]
589 return repo[r]
590 except error.FilteredIndexError:
590 except error.FilteredIndexError:
591 raise
591 raise
592 except (ValueError, OverflowError, IndexError):
592 except (ValueError, OverflowError, IndexError):
593 pass
593 pass
594
594
595 if len(symbol) == 40:
595 if len(symbol) == 40:
596 try:
596 try:
597 node = bin(symbol)
597 node = bin(symbol)
598 rev = repo.changelog.rev(node)
598 rev = repo.changelog.rev(node)
599 return repo[rev]
599 return repo[rev]
600 except error.FilteredLookupError:
600 except error.FilteredLookupError:
601 raise
601 raise
602 except (TypeError, LookupError):
602 except (TypeError, LookupError):
603 pass
603 pass
604
604
605 # look up bookmarks through the name interface
605 # look up bookmarks through the name interface
606 try:
606 try:
607 node = repo.names.singlenode(repo, symbol)
607 node = repo.names.singlenode(repo, symbol)
608 rev = repo.changelog.rev(node)
608 rev = repo.changelog.rev(node)
609 return repo[rev]
609 return repo[rev]
610 except KeyError:
610 except KeyError:
611 pass
611 pass
612
612
613 node = resolvehexnodeidprefix(repo, symbol)
613 node = resolvehexnodeidprefix(repo, symbol)
614 if node is not None:
614 if node is not None:
615 rev = repo.changelog.rev(node)
615 rev = repo.changelog.rev(node)
616 return repo[rev]
616 return repo[rev]
617
617
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
619
619
620 except error.WdirUnsupported:
620 except error.WdirUnsupported:
621 return repo[None]
621 return repo[None]
622 except (error.FilteredIndexError, error.FilteredLookupError,
622 except (error.FilteredIndexError, error.FilteredLookupError,
623 error.FilteredRepoLookupError):
623 error.FilteredRepoLookupError):
624 raise _filterederror(repo, symbol)
624 raise _filterederror(repo, symbol)
625
625
626 def _filterederror(repo, changeid):
626 def _filterederror(repo, changeid):
627 """build an exception to be raised about a filtered changeid
627 """build an exception to be raised about a filtered changeid
628
628
629 This is extracted in a function to help extensions (eg: evolve) to
629 This is extracted in a function to help extensions (eg: evolve) to
630 experiment with various message variants."""
630 experiment with various message variants."""
631 if repo.filtername.startswith('visible'):
631 if repo.filtername.startswith('visible'):
632
632
633 # Check if the changeset is obsolete
633 # Check if the changeset is obsolete
634 unfilteredrepo = repo.unfiltered()
634 unfilteredrepo = repo.unfiltered()
635 ctx = revsymbol(unfilteredrepo, changeid)
635 ctx = revsymbol(unfilteredrepo, changeid)
636
636
637 # If the changeset is obsolete, enrich the message with the reason
637 # If the changeset is obsolete, enrich the message with the reason
638 # that made this changeset not visible
638 # that made this changeset not visible
639 if ctx.obsolete():
639 if ctx.obsolete():
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
641 else:
641 else:
642 msg = _("hidden revision '%s'") % changeid
642 msg = _("hidden revision '%s'") % changeid
643
643
644 hint = _('use --hidden to access hidden revisions')
644 hint = _('use --hidden to access hidden revisions')
645
645
646 return error.FilteredRepoLookupError(msg, hint=hint)
646 return error.FilteredRepoLookupError(msg, hint=hint)
647 msg = _("filtered revision '%s' (not in '%s' subset)")
647 msg = _("filtered revision '%s' (not in '%s' subset)")
648 msg %= (changeid, repo.filtername)
648 msg %= (changeid, repo.filtername)
649 return error.FilteredRepoLookupError(msg)
649 return error.FilteredRepoLookupError(msg)
650
650
651 def revsingle(repo, revspec, default='.', localalias=None):
651 def revsingle(repo, revspec, default='.', localalias=None):
652 if not revspec and revspec != 0:
652 if not revspec and revspec != 0:
653 return repo[default]
653 return repo[default]
654
654
655 l = revrange(repo, [revspec], localalias=localalias)
655 l = revrange(repo, [revspec], localalias=localalias)
656 if not l:
656 if not l:
657 raise error.Abort(_('empty revision set'))
657 raise error.Abort(_('empty revision set'))
658 return repo[l.last()]
658 return repo[l.last()]
659
659
660 def _pairspec(revspec):
660 def _pairspec(revspec):
661 tree = revsetlang.parse(revspec)
661 tree = revsetlang.parse(revspec)
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
663
663
664 def revpair(repo, revs):
664 def revpair(repo, revs):
665 if not revs:
665 if not revs:
666 return repo['.'], repo[None]
666 return repo['.'], repo[None]
667
667
668 l = revrange(repo, revs)
668 l = revrange(repo, revs)
669
669
670 if not l:
670 if not l:
671 raise error.Abort(_('empty revision range'))
671 raise error.Abort(_('empty revision range'))
672
672
673 first = l.first()
673 first = l.first()
674 second = l.last()
674 second = l.last()
675
675
676 if (first == second and len(revs) >= 2
676 if (first == second and len(revs) >= 2
677 and not all(revrange(repo, [r]) for r in revs)):
677 and not all(revrange(repo, [r]) for r in revs)):
678 raise error.Abort(_('empty revision on one side of range'))
678 raise error.Abort(_('empty revision on one side of range'))
679
679
680 # if top-level is range expression, the result must always be a pair
680 # if top-level is range expression, the result must always be a pair
681 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
682 return repo[first], repo[None]
682 return repo[first], repo[None]
683
683
684 return repo[first], repo[second]
684 return repo[first], repo[second]
685
685
686 def revrange(repo, specs, localalias=None):
686 def revrange(repo, specs, localalias=None):
687 """Execute 1 to many revsets and return the union.
687 """Execute 1 to many revsets and return the union.
688
688
689 This is the preferred mechanism for executing revsets using user-specified
689 This is the preferred mechanism for executing revsets using user-specified
690 config options, such as revset aliases.
690 config options, such as revset aliases.
691
691
692 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 The revsets specified by ``specs`` will be executed via a chained ``OR``
693 expression. If ``specs`` is empty, an empty result is returned.
693 expression. If ``specs`` is empty, an empty result is returned.
694
694
695 ``specs`` can contain integers, in which case they are assumed to be
695 ``specs`` can contain integers, in which case they are assumed to be
696 revision numbers.
696 revision numbers.
697
697
698 It is assumed the revsets are already formatted. If you have arguments
698 It is assumed the revsets are already formatted. If you have arguments
699 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 that need to be expanded in the revset, call ``revsetlang.formatspec()``
700 and pass the result as an element of ``specs``.
700 and pass the result as an element of ``specs``.
701
701
702 Specifying a single revset is allowed.
702 Specifying a single revset is allowed.
703
703
704 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 Returns a ``revset.abstractsmartset`` which is a list-like interface over
705 integer revisions.
705 integer revisions.
706 """
706 """
707 allspecs = []
707 allspecs = []
708 for spec in specs:
708 for spec in specs:
709 if isinstance(spec, int):
709 if isinstance(spec, int):
710 spec = revsetlang.formatspec('%d', spec)
710 spec = revsetlang.formatspec('%d', spec)
711 allspecs.append(spec)
711 allspecs.append(spec)
712 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712 return repo.anyrevs(allspecs, user=True, localalias=localalias)
713
713
714 def meaningfulparents(repo, ctx):
714 def meaningfulparents(repo, ctx):
715 """Return list of meaningful (or all if debug) parentrevs for rev.
715 """Return list of meaningful (or all if debug) parentrevs for rev.
716
716
717 For merges (two non-nullrev revisions) both parents are meaningful.
717 For merges (two non-nullrev revisions) both parents are meaningful.
718 Otherwise the first parent revision is considered meaningful if it
718 Otherwise the first parent revision is considered meaningful if it
719 is not the preceding revision.
719 is not the preceding revision.
720 """
720 """
721 parents = ctx.parents()
721 parents = ctx.parents()
722 if len(parents) > 1:
722 if len(parents) > 1:
723 return parents
723 return parents
724 if repo.ui.debugflag:
724 if repo.ui.debugflag:
725 return [parents[0], repo[nullrev]]
725 return [parents[0], repo[nullrev]]
726 if parents[0].rev() >= intrev(ctx) - 1:
726 if parents[0].rev() >= intrev(ctx) - 1:
727 return []
727 return []
728 return parents
728 return parents
729
729
730 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
731 """Return a function that produced paths for presenting to the user.
731 """Return a function that produced paths for presenting to the user.
732
732
733 The returned function takes a repo-relative path and produces a path
733 The returned function takes a repo-relative path and produces a path
734 that can be presented in the UI.
734 that can be presented in the UI.
735
735
736 Depending on the value of ui.relative-paths, either a repo-relative or
736 Depending on the value of ui.relative-paths, either a repo-relative or
737 cwd-relative path will be produced.
737 cwd-relative path will be produced.
738
738
739 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739 legacyrelativevalue is the value to use if ui.relative-paths=legacy
740
740
741 If forcerelativevalue is not None, then that value will be used regardless
741 If forcerelativevalue is not None, then that value will be used regardless
742 of what ui.relative-paths is set to.
742 of what ui.relative-paths is set to.
743 """
743 """
744 if forcerelativevalue is not None:
744 if forcerelativevalue is not None:
745 relative = forcerelativevalue
745 relative = forcerelativevalue
746 else:
746 else:
747 config = repo.ui.config('ui', 'relative-paths')
747 config = repo.ui.config('ui', 'relative-paths')
748 if config == 'legacy':
748 if config == 'legacy':
749 relative = legacyrelativevalue
749 relative = legacyrelativevalue
750 else:
750 else:
751 relative = stringutil.parsebool(config)
751 relative = stringutil.parsebool(config)
752 if relative is None:
752 if relative is None:
753 raise error.ConfigError(
753 raise error.ConfigError(
754 _("ui.relative-paths is not a boolean ('%s')") % config)
754 _("ui.relative-paths is not a boolean ('%s')") % config)
755
755
756 if relative:
756 if relative:
757 cwd = repo.getcwd()
757 cwd = repo.getcwd()
758 pathto = repo.pathto
758 pathto = repo.pathto
759 return lambda f: pathto(f, cwd)
759 return lambda f: pathto(f, cwd)
760 elif repo.ui.configbool('ui', 'slash'):
760 elif repo.ui.configbool('ui', 'slash'):
761 return lambda f: f
761 return lambda f: f
762 else:
762 else:
763 return util.localpath
763 return util.localpath
764
764
765 def subdiruipathfn(subpath, uipathfn):
765 def subdiruipathfn(subpath, uipathfn):
766 '''Create a new uipathfn that treats the file as relative to subpath.'''
766 '''Create a new uipathfn that treats the file as relative to subpath.'''
767 return lambda f: uipathfn(posixpath.join(subpath, f))
767 return lambda f: uipathfn(posixpath.join(subpath, f))
768
768
769 def anypats(pats, opts):
769 def anypats(pats, opts):
770 '''Checks if any patterns, including --include and --exclude were given.
770 '''Checks if any patterns, including --include and --exclude were given.
771
771
772 Some commands (e.g. addremove) use this condition for deciding whether to
772 Some commands (e.g. addremove) use this condition for deciding whether to
773 print absolute or relative paths.
773 print absolute or relative paths.
774 '''
774 '''
775 return bool(pats or opts.get('include') or opts.get('exclude'))
775 return bool(pats or opts.get('include') or opts.get('exclude'))
776
776
777 def expandpats(pats):
777 def expandpats(pats):
778 '''Expand bare globs when running on windows.
778 '''Expand bare globs when running on windows.
779 On posix we assume it already has already been done by sh.'''
779 On posix we assume it already has already been done by sh.'''
780 if not util.expandglobs:
780 if not util.expandglobs:
781 return list(pats)
781 return list(pats)
782 ret = []
782 ret = []
783 for kindpat in pats:
783 for kindpat in pats:
784 kind, pat = matchmod._patsplit(kindpat, None)
784 kind, pat = matchmod._patsplit(kindpat, None)
785 if kind is None:
785 if kind is None:
786 try:
786 try:
787 globbed = glob.glob(pat)
787 globbed = glob.glob(pat)
788 except re.error:
788 except re.error:
789 globbed = [pat]
789 globbed = [pat]
790 if globbed:
790 if globbed:
791 ret.extend(globbed)
791 ret.extend(globbed)
792 continue
792 continue
793 ret.append(kindpat)
793 ret.append(kindpat)
794 return ret
794 return ret
795
795
796 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
796 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
797 badfn=None):
797 badfn=None):
798 '''Return a matcher and the patterns that were used.
798 '''Return a matcher and the patterns that were used.
799 The matcher will warn about bad matches, unless an alternate badfn callback
799 The matcher will warn about bad matches, unless an alternate badfn callback
800 is provided.'''
800 is provided.'''
801 if opts is None:
801 if opts is None:
802 opts = {}
802 opts = {}
803 if not globbed and default == 'relpath':
803 if not globbed and default == 'relpath':
804 pats = expandpats(pats or [])
804 pats = expandpats(pats or [])
805
805
806 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
806 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
807 def bad(f, msg):
807 def bad(f, msg):
808 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
808 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
809
809
810 if badfn is None:
810 if badfn is None:
811 badfn = bad
811 badfn = bad
812
812
813 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
813 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
814 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
814 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
815
815
816 if m.always():
816 if m.always():
817 pats = []
817 pats = []
818 return m, pats
818 return m, pats
819
819
820 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
820 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
821 badfn=None):
821 badfn=None):
822 '''Return a matcher that will warn about bad matches.'''
822 '''Return a matcher that will warn about bad matches.'''
823 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
823 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
824
824
825 def matchall(repo):
825 def matchall(repo):
826 '''Return a matcher that will efficiently match everything.'''
826 '''Return a matcher that will efficiently match everything.'''
827 return matchmod.always()
827 return matchmod.always()
828
828
829 def matchfiles(repo, files, badfn=None):
829 def matchfiles(repo, files, badfn=None):
830 '''Return a matcher that will efficiently match exactly these files.'''
830 '''Return a matcher that will efficiently match exactly these files.'''
831 return matchmod.exact(files, badfn=badfn)
831 return matchmod.exact(files, badfn=badfn)
832
832
833 def parsefollowlinespattern(repo, rev, pat, msg):
833 def parsefollowlinespattern(repo, rev, pat, msg):
834 """Return a file name from `pat` pattern suitable for usage in followlines
834 """Return a file name from `pat` pattern suitable for usage in followlines
835 logic.
835 logic.
836 """
836 """
837 if not matchmod.patkind(pat):
837 if not matchmod.patkind(pat):
838 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
838 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
839 else:
839 else:
840 ctx = repo[rev]
840 ctx = repo[rev]
841 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
841 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
842 files = [f for f in ctx if m(f)]
842 files = [f for f in ctx if m(f)]
843 if len(files) != 1:
843 if len(files) != 1:
844 raise error.ParseError(msg)
844 raise error.ParseError(msg)
845 return files[0]
845 return files[0]
846
846
847 def getorigvfs(ui, repo):
847 def getorigvfs(ui, repo):
848 """return a vfs suitable to save 'orig' file
848 """return a vfs suitable to save 'orig' file
849
849
850 return None if no special directory is configured"""
850 return None if no special directory is configured"""
851 origbackuppath = ui.config('ui', 'origbackuppath')
851 origbackuppath = ui.config('ui', 'origbackuppath')
852 if not origbackuppath:
852 if not origbackuppath:
853 return None
853 return None
854 return vfs.vfs(repo.wvfs.join(origbackuppath))
854 return vfs.vfs(repo.wvfs.join(origbackuppath))
855
855
856 def backuppath(ui, repo, filepath):
856 def backuppath(ui, repo, filepath):
857 '''customize where working copy backup files (.orig files) are created
857 '''customize where working copy backup files (.orig files) are created
858
858
859 Fetch user defined path from config file: [ui] origbackuppath = <path>
859 Fetch user defined path from config file: [ui] origbackuppath = <path>
860 Fall back to default (filepath with .orig suffix) if not specified
860 Fall back to default (filepath with .orig suffix) if not specified
861
861
862 filepath is repo-relative
862 filepath is repo-relative
863
863
864 Returns an absolute path
864 Returns an absolute path
865 '''
865 '''
866 origvfs = getorigvfs(ui, repo)
866 origvfs = getorigvfs(ui, repo)
867 if origvfs is None:
867 if origvfs is None:
868 return repo.wjoin(filepath + ".orig")
868 return repo.wjoin(filepath + ".orig")
869
869
870 origbackupdir = origvfs.dirname(filepath)
870 origbackupdir = origvfs.dirname(filepath)
871 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
871 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
872 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
872 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
873
873
874 # Remove any files that conflict with the backup file's path
874 # Remove any files that conflict with the backup file's path
875 for f in reversed(list(util.finddirs(filepath))):
875 for f in reversed(list(util.finddirs(filepath))):
876 if origvfs.isfileorlink(f):
876 if origvfs.isfileorlink(f):
877 ui.note(_('removing conflicting file: %s\n')
877 ui.note(_('removing conflicting file: %s\n')
878 % origvfs.join(f))
878 % origvfs.join(f))
879 origvfs.unlink(f)
879 origvfs.unlink(f)
880 break
880 break
881
881
882 origvfs.makedirs(origbackupdir)
882 origvfs.makedirs(origbackupdir)
883
883
884 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
884 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
885 ui.note(_('removing conflicting directory: %s\n')
885 ui.note(_('removing conflicting directory: %s\n')
886 % origvfs.join(filepath))
886 % origvfs.join(filepath))
887 origvfs.rmtree(filepath, forcibly=True)
887 origvfs.rmtree(filepath, forcibly=True)
888
888
889 return origvfs.join(filepath)
889 return origvfs.join(filepath)
890
890
891 class _containsnode(object):
891 class _containsnode(object):
892 """proxy __contains__(node) to container.__contains__ which accepts revs"""
892 """proxy __contains__(node) to container.__contains__ which accepts revs"""
893
893
894 def __init__(self, repo, revcontainer):
894 def __init__(self, repo, revcontainer):
895 self._torev = repo.changelog.rev
895 self._torev = repo.changelog.rev
896 self._revcontains = revcontainer.__contains__
896 self._revcontains = revcontainer.__contains__
897
897
898 def __contains__(self, node):
898 def __contains__(self, node):
899 return self._revcontains(self._torev(node))
899 return self._revcontains(self._torev(node))
900
900
901 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
901 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
902 fixphase=False, targetphase=None, backup=True):
902 fixphase=False, targetphase=None, backup=True):
903 """do common cleanups when old nodes are replaced by new nodes
903 """do common cleanups when old nodes are replaced by new nodes
904
904
905 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
905 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
906 (we might also want to move working directory parent in the future)
906 (we might also want to move working directory parent in the future)
907
907
908 By default, bookmark moves are calculated automatically from 'replacements',
908 By default, bookmark moves are calculated automatically from 'replacements',
909 but 'moves' can be used to override that. Also, 'moves' may include
909 but 'moves' can be used to override that. Also, 'moves' may include
910 additional bookmark moves that should not have associated obsmarkers.
910 additional bookmark moves that should not have associated obsmarkers.
911
911
912 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
912 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
913 have replacements. operation is a string, like "rebase".
913 have replacements. operation is a string, like "rebase".
914
914
915 metadata is dictionary containing metadata to be stored in obsmarker if
915 metadata is dictionary containing metadata to be stored in obsmarker if
916 obsolescence is enabled.
916 obsolescence is enabled.
917 """
917 """
918 assert fixphase or targetphase is None
918 assert fixphase or targetphase is None
919 if not replacements and not moves:
919 if not replacements and not moves:
920 return
920 return
921
921
922 # translate mapping's other forms
922 # translate mapping's other forms
923 if not util.safehasattr(replacements, 'items'):
923 if not util.safehasattr(replacements, 'items'):
924 replacements = {(n,): () for n in replacements}
924 replacements = {(n,): () for n in replacements}
925 else:
925 else:
926 # upgrading non tuple "source" to tuple ones for BC
926 # upgrading non tuple "source" to tuple ones for BC
927 repls = {}
927 repls = {}
928 for key, value in replacements.items():
928 for key, value in replacements.items():
929 if not isinstance(key, tuple):
929 if not isinstance(key, tuple):
930 key = (key,)
930 key = (key,)
931 repls[key] = value
931 repls[key] = value
932 replacements = repls
932 replacements = repls
933
933
934 # Unfiltered repo is needed since nodes in replacements might be hidden.
934 # Unfiltered repo is needed since nodes in replacements might be hidden.
935 unfi = repo.unfiltered()
935 unfi = repo.unfiltered()
936
936
937 # Calculate bookmark movements
937 # Calculate bookmark movements
938 if moves is None:
938 if moves is None:
939 moves = {}
939 moves = {}
940 for oldnodes, newnodes in replacements.items():
940 for oldnodes, newnodes in replacements.items():
941 for oldnode in oldnodes:
941 for oldnode in oldnodes:
942 if oldnode in moves:
942 if oldnode in moves:
943 continue
943 continue
944 if len(newnodes) > 1:
944 if len(newnodes) > 1:
945 # usually a split, take the one with biggest rev number
945 # usually a split, take the one with biggest rev number
946 newnode = next(unfi.set('max(%ln)', newnodes)).node()
946 newnode = next(unfi.set('max(%ln)', newnodes)).node()
947 elif len(newnodes) == 0:
947 elif len(newnodes) == 0:
948 # move bookmark backwards
948 # move bookmark backwards
949 allreplaced = []
949 allreplaced = []
950 for rep in replacements:
950 for rep in replacements:
951 allreplaced.extend(rep)
951 allreplaced.extend(rep)
952 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
952 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
953 allreplaced))
953 allreplaced))
954 if roots:
954 if roots:
955 newnode = roots[0].node()
955 newnode = roots[0].node()
956 else:
956 else:
957 newnode = nullid
957 newnode = nullid
958 else:
958 else:
959 newnode = newnodes[0]
959 newnode = newnodes[0]
960 moves[oldnode] = newnode
960 moves[oldnode] = newnode
961
961
962 allnewnodes = [n for ns in replacements.values() for n in ns]
962 allnewnodes = [n for ns in replacements.values() for n in ns]
963 toretract = {}
963 toretract = {}
964 toadvance = {}
964 toadvance = {}
965 if fixphase:
965 if fixphase:
966 precursors = {}
966 precursors = {}
967 for oldnodes, newnodes in replacements.items():
967 for oldnodes, newnodes in replacements.items():
968 for oldnode in oldnodes:
968 for oldnode in oldnodes:
969 for newnode in newnodes:
969 for newnode in newnodes:
970 precursors.setdefault(newnode, []).append(oldnode)
970 precursors.setdefault(newnode, []).append(oldnode)
971
971
972 allnewnodes.sort(key=lambda n: unfi[n].rev())
972 allnewnodes.sort(key=lambda n: unfi[n].rev())
973 newphases = {}
973 newphases = {}
974 def phase(ctx):
974 def phase(ctx):
975 return newphases.get(ctx.node(), ctx.phase())
975 return newphases.get(ctx.node(), ctx.phase())
976 for newnode in allnewnodes:
976 for newnode in allnewnodes:
977 ctx = unfi[newnode]
977 ctx = unfi[newnode]
978 parentphase = max(phase(p) for p in ctx.parents())
978 parentphase = max(phase(p) for p in ctx.parents())
979 if targetphase is None:
979 if targetphase is None:
980 oldphase = max(unfi[oldnode].phase()
980 oldphase = max(unfi[oldnode].phase()
981 for oldnode in precursors[newnode])
981 for oldnode in precursors[newnode])
982 newphase = max(oldphase, parentphase)
982 newphase = max(oldphase, parentphase)
983 else:
983 else:
984 newphase = max(targetphase, parentphase)
984 newphase = max(targetphase, parentphase)
985 newphases[newnode] = newphase
985 newphases[newnode] = newphase
986 if newphase > ctx.phase():
986 if newphase > ctx.phase():
987 toretract.setdefault(newphase, []).append(newnode)
987 toretract.setdefault(newphase, []).append(newnode)
988 elif newphase < ctx.phase():
988 elif newphase < ctx.phase():
989 toadvance.setdefault(newphase, []).append(newnode)
989 toadvance.setdefault(newphase, []).append(newnode)
990
990
991 with repo.transaction('cleanup') as tr:
991 with repo.transaction('cleanup') as tr:
992 # Move bookmarks
992 # Move bookmarks
993 bmarks = repo._bookmarks
993 bmarks = repo._bookmarks
994 bmarkchanges = []
994 bmarkchanges = []
995 for oldnode, newnode in moves.items():
995 for oldnode, newnode in moves.items():
996 oldbmarks = repo.nodebookmarks(oldnode)
996 oldbmarks = repo.nodebookmarks(oldnode)
997 if not oldbmarks:
997 if not oldbmarks:
998 continue
998 continue
999 from . import bookmarks # avoid import cycle
999 from . import bookmarks # avoid import cycle
1000 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1000 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1001 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1001 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1002 hex(oldnode), hex(newnode)))
1002 hex(oldnode), hex(newnode)))
1003 # Delete divergent bookmarks being parents of related newnodes
1003 # Delete divergent bookmarks being parents of related newnodes
1004 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1004 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1005 allnewnodes, newnode, oldnode)
1005 allnewnodes, newnode, oldnode)
1006 deletenodes = _containsnode(repo, deleterevs)
1006 deletenodes = _containsnode(repo, deleterevs)
1007 for name in oldbmarks:
1007 for name in oldbmarks:
1008 bmarkchanges.append((name, newnode))
1008 bmarkchanges.append((name, newnode))
1009 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1009 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1010 bmarkchanges.append((b, None))
1010 bmarkchanges.append((b, None))
1011
1011
1012 if bmarkchanges:
1012 if bmarkchanges:
1013 bmarks.applychanges(repo, tr, bmarkchanges)
1013 bmarks.applychanges(repo, tr, bmarkchanges)
1014
1014
1015 for phase, nodes in toretract.items():
1015 for phase, nodes in toretract.items():
1016 phases.retractboundary(repo, tr, phase, nodes)
1016 phases.retractboundary(repo, tr, phase, nodes)
1017 for phase, nodes in toadvance.items():
1017 for phase, nodes in toadvance.items():
1018 phases.advanceboundary(repo, tr, phase, nodes)
1018 phases.advanceboundary(repo, tr, phase, nodes)
1019
1019
1020 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1020 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1021 # Obsolete or strip nodes
1021 # Obsolete or strip nodes
1022 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1022 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1023 # If a node is already obsoleted, and we want to obsolete it
1023 # If a node is already obsoleted, and we want to obsolete it
1024 # without a successor, skip that obssolete request since it's
1024 # without a successor, skip that obssolete request since it's
1025 # unnecessary. That's the "if s or not isobs(n)" check below.
1025 # unnecessary. That's the "if s or not isobs(n)" check below.
1026 # Also sort the node in topology order, that might be useful for
1026 # Also sort the node in topology order, that might be useful for
1027 # some obsstore logic.
1027 # some obsstore logic.
1028 # NOTE: the sorting might belong to createmarkers.
1028 # NOTE: the sorting might belong to createmarkers.
1029 torev = unfi.changelog.rev
1029 torev = unfi.changelog.rev
1030 sortfunc = lambda ns: torev(ns[0][0])
1030 sortfunc = lambda ns: torev(ns[0][0])
1031 rels = []
1031 rels = []
1032 for ns, s in sorted(replacements.items(), key=sortfunc):
1032 for ns, s in sorted(replacements.items(), key=sortfunc):
1033 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1033 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1034 rels.append(rel)
1034 rels.append(rel)
1035 if rels:
1035 if rels:
1036 obsolete.createmarkers(repo, rels, operation=operation,
1036 obsolete.createmarkers(repo, rels, operation=operation,
1037 metadata=metadata)
1037 metadata=metadata)
1038 elif phases.supportinternal(repo) and mayusearchived:
1038 elif phases.supportinternal(repo) and mayusearchived:
1039 # this assume we do not have "unstable" nodes above the cleaned ones
1039 # this assume we do not have "unstable" nodes above the cleaned ones
1040 allreplaced = set()
1040 allreplaced = set()
1041 for ns in replacements.keys():
1041 for ns in replacements.keys():
1042 allreplaced.update(ns)
1042 allreplaced.update(ns)
1043 if backup:
1043 if backup:
1044 from . import repair # avoid import cycle
1044 from . import repair # avoid import cycle
1045 node = min(allreplaced, key=repo.changelog.rev)
1045 node = min(allreplaced, key=repo.changelog.rev)
1046 repair.backupbundle(repo, allreplaced, allreplaced, node,
1046 repair.backupbundle(repo, allreplaced, allreplaced, node,
1047 operation)
1047 operation)
1048 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1048 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1049 else:
1049 else:
1050 from . import repair # avoid import cycle
1050 from . import repair # avoid import cycle
1051 tostrip = list(n for ns in replacements for n in ns)
1051 tostrip = list(n for ns in replacements for n in ns)
1052 if tostrip:
1052 if tostrip:
1053 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1053 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1054 backup=backup)
1054 backup=backup)
1055
1055
1056 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1056 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1057 if opts is None:
1057 if opts is None:
1058 opts = {}
1058 opts = {}
1059 m = matcher
1059 m = matcher
1060 dry_run = opts.get('dry_run')
1060 dry_run = opts.get('dry_run')
1061 try:
1061 try:
1062 similarity = float(opts.get('similarity') or 0)
1062 similarity = float(opts.get('similarity') or 0)
1063 except ValueError:
1063 except ValueError:
1064 raise error.Abort(_('similarity must be a number'))
1064 raise error.Abort(_('similarity must be a number'))
1065 if similarity < 0 or similarity > 100:
1065 if similarity < 0 or similarity > 100:
1066 raise error.Abort(_('similarity must be between 0 and 100'))
1066 raise error.Abort(_('similarity must be between 0 and 100'))
1067 similarity /= 100.0
1067 similarity /= 100.0
1068
1068
1069 ret = 0
1069 ret = 0
1070
1070
1071 wctx = repo[None]
1071 wctx = repo[None]
1072 for subpath in sorted(wctx.substate):
1072 for subpath in sorted(wctx.substate):
1073 submatch = matchmod.subdirmatcher(subpath, m)
1073 submatch = matchmod.subdirmatcher(subpath, m)
1074 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1074 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1075 sub = wctx.sub(subpath)
1075 sub = wctx.sub(subpath)
1076 subprefix = repo.wvfs.reljoin(prefix, subpath)
1076 subprefix = repo.wvfs.reljoin(prefix, subpath)
1077 subuipathfn = subdiruipathfn(subpath, uipathfn)
1077 subuipathfn = subdiruipathfn(subpath, uipathfn)
1078 try:
1078 try:
1079 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1079 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1080 ret = 1
1080 ret = 1
1081 except error.LookupError:
1081 except error.LookupError:
1082 repo.ui.status(_("skipping missing subrepository: %s\n")
1082 repo.ui.status(_("skipping missing subrepository: %s\n")
1083 % uipathfn(subpath))
1083 % uipathfn(subpath))
1084
1084
1085 rejected = []
1085 rejected = []
1086 def badfn(f, msg):
1086 def badfn(f, msg):
1087 if f in m.files():
1087 if f in m.files():
1088 m.bad(f, msg)
1088 m.bad(f, msg)
1089 rejected.append(f)
1089 rejected.append(f)
1090
1090
1091 badmatch = matchmod.badmatch(m, badfn)
1091 badmatch = matchmod.badmatch(m, badfn)
1092 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1092 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1093 badmatch)
1093 badmatch)
1094
1094
1095 unknownset = set(unknown + forgotten)
1095 unknownset = set(unknown + forgotten)
1096 toprint = unknownset.copy()
1096 toprint = unknownset.copy()
1097 toprint.update(deleted)
1097 toprint.update(deleted)
1098 for abs in sorted(toprint):
1098 for abs in sorted(toprint):
1099 if repo.ui.verbose or not m.exact(abs):
1099 if repo.ui.verbose or not m.exact(abs):
1100 if abs in unknownset:
1100 if abs in unknownset:
1101 status = _('adding %s\n') % uipathfn(abs)
1101 status = _('adding %s\n') % uipathfn(abs)
1102 label = 'ui.addremove.added'
1102 label = 'ui.addremove.added'
1103 else:
1103 else:
1104 status = _('removing %s\n') % uipathfn(abs)
1104 status = _('removing %s\n') % uipathfn(abs)
1105 label = 'ui.addremove.removed'
1105 label = 'ui.addremove.removed'
1106 repo.ui.status(status, label=label)
1106 repo.ui.status(status, label=label)
1107
1107
1108 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1108 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1109 similarity, uipathfn)
1109 similarity, uipathfn)
1110
1110
1111 if not dry_run:
1111 if not dry_run:
1112 _markchanges(repo, unknown + forgotten, deleted, renames)
1112 _markchanges(repo, unknown + forgotten, deleted, renames)
1113
1113
1114 for f in rejected:
1114 for f in rejected:
1115 if f in m.files():
1115 if f in m.files():
1116 return 1
1116 return 1
1117 return ret
1117 return ret
1118
1118
1119 def marktouched(repo, files, similarity=0.0):
1119 def marktouched(repo, files, similarity=0.0):
1120 '''Assert that files have somehow been operated upon. files are relative to
1120 '''Assert that files have somehow been operated upon. files are relative to
1121 the repo root.'''
1121 the repo root.'''
1122 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1122 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1123 rejected = []
1123 rejected = []
1124
1124
1125 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1125 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1126
1126
1127 if repo.ui.verbose:
1127 if repo.ui.verbose:
1128 unknownset = set(unknown + forgotten)
1128 unknownset = set(unknown + forgotten)
1129 toprint = unknownset.copy()
1129 toprint = unknownset.copy()
1130 toprint.update(deleted)
1130 toprint.update(deleted)
1131 for abs in sorted(toprint):
1131 for abs in sorted(toprint):
1132 if abs in unknownset:
1132 if abs in unknownset:
1133 status = _('adding %s\n') % abs
1133 status = _('adding %s\n') % abs
1134 else:
1134 else:
1135 status = _('removing %s\n') % abs
1135 status = _('removing %s\n') % abs
1136 repo.ui.status(status)
1136 repo.ui.status(status)
1137
1137
1138 # TODO: We should probably have the caller pass in uipathfn and apply it to
1138 # TODO: We should probably have the caller pass in uipathfn and apply it to
1139 # the messages above too. legacyrelativevalue=True is consistent with how
1139 # the messages above too. legacyrelativevalue=True is consistent with how
1140 # it used to work.
1140 # it used to work.
1141 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1141 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1142 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1142 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1143 similarity, uipathfn)
1143 similarity, uipathfn)
1144
1144
1145 _markchanges(repo, unknown + forgotten, deleted, renames)
1145 _markchanges(repo, unknown + forgotten, deleted, renames)
1146
1146
1147 for f in rejected:
1147 for f in rejected:
1148 if f in m.files():
1148 if f in m.files():
1149 return 1
1149 return 1
1150 return 0
1150 return 0
1151
1151
1152 def _interestingfiles(repo, matcher):
1152 def _interestingfiles(repo, matcher):
1153 '''Walk dirstate with matcher, looking for files that addremove would care
1153 '''Walk dirstate with matcher, looking for files that addremove would care
1154 about.
1154 about.
1155
1155
1156 This is different from dirstate.status because it doesn't care about
1156 This is different from dirstate.status because it doesn't care about
1157 whether files are modified or clean.'''
1157 whether files are modified or clean.'''
1158 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1158 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1159 audit_path = pathutil.pathauditor(repo.root, cached=True)
1159 audit_path = pathutil.pathauditor(repo.root, cached=True)
1160
1160
1161 ctx = repo[None]
1161 ctx = repo[None]
1162 dirstate = repo.dirstate
1162 dirstate = repo.dirstate
1163 matcher = repo.narrowmatch(matcher, includeexact=True)
1163 matcher = repo.narrowmatch(matcher, includeexact=True)
1164 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1164 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1165 unknown=True, ignored=False, full=False)
1165 unknown=True, ignored=False, full=False)
1166 for abs, st in walkresults.iteritems():
1166 for abs, st in walkresults.iteritems():
1167 dstate = dirstate[abs]
1167 dstate = dirstate[abs]
1168 if dstate == '?' and audit_path.check(abs):
1168 if dstate == '?' and audit_path.check(abs):
1169 unknown.append(abs)
1169 unknown.append(abs)
1170 elif dstate != 'r' and not st:
1170 elif dstate != 'r' and not st:
1171 deleted.append(abs)
1171 deleted.append(abs)
1172 elif dstate == 'r' and st:
1172 elif dstate == 'r' and st:
1173 forgotten.append(abs)
1173 forgotten.append(abs)
1174 # for finding renames
1174 # for finding renames
1175 elif dstate == 'r' and not st:
1175 elif dstate == 'r' and not st:
1176 removed.append(abs)
1176 removed.append(abs)
1177 elif dstate == 'a':
1177 elif dstate == 'a':
1178 added.append(abs)
1178 added.append(abs)
1179
1179
1180 return added, unknown, deleted, removed, forgotten
1180 return added, unknown, deleted, removed, forgotten
1181
1181
1182 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1182 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1183 '''Find renames from removed files to added ones.'''
1183 '''Find renames from removed files to added ones.'''
1184 renames = {}
1184 renames = {}
1185 if similarity > 0:
1185 if similarity > 0:
1186 for old, new, score in similar.findrenames(repo, added, removed,
1186 for old, new, score in similar.findrenames(repo, added, removed,
1187 similarity):
1187 similarity):
1188 if (repo.ui.verbose or not matcher.exact(old)
1188 if (repo.ui.verbose or not matcher.exact(old)
1189 or not matcher.exact(new)):
1189 or not matcher.exact(new)):
1190 repo.ui.status(_('recording removal of %s as rename to %s '
1190 repo.ui.status(_('recording removal of %s as rename to %s '
1191 '(%d%% similar)\n') %
1191 '(%d%% similar)\n') %
1192 (uipathfn(old), uipathfn(new),
1192 (uipathfn(old), uipathfn(new),
1193 score * 100))
1193 score * 100))
1194 renames[new] = old
1194 renames[new] = old
1195 return renames
1195 return renames
1196
1196
1197 def _markchanges(repo, unknown, deleted, renames):
1197 def _markchanges(repo, unknown, deleted, renames):
1198 '''Marks the files in unknown as added, the files in deleted as removed,
1198 '''Marks the files in unknown as added, the files in deleted as removed,
1199 and the files in renames as copied.'''
1199 and the files in renames as copied.'''
1200 wctx = repo[None]
1200 wctx = repo[None]
1201 with repo.wlock():
1201 with repo.wlock():
1202 wctx.forget(deleted)
1202 wctx.forget(deleted)
1203 wctx.add(unknown)
1203 wctx.add(unknown)
1204 for new, old in renames.iteritems():
1204 for new, old in renames.iteritems():
1205 wctx.copy(old, new)
1205 wctx.copy(old, new)
1206
1206
1207 def getrenamedfn(repo, endrev=None):
1207 def getrenamedfn(repo, endrev=None):
1208 rcache = {}
1208 rcache = {}
1209 if endrev is None:
1209 if endrev is None:
1210 endrev = len(repo)
1210 endrev = len(repo)
1211
1211
1212 def getrenamed(fn, rev):
1212 def getrenamed(fn, rev):
1213 '''looks up all renames for a file (up to endrev) the first
1213 '''looks up all renames for a file (up to endrev) the first
1214 time the file is given. It indexes on the changerev and only
1214 time the file is given. It indexes on the changerev and only
1215 parses the manifest if linkrev != changerev.
1215 parses the manifest if linkrev != changerev.
1216 Returns rename info for fn at changerev rev.'''
1216 Returns rename info for fn at changerev rev.'''
1217 if fn not in rcache:
1217 if fn not in rcache:
1218 rcache[fn] = {}
1218 rcache[fn] = {}
1219 fl = repo.file(fn)
1219 fl = repo.file(fn)
1220 for i in fl:
1220 for i in fl:
1221 lr = fl.linkrev(i)
1221 lr = fl.linkrev(i)
1222 renamed = fl.renamed(fl.node(i))
1222 renamed = fl.renamed(fl.node(i))
1223 rcache[fn][lr] = renamed and renamed[0]
1223 rcache[fn][lr] = renamed and renamed[0]
1224 if lr >= endrev:
1224 if lr >= endrev:
1225 break
1225 break
1226 if rev in rcache[fn]:
1226 if rev in rcache[fn]:
1227 return rcache[fn][rev]
1227 return rcache[fn][rev]
1228
1228
1229 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1229 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1230 # filectx logic.
1230 # filectx logic.
1231 try:
1231 try:
1232 return repo[rev][fn].copysource()
1232 return repo[rev][fn].copysource()
1233 except error.LookupError:
1233 except error.LookupError:
1234 return None
1234 return None
1235
1235
1236 return getrenamed
1236 return getrenamed
1237
1237
1238 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1238 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1239 """Update the dirstate to reflect the intent of copying src to dst. For
1239 """Update the dirstate to reflect the intent of copying src to dst. For
1240 different reasons it might not end with dst being marked as copied from src.
1240 different reasons it might not end with dst being marked as copied from src.
1241 """
1241 """
1242 origsrc = repo.dirstate.copied(src) or src
1242 origsrc = repo.dirstate.copied(src) or src
1243 if dst == origsrc: # copying back a copy?
1243 if dst == origsrc: # copying back a copy?
1244 if repo.dirstate[dst] not in 'mn' and not dryrun:
1244 if repo.dirstate[dst] not in 'mn' and not dryrun:
1245 repo.dirstate.normallookup(dst)
1245 repo.dirstate.normallookup(dst)
1246 else:
1246 else:
1247 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1247 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1248 if not ui.quiet:
1248 if not ui.quiet:
1249 ui.warn(_("%s has not been committed yet, so no copy "
1249 ui.warn(_("%s has not been committed yet, so no copy "
1250 "data will be stored for %s.\n")
1250 "data will be stored for %s.\n")
1251 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1251 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1252 if repo.dirstate[dst] in '?r' and not dryrun:
1252 if repo.dirstate[dst] in '?r' and not dryrun:
1253 wctx.add([dst])
1253 wctx.add([dst])
1254 elif not dryrun:
1254 elif not dryrun:
1255 wctx.copy(origsrc, dst)
1255 wctx.copy(origsrc, dst)
1256
1256
1257 def movedirstate(repo, newctx, match=None):
1257 def movedirstate(repo, newctx, match=None):
1258 """Move the dirstate to newctx and adjust it as necessary."""
1258 """Move the dirstate to newctx and adjust it as necessary.
1259
1260 A matcher can be provided as an optimization. It is probably a bug to pass
1261 a matcher that doesn't match all the differences between the parent of the
1262 working copy and newctx.
1263 """
1259 oldctx = repo['.']
1264 oldctx = repo['.']
1260 ds = repo.dirstate
1265 ds = repo.dirstate
1261 ds.setparents(newctx.node(), nullid)
1266 ds.setparents(newctx.node(), nullid)
1262 copies = dict(ds.copies())
1267 copies = dict(ds.copies())
1263 s = newctx.status(oldctx, match=match)
1268 s = newctx.status(oldctx, match=match)
1264 for f in s.modified:
1269 for f in s.modified:
1265 if ds[f] == 'r':
1270 if ds[f] == 'r':
1266 # modified + removed -> removed
1271 # modified + removed -> removed
1267 continue
1272 continue
1268 ds.normallookup(f)
1273 ds.normallookup(f)
1269
1274
1270 for f in s.added:
1275 for f in s.added:
1271 if ds[f] == 'r':
1276 if ds[f] == 'r':
1272 # added + removed -> unknown
1277 # added + removed -> unknown
1273 ds.drop(f)
1278 ds.drop(f)
1274 elif ds[f] != 'a':
1279 elif ds[f] != 'a':
1275 ds.add(f)
1280 ds.add(f)
1276
1281
1277 for f in s.removed:
1282 for f in s.removed:
1278 if ds[f] == 'a':
1283 if ds[f] == 'a':
1279 # removed + added -> normal
1284 # removed + added -> normal
1280 ds.normallookup(f)
1285 ds.normallookup(f)
1281 elif ds[f] != 'r':
1286 elif ds[f] != 'r':
1282 ds.remove(f)
1287 ds.remove(f)
1283
1288
1284 # Merge old parent and old working dir copies
1289 # Merge old parent and old working dir copies
1285 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1290 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1286 oldcopies.update(copies)
1291 oldcopies.update(copies)
1287 copies = dict((dst, oldcopies.get(src, src))
1292 copies = dict((dst, oldcopies.get(src, src))
1288 for dst, src in oldcopies.iteritems())
1293 for dst, src in oldcopies.iteritems())
1289 # Adjust the dirstate copies
1294 # Adjust the dirstate copies
1290 for dst, src in copies.iteritems():
1295 for dst, src in copies.iteritems():
1291 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
1296 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
1292 src = None
1297 src = None
1293 ds.copy(src, dst)
1298 ds.copy(src, dst)
1294
1299
1295 def writerequires(opener, requirements):
1300 def writerequires(opener, requirements):
1296 with opener('requires', 'w', atomictemp=True) as fp:
1301 with opener('requires', 'w', atomictemp=True) as fp:
1297 for r in sorted(requirements):
1302 for r in sorted(requirements):
1298 fp.write("%s\n" % r)
1303 fp.write("%s\n" % r)
1299
1304
1300 class filecachesubentry(object):
1305 class filecachesubentry(object):
1301 def __init__(self, path, stat):
1306 def __init__(self, path, stat):
1302 self.path = path
1307 self.path = path
1303 self.cachestat = None
1308 self.cachestat = None
1304 self._cacheable = None
1309 self._cacheable = None
1305
1310
1306 if stat:
1311 if stat:
1307 self.cachestat = filecachesubentry.stat(self.path)
1312 self.cachestat = filecachesubentry.stat(self.path)
1308
1313
1309 if self.cachestat:
1314 if self.cachestat:
1310 self._cacheable = self.cachestat.cacheable()
1315 self._cacheable = self.cachestat.cacheable()
1311 else:
1316 else:
1312 # None means we don't know yet
1317 # None means we don't know yet
1313 self._cacheable = None
1318 self._cacheable = None
1314
1319
1315 def refresh(self):
1320 def refresh(self):
1316 if self.cacheable():
1321 if self.cacheable():
1317 self.cachestat = filecachesubentry.stat(self.path)
1322 self.cachestat = filecachesubentry.stat(self.path)
1318
1323
1319 def cacheable(self):
1324 def cacheable(self):
1320 if self._cacheable is not None:
1325 if self._cacheable is not None:
1321 return self._cacheable
1326 return self._cacheable
1322
1327
1323 # we don't know yet, assume it is for now
1328 # we don't know yet, assume it is for now
1324 return True
1329 return True
1325
1330
1326 def changed(self):
1331 def changed(self):
1327 # no point in going further if we can't cache it
1332 # no point in going further if we can't cache it
1328 if not self.cacheable():
1333 if not self.cacheable():
1329 return True
1334 return True
1330
1335
1331 newstat = filecachesubentry.stat(self.path)
1336 newstat = filecachesubentry.stat(self.path)
1332
1337
1333 # we may not know if it's cacheable yet, check again now
1338 # we may not know if it's cacheable yet, check again now
1334 if newstat and self._cacheable is None:
1339 if newstat and self._cacheable is None:
1335 self._cacheable = newstat.cacheable()
1340 self._cacheable = newstat.cacheable()
1336
1341
1337 # check again
1342 # check again
1338 if not self._cacheable:
1343 if not self._cacheable:
1339 return True
1344 return True
1340
1345
1341 if self.cachestat != newstat:
1346 if self.cachestat != newstat:
1342 self.cachestat = newstat
1347 self.cachestat = newstat
1343 return True
1348 return True
1344 else:
1349 else:
1345 return False
1350 return False
1346
1351
1347 @staticmethod
1352 @staticmethod
1348 def stat(path):
1353 def stat(path):
1349 try:
1354 try:
1350 return util.cachestat(path)
1355 return util.cachestat(path)
1351 except OSError as e:
1356 except OSError as e:
1352 if e.errno != errno.ENOENT:
1357 if e.errno != errno.ENOENT:
1353 raise
1358 raise
1354
1359
1355 class filecacheentry(object):
1360 class filecacheentry(object):
1356 def __init__(self, paths, stat=True):
1361 def __init__(self, paths, stat=True):
1357 self._entries = []
1362 self._entries = []
1358 for path in paths:
1363 for path in paths:
1359 self._entries.append(filecachesubentry(path, stat))
1364 self._entries.append(filecachesubentry(path, stat))
1360
1365
1361 def changed(self):
1366 def changed(self):
1362 '''true if any entry has changed'''
1367 '''true if any entry has changed'''
1363 for entry in self._entries:
1368 for entry in self._entries:
1364 if entry.changed():
1369 if entry.changed():
1365 return True
1370 return True
1366 return False
1371 return False
1367
1372
1368 def refresh(self):
1373 def refresh(self):
1369 for entry in self._entries:
1374 for entry in self._entries:
1370 entry.refresh()
1375 entry.refresh()
1371
1376
1372 class filecache(object):
1377 class filecache(object):
1373 """A property like decorator that tracks files under .hg/ for updates.
1378 """A property like decorator that tracks files under .hg/ for updates.
1374
1379
1375 On first access, the files defined as arguments are stat()ed and the
1380 On first access, the files defined as arguments are stat()ed and the
1376 results cached. The decorated function is called. The results are stashed
1381 results cached. The decorated function is called. The results are stashed
1377 away in a ``_filecache`` dict on the object whose method is decorated.
1382 away in a ``_filecache`` dict on the object whose method is decorated.
1378
1383
1379 On subsequent access, the cached result is used as it is set to the
1384 On subsequent access, the cached result is used as it is set to the
1380 instance dictionary.
1385 instance dictionary.
1381
1386
1382 On external property set/delete operations, the caller must update the
1387 On external property set/delete operations, the caller must update the
1383 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1388 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1384 instead of directly setting <attr>.
1389 instead of directly setting <attr>.
1385
1390
1386 When using the property API, the cached data is always used if available.
1391 When using the property API, the cached data is always used if available.
1387 No stat() is performed to check if the file has changed.
1392 No stat() is performed to check if the file has changed.
1388
1393
1389 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1394 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1390 can populate an entry before the property's getter is called. In this case,
1395 can populate an entry before the property's getter is called. In this case,
1391 entries in ``_filecache`` will be used during property operations,
1396 entries in ``_filecache`` will be used during property operations,
1392 if available. If the underlying file changes, it is up to external callers
1397 if available. If the underlying file changes, it is up to external callers
1393 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1398 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1394 method result as well as possibly calling ``del obj._filecache[attr]`` to
1399 method result as well as possibly calling ``del obj._filecache[attr]`` to
1395 remove the ``filecacheentry``.
1400 remove the ``filecacheentry``.
1396 """
1401 """
1397
1402
1398 def __init__(self, *paths):
1403 def __init__(self, *paths):
1399 self.paths = paths
1404 self.paths = paths
1400
1405
1401 def join(self, obj, fname):
1406 def join(self, obj, fname):
1402 """Used to compute the runtime path of a cached file.
1407 """Used to compute the runtime path of a cached file.
1403
1408
1404 Users should subclass filecache and provide their own version of this
1409 Users should subclass filecache and provide their own version of this
1405 function to call the appropriate join function on 'obj' (an instance
1410 function to call the appropriate join function on 'obj' (an instance
1406 of the class that its member function was decorated).
1411 of the class that its member function was decorated).
1407 """
1412 """
1408 raise NotImplementedError
1413 raise NotImplementedError
1409
1414
1410 def __call__(self, func):
1415 def __call__(self, func):
1411 self.func = func
1416 self.func = func
1412 self.sname = func.__name__
1417 self.sname = func.__name__
1413 self.name = pycompat.sysbytes(self.sname)
1418 self.name = pycompat.sysbytes(self.sname)
1414 return self
1419 return self
1415
1420
1416 def __get__(self, obj, type=None):
1421 def __get__(self, obj, type=None):
1417 # if accessed on the class, return the descriptor itself.
1422 # if accessed on the class, return the descriptor itself.
1418 if obj is None:
1423 if obj is None:
1419 return self
1424 return self
1420
1425
1421 assert self.sname not in obj.__dict__
1426 assert self.sname not in obj.__dict__
1422
1427
1423 entry = obj._filecache.get(self.name)
1428 entry = obj._filecache.get(self.name)
1424
1429
1425 if entry:
1430 if entry:
1426 if entry.changed():
1431 if entry.changed():
1427 entry.obj = self.func(obj)
1432 entry.obj = self.func(obj)
1428 else:
1433 else:
1429 paths = [self.join(obj, path) for path in self.paths]
1434 paths = [self.join(obj, path) for path in self.paths]
1430
1435
1431 # We stat -before- creating the object so our cache doesn't lie if
1436 # We stat -before- creating the object so our cache doesn't lie if
1432 # a writer modified between the time we read and stat
1437 # a writer modified between the time we read and stat
1433 entry = filecacheentry(paths, True)
1438 entry = filecacheentry(paths, True)
1434 entry.obj = self.func(obj)
1439 entry.obj = self.func(obj)
1435
1440
1436 obj._filecache[self.name] = entry
1441 obj._filecache[self.name] = entry
1437
1442
1438 obj.__dict__[self.sname] = entry.obj
1443 obj.__dict__[self.sname] = entry.obj
1439 return entry.obj
1444 return entry.obj
1440
1445
1441 # don't implement __set__(), which would make __dict__ lookup as slow as
1446 # don't implement __set__(), which would make __dict__ lookup as slow as
1442 # function call.
1447 # function call.
1443
1448
1444 def set(self, obj, value):
1449 def set(self, obj, value):
1445 if self.name not in obj._filecache:
1450 if self.name not in obj._filecache:
1446 # we add an entry for the missing value because X in __dict__
1451 # we add an entry for the missing value because X in __dict__
1447 # implies X in _filecache
1452 # implies X in _filecache
1448 paths = [self.join(obj, path) for path in self.paths]
1453 paths = [self.join(obj, path) for path in self.paths]
1449 ce = filecacheentry(paths, False)
1454 ce = filecacheentry(paths, False)
1450 obj._filecache[self.name] = ce
1455 obj._filecache[self.name] = ce
1451 else:
1456 else:
1452 ce = obj._filecache[self.name]
1457 ce = obj._filecache[self.name]
1453
1458
1454 ce.obj = value # update cached copy
1459 ce.obj = value # update cached copy
1455 obj.__dict__[self.sname] = value # update copy returned by obj.x
1460 obj.__dict__[self.sname] = value # update copy returned by obj.x
1456
1461
1457 def extdatasource(repo, source):
1462 def extdatasource(repo, source):
1458 """Gather a map of rev -> value dict from the specified source
1463 """Gather a map of rev -> value dict from the specified source
1459
1464
1460 A source spec is treated as a URL, with a special case shell: type
1465 A source spec is treated as a URL, with a special case shell: type
1461 for parsing the output from a shell command.
1466 for parsing the output from a shell command.
1462
1467
1463 The data is parsed as a series of newline-separated records where
1468 The data is parsed as a series of newline-separated records where
1464 each record is a revision specifier optionally followed by a space
1469 each record is a revision specifier optionally followed by a space
1465 and a freeform string value. If the revision is known locally, it
1470 and a freeform string value. If the revision is known locally, it
1466 is converted to a rev, otherwise the record is skipped.
1471 is converted to a rev, otherwise the record is skipped.
1467
1472
1468 Note that both key and value are treated as UTF-8 and converted to
1473 Note that both key and value are treated as UTF-8 and converted to
1469 the local encoding. This allows uniformity between local and
1474 the local encoding. This allows uniformity between local and
1470 remote data sources.
1475 remote data sources.
1471 """
1476 """
1472
1477
1473 spec = repo.ui.config("extdata", source)
1478 spec = repo.ui.config("extdata", source)
1474 if not spec:
1479 if not spec:
1475 raise error.Abort(_("unknown extdata source '%s'") % source)
1480 raise error.Abort(_("unknown extdata source '%s'") % source)
1476
1481
1477 data = {}
1482 data = {}
1478 src = proc = None
1483 src = proc = None
1479 try:
1484 try:
1480 if spec.startswith("shell:"):
1485 if spec.startswith("shell:"):
1481 # external commands should be run relative to the repo root
1486 # external commands should be run relative to the repo root
1482 cmd = spec[6:]
1487 cmd = spec[6:]
1483 proc = subprocess.Popen(procutil.tonativestr(cmd),
1488 proc = subprocess.Popen(procutil.tonativestr(cmd),
1484 shell=True, bufsize=-1,
1489 shell=True, bufsize=-1,
1485 close_fds=procutil.closefds,
1490 close_fds=procutil.closefds,
1486 stdout=subprocess.PIPE,
1491 stdout=subprocess.PIPE,
1487 cwd=procutil.tonativestr(repo.root))
1492 cwd=procutil.tonativestr(repo.root))
1488 src = proc.stdout
1493 src = proc.stdout
1489 else:
1494 else:
1490 # treat as a URL or file
1495 # treat as a URL or file
1491 src = url.open(repo.ui, spec)
1496 src = url.open(repo.ui, spec)
1492 for l in src:
1497 for l in src:
1493 if " " in l:
1498 if " " in l:
1494 k, v = l.strip().split(" ", 1)
1499 k, v = l.strip().split(" ", 1)
1495 else:
1500 else:
1496 k, v = l.strip(), ""
1501 k, v = l.strip(), ""
1497
1502
1498 k = encoding.tolocal(k)
1503 k = encoding.tolocal(k)
1499 try:
1504 try:
1500 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1505 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1501 except (error.LookupError, error.RepoLookupError):
1506 except (error.LookupError, error.RepoLookupError):
1502 pass # we ignore data for nodes that don't exist locally
1507 pass # we ignore data for nodes that don't exist locally
1503 finally:
1508 finally:
1504 if proc:
1509 if proc:
1505 proc.communicate()
1510 proc.communicate()
1506 if src:
1511 if src:
1507 src.close()
1512 src.close()
1508 if proc and proc.returncode != 0:
1513 if proc and proc.returncode != 0:
1509 raise error.Abort(_("extdata command '%s' failed: %s")
1514 raise error.Abort(_("extdata command '%s' failed: %s")
1510 % (cmd, procutil.explainexit(proc.returncode)))
1515 % (cmd, procutil.explainexit(proc.returncode)))
1511
1516
1512 return data
1517 return data
1513
1518
1514 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1519 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1515 if lock is None:
1520 if lock is None:
1516 raise error.LockInheritanceContractViolation(
1521 raise error.LockInheritanceContractViolation(
1517 'lock can only be inherited while held')
1522 'lock can only be inherited while held')
1518 if environ is None:
1523 if environ is None:
1519 environ = {}
1524 environ = {}
1520 with lock.inherit() as locker:
1525 with lock.inherit() as locker:
1521 environ[envvar] = locker
1526 environ[envvar] = locker
1522 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1527 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1523
1528
1524 def wlocksub(repo, cmd, *args, **kwargs):
1529 def wlocksub(repo, cmd, *args, **kwargs):
1525 """run cmd as a subprocess that allows inheriting repo's wlock
1530 """run cmd as a subprocess that allows inheriting repo's wlock
1526
1531
1527 This can only be called while the wlock is held. This takes all the
1532 This can only be called while the wlock is held. This takes all the
1528 arguments that ui.system does, and returns the exit code of the
1533 arguments that ui.system does, and returns the exit code of the
1529 subprocess."""
1534 subprocess."""
1530 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1535 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1531 **kwargs)
1536 **kwargs)
1532
1537
1533 class progress(object):
1538 class progress(object):
1534 def __init__(self, ui, updatebar, topic, unit="", total=None):
1539 def __init__(self, ui, updatebar, topic, unit="", total=None):
1535 self.ui = ui
1540 self.ui = ui
1536 self.pos = 0
1541 self.pos = 0
1537 self.topic = topic
1542 self.topic = topic
1538 self.unit = unit
1543 self.unit = unit
1539 self.total = total
1544 self.total = total
1540 self.debug = ui.configbool('progress', 'debug')
1545 self.debug = ui.configbool('progress', 'debug')
1541 self._updatebar = updatebar
1546 self._updatebar = updatebar
1542
1547
1543 def __enter__(self):
1548 def __enter__(self):
1544 return self
1549 return self
1545
1550
1546 def __exit__(self, exc_type, exc_value, exc_tb):
1551 def __exit__(self, exc_type, exc_value, exc_tb):
1547 self.complete()
1552 self.complete()
1548
1553
1549 def update(self, pos, item="", total=None):
1554 def update(self, pos, item="", total=None):
1550 assert pos is not None
1555 assert pos is not None
1551 if total:
1556 if total:
1552 self.total = total
1557 self.total = total
1553 self.pos = pos
1558 self.pos = pos
1554 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1559 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1555 if self.debug:
1560 if self.debug:
1556 self._printdebug(item)
1561 self._printdebug(item)
1557
1562
1558 def increment(self, step=1, item="", total=None):
1563 def increment(self, step=1, item="", total=None):
1559 self.update(self.pos + step, item, total)
1564 self.update(self.pos + step, item, total)
1560
1565
1561 def complete(self):
1566 def complete(self):
1562 self.pos = None
1567 self.pos = None
1563 self.unit = ""
1568 self.unit = ""
1564 self.total = None
1569 self.total = None
1565 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1570 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1566
1571
1567 def _printdebug(self, item):
1572 def _printdebug(self, item):
1568 if self.unit:
1573 if self.unit:
1569 unit = ' ' + self.unit
1574 unit = ' ' + self.unit
1570 if item:
1575 if item:
1571 item = ' ' + item
1576 item = ' ' + item
1572
1577
1573 if self.total:
1578 if self.total:
1574 pct = 100.0 * self.pos / self.total
1579 pct = 100.0 * self.pos / self.total
1575 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1580 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1576 % (self.topic, item, self.pos, self.total, unit, pct))
1581 % (self.topic, item, self.pos, self.total, unit, pct))
1577 else:
1582 else:
1578 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1583 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1579
1584
1580 def gdinitconfig(ui):
1585 def gdinitconfig(ui):
1581 """helper function to know if a repo should be created as general delta
1586 """helper function to know if a repo should be created as general delta
1582 """
1587 """
1583 # experimental config: format.generaldelta
1588 # experimental config: format.generaldelta
1584 return (ui.configbool('format', 'generaldelta')
1589 return (ui.configbool('format', 'generaldelta')
1585 or ui.configbool('format', 'usegeneraldelta'))
1590 or ui.configbool('format', 'usegeneraldelta'))
1586
1591
1587 def gddeltaconfig(ui):
1592 def gddeltaconfig(ui):
1588 """helper function to know if incoming delta should be optimised
1593 """helper function to know if incoming delta should be optimised
1589 """
1594 """
1590 # experimental config: format.generaldelta
1595 # experimental config: format.generaldelta
1591 return ui.configbool('format', 'generaldelta')
1596 return ui.configbool('format', 'generaldelta')
1592
1597
1593 class simplekeyvaluefile(object):
1598 class simplekeyvaluefile(object):
1594 """A simple file with key=value lines
1599 """A simple file with key=value lines
1595
1600
1596 Keys must be alphanumerics and start with a letter, values must not
1601 Keys must be alphanumerics and start with a letter, values must not
1597 contain '\n' characters"""
1602 contain '\n' characters"""
1598 firstlinekey = '__firstline'
1603 firstlinekey = '__firstline'
1599
1604
1600 def __init__(self, vfs, path, keys=None):
1605 def __init__(self, vfs, path, keys=None):
1601 self.vfs = vfs
1606 self.vfs = vfs
1602 self.path = path
1607 self.path = path
1603
1608
1604 def read(self, firstlinenonkeyval=False):
1609 def read(self, firstlinenonkeyval=False):
1605 """Read the contents of a simple key-value file
1610 """Read the contents of a simple key-value file
1606
1611
1607 'firstlinenonkeyval' indicates whether the first line of file should
1612 'firstlinenonkeyval' indicates whether the first line of file should
1608 be treated as a key-value pair or reuturned fully under the
1613 be treated as a key-value pair or reuturned fully under the
1609 __firstline key."""
1614 __firstline key."""
1610 lines = self.vfs.readlines(self.path)
1615 lines = self.vfs.readlines(self.path)
1611 d = {}
1616 d = {}
1612 if firstlinenonkeyval:
1617 if firstlinenonkeyval:
1613 if not lines:
1618 if not lines:
1614 e = _("empty simplekeyvalue file")
1619 e = _("empty simplekeyvalue file")
1615 raise error.CorruptedState(e)
1620 raise error.CorruptedState(e)
1616 # we don't want to include '\n' in the __firstline
1621 # we don't want to include '\n' in the __firstline
1617 d[self.firstlinekey] = lines[0][:-1]
1622 d[self.firstlinekey] = lines[0][:-1]
1618 del lines[0]
1623 del lines[0]
1619
1624
1620 try:
1625 try:
1621 # the 'if line.strip()' part prevents us from failing on empty
1626 # the 'if line.strip()' part prevents us from failing on empty
1622 # lines which only contain '\n' therefore are not skipped
1627 # lines which only contain '\n' therefore are not skipped
1623 # by 'if line'
1628 # by 'if line'
1624 updatedict = dict(line[:-1].split('=', 1) for line in lines
1629 updatedict = dict(line[:-1].split('=', 1) for line in lines
1625 if line.strip())
1630 if line.strip())
1626 if self.firstlinekey in updatedict:
1631 if self.firstlinekey in updatedict:
1627 e = _("%r can't be used as a key")
1632 e = _("%r can't be used as a key")
1628 raise error.CorruptedState(e % self.firstlinekey)
1633 raise error.CorruptedState(e % self.firstlinekey)
1629 d.update(updatedict)
1634 d.update(updatedict)
1630 except ValueError as e:
1635 except ValueError as e:
1631 raise error.CorruptedState(str(e))
1636 raise error.CorruptedState(str(e))
1632 return d
1637 return d
1633
1638
1634 def write(self, data, firstline=None):
1639 def write(self, data, firstline=None):
1635 """Write key=>value mapping to a file
1640 """Write key=>value mapping to a file
1636 data is a dict. Keys must be alphanumerical and start with a letter.
1641 data is a dict. Keys must be alphanumerical and start with a letter.
1637 Values must not contain newline characters.
1642 Values must not contain newline characters.
1638
1643
1639 If 'firstline' is not None, it is written to file before
1644 If 'firstline' is not None, it is written to file before
1640 everything else, as it is, not in a key=value form"""
1645 everything else, as it is, not in a key=value form"""
1641 lines = []
1646 lines = []
1642 if firstline is not None:
1647 if firstline is not None:
1643 lines.append('%s\n' % firstline)
1648 lines.append('%s\n' % firstline)
1644
1649
1645 for k, v in data.items():
1650 for k, v in data.items():
1646 if k == self.firstlinekey:
1651 if k == self.firstlinekey:
1647 e = "key name '%s' is reserved" % self.firstlinekey
1652 e = "key name '%s' is reserved" % self.firstlinekey
1648 raise error.ProgrammingError(e)
1653 raise error.ProgrammingError(e)
1649 if not k[0:1].isalpha():
1654 if not k[0:1].isalpha():
1650 e = "keys must start with a letter in a key-value file"
1655 e = "keys must start with a letter in a key-value file"
1651 raise error.ProgrammingError(e)
1656 raise error.ProgrammingError(e)
1652 if not k.isalnum():
1657 if not k.isalnum():
1653 e = "invalid key name in a simple key-value file"
1658 e = "invalid key name in a simple key-value file"
1654 raise error.ProgrammingError(e)
1659 raise error.ProgrammingError(e)
1655 if '\n' in v:
1660 if '\n' in v:
1656 e = "invalid value in a simple key-value file"
1661 e = "invalid value in a simple key-value file"
1657 raise error.ProgrammingError(e)
1662 raise error.ProgrammingError(e)
1658 lines.append("%s=%s\n" % (k, v))
1663 lines.append("%s=%s\n" % (k, v))
1659 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1664 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1660 fp.write(''.join(lines))
1665 fp.write(''.join(lines))
1661
1666
1662 _reportobsoletedsource = [
1667 _reportobsoletedsource = [
1663 'debugobsolete',
1668 'debugobsolete',
1664 'pull',
1669 'pull',
1665 'push',
1670 'push',
1666 'serve',
1671 'serve',
1667 'unbundle',
1672 'unbundle',
1668 ]
1673 ]
1669
1674
1670 _reportnewcssource = [
1675 _reportnewcssource = [
1671 'pull',
1676 'pull',
1672 'unbundle',
1677 'unbundle',
1673 ]
1678 ]
1674
1679
1675 def prefetchfiles(repo, revs, match):
1680 def prefetchfiles(repo, revs, match):
1676 """Invokes the registered file prefetch functions, allowing extensions to
1681 """Invokes the registered file prefetch functions, allowing extensions to
1677 ensure the corresponding files are available locally, before the command
1682 ensure the corresponding files are available locally, before the command
1678 uses them."""
1683 uses them."""
1679 if match:
1684 if match:
1680 # The command itself will complain about files that don't exist, so
1685 # The command itself will complain about files that don't exist, so
1681 # don't duplicate the message.
1686 # don't duplicate the message.
1682 match = matchmod.badmatch(match, lambda fn, msg: None)
1687 match = matchmod.badmatch(match, lambda fn, msg: None)
1683 else:
1688 else:
1684 match = matchall(repo)
1689 match = matchall(repo)
1685
1690
1686 fileprefetchhooks(repo, revs, match)
1691 fileprefetchhooks(repo, revs, match)
1687
1692
1688 # a list of (repo, revs, match) prefetch functions
1693 # a list of (repo, revs, match) prefetch functions
1689 fileprefetchhooks = util.hooks()
1694 fileprefetchhooks = util.hooks()
1690
1695
1691 # A marker that tells the evolve extension to suppress its own reporting
1696 # A marker that tells the evolve extension to suppress its own reporting
1692 _reportstroubledchangesets = True
1697 _reportstroubledchangesets = True
1693
1698
1694 def registersummarycallback(repo, otr, txnname=''):
1699 def registersummarycallback(repo, otr, txnname=''):
1695 """register a callback to issue a summary after the transaction is closed
1700 """register a callback to issue a summary after the transaction is closed
1696 """
1701 """
1697 def txmatch(sources):
1702 def txmatch(sources):
1698 return any(txnname.startswith(source) for source in sources)
1703 return any(txnname.startswith(source) for source in sources)
1699
1704
1700 categories = []
1705 categories = []
1701
1706
1702 def reportsummary(func):
1707 def reportsummary(func):
1703 """decorator for report callbacks."""
1708 """decorator for report callbacks."""
1704 # The repoview life cycle is shorter than the one of the actual
1709 # The repoview life cycle is shorter than the one of the actual
1705 # underlying repository. So the filtered object can die before the
1710 # underlying repository. So the filtered object can die before the
1706 # weakref is used leading to troubles. We keep a reference to the
1711 # weakref is used leading to troubles. We keep a reference to the
1707 # unfiltered object and restore the filtering when retrieving the
1712 # unfiltered object and restore the filtering when retrieving the
1708 # repository through the weakref.
1713 # repository through the weakref.
1709 filtername = repo.filtername
1714 filtername = repo.filtername
1710 reporef = weakref.ref(repo.unfiltered())
1715 reporef = weakref.ref(repo.unfiltered())
1711 def wrapped(tr):
1716 def wrapped(tr):
1712 repo = reporef()
1717 repo = reporef()
1713 if filtername:
1718 if filtername:
1714 repo = repo.filtered(filtername)
1719 repo = repo.filtered(filtername)
1715 func(repo, tr)
1720 func(repo, tr)
1716 newcat = '%02i-txnreport' % len(categories)
1721 newcat = '%02i-txnreport' % len(categories)
1717 otr.addpostclose(newcat, wrapped)
1722 otr.addpostclose(newcat, wrapped)
1718 categories.append(newcat)
1723 categories.append(newcat)
1719 return wrapped
1724 return wrapped
1720
1725
1721 if txmatch(_reportobsoletedsource):
1726 if txmatch(_reportobsoletedsource):
1722 @reportsummary
1727 @reportsummary
1723 def reportobsoleted(repo, tr):
1728 def reportobsoleted(repo, tr):
1724 obsoleted = obsutil.getobsoleted(repo, tr)
1729 obsoleted = obsutil.getobsoleted(repo, tr)
1725 if obsoleted:
1730 if obsoleted:
1726 repo.ui.status(_('obsoleted %i changesets\n')
1731 repo.ui.status(_('obsoleted %i changesets\n')
1727 % len(obsoleted))
1732 % len(obsoleted))
1728
1733
1729 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1734 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1730 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1735 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1731 instabilitytypes = [
1736 instabilitytypes = [
1732 ('orphan', 'orphan'),
1737 ('orphan', 'orphan'),
1733 ('phase-divergent', 'phasedivergent'),
1738 ('phase-divergent', 'phasedivergent'),
1734 ('content-divergent', 'contentdivergent'),
1739 ('content-divergent', 'contentdivergent'),
1735 ]
1740 ]
1736
1741
1737 def getinstabilitycounts(repo):
1742 def getinstabilitycounts(repo):
1738 filtered = repo.changelog.filteredrevs
1743 filtered = repo.changelog.filteredrevs
1739 counts = {}
1744 counts = {}
1740 for instability, revset in instabilitytypes:
1745 for instability, revset in instabilitytypes:
1741 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1746 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1742 filtered)
1747 filtered)
1743 return counts
1748 return counts
1744
1749
1745 oldinstabilitycounts = getinstabilitycounts(repo)
1750 oldinstabilitycounts = getinstabilitycounts(repo)
1746 @reportsummary
1751 @reportsummary
1747 def reportnewinstabilities(repo, tr):
1752 def reportnewinstabilities(repo, tr):
1748 newinstabilitycounts = getinstabilitycounts(repo)
1753 newinstabilitycounts = getinstabilitycounts(repo)
1749 for instability, revset in instabilitytypes:
1754 for instability, revset in instabilitytypes:
1750 delta = (newinstabilitycounts[instability] -
1755 delta = (newinstabilitycounts[instability] -
1751 oldinstabilitycounts[instability])
1756 oldinstabilitycounts[instability])
1752 msg = getinstabilitymessage(delta, instability)
1757 msg = getinstabilitymessage(delta, instability)
1753 if msg:
1758 if msg:
1754 repo.ui.warn(msg)
1759 repo.ui.warn(msg)
1755
1760
1756 if txmatch(_reportnewcssource):
1761 if txmatch(_reportnewcssource):
1757 @reportsummary
1762 @reportsummary
1758 def reportnewcs(repo, tr):
1763 def reportnewcs(repo, tr):
1759 """Report the range of new revisions pulled/unbundled."""
1764 """Report the range of new revisions pulled/unbundled."""
1760 origrepolen = tr.changes.get('origrepolen', len(repo))
1765 origrepolen = tr.changes.get('origrepolen', len(repo))
1761 unfi = repo.unfiltered()
1766 unfi = repo.unfiltered()
1762 if origrepolen >= len(unfi):
1767 if origrepolen >= len(unfi):
1763 return
1768 return
1764
1769
1765 # Compute the bounds of new visible revisions' range.
1770 # Compute the bounds of new visible revisions' range.
1766 revs = smartset.spanset(repo, start=origrepolen)
1771 revs = smartset.spanset(repo, start=origrepolen)
1767 if revs:
1772 if revs:
1768 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1773 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1769
1774
1770 if minrev == maxrev:
1775 if minrev == maxrev:
1771 revrange = minrev
1776 revrange = minrev
1772 else:
1777 else:
1773 revrange = '%s:%s' % (minrev, maxrev)
1778 revrange = '%s:%s' % (minrev, maxrev)
1774 draft = len(repo.revs('%ld and draft()', revs))
1779 draft = len(repo.revs('%ld and draft()', revs))
1775 secret = len(repo.revs('%ld and secret()', revs))
1780 secret = len(repo.revs('%ld and secret()', revs))
1776 if not (draft or secret):
1781 if not (draft or secret):
1777 msg = _('new changesets %s\n') % revrange
1782 msg = _('new changesets %s\n') % revrange
1778 elif draft and secret:
1783 elif draft and secret:
1779 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1784 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1780 msg %= (revrange, draft, secret)
1785 msg %= (revrange, draft, secret)
1781 elif draft:
1786 elif draft:
1782 msg = _('new changesets %s (%d drafts)\n')
1787 msg = _('new changesets %s (%d drafts)\n')
1783 msg %= (revrange, draft)
1788 msg %= (revrange, draft)
1784 elif secret:
1789 elif secret:
1785 msg = _('new changesets %s (%d secrets)\n')
1790 msg = _('new changesets %s (%d secrets)\n')
1786 msg %= (revrange, secret)
1791 msg %= (revrange, secret)
1787 else:
1792 else:
1788 errormsg = 'entered unreachable condition'
1793 errormsg = 'entered unreachable condition'
1789 raise error.ProgrammingError(errormsg)
1794 raise error.ProgrammingError(errormsg)
1790 repo.ui.status(msg)
1795 repo.ui.status(msg)
1791
1796
1792 # search new changesets directly pulled as obsolete
1797 # search new changesets directly pulled as obsolete
1793 duplicates = tr.changes.get('revduplicates', ())
1798 duplicates = tr.changes.get('revduplicates', ())
1794 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1799 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1795 origrepolen, duplicates)
1800 origrepolen, duplicates)
1796 cl = repo.changelog
1801 cl = repo.changelog
1797 extinctadded = [r for r in obsadded if r not in cl]
1802 extinctadded = [r for r in obsadded if r not in cl]
1798 if extinctadded:
1803 if extinctadded:
1799 # They are not just obsolete, but obsolete and invisible
1804 # They are not just obsolete, but obsolete and invisible
1800 # we call them "extinct" internally but the terms have not been
1805 # we call them "extinct" internally but the terms have not been
1801 # exposed to users.
1806 # exposed to users.
1802 msg = '(%d other changesets obsolete on arrival)\n'
1807 msg = '(%d other changesets obsolete on arrival)\n'
1803 repo.ui.status(msg % len(extinctadded))
1808 repo.ui.status(msg % len(extinctadded))
1804
1809
1805 @reportsummary
1810 @reportsummary
1806 def reportphasechanges(repo, tr):
1811 def reportphasechanges(repo, tr):
1807 """Report statistics of phase changes for changesets pre-existing
1812 """Report statistics of phase changes for changesets pre-existing
1808 pull/unbundle.
1813 pull/unbundle.
1809 """
1814 """
1810 origrepolen = tr.changes.get('origrepolen', len(repo))
1815 origrepolen = tr.changes.get('origrepolen', len(repo))
1811 phasetracking = tr.changes.get('phases', {})
1816 phasetracking = tr.changes.get('phases', {})
1812 if not phasetracking:
1817 if not phasetracking:
1813 return
1818 return
1814 published = [
1819 published = [
1815 rev for rev, (old, new) in phasetracking.iteritems()
1820 rev for rev, (old, new) in phasetracking.iteritems()
1816 if new == phases.public and rev < origrepolen
1821 if new == phases.public and rev < origrepolen
1817 ]
1822 ]
1818 if not published:
1823 if not published:
1819 return
1824 return
1820 repo.ui.status(_('%d local changesets published\n')
1825 repo.ui.status(_('%d local changesets published\n')
1821 % len(published))
1826 % len(published))
1822
1827
1823 def getinstabilitymessage(delta, instability):
1828 def getinstabilitymessage(delta, instability):
1824 """function to return the message to show warning about new instabilities
1829 """function to return the message to show warning about new instabilities
1825
1830
1826 exists as a separate function so that extension can wrap to show more
1831 exists as a separate function so that extension can wrap to show more
1827 information like how to fix instabilities"""
1832 information like how to fix instabilities"""
1828 if delta > 0:
1833 if delta > 0:
1829 return _('%i new %s changesets\n') % (delta, instability)
1834 return _('%i new %s changesets\n') % (delta, instability)
1830
1835
1831 def nodesummaries(repo, nodes, maxnumnodes=4):
1836 def nodesummaries(repo, nodes, maxnumnodes=4):
1832 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1837 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1833 return ' '.join(short(h) for h in nodes)
1838 return ' '.join(short(h) for h in nodes)
1834 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1839 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1835 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1840 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1836
1841
1837 def enforcesinglehead(repo, tr, desc):
1842 def enforcesinglehead(repo, tr, desc):
1838 """check that no named branch has multiple heads"""
1843 """check that no named branch has multiple heads"""
1839 if desc in ('strip', 'repair'):
1844 if desc in ('strip', 'repair'):
1840 # skip the logic during strip
1845 # skip the logic during strip
1841 return
1846 return
1842 visible = repo.filtered('visible')
1847 visible = repo.filtered('visible')
1843 # possible improvement: we could restrict the check to affected branch
1848 # possible improvement: we could restrict the check to affected branch
1844 for name, heads in visible.branchmap().iteritems():
1849 for name, heads in visible.branchmap().iteritems():
1845 if len(heads) > 1:
1850 if len(heads) > 1:
1846 msg = _('rejecting multiple heads on branch "%s"')
1851 msg = _('rejecting multiple heads on branch "%s"')
1847 msg %= name
1852 msg %= name
1848 hint = _('%d heads: %s')
1853 hint = _('%d heads: %s')
1849 hint %= (len(heads), nodesummaries(repo, heads))
1854 hint %= (len(heads), nodesummaries(repo, heads))
1850 raise error.Abort(msg, hint=hint)
1855 raise error.Abort(msg, hint=hint)
1851
1856
1852 def wrapconvertsink(sink):
1857 def wrapconvertsink(sink):
1853 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1858 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1854 before it is used, whether or not the convert extension was formally loaded.
1859 before it is used, whether or not the convert extension was formally loaded.
1855 """
1860 """
1856 return sink
1861 return sink
1857
1862
1858 def unhidehashlikerevs(repo, specs, hiddentype):
1863 def unhidehashlikerevs(repo, specs, hiddentype):
1859 """parse the user specs and unhide changesets whose hash or revision number
1864 """parse the user specs and unhide changesets whose hash or revision number
1860 is passed.
1865 is passed.
1861
1866
1862 hiddentype can be: 1) 'warn': warn while unhiding changesets
1867 hiddentype can be: 1) 'warn': warn while unhiding changesets
1863 2) 'nowarn': don't warn while unhiding changesets
1868 2) 'nowarn': don't warn while unhiding changesets
1864
1869
1865 returns a repo object with the required changesets unhidden
1870 returns a repo object with the required changesets unhidden
1866 """
1871 """
1867 if not repo.filtername or not repo.ui.configbool('experimental',
1872 if not repo.filtername or not repo.ui.configbool('experimental',
1868 'directaccess'):
1873 'directaccess'):
1869 return repo
1874 return repo
1870
1875
1871 if repo.filtername not in ('visible', 'visible-hidden'):
1876 if repo.filtername not in ('visible', 'visible-hidden'):
1872 return repo
1877 return repo
1873
1878
1874 symbols = set()
1879 symbols = set()
1875 for spec in specs:
1880 for spec in specs:
1876 try:
1881 try:
1877 tree = revsetlang.parse(spec)
1882 tree = revsetlang.parse(spec)
1878 except error.ParseError: # will be reported by scmutil.revrange()
1883 except error.ParseError: # will be reported by scmutil.revrange()
1879 continue
1884 continue
1880
1885
1881 symbols.update(revsetlang.gethashlikesymbols(tree))
1886 symbols.update(revsetlang.gethashlikesymbols(tree))
1882
1887
1883 if not symbols:
1888 if not symbols:
1884 return repo
1889 return repo
1885
1890
1886 revs = _getrevsfromsymbols(repo, symbols)
1891 revs = _getrevsfromsymbols(repo, symbols)
1887
1892
1888 if not revs:
1893 if not revs:
1889 return repo
1894 return repo
1890
1895
1891 if hiddentype == 'warn':
1896 if hiddentype == 'warn':
1892 unfi = repo.unfiltered()
1897 unfi = repo.unfiltered()
1893 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1898 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1894 repo.ui.warn(_("warning: accessing hidden changesets for write "
1899 repo.ui.warn(_("warning: accessing hidden changesets for write "
1895 "operation: %s\n") % revstr)
1900 "operation: %s\n") % revstr)
1896
1901
1897 # we have to use new filtername to separate branch/tags cache until we can
1902 # we have to use new filtername to separate branch/tags cache until we can
1898 # disbale these cache when revisions are dynamically pinned.
1903 # disbale these cache when revisions are dynamically pinned.
1899 return repo.filtered('visible-hidden', revs)
1904 return repo.filtered('visible-hidden', revs)
1900
1905
1901 def _getrevsfromsymbols(repo, symbols):
1906 def _getrevsfromsymbols(repo, symbols):
1902 """parse the list of symbols and returns a set of revision numbers of hidden
1907 """parse the list of symbols and returns a set of revision numbers of hidden
1903 changesets present in symbols"""
1908 changesets present in symbols"""
1904 revs = set()
1909 revs = set()
1905 unfi = repo.unfiltered()
1910 unfi = repo.unfiltered()
1906 unficl = unfi.changelog
1911 unficl = unfi.changelog
1907 cl = repo.changelog
1912 cl = repo.changelog
1908 tiprev = len(unficl)
1913 tiprev = len(unficl)
1909 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1914 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1910 for s in symbols:
1915 for s in symbols:
1911 try:
1916 try:
1912 n = int(s)
1917 n = int(s)
1913 if n <= tiprev:
1918 if n <= tiprev:
1914 if not allowrevnums:
1919 if not allowrevnums:
1915 continue
1920 continue
1916 else:
1921 else:
1917 if n not in cl:
1922 if n not in cl:
1918 revs.add(n)
1923 revs.add(n)
1919 continue
1924 continue
1920 except ValueError:
1925 except ValueError:
1921 pass
1926 pass
1922
1927
1923 try:
1928 try:
1924 s = resolvehexnodeidprefix(unfi, s)
1929 s = resolvehexnodeidprefix(unfi, s)
1925 except (error.LookupError, error.WdirUnsupported):
1930 except (error.LookupError, error.WdirUnsupported):
1926 s = None
1931 s = None
1927
1932
1928 if s is not None:
1933 if s is not None:
1929 rev = unficl.rev(s)
1934 rev = unficl.rev(s)
1930 if rev not in cl:
1935 if rev not in cl:
1931 revs.add(rev)
1936 revs.add(rev)
1932
1937
1933 return revs
1938 return revs
1934
1939
1935 def bookmarkrevs(repo, mark):
1940 def bookmarkrevs(repo, mark):
1936 """
1941 """
1937 Select revisions reachable by a given bookmark
1942 Select revisions reachable by a given bookmark
1938 """
1943 """
1939 return repo.revs("ancestors(bookmark(%s)) - "
1944 return repo.revs("ancestors(bookmark(%s)) - "
1940 "ancestors(head() and not bookmark(%s)) - "
1945 "ancestors(head() and not bookmark(%s)) - "
1941 "ancestors(bookmark() and not bookmark(%s))",
1946 "ancestors(bookmark() and not bookmark(%s))",
1942 mark, mark, mark)
1947 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now