##// END OF EJS Templates
scmutil: fix a comment that doesn't match the code...
Martin von Zweigbergk -
r41846:e21183db default
parent child Browse files
Show More
@@ -1,1860 +1,1860 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 encoding,
31 encoding,
32 error,
32 error,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 obsutil,
35 obsutil,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 policy,
38 policy,
39 pycompat,
39 pycompat,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 procutil,
49 procutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 if pycompat.iswindows:
53 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
54 from . import scmwindows as scmplatform
55 else:
55 else:
56 from . import scmposix as scmplatform
56 from . import scmposix as scmplatform
57
57
58 parsers = policy.importmod(r'parsers')
58 parsers = policy.importmod(r'parsers')
59
59
60 termsize = scmplatform.termsize
60 termsize = scmplatform.termsize
61
61
62 class status(tuple):
62 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
64 and 'ignored' properties are only relevant to the working copy.
65 '''
65 '''
66
66
67 __slots__ = ()
67 __slots__ = ()
68
68
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
70 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
72 ignored, clean))
73
73
74 @property
74 @property
75 def modified(self):
75 def modified(self):
76 '''files that have been modified'''
76 '''files that have been modified'''
77 return self[0]
77 return self[0]
78
78
79 @property
79 @property
80 def added(self):
80 def added(self):
81 '''files that have been added'''
81 '''files that have been added'''
82 return self[1]
82 return self[1]
83
83
84 @property
84 @property
85 def removed(self):
85 def removed(self):
86 '''files that have been removed'''
86 '''files that have been removed'''
87 return self[2]
87 return self[2]
88
88
89 @property
89 @property
90 def deleted(self):
90 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
91 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
92 working copy (aka "missing")
93 '''
93 '''
94 return self[3]
94 return self[3]
95
95
96 @property
96 @property
97 def unknown(self):
97 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
98 '''files not in the dirstate that are not ignored'''
99 return self[4]
99 return self[4]
100
100
101 @property
101 @property
102 def ignored(self):
102 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
104 return self[5]
105
105
106 @property
106 @property
107 def clean(self):
107 def clean(self):
108 '''files that have not been modified'''
108 '''files that have not been modified'''
109 return self[6]
109 return self[6]
110
110
111 def __repr__(self, *args, **kwargs):
111 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
115
116 def itersubrepos(ctx1, ctx2):
116 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
117 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
123
124 missing = set()
124 missing = set()
125
125
126 for subpath in ctx2.substate:
126 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
127 if subpath not in ctx1.substate:
128 del subpaths[subpath]
128 del subpaths[subpath]
129 missing.add(subpath)
129 missing.add(subpath)
130
130
131 for subpath, ctx in sorted(subpaths.iteritems()):
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
132 yield subpath, ctx.sub(subpath)
133
133
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
135 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
137 # against itself.
138 for subpath in missing:
138 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140
140
141 def nochangesfound(ui, repo, excluded=None):
141 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
142 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
143 nodes excluded from the push/pull.
144 '''
144 '''
145 secretlist = []
145 secretlist = []
146 if excluded:
146 if excluded:
147 for n in excluded:
147 for n in excluded:
148 ctx = repo[n]
148 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
150 secretlist.append(n)
151
151
152 if secretlist:
152 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
154 % len(secretlist))
155 else:
155 else:
156 ui.status(_("no changes found\n"))
156 ui.status(_("no changes found\n"))
157
157
158 def callcatch(ui, func):
158 def callcatch(ui, func):
159 """call func() with global exception handling
159 """call func() with global exception handling
160
160
161 return func() if no exception happens. otherwise do some error handling
161 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
162 and return an exit code accordingly. does not handle all exceptions.
163 """
163 """
164 try:
164 try:
165 try:
165 try:
166 return func()
166 return func()
167 except: # re-raises
167 except: # re-raises
168 ui.traceback()
168 ui.traceback()
169 raise
169 raise
170 # Global exception handling, alphabetically
170 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
171 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
172 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
173 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % (
174 reason = _('timed out waiting for lock held by %r') % (
175 pycompat.bytestr(inst.locker))
175 pycompat.bytestr(inst.locker))
176 else:
176 else:
177 reason = _('lock held by %r') % inst.locker
177 reason = _('lock held by %r') % inst.locker
178 ui.error(_("abort: %s: %s\n") % (
178 ui.error(_("abort: %s: %s\n") % (
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 if not inst.locker:
180 if not inst.locker:
181 ui.error(_("(lock might be very busy)\n"))
181 ui.error(_("(lock might be very busy)\n"))
182 except error.LockUnavailable as inst:
182 except error.LockUnavailable as inst:
183 ui.error(_("abort: could not lock %s: %s\n") %
183 ui.error(_("abort: could not lock %s: %s\n") %
184 (inst.desc or stringutil.forcebytestr(inst.filename),
184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 encoding.strtolocal(inst.strerror)))
185 encoding.strtolocal(inst.strerror)))
186 except error.OutOfBandError as inst:
186 except error.OutOfBandError as inst:
187 if inst.args:
187 if inst.args:
188 msg = _("abort: remote error:\n")
188 msg = _("abort: remote error:\n")
189 else:
189 else:
190 msg = _("abort: remote error\n")
190 msg = _("abort: remote error\n")
191 ui.error(msg)
191 ui.error(msg)
192 if inst.args:
192 if inst.args:
193 ui.error(''.join(inst.args))
193 ui.error(''.join(inst.args))
194 if inst.hint:
194 if inst.hint:
195 ui.error('(%s)\n' % inst.hint)
195 ui.error('(%s)\n' % inst.hint)
196 except error.RepoError as inst:
196 except error.RepoError as inst:
197 ui.error(_("abort: %s!\n") % inst)
197 ui.error(_("abort: %s!\n") % inst)
198 if inst.hint:
198 if inst.hint:
199 ui.error(_("(%s)\n") % inst.hint)
199 ui.error(_("(%s)\n") % inst.hint)
200 except error.ResponseError as inst:
200 except error.ResponseError as inst:
201 ui.error(_("abort: %s") % inst.args[0])
201 ui.error(_("abort: %s") % inst.args[0])
202 msg = inst.args[1]
202 msg = inst.args[1]
203 if isinstance(msg, type(u'')):
203 if isinstance(msg, type(u'')):
204 msg = pycompat.sysbytes(msg)
204 msg = pycompat.sysbytes(msg)
205 if not isinstance(msg, bytes):
205 if not isinstance(msg, bytes):
206 ui.error(" %r\n" % (msg,))
206 ui.error(" %r\n" % (msg,))
207 elif not msg:
207 elif not msg:
208 ui.error(_(" empty string\n"))
208 ui.error(_(" empty string\n"))
209 else:
209 else:
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 except error.CensoredNodeError as inst:
211 except error.CensoredNodeError as inst:
212 ui.error(_("abort: file censored %s!\n") % inst)
212 ui.error(_("abort: file censored %s!\n") % inst)
213 except error.StorageError as inst:
213 except error.StorageError as inst:
214 ui.error(_("abort: %s!\n") % inst)
214 ui.error(_("abort: %s!\n") % inst)
215 if inst.hint:
215 if inst.hint:
216 ui.error(_("(%s)\n") % inst.hint)
216 ui.error(_("(%s)\n") % inst.hint)
217 except error.InterventionRequired as inst:
217 except error.InterventionRequired as inst:
218 ui.error("%s\n" % inst)
218 ui.error("%s\n" % inst)
219 if inst.hint:
219 if inst.hint:
220 ui.error(_("(%s)\n") % inst.hint)
220 ui.error(_("(%s)\n") % inst.hint)
221 return 1
221 return 1
222 except error.WdirUnsupported:
222 except error.WdirUnsupported:
223 ui.error(_("abort: working directory revision cannot be specified\n"))
223 ui.error(_("abort: working directory revision cannot be specified\n"))
224 except error.Abort as inst:
224 except error.Abort as inst:
225 ui.error(_("abort: %s\n") % inst)
225 ui.error(_("abort: %s\n") % inst)
226 if inst.hint:
226 if inst.hint:
227 ui.error(_("(%s)\n") % inst.hint)
227 ui.error(_("(%s)\n") % inst.hint)
228 except ImportError as inst:
228 except ImportError as inst:
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 m = stringutil.forcebytestr(inst).split()[-1]
230 m = stringutil.forcebytestr(inst).split()[-1]
231 if m in "mpatch bdiff".split():
231 if m in "mpatch bdiff".split():
232 ui.error(_("(did you forget to compile extensions?)\n"))
232 ui.error(_("(did you forget to compile extensions?)\n"))
233 elif m in "zlib".split():
233 elif m in "zlib".split():
234 ui.error(_("(is your Python install correct?)\n"))
234 ui.error(_("(is your Python install correct?)\n"))
235 except (IOError, OSError) as inst:
235 except (IOError, OSError) as inst:
236 if util.safehasattr(inst, "code"): # HTTPError
236 if util.safehasattr(inst, "code"): # HTTPError
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 elif util.safehasattr(inst, "reason"): # URLError or SSLError
238 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 try: # usually it is in the form (errno, strerror)
239 try: # usually it is in the form (errno, strerror)
240 reason = inst.reason.args[1]
240 reason = inst.reason.args[1]
241 except (AttributeError, IndexError):
241 except (AttributeError, IndexError):
242 # it might be anything, for example a string
242 # it might be anything, for example a string
243 reason = inst.reason
243 reason = inst.reason
244 if isinstance(reason, pycompat.unicode):
244 if isinstance(reason, pycompat.unicode):
245 # SSLError of Python 2.7.9 contains a unicode
245 # SSLError of Python 2.7.9 contains a unicode
246 reason = encoding.unitolocal(reason)
246 reason = encoding.unitolocal(reason)
247 ui.error(_("abort: error: %s\n") % reason)
247 ui.error(_("abort: error: %s\n") % reason)
248 elif (util.safehasattr(inst, "args")
248 elif (util.safehasattr(inst, "args")
249 and inst.args and inst.args[0] == errno.EPIPE):
249 and inst.args and inst.args[0] == errno.EPIPE):
250 pass
250 pass
251 elif getattr(inst, "strerror", None): # common IOError or OSError
251 elif getattr(inst, "strerror", None): # common IOError or OSError
252 if getattr(inst, "filename", None) is not None:
252 if getattr(inst, "filename", None) is not None:
253 ui.error(_("abort: %s: '%s'\n") % (
253 ui.error(_("abort: %s: '%s'\n") % (
254 encoding.strtolocal(inst.strerror),
254 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
255 stringutil.forcebytestr(inst.filename)))
256 else:
256 else:
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 else: # suspicious IOError
258 else: # suspicious IOError
259 raise
259 raise
260 except MemoryError:
260 except MemoryError:
261 ui.error(_("abort: out of memory\n"))
261 ui.error(_("abort: out of memory\n"))
262 except SystemExit as inst:
262 except SystemExit as inst:
263 # Commands shouldn't sys.exit directly, but give a return code.
263 # Commands shouldn't sys.exit directly, but give a return code.
264 # Just in case catch this and and pass exit code to caller.
264 # Just in case catch this and and pass exit code to caller.
265 return inst.code
265 return inst.code
266
266
267 return -1
267 return -1
268
268
269 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
272 if lbl in ['tip', '.', 'null']:
272 if lbl in ['tip', '.', 'null']:
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 for c in (':', '\0', '\n', '\r'):
274 for c in (':', '\0', '\n', '\r'):
275 if c in lbl:
275 if c in lbl:
276 raise error.Abort(
276 raise error.Abort(
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 try:
278 try:
279 int(lbl)
279 int(lbl)
280 raise error.Abort(_("cannot use an integer as a name"))
280 raise error.Abort(_("cannot use an integer as a name"))
281 except ValueError:
281 except ValueError:
282 pass
282 pass
283 if lbl.strip() != lbl:
283 if lbl.strip() != lbl:
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285
285
286 def checkfilename(f):
286 def checkfilename(f):
287 '''Check that the filename f is an acceptable filename for a tracked file'''
287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 if '\r' in f or '\n' in f:
288 if '\r' in f or '\n' in f:
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 % pycompat.bytestr(f))
290 % pycompat.bytestr(f))
291
291
292 def checkportable(ui, f):
292 def checkportable(ui, f):
293 '''Check if filename f is portable and warn or abort depending on config'''
293 '''Check if filename f is portable and warn or abort depending on config'''
294 checkfilename(f)
294 checkfilename(f)
295 abort, warn = checkportabilityalert(ui)
295 abort, warn = checkportabilityalert(ui)
296 if abort or warn:
296 if abort or warn:
297 msg = util.checkwinfilename(f)
297 msg = util.checkwinfilename(f)
298 if msg:
298 if msg:
299 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 if abort:
300 if abort:
301 raise error.Abort(msg)
301 raise error.Abort(msg)
302 ui.warn(_("warning: %s\n") % msg)
302 ui.warn(_("warning: %s\n") % msg)
303
303
304 def checkportabilityalert(ui):
304 def checkportabilityalert(ui):
305 '''check if the user's config requests nothing, a warning, or abort for
305 '''check if the user's config requests nothing, a warning, or abort for
306 non-portable filenames'''
306 non-portable filenames'''
307 val = ui.config('ui', 'portablefilenames')
307 val = ui.config('ui', 'portablefilenames')
308 lval = val.lower()
308 lval = val.lower()
309 bval = stringutil.parsebool(val)
309 bval = stringutil.parsebool(val)
310 abort = pycompat.iswindows or lval == 'abort'
310 abort = pycompat.iswindows or lval == 'abort'
311 warn = bval or lval == 'warn'
311 warn = bval or lval == 'warn'
312 if bval is None and not (warn or abort or lval == 'ignore'):
312 if bval is None and not (warn or abort or lval == 'ignore'):
313 raise error.ConfigError(
313 raise error.ConfigError(
314 _("ui.portablefilenames value is invalid ('%s')") % val)
314 _("ui.portablefilenames value is invalid ('%s')") % val)
315 return abort, warn
315 return abort, warn
316
316
317 class casecollisionauditor(object):
317 class casecollisionauditor(object):
318 def __init__(self, ui, abort, dirstate):
318 def __init__(self, ui, abort, dirstate):
319 self._ui = ui
319 self._ui = ui
320 self._abort = abort
320 self._abort = abort
321 allfiles = '\0'.join(dirstate._map)
321 allfiles = '\0'.join(dirstate._map)
322 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._dirstate = dirstate
323 self._dirstate = dirstate
324 # The purpose of _newfiles is so that we don't complain about
324 # The purpose of _newfiles is so that we don't complain about
325 # case collisions if someone were to call this object with the
325 # case collisions if someone were to call this object with the
326 # same filename twice.
326 # same filename twice.
327 self._newfiles = set()
327 self._newfiles = set()
328
328
329 def __call__(self, f):
329 def __call__(self, f):
330 if f in self._newfiles:
330 if f in self._newfiles:
331 return
331 return
332 fl = encoding.lower(f)
332 fl = encoding.lower(f)
333 if fl in self._loweredfiles and f not in self._dirstate:
333 if fl in self._loweredfiles and f not in self._dirstate:
334 msg = _('possible case-folding collision for %s') % f
334 msg = _('possible case-folding collision for %s') % f
335 if self._abort:
335 if self._abort:
336 raise error.Abort(msg)
336 raise error.Abort(msg)
337 self._ui.warn(_("warning: %s\n") % msg)
337 self._ui.warn(_("warning: %s\n") % msg)
338 self._loweredfiles.add(fl)
338 self._loweredfiles.add(fl)
339 self._newfiles.add(f)
339 self._newfiles.add(f)
340
340
341 def filteredhash(repo, maxrev):
341 def filteredhash(repo, maxrev):
342 """build hash of filtered revisions in the current repoview.
342 """build hash of filtered revisions in the current repoview.
343
343
344 Multiple caches perform up-to-date validation by checking that the
344 Multiple caches perform up-to-date validation by checking that the
345 tiprev and tipnode stored in the cache file match the current repository.
345 tiprev and tipnode stored in the cache file match the current repository.
346 However, this is not sufficient for validating repoviews because the set
346 However, this is not sufficient for validating repoviews because the set
347 of revisions in the view may change without the repository tiprev and
347 of revisions in the view may change without the repository tiprev and
348 tipnode changing.
348 tipnode changing.
349
349
350 This function hashes all the revs filtered from the view and returns
350 This function hashes all the revs filtered from the view and returns
351 that SHA-1 digest.
351 that SHA-1 digest.
352 """
352 """
353 cl = repo.changelog
353 cl = repo.changelog
354 if not cl.filteredrevs:
354 if not cl.filteredrevs:
355 return None
355 return None
356 key = None
356 key = None
357 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 if revs:
358 if revs:
359 s = hashlib.sha1()
359 s = hashlib.sha1()
360 for rev in revs:
360 for rev in revs:
361 s.update('%d;' % rev)
361 s.update('%d;' % rev)
362 key = s.digest()
362 key = s.digest()
363 return key
363 return key
364
364
365 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 '''yield every hg repository under path, always recursively.
366 '''yield every hg repository under path, always recursively.
367 The recurse flag will only control recursion into repo working dirs'''
367 The recurse flag will only control recursion into repo working dirs'''
368 def errhandler(err):
368 def errhandler(err):
369 if err.filename == path:
369 if err.filename == path:
370 raise err
370 raise err
371 samestat = getattr(os.path, 'samestat', None)
371 samestat = getattr(os.path, 'samestat', None)
372 if followsym and samestat is not None:
372 if followsym and samestat is not None:
373 def adddir(dirlst, dirname):
373 def adddir(dirlst, dirname):
374 dirstat = os.stat(dirname)
374 dirstat = os.stat(dirname)
375 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 if not match:
376 if not match:
377 dirlst.append(dirstat)
377 dirlst.append(dirstat)
378 return not match
378 return not match
379 else:
379 else:
380 followsym = False
380 followsym = False
381
381
382 if (seen_dirs is None) and followsym:
382 if (seen_dirs is None) and followsym:
383 seen_dirs = []
383 seen_dirs = []
384 adddir(seen_dirs, path)
384 adddir(seen_dirs, path)
385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 dirs.sort()
386 dirs.sort()
387 if '.hg' in dirs:
387 if '.hg' in dirs:
388 yield root # found a repository
388 yield root # found a repository
389 qroot = os.path.join(root, '.hg', 'patches')
389 qroot = os.path.join(root, '.hg', 'patches')
390 if os.path.isdir(os.path.join(qroot, '.hg')):
390 if os.path.isdir(os.path.join(qroot, '.hg')):
391 yield qroot # we have a patch queue repo here
391 yield qroot # we have a patch queue repo here
392 if recurse:
392 if recurse:
393 # avoid recursing inside the .hg directory
393 # avoid recursing inside the .hg directory
394 dirs.remove('.hg')
394 dirs.remove('.hg')
395 else:
395 else:
396 dirs[:] = [] # don't descend further
396 dirs[:] = [] # don't descend further
397 elif followsym:
397 elif followsym:
398 newdirs = []
398 newdirs = []
399 for d in dirs:
399 for d in dirs:
400 fname = os.path.join(root, d)
400 fname = os.path.join(root, d)
401 if adddir(seen_dirs, fname):
401 if adddir(seen_dirs, fname):
402 if os.path.islink(fname):
402 if os.path.islink(fname):
403 for hgname in walkrepos(fname, True, seen_dirs):
403 for hgname in walkrepos(fname, True, seen_dirs):
404 yield hgname
404 yield hgname
405 else:
405 else:
406 newdirs.append(d)
406 newdirs.append(d)
407 dirs[:] = newdirs
407 dirs[:] = newdirs
408
408
409 def binnode(ctx):
409 def binnode(ctx):
410 """Return binary node id for a given basectx"""
410 """Return binary node id for a given basectx"""
411 node = ctx.node()
411 node = ctx.node()
412 if node is None:
412 if node is None:
413 return wdirid
413 return wdirid
414 return node
414 return node
415
415
416 def intrev(ctx):
416 def intrev(ctx):
417 """Return integer for a given basectx that can be used in comparison or
417 """Return integer for a given basectx that can be used in comparison or
418 arithmetic operation"""
418 arithmetic operation"""
419 rev = ctx.rev()
419 rev = ctx.rev()
420 if rev is None:
420 if rev is None:
421 return wdirrev
421 return wdirrev
422 return rev
422 return rev
423
423
424 def formatchangeid(ctx):
424 def formatchangeid(ctx):
425 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 template provided by logcmdutil.changesettemplater"""
426 template provided by logcmdutil.changesettemplater"""
427 repo = ctx.repo()
427 repo = ctx.repo()
428 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429
429
430 def formatrevnode(ui, rev, node):
430 def formatrevnode(ui, rev, node):
431 """Format given revision and node depending on the current verbosity"""
431 """Format given revision and node depending on the current verbosity"""
432 if ui.debugflag:
432 if ui.debugflag:
433 hexfunc = hex
433 hexfunc = hex
434 else:
434 else:
435 hexfunc = short
435 hexfunc = short
436 return '%d:%s' % (rev, hexfunc(node))
436 return '%d:%s' % (rev, hexfunc(node))
437
437
438 def resolvehexnodeidprefix(repo, prefix):
438 def resolvehexnodeidprefix(repo, prefix):
439 if (prefix.startswith('x') and
439 if (prefix.startswith('x') and
440 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
440 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 prefix = prefix[1:]
441 prefix = prefix[1:]
442 try:
442 try:
443 # Uses unfiltered repo because it's faster when prefix is ambiguous/
443 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 # This matches the shortesthexnodeidprefix() function below.
444 # This matches the shortesthexnodeidprefix() function below.
445 node = repo.unfiltered().changelog._partialmatch(prefix)
445 node = repo.unfiltered().changelog._partialmatch(prefix)
446 except error.AmbiguousPrefixLookupError:
446 except error.AmbiguousPrefixLookupError:
447 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
447 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 if revset:
448 if revset:
449 # Clear config to avoid infinite recursion
449 # Clear config to avoid infinite recursion
450 configoverrides = {('experimental',
450 configoverrides = {('experimental',
451 'revisions.disambiguatewithin'): None}
451 'revisions.disambiguatewithin'): None}
452 with repo.ui.configoverride(configoverrides):
452 with repo.ui.configoverride(configoverrides):
453 revs = repo.anyrevs([revset], user=True)
453 revs = repo.anyrevs([revset], user=True)
454 matches = []
454 matches = []
455 for rev in revs:
455 for rev in revs:
456 node = repo.changelog.node(rev)
456 node = repo.changelog.node(rev)
457 if hex(node).startswith(prefix):
457 if hex(node).startswith(prefix):
458 matches.append(node)
458 matches.append(node)
459 if len(matches) == 1:
459 if len(matches) == 1:
460 return matches[0]
460 return matches[0]
461 raise
461 raise
462 if node is None:
462 if node is None:
463 return
463 return
464 repo.changelog.rev(node) # make sure node isn't filtered
464 repo.changelog.rev(node) # make sure node isn't filtered
465 return node
465 return node
466
466
467 def mayberevnum(repo, prefix):
467 def mayberevnum(repo, prefix):
468 """Checks if the given prefix may be mistaken for a revision number"""
468 """Checks if the given prefix may be mistaken for a revision number"""
469 try:
469 try:
470 i = int(prefix)
470 i = int(prefix)
471 # if we are a pure int, then starting with zero will not be
471 # if we are a pure int, then starting with zero will not be
472 # confused as a rev; or, obviously, if the int is larger
472 # confused as a rev; or, obviously, if the int is larger
473 # than the value of the tip rev. We still need to disambiguate if
473 # than the value of the tip rev. We still need to disambiguate if
474 # prefix == '0', since that *is* a valid revnum.
474 # prefix == '0', since that *is* a valid revnum.
475 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
475 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 return False
476 return False
477 return True
477 return True
478 except ValueError:
478 except ValueError:
479 return False
479 return False
480
480
481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 """Find the shortest unambiguous prefix that matches hexnode.
482 """Find the shortest unambiguous prefix that matches hexnode.
483
483
484 If "cache" is not None, it must be a dictionary that can be used for
484 If "cache" is not None, it must be a dictionary that can be used for
485 caching between calls to this method.
485 caching between calls to this method.
486 """
486 """
487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 # which would be unacceptably slow. so we look for hash collision in
488 # which would be unacceptably slow. so we look for hash collision in
489 # unfiltered space, which means some hashes may be slightly longer.
489 # unfiltered space, which means some hashes may be slightly longer.
490
490
491 minlength=max(minlength, 1)
491 minlength=max(minlength, 1)
492
492
493 def disambiguate(prefix):
493 def disambiguate(prefix):
494 """Disambiguate against revnums."""
494 """Disambiguate against revnums."""
495 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
495 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 if mayberevnum(repo, prefix):
496 if mayberevnum(repo, prefix):
497 return 'x' + prefix
497 return 'x' + prefix
498 else:
498 else:
499 return prefix
499 return prefix
500
500
501 hexnode = hex(node)
501 hexnode = hex(node)
502 for length in range(len(prefix), len(hexnode) + 1):
502 for length in range(len(prefix), len(hexnode) + 1):
503 prefix = hexnode[:length]
503 prefix = hexnode[:length]
504 if not mayberevnum(repo, prefix):
504 if not mayberevnum(repo, prefix):
505 return prefix
505 return prefix
506
506
507 cl = repo.unfiltered().changelog
507 cl = repo.unfiltered().changelog
508 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
508 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 if revset:
509 if revset:
510 revs = None
510 revs = None
511 if cache is not None:
511 if cache is not None:
512 revs = cache.get('disambiguationrevset')
512 revs = cache.get('disambiguationrevset')
513 if revs is None:
513 if revs is None:
514 revs = repo.anyrevs([revset], user=True)
514 revs = repo.anyrevs([revset], user=True)
515 if cache is not None:
515 if cache is not None:
516 cache['disambiguationrevset'] = revs
516 cache['disambiguationrevset'] = revs
517 if cl.rev(node) in revs:
517 if cl.rev(node) in revs:
518 hexnode = hex(node)
518 hexnode = hex(node)
519 nodetree = None
519 nodetree = None
520 if cache is not None:
520 if cache is not None:
521 nodetree = cache.get('disambiguationnodetree')
521 nodetree = cache.get('disambiguationnodetree')
522 if not nodetree:
522 if not nodetree:
523 try:
523 try:
524 nodetree = parsers.nodetree(cl.index, len(revs))
524 nodetree = parsers.nodetree(cl.index, len(revs))
525 except AttributeError:
525 except AttributeError:
526 # no native nodetree
526 # no native nodetree
527 pass
527 pass
528 else:
528 else:
529 for r in revs:
529 for r in revs:
530 nodetree.insert(r)
530 nodetree.insert(r)
531 if cache is not None:
531 if cache is not None:
532 cache['disambiguationnodetree'] = nodetree
532 cache['disambiguationnodetree'] = nodetree
533 if nodetree is not None:
533 if nodetree is not None:
534 length = max(nodetree.shortest(node), minlength)
534 length = max(nodetree.shortest(node), minlength)
535 prefix = hexnode[:length]
535 prefix = hexnode[:length]
536 return disambiguate(prefix)
536 return disambiguate(prefix)
537 for length in range(minlength, len(hexnode) + 1):
537 for length in range(minlength, len(hexnode) + 1):
538 matches = []
538 matches = []
539 prefix = hexnode[:length]
539 prefix = hexnode[:length]
540 for rev in revs:
540 for rev in revs:
541 otherhexnode = repo[rev].hex()
541 otherhexnode = repo[rev].hex()
542 if prefix == otherhexnode[:length]:
542 if prefix == otherhexnode[:length]:
543 matches.append(otherhexnode)
543 matches.append(otherhexnode)
544 if len(matches) == 1:
544 if len(matches) == 1:
545 return disambiguate(prefix)
545 return disambiguate(prefix)
546
546
547 try:
547 try:
548 return disambiguate(cl.shortest(node, minlength))
548 return disambiguate(cl.shortest(node, minlength))
549 except error.LookupError:
549 except error.LookupError:
550 raise error.RepoLookupError()
550 raise error.RepoLookupError()
551
551
552 def isrevsymbol(repo, symbol):
552 def isrevsymbol(repo, symbol):
553 """Checks if a symbol exists in the repo.
553 """Checks if a symbol exists in the repo.
554
554
555 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
555 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 symbol is an ambiguous nodeid prefix.
556 symbol is an ambiguous nodeid prefix.
557 """
557 """
558 try:
558 try:
559 revsymbol(repo, symbol)
559 revsymbol(repo, symbol)
560 return True
560 return True
561 except error.RepoLookupError:
561 except error.RepoLookupError:
562 return False
562 return False
563
563
564 def revsymbol(repo, symbol):
564 def revsymbol(repo, symbol):
565 """Returns a context given a single revision symbol (as string).
565 """Returns a context given a single revision symbol (as string).
566
566
567 This is similar to revsingle(), but accepts only a single revision symbol,
567 This is similar to revsingle(), but accepts only a single revision symbol,
568 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
568 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 not "max(public())".
569 not "max(public())".
570 """
570 """
571 if not isinstance(symbol, bytes):
571 if not isinstance(symbol, bytes):
572 msg = ("symbol (%s of type %s) was not a string, did you mean "
572 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 "repo[symbol]?" % (symbol, type(symbol)))
573 "repo[symbol]?" % (symbol, type(symbol)))
574 raise error.ProgrammingError(msg)
574 raise error.ProgrammingError(msg)
575 try:
575 try:
576 if symbol in ('.', 'tip', 'null'):
576 if symbol in ('.', 'tip', 'null'):
577 return repo[symbol]
577 return repo[symbol]
578
578
579 try:
579 try:
580 r = int(symbol)
580 r = int(symbol)
581 if '%d' % r != symbol:
581 if '%d' % r != symbol:
582 raise ValueError
582 raise ValueError
583 l = len(repo.changelog)
583 l = len(repo.changelog)
584 if r < 0:
584 if r < 0:
585 r += l
585 r += l
586 if r < 0 or r >= l and r != wdirrev:
586 if r < 0 or r >= l and r != wdirrev:
587 raise ValueError
587 raise ValueError
588 return repo[r]
588 return repo[r]
589 except error.FilteredIndexError:
589 except error.FilteredIndexError:
590 raise
590 raise
591 except (ValueError, OverflowError, IndexError):
591 except (ValueError, OverflowError, IndexError):
592 pass
592 pass
593
593
594 if len(symbol) == 40:
594 if len(symbol) == 40:
595 try:
595 try:
596 node = bin(symbol)
596 node = bin(symbol)
597 rev = repo.changelog.rev(node)
597 rev = repo.changelog.rev(node)
598 return repo[rev]
598 return repo[rev]
599 except error.FilteredLookupError:
599 except error.FilteredLookupError:
600 raise
600 raise
601 except (TypeError, LookupError):
601 except (TypeError, LookupError):
602 pass
602 pass
603
603
604 # look up bookmarks through the name interface
604 # look up bookmarks through the name interface
605 try:
605 try:
606 node = repo.names.singlenode(repo, symbol)
606 node = repo.names.singlenode(repo, symbol)
607 rev = repo.changelog.rev(node)
607 rev = repo.changelog.rev(node)
608 return repo[rev]
608 return repo[rev]
609 except KeyError:
609 except KeyError:
610 pass
610 pass
611
611
612 node = resolvehexnodeidprefix(repo, symbol)
612 node = resolvehexnodeidprefix(repo, symbol)
613 if node is not None:
613 if node is not None:
614 rev = repo.changelog.rev(node)
614 rev = repo.changelog.rev(node)
615 return repo[rev]
615 return repo[rev]
616
616
617 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
617 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618
618
619 except error.WdirUnsupported:
619 except error.WdirUnsupported:
620 return repo[None]
620 return repo[None]
621 except (error.FilteredIndexError, error.FilteredLookupError,
621 except (error.FilteredIndexError, error.FilteredLookupError,
622 error.FilteredRepoLookupError):
622 error.FilteredRepoLookupError):
623 raise _filterederror(repo, symbol)
623 raise _filterederror(repo, symbol)
624
624
625 def _filterederror(repo, changeid):
625 def _filterederror(repo, changeid):
626 """build an exception to be raised about a filtered changeid
626 """build an exception to be raised about a filtered changeid
627
627
628 This is extracted in a function to help extensions (eg: evolve) to
628 This is extracted in a function to help extensions (eg: evolve) to
629 experiment with various message variants."""
629 experiment with various message variants."""
630 if repo.filtername.startswith('visible'):
630 if repo.filtername.startswith('visible'):
631
631
632 # Check if the changeset is obsolete
632 # Check if the changeset is obsolete
633 unfilteredrepo = repo.unfiltered()
633 unfilteredrepo = repo.unfiltered()
634 ctx = revsymbol(unfilteredrepo, changeid)
634 ctx = revsymbol(unfilteredrepo, changeid)
635
635
636 # If the changeset is obsolete, enrich the message with the reason
636 # If the changeset is obsolete, enrich the message with the reason
637 # that made this changeset not visible
637 # that made this changeset not visible
638 if ctx.obsolete():
638 if ctx.obsolete():
639 msg = obsutil._getfilteredreason(repo, changeid, ctx)
639 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 else:
640 else:
641 msg = _("hidden revision '%s'") % changeid
641 msg = _("hidden revision '%s'") % changeid
642
642
643 hint = _('use --hidden to access hidden revisions')
643 hint = _('use --hidden to access hidden revisions')
644
644
645 return error.FilteredRepoLookupError(msg, hint=hint)
645 return error.FilteredRepoLookupError(msg, hint=hint)
646 msg = _("filtered revision '%s' (not in '%s' subset)")
646 msg = _("filtered revision '%s' (not in '%s' subset)")
647 msg %= (changeid, repo.filtername)
647 msg %= (changeid, repo.filtername)
648 return error.FilteredRepoLookupError(msg)
648 return error.FilteredRepoLookupError(msg)
649
649
650 def revsingle(repo, revspec, default='.', localalias=None):
650 def revsingle(repo, revspec, default='.', localalias=None):
651 if not revspec and revspec != 0:
651 if not revspec and revspec != 0:
652 return repo[default]
652 return repo[default]
653
653
654 l = revrange(repo, [revspec], localalias=localalias)
654 l = revrange(repo, [revspec], localalias=localalias)
655 if not l:
655 if not l:
656 raise error.Abort(_('empty revision set'))
656 raise error.Abort(_('empty revision set'))
657 return repo[l.last()]
657 return repo[l.last()]
658
658
659 def _pairspec(revspec):
659 def _pairspec(revspec):
660 tree = revsetlang.parse(revspec)
660 tree = revsetlang.parse(revspec)
661 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
661 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662
662
663 def revpair(repo, revs):
663 def revpair(repo, revs):
664 if not revs:
664 if not revs:
665 return repo['.'], repo[None]
665 return repo['.'], repo[None]
666
666
667 l = revrange(repo, revs)
667 l = revrange(repo, revs)
668
668
669 if not l:
669 if not l:
670 raise error.Abort(_('empty revision range'))
670 raise error.Abort(_('empty revision range'))
671
671
672 first = l.first()
672 first = l.first()
673 second = l.last()
673 second = l.last()
674
674
675 if (first == second and len(revs) >= 2
675 if (first == second and len(revs) >= 2
676 and not all(revrange(repo, [r]) for r in revs)):
676 and not all(revrange(repo, [r]) for r in revs)):
677 raise error.Abort(_('empty revision on one side of range'))
677 raise error.Abort(_('empty revision on one side of range'))
678
678
679 # if top-level is range expression, the result must always be a pair
679 # if top-level is range expression, the result must always be a pair
680 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
680 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 return repo[first], repo[None]
681 return repo[first], repo[None]
682
682
683 return repo[first], repo[second]
683 return repo[first], repo[second]
684
684
685 def revrange(repo, specs, localalias=None):
685 def revrange(repo, specs, localalias=None):
686 """Execute 1 to many revsets and return the union.
686 """Execute 1 to many revsets and return the union.
687
687
688 This is the preferred mechanism for executing revsets using user-specified
688 This is the preferred mechanism for executing revsets using user-specified
689 config options, such as revset aliases.
689 config options, such as revset aliases.
690
690
691 The revsets specified by ``specs`` will be executed via a chained ``OR``
691 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 expression. If ``specs`` is empty, an empty result is returned.
692 expression. If ``specs`` is empty, an empty result is returned.
693
693
694 ``specs`` can contain integers, in which case they are assumed to be
694 ``specs`` can contain integers, in which case they are assumed to be
695 revision numbers.
695 revision numbers.
696
696
697 It is assumed the revsets are already formatted. If you have arguments
697 It is assumed the revsets are already formatted. If you have arguments
698 that need to be expanded in the revset, call ``revsetlang.formatspec()``
698 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 and pass the result as an element of ``specs``.
699 and pass the result as an element of ``specs``.
700
700
701 Specifying a single revset is allowed.
701 Specifying a single revset is allowed.
702
702
703 Returns a ``revset.abstractsmartset`` which is a list-like interface over
703 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 integer revisions.
704 integer revisions.
705 """
705 """
706 allspecs = []
706 allspecs = []
707 for spec in specs:
707 for spec in specs:
708 if isinstance(spec, int):
708 if isinstance(spec, int):
709 spec = revsetlang.formatspec('%d', spec)
709 spec = revsetlang.formatspec('%d', spec)
710 allspecs.append(spec)
710 allspecs.append(spec)
711 return repo.anyrevs(allspecs, user=True, localalias=localalias)
711 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712
712
713 def meaningfulparents(repo, ctx):
713 def meaningfulparents(repo, ctx):
714 """Return list of meaningful (or all if debug) parentrevs for rev.
714 """Return list of meaningful (or all if debug) parentrevs for rev.
715
715
716 For merges (two non-nullrev revisions) both parents are meaningful.
716 For merges (two non-nullrev revisions) both parents are meaningful.
717 Otherwise the first parent revision is considered meaningful if it
717 Otherwise the first parent revision is considered meaningful if it
718 is not the preceding revision.
718 is not the preceding revision.
719 """
719 """
720 parents = ctx.parents()
720 parents = ctx.parents()
721 if len(parents) > 1:
721 if len(parents) > 1:
722 return parents
722 return parents
723 if repo.ui.debugflag:
723 if repo.ui.debugflag:
724 return [parents[0], repo[nullrev]]
724 return [parents[0], repo[nullrev]]
725 if parents[0].rev() >= intrev(ctx) - 1:
725 if parents[0].rev() >= intrev(ctx) - 1:
726 return []
726 return []
727 return parents
727 return parents
728
728
729 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
729 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 """Return a function that produced paths for presenting to the user.
730 """Return a function that produced paths for presenting to the user.
731
731
732 The returned function takes a repo-relative path and produces a path
732 The returned function takes a repo-relative path and produces a path
733 that can be presented in the UI.
733 that can be presented in the UI.
734
734
735 Depending on the value of ui.relative-paths, either a repo-relative or
735 Depending on the value of ui.relative-paths, either a repo-relative or
736 cwd-relative path will be produced.
736 cwd-relative path will be produced.
737
737
738 legacyrelativevalue is the value to use if ui.relative-paths=legacy
738 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739
739
740 If forcerelativevalue is not None, then that value will be used regardless
740 If forcerelativevalue is not None, then that value will be used regardless
741 of what ui.relative-paths is set to.
741 of what ui.relative-paths is set to.
742 """
742 """
743 if forcerelativevalue is not None:
743 if forcerelativevalue is not None:
744 relative = forcerelativevalue
744 relative = forcerelativevalue
745 else:
745 else:
746 config = repo.ui.config('ui', 'relative-paths')
746 config = repo.ui.config('ui', 'relative-paths')
747 if config == 'legacy':
747 if config == 'legacy':
748 relative = legacyrelativevalue
748 relative = legacyrelativevalue
749 else:
749 else:
750 relative = stringutil.parsebool(config)
750 relative = stringutil.parsebool(config)
751 if relative is None:
751 if relative is None:
752 raise error.ConfigError(
752 raise error.ConfigError(
753 _("ui.relative-paths is not a boolean ('%s')") % config)
753 _("ui.relative-paths is not a boolean ('%s')") % config)
754
754
755 if relative:
755 if relative:
756 cwd = repo.getcwd()
756 cwd = repo.getcwd()
757 pathto = repo.pathto
757 pathto = repo.pathto
758 return lambda f: pathto(f, cwd)
758 return lambda f: pathto(f, cwd)
759 elif repo.ui.configbool('ui', 'slash'):
759 elif repo.ui.configbool('ui', 'slash'):
760 return lambda f: f
760 return lambda f: f
761 else:
761 else:
762 return util.localpath
762 return util.localpath
763
763
764 def subdiruipathfn(subpath, uipathfn):
764 def subdiruipathfn(subpath, uipathfn):
765 '''Create a new uipathfn that treats the file as relative to subpath.'''
765 '''Create a new uipathfn that treats the file as relative to subpath.'''
766 return lambda f: uipathfn(posixpath.join(subpath, f))
766 return lambda f: uipathfn(posixpath.join(subpath, f))
767
767
768 def anypats(pats, opts):
768 def anypats(pats, opts):
769 '''Checks if any patterns, including --include and --exclude were given.
769 '''Checks if any patterns, including --include and --exclude were given.
770
770
771 Some commands (e.g. addremove) use this condition for deciding whether to
771 Some commands (e.g. addremove) use this condition for deciding whether to
772 print absolute or relative paths.
772 print absolute or relative paths.
773 '''
773 '''
774 return bool(pats or opts.get('include') or opts.get('exclude'))
774 return bool(pats or opts.get('include') or opts.get('exclude'))
775
775
776 def expandpats(pats):
776 def expandpats(pats):
777 '''Expand bare globs when running on windows.
777 '''Expand bare globs when running on windows.
778 On posix we assume it already has already been done by sh.'''
778 On posix we assume it already has already been done by sh.'''
779 if not util.expandglobs:
779 if not util.expandglobs:
780 return list(pats)
780 return list(pats)
781 ret = []
781 ret = []
782 for kindpat in pats:
782 for kindpat in pats:
783 kind, pat = matchmod._patsplit(kindpat, None)
783 kind, pat = matchmod._patsplit(kindpat, None)
784 if kind is None:
784 if kind is None:
785 try:
785 try:
786 globbed = glob.glob(pat)
786 globbed = glob.glob(pat)
787 except re.error:
787 except re.error:
788 globbed = [pat]
788 globbed = [pat]
789 if globbed:
789 if globbed:
790 ret.extend(globbed)
790 ret.extend(globbed)
791 continue
791 continue
792 ret.append(kindpat)
792 ret.append(kindpat)
793 return ret
793 return ret
794
794
795 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
795 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
796 badfn=None):
796 badfn=None):
797 '''Return a matcher and the patterns that were used.
797 '''Return a matcher and the patterns that were used.
798 The matcher will warn about bad matches, unless an alternate badfn callback
798 The matcher will warn about bad matches, unless an alternate badfn callback
799 is provided.'''
799 is provided.'''
800 if opts is None:
800 if opts is None:
801 opts = {}
801 opts = {}
802 if not globbed and default == 'relpath':
802 if not globbed and default == 'relpath':
803 pats = expandpats(pats or [])
803 pats = expandpats(pats or [])
804
804
805 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
805 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
806 def bad(f, msg):
806 def bad(f, msg):
807 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
807 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
808
808
809 if badfn is None:
809 if badfn is None:
810 badfn = bad
810 badfn = bad
811
811
812 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
812 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
813 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
813 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
814
814
815 if m.always():
815 if m.always():
816 pats = []
816 pats = []
817 return m, pats
817 return m, pats
818
818
819 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
819 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
820 badfn=None):
820 badfn=None):
821 '''Return a matcher that will warn about bad matches.'''
821 '''Return a matcher that will warn about bad matches.'''
822 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
822 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
823
823
824 def matchall(repo):
824 def matchall(repo):
825 '''Return a matcher that will efficiently match everything.'''
825 '''Return a matcher that will efficiently match everything.'''
826 return matchmod.always()
826 return matchmod.always()
827
827
828 def matchfiles(repo, files, badfn=None):
828 def matchfiles(repo, files, badfn=None):
829 '''Return a matcher that will efficiently match exactly these files.'''
829 '''Return a matcher that will efficiently match exactly these files.'''
830 return matchmod.exact(files, badfn=badfn)
830 return matchmod.exact(files, badfn=badfn)
831
831
832 def parsefollowlinespattern(repo, rev, pat, msg):
832 def parsefollowlinespattern(repo, rev, pat, msg):
833 """Return a file name from `pat` pattern suitable for usage in followlines
833 """Return a file name from `pat` pattern suitable for usage in followlines
834 logic.
834 logic.
835 """
835 """
836 if not matchmod.patkind(pat):
836 if not matchmod.patkind(pat):
837 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
837 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
838 else:
838 else:
839 ctx = repo[rev]
839 ctx = repo[rev]
840 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
840 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
841 files = [f for f in ctx if m(f)]
841 files = [f for f in ctx if m(f)]
842 if len(files) != 1:
842 if len(files) != 1:
843 raise error.ParseError(msg)
843 raise error.ParseError(msg)
844 return files[0]
844 return files[0]
845
845
846 def getorigvfs(ui, repo):
846 def getorigvfs(ui, repo):
847 """return a vfs suitable to save 'orig' file
847 """return a vfs suitable to save 'orig' file
848
848
849 return None if no special directory is configured"""
849 return None if no special directory is configured"""
850 origbackuppath = ui.config('ui', 'origbackuppath')
850 origbackuppath = ui.config('ui', 'origbackuppath')
851 if not origbackuppath:
851 if not origbackuppath:
852 return None
852 return None
853 return vfs.vfs(repo.wvfs.join(origbackuppath))
853 return vfs.vfs(repo.wvfs.join(origbackuppath))
854
854
855 def backuppath(ui, repo, filepath):
855 def backuppath(ui, repo, filepath):
856 '''customize where working copy backup files (.orig files) are created
856 '''customize where working copy backup files (.orig files) are created
857
857
858 Fetch user defined path from config file: [ui] origbackuppath = <path>
858 Fetch user defined path from config file: [ui] origbackuppath = <path>
859 Fall back to default (filepath with .orig suffix) if not specified
859 Fall back to default (filepath with .orig suffix) if not specified
860
860
861 filepath is repo-relative
861 filepath is repo-relative
862
862
863 Returns an absolute path
863 Returns an absolute path
864 '''
864 '''
865 origvfs = getorigvfs(ui, repo)
865 origvfs = getorigvfs(ui, repo)
866 if origvfs is None:
866 if origvfs is None:
867 return repo.wjoin(filepath + ".orig")
867 return repo.wjoin(filepath + ".orig")
868
868
869 origbackupdir = origvfs.dirname(filepath)
869 origbackupdir = origvfs.dirname(filepath)
870 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
870 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
871 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
871 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
872
872
873 # Remove any files that conflict with the backup file's path
873 # Remove any files that conflict with the backup file's path
874 for f in reversed(list(util.finddirs(filepath))):
874 for f in reversed(list(util.finddirs(filepath))):
875 if origvfs.isfileorlink(f):
875 if origvfs.isfileorlink(f):
876 ui.note(_('removing conflicting file: %s\n')
876 ui.note(_('removing conflicting file: %s\n')
877 % origvfs.join(f))
877 % origvfs.join(f))
878 origvfs.unlink(f)
878 origvfs.unlink(f)
879 break
879 break
880
880
881 origvfs.makedirs(origbackupdir)
881 origvfs.makedirs(origbackupdir)
882
882
883 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
883 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
884 ui.note(_('removing conflicting directory: %s\n')
884 ui.note(_('removing conflicting directory: %s\n')
885 % origvfs.join(filepath))
885 % origvfs.join(filepath))
886 origvfs.rmtree(filepath, forcibly=True)
886 origvfs.rmtree(filepath, forcibly=True)
887
887
888 return origvfs.join(filepath)
888 return origvfs.join(filepath)
889
889
890 class _containsnode(object):
890 class _containsnode(object):
891 """proxy __contains__(node) to container.__contains__ which accepts revs"""
891 """proxy __contains__(node) to container.__contains__ which accepts revs"""
892
892
893 def __init__(self, repo, revcontainer):
893 def __init__(self, repo, revcontainer):
894 self._torev = repo.changelog.rev
894 self._torev = repo.changelog.rev
895 self._revcontains = revcontainer.__contains__
895 self._revcontains = revcontainer.__contains__
896
896
897 def __contains__(self, node):
897 def __contains__(self, node):
898 return self._revcontains(self._torev(node))
898 return self._revcontains(self._torev(node))
899
899
900 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
900 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
901 fixphase=False, targetphase=None, backup=True):
901 fixphase=False, targetphase=None, backup=True):
902 """do common cleanups when old nodes are replaced by new nodes
902 """do common cleanups when old nodes are replaced by new nodes
903
903
904 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
904 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
905 (we might also want to move working directory parent in the future)
905 (we might also want to move working directory parent in the future)
906
906
907 By default, bookmark moves are calculated automatically from 'replacements',
907 By default, bookmark moves are calculated automatically from 'replacements',
908 but 'moves' can be used to override that. Also, 'moves' may include
908 but 'moves' can be used to override that. Also, 'moves' may include
909 additional bookmark moves that should not have associated obsmarkers.
909 additional bookmark moves that should not have associated obsmarkers.
910
910
911 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
911 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
912 have replacements. operation is a string, like "rebase".
912 have replacements. operation is a string, like "rebase".
913
913
914 metadata is dictionary containing metadata to be stored in obsmarker if
914 metadata is dictionary containing metadata to be stored in obsmarker if
915 obsolescence is enabled.
915 obsolescence is enabled.
916 """
916 """
917 assert fixphase or targetphase is None
917 assert fixphase or targetphase is None
918 if not replacements and not moves:
918 if not replacements and not moves:
919 return
919 return
920
920
921 # translate mapping's other forms
921 # translate mapping's other forms
922 if not util.safehasattr(replacements, 'items'):
922 if not util.safehasattr(replacements, 'items'):
923 replacements = {(n,): () for n in replacements}
923 replacements = {(n,): () for n in replacements}
924 else:
924 else:
925 # upgrading non tuple "source" to tuple ones for BC
925 # upgrading non tuple "source" to tuple ones for BC
926 repls = {}
926 repls = {}
927 for key, value in replacements.items():
927 for key, value in replacements.items():
928 if not isinstance(key, tuple):
928 if not isinstance(key, tuple):
929 key = (key,)
929 key = (key,)
930 repls[key] = value
930 repls[key] = value
931 replacements = repls
931 replacements = repls
932
932
933 # Unfiltered repo is needed since nodes in replacements might be hidden.
933 # Unfiltered repo is needed since nodes in replacements might be hidden.
934 unfi = repo.unfiltered()
934 unfi = repo.unfiltered()
935
935
936 # Calculate bookmark movements
936 # Calculate bookmark movements
937 if moves is None:
937 if moves is None:
938 moves = {}
938 moves = {}
939 for oldnodes, newnodes in replacements.items():
939 for oldnodes, newnodes in replacements.items():
940 for oldnode in oldnodes:
940 for oldnode in oldnodes:
941 if oldnode in moves:
941 if oldnode in moves:
942 continue
942 continue
943 if len(newnodes) > 1:
943 if len(newnodes) > 1:
944 # usually a split, take the one with biggest rev number
944 # usually a split, take the one with biggest rev number
945 newnode = next(unfi.set('max(%ln)', newnodes)).node()
945 newnode = next(unfi.set('max(%ln)', newnodes)).node()
946 elif len(newnodes) == 0:
946 elif len(newnodes) == 0:
947 # move bookmark backwards
947 # move bookmark backwards
948 allreplaced = []
948 allreplaced = []
949 for rep in replacements:
949 for rep in replacements:
950 allreplaced.extend(rep)
950 allreplaced.extend(rep)
951 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
951 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
952 allreplaced))
952 allreplaced))
953 if roots:
953 if roots:
954 newnode = roots[0].node()
954 newnode = roots[0].node()
955 else:
955 else:
956 newnode = nullid
956 newnode = nullid
957 else:
957 else:
958 newnode = newnodes[0]
958 newnode = newnodes[0]
959 moves[oldnode] = newnode
959 moves[oldnode] = newnode
960
960
961 allnewnodes = [n for ns in replacements.values() for n in ns]
961 allnewnodes = [n for ns in replacements.values() for n in ns]
962 toretract = {}
962 toretract = {}
963 toadvance = {}
963 toadvance = {}
964 if fixphase:
964 if fixphase:
965 precursors = {}
965 precursors = {}
966 for oldnodes, newnodes in replacements.items():
966 for oldnodes, newnodes in replacements.items():
967 for oldnode in oldnodes:
967 for oldnode in oldnodes:
968 for newnode in newnodes:
968 for newnode in newnodes:
969 precursors.setdefault(newnode, []).append(oldnode)
969 precursors.setdefault(newnode, []).append(oldnode)
970
970
971 allnewnodes.sort(key=lambda n: unfi[n].rev())
971 allnewnodes.sort(key=lambda n: unfi[n].rev())
972 newphases = {}
972 newphases = {}
973 def phase(ctx):
973 def phase(ctx):
974 return newphases.get(ctx.node(), ctx.phase())
974 return newphases.get(ctx.node(), ctx.phase())
975 for newnode in allnewnodes:
975 for newnode in allnewnodes:
976 ctx = unfi[newnode]
976 ctx = unfi[newnode]
977 parentphase = max(phase(p) for p in ctx.parents())
977 parentphase = max(phase(p) for p in ctx.parents())
978 if targetphase is None:
978 if targetphase is None:
979 oldphase = max(unfi[oldnode].phase()
979 oldphase = max(unfi[oldnode].phase()
980 for oldnode in precursors[newnode])
980 for oldnode in precursors[newnode])
981 newphase = max(oldphase, parentphase)
981 newphase = max(oldphase, parentphase)
982 else:
982 else:
983 newphase = max(targetphase, parentphase)
983 newphase = max(targetphase, parentphase)
984 newphases[newnode] = newphase
984 newphases[newnode] = newphase
985 if newphase > ctx.phase():
985 if newphase > ctx.phase():
986 toretract.setdefault(newphase, []).append(newnode)
986 toretract.setdefault(newphase, []).append(newnode)
987 elif newphase < ctx.phase():
987 elif newphase < ctx.phase():
988 toadvance.setdefault(newphase, []).append(newnode)
988 toadvance.setdefault(newphase, []).append(newnode)
989
989
990 with repo.transaction('cleanup') as tr:
990 with repo.transaction('cleanup') as tr:
991 # Move bookmarks
991 # Move bookmarks
992 bmarks = repo._bookmarks
992 bmarks = repo._bookmarks
993 bmarkchanges = []
993 bmarkchanges = []
994 for oldnode, newnode in moves.items():
994 for oldnode, newnode in moves.items():
995 oldbmarks = repo.nodebookmarks(oldnode)
995 oldbmarks = repo.nodebookmarks(oldnode)
996 if not oldbmarks:
996 if not oldbmarks:
997 continue
997 continue
998 from . import bookmarks # avoid import cycle
998 from . import bookmarks # avoid import cycle
999 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
999 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1000 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1000 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1001 hex(oldnode), hex(newnode)))
1001 hex(oldnode), hex(newnode)))
1002 # Delete divergent bookmarks being parents of related newnodes
1002 # Delete divergent bookmarks being parents of related newnodes
1003 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1003 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1004 allnewnodes, newnode, oldnode)
1004 allnewnodes, newnode, oldnode)
1005 deletenodes = _containsnode(repo, deleterevs)
1005 deletenodes = _containsnode(repo, deleterevs)
1006 for name in oldbmarks:
1006 for name in oldbmarks:
1007 bmarkchanges.append((name, newnode))
1007 bmarkchanges.append((name, newnode))
1008 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1008 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1009 bmarkchanges.append((b, None))
1009 bmarkchanges.append((b, None))
1010
1010
1011 if bmarkchanges:
1011 if bmarkchanges:
1012 bmarks.applychanges(repo, tr, bmarkchanges)
1012 bmarks.applychanges(repo, tr, bmarkchanges)
1013
1013
1014 for phase, nodes in toretract.items():
1014 for phase, nodes in toretract.items():
1015 phases.retractboundary(repo, tr, phase, nodes)
1015 phases.retractboundary(repo, tr, phase, nodes)
1016 for phase, nodes in toadvance.items():
1016 for phase, nodes in toadvance.items():
1017 phases.advanceboundary(repo, tr, phase, nodes)
1017 phases.advanceboundary(repo, tr, phase, nodes)
1018
1018
1019 # Obsolete or strip nodes
1019 # Obsolete or strip nodes
1020 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1020 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1021 # If a node is already obsoleted, and we want to obsolete it
1021 # If a node is already obsoleted, and we want to obsolete it
1022 # without a successor, skip that obssolete request since it's
1022 # without a successor, skip that obssolete request since it's
1023 # unnecessary. That's the "if s or not isobs(n)" check below.
1023 # unnecessary. That's the "if s or not isobs(n)" check below.
1024 # Also sort the node in topology order, that might be useful for
1024 # Also sort the node in topology order, that might be useful for
1025 # some obsstore logic.
1025 # some obsstore logic.
1026 # NOTE: the sorting might belong to createmarkers.
1026 # NOTE: the sorting might belong to createmarkers.
1027 torev = unfi.changelog.rev
1027 torev = unfi.changelog.rev
1028 sortfunc = lambda ns: torev(ns[0][0])
1028 sortfunc = lambda ns: torev(ns[0][0])
1029 rels = []
1029 rels = []
1030 for ns, s in sorted(replacements.items(), key=sortfunc):
1030 for ns, s in sorted(replacements.items(), key=sortfunc):
1031 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1031 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1032 rels.append(rel)
1032 rels.append(rel)
1033 if rels:
1033 if rels:
1034 obsolete.createmarkers(repo, rels, operation=operation,
1034 obsolete.createmarkers(repo, rels, operation=operation,
1035 metadata=metadata)
1035 metadata=metadata)
1036 else:
1036 else:
1037 from . import repair # avoid import cycle
1037 from . import repair # avoid import cycle
1038 tostrip = list(n for ns in replacements for n in ns)
1038 tostrip = list(n for ns in replacements for n in ns)
1039 if tostrip:
1039 if tostrip:
1040 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1040 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1041 backup=backup)
1041 backup=backup)
1042
1042
1043 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1043 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1044 if opts is None:
1044 if opts is None:
1045 opts = {}
1045 opts = {}
1046 m = matcher
1046 m = matcher
1047 dry_run = opts.get('dry_run')
1047 dry_run = opts.get('dry_run')
1048 try:
1048 try:
1049 similarity = float(opts.get('similarity') or 0)
1049 similarity = float(opts.get('similarity') or 0)
1050 except ValueError:
1050 except ValueError:
1051 raise error.Abort(_('similarity must be a number'))
1051 raise error.Abort(_('similarity must be a number'))
1052 if similarity < 0 or similarity > 100:
1052 if similarity < 0 or similarity > 100:
1053 raise error.Abort(_('similarity must be between 0 and 100'))
1053 raise error.Abort(_('similarity must be between 0 and 100'))
1054 similarity /= 100.0
1054 similarity /= 100.0
1055
1055
1056 ret = 0
1056 ret = 0
1057
1057
1058 wctx = repo[None]
1058 wctx = repo[None]
1059 for subpath in sorted(wctx.substate):
1059 for subpath in sorted(wctx.substate):
1060 submatch = matchmod.subdirmatcher(subpath, m)
1060 submatch = matchmod.subdirmatcher(subpath, m)
1061 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1061 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1062 sub = wctx.sub(subpath)
1062 sub = wctx.sub(subpath)
1063 subprefix = repo.wvfs.reljoin(prefix, subpath)
1063 subprefix = repo.wvfs.reljoin(prefix, subpath)
1064 subuipathfn = subdiruipathfn(subpath, uipathfn)
1064 subuipathfn = subdiruipathfn(subpath, uipathfn)
1065 try:
1065 try:
1066 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1066 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1067 ret = 1
1067 ret = 1
1068 except error.LookupError:
1068 except error.LookupError:
1069 repo.ui.status(_("skipping missing subrepository: %s\n")
1069 repo.ui.status(_("skipping missing subrepository: %s\n")
1070 % uipathfn(subpath))
1070 % uipathfn(subpath))
1071
1071
1072 rejected = []
1072 rejected = []
1073 def badfn(f, msg):
1073 def badfn(f, msg):
1074 if f in m.files():
1074 if f in m.files():
1075 m.bad(f, msg)
1075 m.bad(f, msg)
1076 rejected.append(f)
1076 rejected.append(f)
1077
1077
1078 badmatch = matchmod.badmatch(m, badfn)
1078 badmatch = matchmod.badmatch(m, badfn)
1079 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1079 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1080 badmatch)
1080 badmatch)
1081
1081
1082 unknownset = set(unknown + forgotten)
1082 unknownset = set(unknown + forgotten)
1083 toprint = unknownset.copy()
1083 toprint = unknownset.copy()
1084 toprint.update(deleted)
1084 toprint.update(deleted)
1085 for abs in sorted(toprint):
1085 for abs in sorted(toprint):
1086 if repo.ui.verbose or not m.exact(abs):
1086 if repo.ui.verbose or not m.exact(abs):
1087 if abs in unknownset:
1087 if abs in unknownset:
1088 status = _('adding %s\n') % uipathfn(abs)
1088 status = _('adding %s\n') % uipathfn(abs)
1089 label = 'ui.addremove.added'
1089 label = 'ui.addremove.added'
1090 else:
1090 else:
1091 status = _('removing %s\n') % uipathfn(abs)
1091 status = _('removing %s\n') % uipathfn(abs)
1092 label = 'ui.addremove.removed'
1092 label = 'ui.addremove.removed'
1093 repo.ui.status(status, label=label)
1093 repo.ui.status(status, label=label)
1094
1094
1095 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1095 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1096 similarity, uipathfn)
1096 similarity, uipathfn)
1097
1097
1098 if not dry_run:
1098 if not dry_run:
1099 _markchanges(repo, unknown + forgotten, deleted, renames)
1099 _markchanges(repo, unknown + forgotten, deleted, renames)
1100
1100
1101 for f in rejected:
1101 for f in rejected:
1102 if f in m.files():
1102 if f in m.files():
1103 return 1
1103 return 1
1104 return ret
1104 return ret
1105
1105
1106 def marktouched(repo, files, similarity=0.0):
1106 def marktouched(repo, files, similarity=0.0):
1107 '''Assert that files have somehow been operated upon. files are relative to
1107 '''Assert that files have somehow been operated upon. files are relative to
1108 the repo root.'''
1108 the repo root.'''
1109 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1109 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1110 rejected = []
1110 rejected = []
1111
1111
1112 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1112 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1113
1113
1114 if repo.ui.verbose:
1114 if repo.ui.verbose:
1115 unknownset = set(unknown + forgotten)
1115 unknownset = set(unknown + forgotten)
1116 toprint = unknownset.copy()
1116 toprint = unknownset.copy()
1117 toprint.update(deleted)
1117 toprint.update(deleted)
1118 for abs in sorted(toprint):
1118 for abs in sorted(toprint):
1119 if abs in unknownset:
1119 if abs in unknownset:
1120 status = _('adding %s\n') % abs
1120 status = _('adding %s\n') % abs
1121 else:
1121 else:
1122 status = _('removing %s\n') % abs
1122 status = _('removing %s\n') % abs
1123 repo.ui.status(status)
1123 repo.ui.status(status)
1124
1124
1125 # TODO: We should probably have the caller pass in uipathfn and apply it to
1125 # TODO: We should probably have the caller pass in uipathfn and apply it to
1126 # the messages above too. forcerelativevalue=True is consistent with how
1126 # the messages above too. legacyrelativevalue=True is consistent with how
1127 # it used to work.
1127 # it used to work.
1128 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1128 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1129 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1129 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1130 similarity, uipathfn)
1130 similarity, uipathfn)
1131
1131
1132 _markchanges(repo, unknown + forgotten, deleted, renames)
1132 _markchanges(repo, unknown + forgotten, deleted, renames)
1133
1133
1134 for f in rejected:
1134 for f in rejected:
1135 if f in m.files():
1135 if f in m.files():
1136 return 1
1136 return 1
1137 return 0
1137 return 0
1138
1138
1139 def _interestingfiles(repo, matcher):
1139 def _interestingfiles(repo, matcher):
1140 '''Walk dirstate with matcher, looking for files that addremove would care
1140 '''Walk dirstate with matcher, looking for files that addremove would care
1141 about.
1141 about.
1142
1142
1143 This is different from dirstate.status because it doesn't care about
1143 This is different from dirstate.status because it doesn't care about
1144 whether files are modified or clean.'''
1144 whether files are modified or clean.'''
1145 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1145 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1146 audit_path = pathutil.pathauditor(repo.root, cached=True)
1146 audit_path = pathutil.pathauditor(repo.root, cached=True)
1147
1147
1148 ctx = repo[None]
1148 ctx = repo[None]
1149 dirstate = repo.dirstate
1149 dirstate = repo.dirstate
1150 matcher = repo.narrowmatch(matcher, includeexact=True)
1150 matcher = repo.narrowmatch(matcher, includeexact=True)
1151 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1151 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1152 unknown=True, ignored=False, full=False)
1152 unknown=True, ignored=False, full=False)
1153 for abs, st in walkresults.iteritems():
1153 for abs, st in walkresults.iteritems():
1154 dstate = dirstate[abs]
1154 dstate = dirstate[abs]
1155 if dstate == '?' and audit_path.check(abs):
1155 if dstate == '?' and audit_path.check(abs):
1156 unknown.append(abs)
1156 unknown.append(abs)
1157 elif dstate != 'r' and not st:
1157 elif dstate != 'r' and not st:
1158 deleted.append(abs)
1158 deleted.append(abs)
1159 elif dstate == 'r' and st:
1159 elif dstate == 'r' and st:
1160 forgotten.append(abs)
1160 forgotten.append(abs)
1161 # for finding renames
1161 # for finding renames
1162 elif dstate == 'r' and not st:
1162 elif dstate == 'r' and not st:
1163 removed.append(abs)
1163 removed.append(abs)
1164 elif dstate == 'a':
1164 elif dstate == 'a':
1165 added.append(abs)
1165 added.append(abs)
1166
1166
1167 return added, unknown, deleted, removed, forgotten
1167 return added, unknown, deleted, removed, forgotten
1168
1168
1169 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1169 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1170 '''Find renames from removed files to added ones.'''
1170 '''Find renames from removed files to added ones.'''
1171 renames = {}
1171 renames = {}
1172 if similarity > 0:
1172 if similarity > 0:
1173 for old, new, score in similar.findrenames(repo, added, removed,
1173 for old, new, score in similar.findrenames(repo, added, removed,
1174 similarity):
1174 similarity):
1175 if (repo.ui.verbose or not matcher.exact(old)
1175 if (repo.ui.verbose or not matcher.exact(old)
1176 or not matcher.exact(new)):
1176 or not matcher.exact(new)):
1177 repo.ui.status(_('recording removal of %s as rename to %s '
1177 repo.ui.status(_('recording removal of %s as rename to %s '
1178 '(%d%% similar)\n') %
1178 '(%d%% similar)\n') %
1179 (uipathfn(old), uipathfn(new),
1179 (uipathfn(old), uipathfn(new),
1180 score * 100))
1180 score * 100))
1181 renames[new] = old
1181 renames[new] = old
1182 return renames
1182 return renames
1183
1183
1184 def _markchanges(repo, unknown, deleted, renames):
1184 def _markchanges(repo, unknown, deleted, renames):
1185 '''Marks the files in unknown as added, the files in deleted as removed,
1185 '''Marks the files in unknown as added, the files in deleted as removed,
1186 and the files in renames as copied.'''
1186 and the files in renames as copied.'''
1187 wctx = repo[None]
1187 wctx = repo[None]
1188 with repo.wlock():
1188 with repo.wlock():
1189 wctx.forget(deleted)
1189 wctx.forget(deleted)
1190 wctx.add(unknown)
1190 wctx.add(unknown)
1191 for new, old in renames.iteritems():
1191 for new, old in renames.iteritems():
1192 wctx.copy(old, new)
1192 wctx.copy(old, new)
1193
1193
1194 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1194 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1195 """Update the dirstate to reflect the intent of copying src to dst. For
1195 """Update the dirstate to reflect the intent of copying src to dst. For
1196 different reasons it might not end with dst being marked as copied from src.
1196 different reasons it might not end with dst being marked as copied from src.
1197 """
1197 """
1198 origsrc = repo.dirstate.copied(src) or src
1198 origsrc = repo.dirstate.copied(src) or src
1199 if dst == origsrc: # copying back a copy?
1199 if dst == origsrc: # copying back a copy?
1200 if repo.dirstate[dst] not in 'mn' and not dryrun:
1200 if repo.dirstate[dst] not in 'mn' and not dryrun:
1201 repo.dirstate.normallookup(dst)
1201 repo.dirstate.normallookup(dst)
1202 else:
1202 else:
1203 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1203 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1204 if not ui.quiet:
1204 if not ui.quiet:
1205 ui.warn(_("%s has not been committed yet, so no copy "
1205 ui.warn(_("%s has not been committed yet, so no copy "
1206 "data will be stored for %s.\n")
1206 "data will be stored for %s.\n")
1207 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1207 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1208 if repo.dirstate[dst] in '?r' and not dryrun:
1208 if repo.dirstate[dst] in '?r' and not dryrun:
1209 wctx.add([dst])
1209 wctx.add([dst])
1210 elif not dryrun:
1210 elif not dryrun:
1211 wctx.copy(origsrc, dst)
1211 wctx.copy(origsrc, dst)
1212
1212
1213 def writerequires(opener, requirements):
1213 def writerequires(opener, requirements):
1214 with opener('requires', 'w', atomictemp=True) as fp:
1214 with opener('requires', 'w', atomictemp=True) as fp:
1215 for r in sorted(requirements):
1215 for r in sorted(requirements):
1216 fp.write("%s\n" % r)
1216 fp.write("%s\n" % r)
1217
1217
1218 class filecachesubentry(object):
1218 class filecachesubentry(object):
1219 def __init__(self, path, stat):
1219 def __init__(self, path, stat):
1220 self.path = path
1220 self.path = path
1221 self.cachestat = None
1221 self.cachestat = None
1222 self._cacheable = None
1222 self._cacheable = None
1223
1223
1224 if stat:
1224 if stat:
1225 self.cachestat = filecachesubentry.stat(self.path)
1225 self.cachestat = filecachesubentry.stat(self.path)
1226
1226
1227 if self.cachestat:
1227 if self.cachestat:
1228 self._cacheable = self.cachestat.cacheable()
1228 self._cacheable = self.cachestat.cacheable()
1229 else:
1229 else:
1230 # None means we don't know yet
1230 # None means we don't know yet
1231 self._cacheable = None
1231 self._cacheable = None
1232
1232
1233 def refresh(self):
1233 def refresh(self):
1234 if self.cacheable():
1234 if self.cacheable():
1235 self.cachestat = filecachesubentry.stat(self.path)
1235 self.cachestat = filecachesubentry.stat(self.path)
1236
1236
1237 def cacheable(self):
1237 def cacheable(self):
1238 if self._cacheable is not None:
1238 if self._cacheable is not None:
1239 return self._cacheable
1239 return self._cacheable
1240
1240
1241 # we don't know yet, assume it is for now
1241 # we don't know yet, assume it is for now
1242 return True
1242 return True
1243
1243
1244 def changed(self):
1244 def changed(self):
1245 # no point in going further if we can't cache it
1245 # no point in going further if we can't cache it
1246 if not self.cacheable():
1246 if not self.cacheable():
1247 return True
1247 return True
1248
1248
1249 newstat = filecachesubentry.stat(self.path)
1249 newstat = filecachesubentry.stat(self.path)
1250
1250
1251 # we may not know if it's cacheable yet, check again now
1251 # we may not know if it's cacheable yet, check again now
1252 if newstat and self._cacheable is None:
1252 if newstat and self._cacheable is None:
1253 self._cacheable = newstat.cacheable()
1253 self._cacheable = newstat.cacheable()
1254
1254
1255 # check again
1255 # check again
1256 if not self._cacheable:
1256 if not self._cacheable:
1257 return True
1257 return True
1258
1258
1259 if self.cachestat != newstat:
1259 if self.cachestat != newstat:
1260 self.cachestat = newstat
1260 self.cachestat = newstat
1261 return True
1261 return True
1262 else:
1262 else:
1263 return False
1263 return False
1264
1264
1265 @staticmethod
1265 @staticmethod
1266 def stat(path):
1266 def stat(path):
1267 try:
1267 try:
1268 return util.cachestat(path)
1268 return util.cachestat(path)
1269 except OSError as e:
1269 except OSError as e:
1270 if e.errno != errno.ENOENT:
1270 if e.errno != errno.ENOENT:
1271 raise
1271 raise
1272
1272
1273 class filecacheentry(object):
1273 class filecacheentry(object):
1274 def __init__(self, paths, stat=True):
1274 def __init__(self, paths, stat=True):
1275 self._entries = []
1275 self._entries = []
1276 for path in paths:
1276 for path in paths:
1277 self._entries.append(filecachesubentry(path, stat))
1277 self._entries.append(filecachesubentry(path, stat))
1278
1278
1279 def changed(self):
1279 def changed(self):
1280 '''true if any entry has changed'''
1280 '''true if any entry has changed'''
1281 for entry in self._entries:
1281 for entry in self._entries:
1282 if entry.changed():
1282 if entry.changed():
1283 return True
1283 return True
1284 return False
1284 return False
1285
1285
1286 def refresh(self):
1286 def refresh(self):
1287 for entry in self._entries:
1287 for entry in self._entries:
1288 entry.refresh()
1288 entry.refresh()
1289
1289
1290 class filecache(object):
1290 class filecache(object):
1291 """A property like decorator that tracks files under .hg/ for updates.
1291 """A property like decorator that tracks files under .hg/ for updates.
1292
1292
1293 On first access, the files defined as arguments are stat()ed and the
1293 On first access, the files defined as arguments are stat()ed and the
1294 results cached. The decorated function is called. The results are stashed
1294 results cached. The decorated function is called. The results are stashed
1295 away in a ``_filecache`` dict on the object whose method is decorated.
1295 away in a ``_filecache`` dict on the object whose method is decorated.
1296
1296
1297 On subsequent access, the cached result is used as it is set to the
1297 On subsequent access, the cached result is used as it is set to the
1298 instance dictionary.
1298 instance dictionary.
1299
1299
1300 On external property set/delete operations, the caller must update the
1300 On external property set/delete operations, the caller must update the
1301 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1301 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1302 instead of directly setting <attr>.
1302 instead of directly setting <attr>.
1303
1303
1304 When using the property API, the cached data is always used if available.
1304 When using the property API, the cached data is always used if available.
1305 No stat() is performed to check if the file has changed.
1305 No stat() is performed to check if the file has changed.
1306
1306
1307 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1307 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1308 can populate an entry before the property's getter is called. In this case,
1308 can populate an entry before the property's getter is called. In this case,
1309 entries in ``_filecache`` will be used during property operations,
1309 entries in ``_filecache`` will be used during property operations,
1310 if available. If the underlying file changes, it is up to external callers
1310 if available. If the underlying file changes, it is up to external callers
1311 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1311 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1312 method result as well as possibly calling ``del obj._filecache[attr]`` to
1312 method result as well as possibly calling ``del obj._filecache[attr]`` to
1313 remove the ``filecacheentry``.
1313 remove the ``filecacheentry``.
1314 """
1314 """
1315
1315
1316 def __init__(self, *paths):
1316 def __init__(self, *paths):
1317 self.paths = paths
1317 self.paths = paths
1318
1318
1319 def join(self, obj, fname):
1319 def join(self, obj, fname):
1320 """Used to compute the runtime path of a cached file.
1320 """Used to compute the runtime path of a cached file.
1321
1321
1322 Users should subclass filecache and provide their own version of this
1322 Users should subclass filecache and provide their own version of this
1323 function to call the appropriate join function on 'obj' (an instance
1323 function to call the appropriate join function on 'obj' (an instance
1324 of the class that its member function was decorated).
1324 of the class that its member function was decorated).
1325 """
1325 """
1326 raise NotImplementedError
1326 raise NotImplementedError
1327
1327
1328 def __call__(self, func):
1328 def __call__(self, func):
1329 self.func = func
1329 self.func = func
1330 self.sname = func.__name__
1330 self.sname = func.__name__
1331 self.name = pycompat.sysbytes(self.sname)
1331 self.name = pycompat.sysbytes(self.sname)
1332 return self
1332 return self
1333
1333
1334 def __get__(self, obj, type=None):
1334 def __get__(self, obj, type=None):
1335 # if accessed on the class, return the descriptor itself.
1335 # if accessed on the class, return the descriptor itself.
1336 if obj is None:
1336 if obj is None:
1337 return self
1337 return self
1338
1338
1339 assert self.sname not in obj.__dict__
1339 assert self.sname not in obj.__dict__
1340
1340
1341 entry = obj._filecache.get(self.name)
1341 entry = obj._filecache.get(self.name)
1342
1342
1343 if entry:
1343 if entry:
1344 if entry.changed():
1344 if entry.changed():
1345 entry.obj = self.func(obj)
1345 entry.obj = self.func(obj)
1346 else:
1346 else:
1347 paths = [self.join(obj, path) for path in self.paths]
1347 paths = [self.join(obj, path) for path in self.paths]
1348
1348
1349 # We stat -before- creating the object so our cache doesn't lie if
1349 # We stat -before- creating the object so our cache doesn't lie if
1350 # a writer modified between the time we read and stat
1350 # a writer modified between the time we read and stat
1351 entry = filecacheentry(paths, True)
1351 entry = filecacheentry(paths, True)
1352 entry.obj = self.func(obj)
1352 entry.obj = self.func(obj)
1353
1353
1354 obj._filecache[self.name] = entry
1354 obj._filecache[self.name] = entry
1355
1355
1356 obj.__dict__[self.sname] = entry.obj
1356 obj.__dict__[self.sname] = entry.obj
1357 return entry.obj
1357 return entry.obj
1358
1358
1359 # don't implement __set__(), which would make __dict__ lookup as slow as
1359 # don't implement __set__(), which would make __dict__ lookup as slow as
1360 # function call.
1360 # function call.
1361
1361
1362 def set(self, obj, value):
1362 def set(self, obj, value):
1363 if self.name not in obj._filecache:
1363 if self.name not in obj._filecache:
1364 # we add an entry for the missing value because X in __dict__
1364 # we add an entry for the missing value because X in __dict__
1365 # implies X in _filecache
1365 # implies X in _filecache
1366 paths = [self.join(obj, path) for path in self.paths]
1366 paths = [self.join(obj, path) for path in self.paths]
1367 ce = filecacheentry(paths, False)
1367 ce = filecacheentry(paths, False)
1368 obj._filecache[self.name] = ce
1368 obj._filecache[self.name] = ce
1369 else:
1369 else:
1370 ce = obj._filecache[self.name]
1370 ce = obj._filecache[self.name]
1371
1371
1372 ce.obj = value # update cached copy
1372 ce.obj = value # update cached copy
1373 obj.__dict__[self.sname] = value # update copy returned by obj.x
1373 obj.__dict__[self.sname] = value # update copy returned by obj.x
1374
1374
1375 def extdatasource(repo, source):
1375 def extdatasource(repo, source):
1376 """Gather a map of rev -> value dict from the specified source
1376 """Gather a map of rev -> value dict from the specified source
1377
1377
1378 A source spec is treated as a URL, with a special case shell: type
1378 A source spec is treated as a URL, with a special case shell: type
1379 for parsing the output from a shell command.
1379 for parsing the output from a shell command.
1380
1380
1381 The data is parsed as a series of newline-separated records where
1381 The data is parsed as a series of newline-separated records where
1382 each record is a revision specifier optionally followed by a space
1382 each record is a revision specifier optionally followed by a space
1383 and a freeform string value. If the revision is known locally, it
1383 and a freeform string value. If the revision is known locally, it
1384 is converted to a rev, otherwise the record is skipped.
1384 is converted to a rev, otherwise the record is skipped.
1385
1385
1386 Note that both key and value are treated as UTF-8 and converted to
1386 Note that both key and value are treated as UTF-8 and converted to
1387 the local encoding. This allows uniformity between local and
1387 the local encoding. This allows uniformity between local and
1388 remote data sources.
1388 remote data sources.
1389 """
1389 """
1390
1390
1391 spec = repo.ui.config("extdata", source)
1391 spec = repo.ui.config("extdata", source)
1392 if not spec:
1392 if not spec:
1393 raise error.Abort(_("unknown extdata source '%s'") % source)
1393 raise error.Abort(_("unknown extdata source '%s'") % source)
1394
1394
1395 data = {}
1395 data = {}
1396 src = proc = None
1396 src = proc = None
1397 try:
1397 try:
1398 if spec.startswith("shell:"):
1398 if spec.startswith("shell:"):
1399 # external commands should be run relative to the repo root
1399 # external commands should be run relative to the repo root
1400 cmd = spec[6:]
1400 cmd = spec[6:]
1401 proc = subprocess.Popen(procutil.tonativestr(cmd),
1401 proc = subprocess.Popen(procutil.tonativestr(cmd),
1402 shell=True, bufsize=-1,
1402 shell=True, bufsize=-1,
1403 close_fds=procutil.closefds,
1403 close_fds=procutil.closefds,
1404 stdout=subprocess.PIPE,
1404 stdout=subprocess.PIPE,
1405 cwd=procutil.tonativestr(repo.root))
1405 cwd=procutil.tonativestr(repo.root))
1406 src = proc.stdout
1406 src = proc.stdout
1407 else:
1407 else:
1408 # treat as a URL or file
1408 # treat as a URL or file
1409 src = url.open(repo.ui, spec)
1409 src = url.open(repo.ui, spec)
1410 for l in src:
1410 for l in src:
1411 if " " in l:
1411 if " " in l:
1412 k, v = l.strip().split(" ", 1)
1412 k, v = l.strip().split(" ", 1)
1413 else:
1413 else:
1414 k, v = l.strip(), ""
1414 k, v = l.strip(), ""
1415
1415
1416 k = encoding.tolocal(k)
1416 k = encoding.tolocal(k)
1417 try:
1417 try:
1418 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1418 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1419 except (error.LookupError, error.RepoLookupError):
1419 except (error.LookupError, error.RepoLookupError):
1420 pass # we ignore data for nodes that don't exist locally
1420 pass # we ignore data for nodes that don't exist locally
1421 finally:
1421 finally:
1422 if proc:
1422 if proc:
1423 proc.communicate()
1423 proc.communicate()
1424 if src:
1424 if src:
1425 src.close()
1425 src.close()
1426 if proc and proc.returncode != 0:
1426 if proc and proc.returncode != 0:
1427 raise error.Abort(_("extdata command '%s' failed: %s")
1427 raise error.Abort(_("extdata command '%s' failed: %s")
1428 % (cmd, procutil.explainexit(proc.returncode)))
1428 % (cmd, procutil.explainexit(proc.returncode)))
1429
1429
1430 return data
1430 return data
1431
1431
1432 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1432 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1433 if lock is None:
1433 if lock is None:
1434 raise error.LockInheritanceContractViolation(
1434 raise error.LockInheritanceContractViolation(
1435 'lock can only be inherited while held')
1435 'lock can only be inherited while held')
1436 if environ is None:
1436 if environ is None:
1437 environ = {}
1437 environ = {}
1438 with lock.inherit() as locker:
1438 with lock.inherit() as locker:
1439 environ[envvar] = locker
1439 environ[envvar] = locker
1440 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1440 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1441
1441
1442 def wlocksub(repo, cmd, *args, **kwargs):
1442 def wlocksub(repo, cmd, *args, **kwargs):
1443 """run cmd as a subprocess that allows inheriting repo's wlock
1443 """run cmd as a subprocess that allows inheriting repo's wlock
1444
1444
1445 This can only be called while the wlock is held. This takes all the
1445 This can only be called while the wlock is held. This takes all the
1446 arguments that ui.system does, and returns the exit code of the
1446 arguments that ui.system does, and returns the exit code of the
1447 subprocess."""
1447 subprocess."""
1448 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1448 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1449 **kwargs)
1449 **kwargs)
1450
1450
1451 class progress(object):
1451 class progress(object):
1452 def __init__(self, ui, updatebar, topic, unit="", total=None):
1452 def __init__(self, ui, updatebar, topic, unit="", total=None):
1453 self.ui = ui
1453 self.ui = ui
1454 self.pos = 0
1454 self.pos = 0
1455 self.topic = topic
1455 self.topic = topic
1456 self.unit = unit
1456 self.unit = unit
1457 self.total = total
1457 self.total = total
1458 self.debug = ui.configbool('progress', 'debug')
1458 self.debug = ui.configbool('progress', 'debug')
1459 self._updatebar = updatebar
1459 self._updatebar = updatebar
1460
1460
1461 def __enter__(self):
1461 def __enter__(self):
1462 return self
1462 return self
1463
1463
1464 def __exit__(self, exc_type, exc_value, exc_tb):
1464 def __exit__(self, exc_type, exc_value, exc_tb):
1465 self.complete()
1465 self.complete()
1466
1466
1467 def update(self, pos, item="", total=None):
1467 def update(self, pos, item="", total=None):
1468 assert pos is not None
1468 assert pos is not None
1469 if total:
1469 if total:
1470 self.total = total
1470 self.total = total
1471 self.pos = pos
1471 self.pos = pos
1472 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1472 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1473 if self.debug:
1473 if self.debug:
1474 self._printdebug(item)
1474 self._printdebug(item)
1475
1475
1476 def increment(self, step=1, item="", total=None):
1476 def increment(self, step=1, item="", total=None):
1477 self.update(self.pos + step, item, total)
1477 self.update(self.pos + step, item, total)
1478
1478
1479 def complete(self):
1479 def complete(self):
1480 self.pos = None
1480 self.pos = None
1481 self.unit = ""
1481 self.unit = ""
1482 self.total = None
1482 self.total = None
1483 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1483 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1484
1484
1485 def _printdebug(self, item):
1485 def _printdebug(self, item):
1486 if self.unit:
1486 if self.unit:
1487 unit = ' ' + self.unit
1487 unit = ' ' + self.unit
1488 if item:
1488 if item:
1489 item = ' ' + item
1489 item = ' ' + item
1490
1490
1491 if self.total:
1491 if self.total:
1492 pct = 100.0 * self.pos / self.total
1492 pct = 100.0 * self.pos / self.total
1493 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1493 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1494 % (self.topic, item, self.pos, self.total, unit, pct))
1494 % (self.topic, item, self.pos, self.total, unit, pct))
1495 else:
1495 else:
1496 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1496 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1497
1497
1498 def gdinitconfig(ui):
1498 def gdinitconfig(ui):
1499 """helper function to know if a repo should be created as general delta
1499 """helper function to know if a repo should be created as general delta
1500 """
1500 """
1501 # experimental config: format.generaldelta
1501 # experimental config: format.generaldelta
1502 return (ui.configbool('format', 'generaldelta')
1502 return (ui.configbool('format', 'generaldelta')
1503 or ui.configbool('format', 'usegeneraldelta'))
1503 or ui.configbool('format', 'usegeneraldelta'))
1504
1504
1505 def gddeltaconfig(ui):
1505 def gddeltaconfig(ui):
1506 """helper function to know if incoming delta should be optimised
1506 """helper function to know if incoming delta should be optimised
1507 """
1507 """
1508 # experimental config: format.generaldelta
1508 # experimental config: format.generaldelta
1509 return ui.configbool('format', 'generaldelta')
1509 return ui.configbool('format', 'generaldelta')
1510
1510
1511 class simplekeyvaluefile(object):
1511 class simplekeyvaluefile(object):
1512 """A simple file with key=value lines
1512 """A simple file with key=value lines
1513
1513
1514 Keys must be alphanumerics and start with a letter, values must not
1514 Keys must be alphanumerics and start with a letter, values must not
1515 contain '\n' characters"""
1515 contain '\n' characters"""
1516 firstlinekey = '__firstline'
1516 firstlinekey = '__firstline'
1517
1517
1518 def __init__(self, vfs, path, keys=None):
1518 def __init__(self, vfs, path, keys=None):
1519 self.vfs = vfs
1519 self.vfs = vfs
1520 self.path = path
1520 self.path = path
1521
1521
1522 def read(self, firstlinenonkeyval=False):
1522 def read(self, firstlinenonkeyval=False):
1523 """Read the contents of a simple key-value file
1523 """Read the contents of a simple key-value file
1524
1524
1525 'firstlinenonkeyval' indicates whether the first line of file should
1525 'firstlinenonkeyval' indicates whether the first line of file should
1526 be treated as a key-value pair or reuturned fully under the
1526 be treated as a key-value pair or reuturned fully under the
1527 __firstline key."""
1527 __firstline key."""
1528 lines = self.vfs.readlines(self.path)
1528 lines = self.vfs.readlines(self.path)
1529 d = {}
1529 d = {}
1530 if firstlinenonkeyval:
1530 if firstlinenonkeyval:
1531 if not lines:
1531 if not lines:
1532 e = _("empty simplekeyvalue file")
1532 e = _("empty simplekeyvalue file")
1533 raise error.CorruptedState(e)
1533 raise error.CorruptedState(e)
1534 # we don't want to include '\n' in the __firstline
1534 # we don't want to include '\n' in the __firstline
1535 d[self.firstlinekey] = lines[0][:-1]
1535 d[self.firstlinekey] = lines[0][:-1]
1536 del lines[0]
1536 del lines[0]
1537
1537
1538 try:
1538 try:
1539 # the 'if line.strip()' part prevents us from failing on empty
1539 # the 'if line.strip()' part prevents us from failing on empty
1540 # lines which only contain '\n' therefore are not skipped
1540 # lines which only contain '\n' therefore are not skipped
1541 # by 'if line'
1541 # by 'if line'
1542 updatedict = dict(line[:-1].split('=', 1) for line in lines
1542 updatedict = dict(line[:-1].split('=', 1) for line in lines
1543 if line.strip())
1543 if line.strip())
1544 if self.firstlinekey in updatedict:
1544 if self.firstlinekey in updatedict:
1545 e = _("%r can't be used as a key")
1545 e = _("%r can't be used as a key")
1546 raise error.CorruptedState(e % self.firstlinekey)
1546 raise error.CorruptedState(e % self.firstlinekey)
1547 d.update(updatedict)
1547 d.update(updatedict)
1548 except ValueError as e:
1548 except ValueError as e:
1549 raise error.CorruptedState(str(e))
1549 raise error.CorruptedState(str(e))
1550 return d
1550 return d
1551
1551
1552 def write(self, data, firstline=None):
1552 def write(self, data, firstline=None):
1553 """Write key=>value mapping to a file
1553 """Write key=>value mapping to a file
1554 data is a dict. Keys must be alphanumerical and start with a letter.
1554 data is a dict. Keys must be alphanumerical and start with a letter.
1555 Values must not contain newline characters.
1555 Values must not contain newline characters.
1556
1556
1557 If 'firstline' is not None, it is written to file before
1557 If 'firstline' is not None, it is written to file before
1558 everything else, as it is, not in a key=value form"""
1558 everything else, as it is, not in a key=value form"""
1559 lines = []
1559 lines = []
1560 if firstline is not None:
1560 if firstline is not None:
1561 lines.append('%s\n' % firstline)
1561 lines.append('%s\n' % firstline)
1562
1562
1563 for k, v in data.items():
1563 for k, v in data.items():
1564 if k == self.firstlinekey:
1564 if k == self.firstlinekey:
1565 e = "key name '%s' is reserved" % self.firstlinekey
1565 e = "key name '%s' is reserved" % self.firstlinekey
1566 raise error.ProgrammingError(e)
1566 raise error.ProgrammingError(e)
1567 if not k[0:1].isalpha():
1567 if not k[0:1].isalpha():
1568 e = "keys must start with a letter in a key-value file"
1568 e = "keys must start with a letter in a key-value file"
1569 raise error.ProgrammingError(e)
1569 raise error.ProgrammingError(e)
1570 if not k.isalnum():
1570 if not k.isalnum():
1571 e = "invalid key name in a simple key-value file"
1571 e = "invalid key name in a simple key-value file"
1572 raise error.ProgrammingError(e)
1572 raise error.ProgrammingError(e)
1573 if '\n' in v:
1573 if '\n' in v:
1574 e = "invalid value in a simple key-value file"
1574 e = "invalid value in a simple key-value file"
1575 raise error.ProgrammingError(e)
1575 raise error.ProgrammingError(e)
1576 lines.append("%s=%s\n" % (k, v))
1576 lines.append("%s=%s\n" % (k, v))
1577 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1577 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1578 fp.write(''.join(lines))
1578 fp.write(''.join(lines))
1579
1579
1580 _reportobsoletedsource = [
1580 _reportobsoletedsource = [
1581 'debugobsolete',
1581 'debugobsolete',
1582 'pull',
1582 'pull',
1583 'push',
1583 'push',
1584 'serve',
1584 'serve',
1585 'unbundle',
1585 'unbundle',
1586 ]
1586 ]
1587
1587
1588 _reportnewcssource = [
1588 _reportnewcssource = [
1589 'pull',
1589 'pull',
1590 'unbundle',
1590 'unbundle',
1591 ]
1591 ]
1592
1592
1593 def prefetchfiles(repo, revs, match):
1593 def prefetchfiles(repo, revs, match):
1594 """Invokes the registered file prefetch functions, allowing extensions to
1594 """Invokes the registered file prefetch functions, allowing extensions to
1595 ensure the corresponding files are available locally, before the command
1595 ensure the corresponding files are available locally, before the command
1596 uses them."""
1596 uses them."""
1597 if match:
1597 if match:
1598 # The command itself will complain about files that don't exist, so
1598 # The command itself will complain about files that don't exist, so
1599 # don't duplicate the message.
1599 # don't duplicate the message.
1600 match = matchmod.badmatch(match, lambda fn, msg: None)
1600 match = matchmod.badmatch(match, lambda fn, msg: None)
1601 else:
1601 else:
1602 match = matchall(repo)
1602 match = matchall(repo)
1603
1603
1604 fileprefetchhooks(repo, revs, match)
1604 fileprefetchhooks(repo, revs, match)
1605
1605
1606 # a list of (repo, revs, match) prefetch functions
1606 # a list of (repo, revs, match) prefetch functions
1607 fileprefetchhooks = util.hooks()
1607 fileprefetchhooks = util.hooks()
1608
1608
1609 # A marker that tells the evolve extension to suppress its own reporting
1609 # A marker that tells the evolve extension to suppress its own reporting
1610 _reportstroubledchangesets = True
1610 _reportstroubledchangesets = True
1611
1611
1612 def registersummarycallback(repo, otr, txnname=''):
1612 def registersummarycallback(repo, otr, txnname=''):
1613 """register a callback to issue a summary after the transaction is closed
1613 """register a callback to issue a summary after the transaction is closed
1614 """
1614 """
1615 def txmatch(sources):
1615 def txmatch(sources):
1616 return any(txnname.startswith(source) for source in sources)
1616 return any(txnname.startswith(source) for source in sources)
1617
1617
1618 categories = []
1618 categories = []
1619
1619
1620 def reportsummary(func):
1620 def reportsummary(func):
1621 """decorator for report callbacks."""
1621 """decorator for report callbacks."""
1622 # The repoview life cycle is shorter than the one of the actual
1622 # The repoview life cycle is shorter than the one of the actual
1623 # underlying repository. So the filtered object can die before the
1623 # underlying repository. So the filtered object can die before the
1624 # weakref is used leading to troubles. We keep a reference to the
1624 # weakref is used leading to troubles. We keep a reference to the
1625 # unfiltered object and restore the filtering when retrieving the
1625 # unfiltered object and restore the filtering when retrieving the
1626 # repository through the weakref.
1626 # repository through the weakref.
1627 filtername = repo.filtername
1627 filtername = repo.filtername
1628 reporef = weakref.ref(repo.unfiltered())
1628 reporef = weakref.ref(repo.unfiltered())
1629 def wrapped(tr):
1629 def wrapped(tr):
1630 repo = reporef()
1630 repo = reporef()
1631 if filtername:
1631 if filtername:
1632 repo = repo.filtered(filtername)
1632 repo = repo.filtered(filtername)
1633 func(repo, tr)
1633 func(repo, tr)
1634 newcat = '%02i-txnreport' % len(categories)
1634 newcat = '%02i-txnreport' % len(categories)
1635 otr.addpostclose(newcat, wrapped)
1635 otr.addpostclose(newcat, wrapped)
1636 categories.append(newcat)
1636 categories.append(newcat)
1637 return wrapped
1637 return wrapped
1638
1638
1639 if txmatch(_reportobsoletedsource):
1639 if txmatch(_reportobsoletedsource):
1640 @reportsummary
1640 @reportsummary
1641 def reportobsoleted(repo, tr):
1641 def reportobsoleted(repo, tr):
1642 obsoleted = obsutil.getobsoleted(repo, tr)
1642 obsoleted = obsutil.getobsoleted(repo, tr)
1643 if obsoleted:
1643 if obsoleted:
1644 repo.ui.status(_('obsoleted %i changesets\n')
1644 repo.ui.status(_('obsoleted %i changesets\n')
1645 % len(obsoleted))
1645 % len(obsoleted))
1646
1646
1647 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1647 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1648 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1648 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1649 instabilitytypes = [
1649 instabilitytypes = [
1650 ('orphan', 'orphan'),
1650 ('orphan', 'orphan'),
1651 ('phase-divergent', 'phasedivergent'),
1651 ('phase-divergent', 'phasedivergent'),
1652 ('content-divergent', 'contentdivergent'),
1652 ('content-divergent', 'contentdivergent'),
1653 ]
1653 ]
1654
1654
1655 def getinstabilitycounts(repo):
1655 def getinstabilitycounts(repo):
1656 filtered = repo.changelog.filteredrevs
1656 filtered = repo.changelog.filteredrevs
1657 counts = {}
1657 counts = {}
1658 for instability, revset in instabilitytypes:
1658 for instability, revset in instabilitytypes:
1659 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1659 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1660 filtered)
1660 filtered)
1661 return counts
1661 return counts
1662
1662
1663 oldinstabilitycounts = getinstabilitycounts(repo)
1663 oldinstabilitycounts = getinstabilitycounts(repo)
1664 @reportsummary
1664 @reportsummary
1665 def reportnewinstabilities(repo, tr):
1665 def reportnewinstabilities(repo, tr):
1666 newinstabilitycounts = getinstabilitycounts(repo)
1666 newinstabilitycounts = getinstabilitycounts(repo)
1667 for instability, revset in instabilitytypes:
1667 for instability, revset in instabilitytypes:
1668 delta = (newinstabilitycounts[instability] -
1668 delta = (newinstabilitycounts[instability] -
1669 oldinstabilitycounts[instability])
1669 oldinstabilitycounts[instability])
1670 msg = getinstabilitymessage(delta, instability)
1670 msg = getinstabilitymessage(delta, instability)
1671 if msg:
1671 if msg:
1672 repo.ui.warn(msg)
1672 repo.ui.warn(msg)
1673
1673
1674 if txmatch(_reportnewcssource):
1674 if txmatch(_reportnewcssource):
1675 @reportsummary
1675 @reportsummary
1676 def reportnewcs(repo, tr):
1676 def reportnewcs(repo, tr):
1677 """Report the range of new revisions pulled/unbundled."""
1677 """Report the range of new revisions pulled/unbundled."""
1678 origrepolen = tr.changes.get('origrepolen', len(repo))
1678 origrepolen = tr.changes.get('origrepolen', len(repo))
1679 unfi = repo.unfiltered()
1679 unfi = repo.unfiltered()
1680 if origrepolen >= len(unfi):
1680 if origrepolen >= len(unfi):
1681 return
1681 return
1682
1682
1683 # Compute the bounds of new visible revisions' range.
1683 # Compute the bounds of new visible revisions' range.
1684 revs = smartset.spanset(repo, start=origrepolen)
1684 revs = smartset.spanset(repo, start=origrepolen)
1685 if revs:
1685 if revs:
1686 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1686 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1687
1687
1688 if minrev == maxrev:
1688 if minrev == maxrev:
1689 revrange = minrev
1689 revrange = minrev
1690 else:
1690 else:
1691 revrange = '%s:%s' % (minrev, maxrev)
1691 revrange = '%s:%s' % (minrev, maxrev)
1692 draft = len(repo.revs('%ld and draft()', revs))
1692 draft = len(repo.revs('%ld and draft()', revs))
1693 secret = len(repo.revs('%ld and secret()', revs))
1693 secret = len(repo.revs('%ld and secret()', revs))
1694 if not (draft or secret):
1694 if not (draft or secret):
1695 msg = _('new changesets %s\n') % revrange
1695 msg = _('new changesets %s\n') % revrange
1696 elif draft and secret:
1696 elif draft and secret:
1697 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1697 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1698 msg %= (revrange, draft, secret)
1698 msg %= (revrange, draft, secret)
1699 elif draft:
1699 elif draft:
1700 msg = _('new changesets %s (%d drafts)\n')
1700 msg = _('new changesets %s (%d drafts)\n')
1701 msg %= (revrange, draft)
1701 msg %= (revrange, draft)
1702 elif secret:
1702 elif secret:
1703 msg = _('new changesets %s (%d secrets)\n')
1703 msg = _('new changesets %s (%d secrets)\n')
1704 msg %= (revrange, secret)
1704 msg %= (revrange, secret)
1705 else:
1705 else:
1706 errormsg = 'entered unreachable condition'
1706 errormsg = 'entered unreachable condition'
1707 raise error.ProgrammingError(errormsg)
1707 raise error.ProgrammingError(errormsg)
1708 repo.ui.status(msg)
1708 repo.ui.status(msg)
1709
1709
1710 # search new changesets directly pulled as obsolete
1710 # search new changesets directly pulled as obsolete
1711 duplicates = tr.changes.get('revduplicates', ())
1711 duplicates = tr.changes.get('revduplicates', ())
1712 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1712 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1713 origrepolen, duplicates)
1713 origrepolen, duplicates)
1714 cl = repo.changelog
1714 cl = repo.changelog
1715 extinctadded = [r for r in obsadded if r not in cl]
1715 extinctadded = [r for r in obsadded if r not in cl]
1716 if extinctadded:
1716 if extinctadded:
1717 # They are not just obsolete, but obsolete and invisible
1717 # They are not just obsolete, but obsolete and invisible
1718 # we call them "extinct" internally but the terms have not been
1718 # we call them "extinct" internally but the terms have not been
1719 # exposed to users.
1719 # exposed to users.
1720 msg = '(%d other changesets obsolete on arrival)\n'
1720 msg = '(%d other changesets obsolete on arrival)\n'
1721 repo.ui.status(msg % len(extinctadded))
1721 repo.ui.status(msg % len(extinctadded))
1722
1722
1723 @reportsummary
1723 @reportsummary
1724 def reportphasechanges(repo, tr):
1724 def reportphasechanges(repo, tr):
1725 """Report statistics of phase changes for changesets pre-existing
1725 """Report statistics of phase changes for changesets pre-existing
1726 pull/unbundle.
1726 pull/unbundle.
1727 """
1727 """
1728 origrepolen = tr.changes.get('origrepolen', len(repo))
1728 origrepolen = tr.changes.get('origrepolen', len(repo))
1729 phasetracking = tr.changes.get('phases', {})
1729 phasetracking = tr.changes.get('phases', {})
1730 if not phasetracking:
1730 if not phasetracking:
1731 return
1731 return
1732 published = [
1732 published = [
1733 rev for rev, (old, new) in phasetracking.iteritems()
1733 rev for rev, (old, new) in phasetracking.iteritems()
1734 if new == phases.public and rev < origrepolen
1734 if new == phases.public and rev < origrepolen
1735 ]
1735 ]
1736 if not published:
1736 if not published:
1737 return
1737 return
1738 repo.ui.status(_('%d local changesets published\n')
1738 repo.ui.status(_('%d local changesets published\n')
1739 % len(published))
1739 % len(published))
1740
1740
1741 def getinstabilitymessage(delta, instability):
1741 def getinstabilitymessage(delta, instability):
1742 """function to return the message to show warning about new instabilities
1742 """function to return the message to show warning about new instabilities
1743
1743
1744 exists as a separate function so that extension can wrap to show more
1744 exists as a separate function so that extension can wrap to show more
1745 information like how to fix instabilities"""
1745 information like how to fix instabilities"""
1746 if delta > 0:
1746 if delta > 0:
1747 return _('%i new %s changesets\n') % (delta, instability)
1747 return _('%i new %s changesets\n') % (delta, instability)
1748
1748
1749 def nodesummaries(repo, nodes, maxnumnodes=4):
1749 def nodesummaries(repo, nodes, maxnumnodes=4):
1750 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1750 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1751 return ' '.join(short(h) for h in nodes)
1751 return ' '.join(short(h) for h in nodes)
1752 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1752 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1753 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1753 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1754
1754
1755 def enforcesinglehead(repo, tr, desc):
1755 def enforcesinglehead(repo, tr, desc):
1756 """check that no named branch has multiple heads"""
1756 """check that no named branch has multiple heads"""
1757 if desc in ('strip', 'repair'):
1757 if desc in ('strip', 'repair'):
1758 # skip the logic during strip
1758 # skip the logic during strip
1759 return
1759 return
1760 visible = repo.filtered('visible')
1760 visible = repo.filtered('visible')
1761 # possible improvement: we could restrict the check to affected branch
1761 # possible improvement: we could restrict the check to affected branch
1762 for name, heads in visible.branchmap().iteritems():
1762 for name, heads in visible.branchmap().iteritems():
1763 if len(heads) > 1:
1763 if len(heads) > 1:
1764 msg = _('rejecting multiple heads on branch "%s"')
1764 msg = _('rejecting multiple heads on branch "%s"')
1765 msg %= name
1765 msg %= name
1766 hint = _('%d heads: %s')
1766 hint = _('%d heads: %s')
1767 hint %= (len(heads), nodesummaries(repo, heads))
1767 hint %= (len(heads), nodesummaries(repo, heads))
1768 raise error.Abort(msg, hint=hint)
1768 raise error.Abort(msg, hint=hint)
1769
1769
1770 def wrapconvertsink(sink):
1770 def wrapconvertsink(sink):
1771 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1771 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1772 before it is used, whether or not the convert extension was formally loaded.
1772 before it is used, whether or not the convert extension was formally loaded.
1773 """
1773 """
1774 return sink
1774 return sink
1775
1775
1776 def unhidehashlikerevs(repo, specs, hiddentype):
1776 def unhidehashlikerevs(repo, specs, hiddentype):
1777 """parse the user specs and unhide changesets whose hash or revision number
1777 """parse the user specs and unhide changesets whose hash or revision number
1778 is passed.
1778 is passed.
1779
1779
1780 hiddentype can be: 1) 'warn': warn while unhiding changesets
1780 hiddentype can be: 1) 'warn': warn while unhiding changesets
1781 2) 'nowarn': don't warn while unhiding changesets
1781 2) 'nowarn': don't warn while unhiding changesets
1782
1782
1783 returns a repo object with the required changesets unhidden
1783 returns a repo object with the required changesets unhidden
1784 """
1784 """
1785 if not repo.filtername or not repo.ui.configbool('experimental',
1785 if not repo.filtername or not repo.ui.configbool('experimental',
1786 'directaccess'):
1786 'directaccess'):
1787 return repo
1787 return repo
1788
1788
1789 if repo.filtername not in ('visible', 'visible-hidden'):
1789 if repo.filtername not in ('visible', 'visible-hidden'):
1790 return repo
1790 return repo
1791
1791
1792 symbols = set()
1792 symbols = set()
1793 for spec in specs:
1793 for spec in specs:
1794 try:
1794 try:
1795 tree = revsetlang.parse(spec)
1795 tree = revsetlang.parse(spec)
1796 except error.ParseError: # will be reported by scmutil.revrange()
1796 except error.ParseError: # will be reported by scmutil.revrange()
1797 continue
1797 continue
1798
1798
1799 symbols.update(revsetlang.gethashlikesymbols(tree))
1799 symbols.update(revsetlang.gethashlikesymbols(tree))
1800
1800
1801 if not symbols:
1801 if not symbols:
1802 return repo
1802 return repo
1803
1803
1804 revs = _getrevsfromsymbols(repo, symbols)
1804 revs = _getrevsfromsymbols(repo, symbols)
1805
1805
1806 if not revs:
1806 if not revs:
1807 return repo
1807 return repo
1808
1808
1809 if hiddentype == 'warn':
1809 if hiddentype == 'warn':
1810 unfi = repo.unfiltered()
1810 unfi = repo.unfiltered()
1811 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1811 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1812 repo.ui.warn(_("warning: accessing hidden changesets for write "
1812 repo.ui.warn(_("warning: accessing hidden changesets for write "
1813 "operation: %s\n") % revstr)
1813 "operation: %s\n") % revstr)
1814
1814
1815 # we have to use new filtername to separate branch/tags cache until we can
1815 # we have to use new filtername to separate branch/tags cache until we can
1816 # disbale these cache when revisions are dynamically pinned.
1816 # disbale these cache when revisions are dynamically pinned.
1817 return repo.filtered('visible-hidden', revs)
1817 return repo.filtered('visible-hidden', revs)
1818
1818
1819 def _getrevsfromsymbols(repo, symbols):
1819 def _getrevsfromsymbols(repo, symbols):
1820 """parse the list of symbols and returns a set of revision numbers of hidden
1820 """parse the list of symbols and returns a set of revision numbers of hidden
1821 changesets present in symbols"""
1821 changesets present in symbols"""
1822 revs = set()
1822 revs = set()
1823 unfi = repo.unfiltered()
1823 unfi = repo.unfiltered()
1824 unficl = unfi.changelog
1824 unficl = unfi.changelog
1825 cl = repo.changelog
1825 cl = repo.changelog
1826 tiprev = len(unficl)
1826 tiprev = len(unficl)
1827 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1827 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1828 for s in symbols:
1828 for s in symbols:
1829 try:
1829 try:
1830 n = int(s)
1830 n = int(s)
1831 if n <= tiprev:
1831 if n <= tiprev:
1832 if not allowrevnums:
1832 if not allowrevnums:
1833 continue
1833 continue
1834 else:
1834 else:
1835 if n not in cl:
1835 if n not in cl:
1836 revs.add(n)
1836 revs.add(n)
1837 continue
1837 continue
1838 except ValueError:
1838 except ValueError:
1839 pass
1839 pass
1840
1840
1841 try:
1841 try:
1842 s = resolvehexnodeidprefix(unfi, s)
1842 s = resolvehexnodeidprefix(unfi, s)
1843 except (error.LookupError, error.WdirUnsupported):
1843 except (error.LookupError, error.WdirUnsupported):
1844 s = None
1844 s = None
1845
1845
1846 if s is not None:
1846 if s is not None:
1847 rev = unficl.rev(s)
1847 rev = unficl.rev(s)
1848 if rev not in cl:
1848 if rev not in cl:
1849 revs.add(rev)
1849 revs.add(rev)
1850
1850
1851 return revs
1851 return revs
1852
1852
1853 def bookmarkrevs(repo, mark):
1853 def bookmarkrevs(repo, mark):
1854 """
1854 """
1855 Select revisions reachable by a given bookmark
1855 Select revisions reachable by a given bookmark
1856 """
1856 """
1857 return repo.revs("ancestors(bookmark(%s)) - "
1857 return repo.revs("ancestors(bookmark(%s)) - "
1858 "ancestors(head() and not bookmark(%s)) - "
1858 "ancestors(head() and not bookmark(%s)) - "
1859 "ancestors(bookmark() and not bookmark(%s))",
1859 "ancestors(bookmark() and not bookmark(%s))",
1860 mark, mark, mark)
1860 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now