##// END OF EJS Templates
addremove: use uipathfn instead of m.rel() for recorded similatity message...
Martin von Zweigbergk -
r41811:0a5a6675 default
parent child Browse files
Show More
@@ -1,1855 +1,1859 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 encoding,
31 encoding,
32 error,
32 error,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 obsutil,
35 obsutil,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 policy,
38 policy,
39 pycompat,
39 pycompat,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 procutil,
49 procutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 if pycompat.iswindows:
53 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
54 from . import scmwindows as scmplatform
55 else:
55 else:
56 from . import scmposix as scmplatform
56 from . import scmposix as scmplatform
57
57
58 parsers = policy.importmod(r'parsers')
58 parsers = policy.importmod(r'parsers')
59
59
60 termsize = scmplatform.termsize
60 termsize = scmplatform.termsize
61
61
62 class status(tuple):
62 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
64 and 'ignored' properties are only relevant to the working copy.
65 '''
65 '''
66
66
67 __slots__ = ()
67 __slots__ = ()
68
68
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
70 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
72 ignored, clean))
73
73
74 @property
74 @property
75 def modified(self):
75 def modified(self):
76 '''files that have been modified'''
76 '''files that have been modified'''
77 return self[0]
77 return self[0]
78
78
79 @property
79 @property
80 def added(self):
80 def added(self):
81 '''files that have been added'''
81 '''files that have been added'''
82 return self[1]
82 return self[1]
83
83
84 @property
84 @property
85 def removed(self):
85 def removed(self):
86 '''files that have been removed'''
86 '''files that have been removed'''
87 return self[2]
87 return self[2]
88
88
89 @property
89 @property
90 def deleted(self):
90 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
91 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
92 working copy (aka "missing")
93 '''
93 '''
94 return self[3]
94 return self[3]
95
95
96 @property
96 @property
97 def unknown(self):
97 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
98 '''files not in the dirstate that are not ignored'''
99 return self[4]
99 return self[4]
100
100
101 @property
101 @property
102 def ignored(self):
102 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
104 return self[5]
105
105
106 @property
106 @property
107 def clean(self):
107 def clean(self):
108 '''files that have not been modified'''
108 '''files that have not been modified'''
109 return self[6]
109 return self[6]
110
110
111 def __repr__(self, *args, **kwargs):
111 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
115
116 def itersubrepos(ctx1, ctx2):
116 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
117 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
123
124 missing = set()
124 missing = set()
125
125
126 for subpath in ctx2.substate:
126 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
127 if subpath not in ctx1.substate:
128 del subpaths[subpath]
128 del subpaths[subpath]
129 missing.add(subpath)
129 missing.add(subpath)
130
130
131 for subpath, ctx in sorted(subpaths.iteritems()):
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
132 yield subpath, ctx.sub(subpath)
133
133
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
135 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
137 # against itself.
138 for subpath in missing:
138 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140
140
141 def nochangesfound(ui, repo, excluded=None):
141 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
142 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
143 nodes excluded from the push/pull.
144 '''
144 '''
145 secretlist = []
145 secretlist = []
146 if excluded:
146 if excluded:
147 for n in excluded:
147 for n in excluded:
148 ctx = repo[n]
148 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
150 secretlist.append(n)
151
151
152 if secretlist:
152 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
154 % len(secretlist))
155 else:
155 else:
156 ui.status(_("no changes found\n"))
156 ui.status(_("no changes found\n"))
157
157
158 def callcatch(ui, func):
158 def callcatch(ui, func):
159 """call func() with global exception handling
159 """call func() with global exception handling
160
160
161 return func() if no exception happens. otherwise do some error handling
161 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
162 and return an exit code accordingly. does not handle all exceptions.
163 """
163 """
164 try:
164 try:
165 try:
165 try:
166 return func()
166 return func()
167 except: # re-raises
167 except: # re-raises
168 ui.traceback()
168 ui.traceback()
169 raise
169 raise
170 # Global exception handling, alphabetically
170 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
171 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
172 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
173 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % (
174 reason = _('timed out waiting for lock held by %r') % (
175 pycompat.bytestr(inst.locker))
175 pycompat.bytestr(inst.locker))
176 else:
176 else:
177 reason = _('lock held by %r') % inst.locker
177 reason = _('lock held by %r') % inst.locker
178 ui.error(_("abort: %s: %s\n") % (
178 ui.error(_("abort: %s: %s\n") % (
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 if not inst.locker:
180 if not inst.locker:
181 ui.error(_("(lock might be very busy)\n"))
181 ui.error(_("(lock might be very busy)\n"))
182 except error.LockUnavailable as inst:
182 except error.LockUnavailable as inst:
183 ui.error(_("abort: could not lock %s: %s\n") %
183 ui.error(_("abort: could not lock %s: %s\n") %
184 (inst.desc or stringutil.forcebytestr(inst.filename),
184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 encoding.strtolocal(inst.strerror)))
185 encoding.strtolocal(inst.strerror)))
186 except error.OutOfBandError as inst:
186 except error.OutOfBandError as inst:
187 if inst.args:
187 if inst.args:
188 msg = _("abort: remote error:\n")
188 msg = _("abort: remote error:\n")
189 else:
189 else:
190 msg = _("abort: remote error\n")
190 msg = _("abort: remote error\n")
191 ui.error(msg)
191 ui.error(msg)
192 if inst.args:
192 if inst.args:
193 ui.error(''.join(inst.args))
193 ui.error(''.join(inst.args))
194 if inst.hint:
194 if inst.hint:
195 ui.error('(%s)\n' % inst.hint)
195 ui.error('(%s)\n' % inst.hint)
196 except error.RepoError as inst:
196 except error.RepoError as inst:
197 ui.error(_("abort: %s!\n") % inst)
197 ui.error(_("abort: %s!\n") % inst)
198 if inst.hint:
198 if inst.hint:
199 ui.error(_("(%s)\n") % inst.hint)
199 ui.error(_("(%s)\n") % inst.hint)
200 except error.ResponseError as inst:
200 except error.ResponseError as inst:
201 ui.error(_("abort: %s") % inst.args[0])
201 ui.error(_("abort: %s") % inst.args[0])
202 msg = inst.args[1]
202 msg = inst.args[1]
203 if isinstance(msg, type(u'')):
203 if isinstance(msg, type(u'')):
204 msg = pycompat.sysbytes(msg)
204 msg = pycompat.sysbytes(msg)
205 if not isinstance(msg, bytes):
205 if not isinstance(msg, bytes):
206 ui.error(" %r\n" % (msg,))
206 ui.error(" %r\n" % (msg,))
207 elif not msg:
207 elif not msg:
208 ui.error(_(" empty string\n"))
208 ui.error(_(" empty string\n"))
209 else:
209 else:
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 except error.CensoredNodeError as inst:
211 except error.CensoredNodeError as inst:
212 ui.error(_("abort: file censored %s!\n") % inst)
212 ui.error(_("abort: file censored %s!\n") % inst)
213 except error.StorageError as inst:
213 except error.StorageError as inst:
214 ui.error(_("abort: %s!\n") % inst)
214 ui.error(_("abort: %s!\n") % inst)
215 if inst.hint:
215 if inst.hint:
216 ui.error(_("(%s)\n") % inst.hint)
216 ui.error(_("(%s)\n") % inst.hint)
217 except error.InterventionRequired as inst:
217 except error.InterventionRequired as inst:
218 ui.error("%s\n" % inst)
218 ui.error("%s\n" % inst)
219 if inst.hint:
219 if inst.hint:
220 ui.error(_("(%s)\n") % inst.hint)
220 ui.error(_("(%s)\n") % inst.hint)
221 return 1
221 return 1
222 except error.WdirUnsupported:
222 except error.WdirUnsupported:
223 ui.error(_("abort: working directory revision cannot be specified\n"))
223 ui.error(_("abort: working directory revision cannot be specified\n"))
224 except error.Abort as inst:
224 except error.Abort as inst:
225 ui.error(_("abort: %s\n") % inst)
225 ui.error(_("abort: %s\n") % inst)
226 if inst.hint:
226 if inst.hint:
227 ui.error(_("(%s)\n") % inst.hint)
227 ui.error(_("(%s)\n") % inst.hint)
228 except ImportError as inst:
228 except ImportError as inst:
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 m = stringutil.forcebytestr(inst).split()[-1]
230 m = stringutil.forcebytestr(inst).split()[-1]
231 if m in "mpatch bdiff".split():
231 if m in "mpatch bdiff".split():
232 ui.error(_("(did you forget to compile extensions?)\n"))
232 ui.error(_("(did you forget to compile extensions?)\n"))
233 elif m in "zlib".split():
233 elif m in "zlib".split():
234 ui.error(_("(is your Python install correct?)\n"))
234 ui.error(_("(is your Python install correct?)\n"))
235 except (IOError, OSError) as inst:
235 except (IOError, OSError) as inst:
236 if util.safehasattr(inst, "code"): # HTTPError
236 if util.safehasattr(inst, "code"): # HTTPError
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 elif util.safehasattr(inst, "reason"): # URLError or SSLError
238 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 try: # usually it is in the form (errno, strerror)
239 try: # usually it is in the form (errno, strerror)
240 reason = inst.reason.args[1]
240 reason = inst.reason.args[1]
241 except (AttributeError, IndexError):
241 except (AttributeError, IndexError):
242 # it might be anything, for example a string
242 # it might be anything, for example a string
243 reason = inst.reason
243 reason = inst.reason
244 if isinstance(reason, pycompat.unicode):
244 if isinstance(reason, pycompat.unicode):
245 # SSLError of Python 2.7.9 contains a unicode
245 # SSLError of Python 2.7.9 contains a unicode
246 reason = encoding.unitolocal(reason)
246 reason = encoding.unitolocal(reason)
247 ui.error(_("abort: error: %s\n") % reason)
247 ui.error(_("abort: error: %s\n") % reason)
248 elif (util.safehasattr(inst, "args")
248 elif (util.safehasattr(inst, "args")
249 and inst.args and inst.args[0] == errno.EPIPE):
249 and inst.args and inst.args[0] == errno.EPIPE):
250 pass
250 pass
251 elif getattr(inst, "strerror", None): # common IOError or OSError
251 elif getattr(inst, "strerror", None): # common IOError or OSError
252 if getattr(inst, "filename", None) is not None:
252 if getattr(inst, "filename", None) is not None:
253 ui.error(_("abort: %s: '%s'\n") % (
253 ui.error(_("abort: %s: '%s'\n") % (
254 encoding.strtolocal(inst.strerror),
254 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
255 stringutil.forcebytestr(inst.filename)))
256 else:
256 else:
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 else: # suspicious IOError
258 else: # suspicious IOError
259 raise
259 raise
260 except MemoryError:
260 except MemoryError:
261 ui.error(_("abort: out of memory\n"))
261 ui.error(_("abort: out of memory\n"))
262 except SystemExit as inst:
262 except SystemExit as inst:
263 # Commands shouldn't sys.exit directly, but give a return code.
263 # Commands shouldn't sys.exit directly, but give a return code.
264 # Just in case catch this and and pass exit code to caller.
264 # Just in case catch this and and pass exit code to caller.
265 return inst.code
265 return inst.code
266
266
267 return -1
267 return -1
268
268
269 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
272 if lbl in ['tip', '.', 'null']:
272 if lbl in ['tip', '.', 'null']:
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 for c in (':', '\0', '\n', '\r'):
274 for c in (':', '\0', '\n', '\r'):
275 if c in lbl:
275 if c in lbl:
276 raise error.Abort(
276 raise error.Abort(
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 try:
278 try:
279 int(lbl)
279 int(lbl)
280 raise error.Abort(_("cannot use an integer as a name"))
280 raise error.Abort(_("cannot use an integer as a name"))
281 except ValueError:
281 except ValueError:
282 pass
282 pass
283 if lbl.strip() != lbl:
283 if lbl.strip() != lbl:
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285
285
286 def checkfilename(f):
286 def checkfilename(f):
287 '''Check that the filename f is an acceptable filename for a tracked file'''
287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 if '\r' in f or '\n' in f:
288 if '\r' in f or '\n' in f:
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 % pycompat.bytestr(f))
290 % pycompat.bytestr(f))
291
291
292 def checkportable(ui, f):
292 def checkportable(ui, f):
293 '''Check if filename f is portable and warn or abort depending on config'''
293 '''Check if filename f is portable and warn or abort depending on config'''
294 checkfilename(f)
294 checkfilename(f)
295 abort, warn = checkportabilityalert(ui)
295 abort, warn = checkportabilityalert(ui)
296 if abort or warn:
296 if abort or warn:
297 msg = util.checkwinfilename(f)
297 msg = util.checkwinfilename(f)
298 if msg:
298 if msg:
299 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 if abort:
300 if abort:
301 raise error.Abort(msg)
301 raise error.Abort(msg)
302 ui.warn(_("warning: %s\n") % msg)
302 ui.warn(_("warning: %s\n") % msg)
303
303
304 def checkportabilityalert(ui):
304 def checkportabilityalert(ui):
305 '''check if the user's config requests nothing, a warning, or abort for
305 '''check if the user's config requests nothing, a warning, or abort for
306 non-portable filenames'''
306 non-portable filenames'''
307 val = ui.config('ui', 'portablefilenames')
307 val = ui.config('ui', 'portablefilenames')
308 lval = val.lower()
308 lval = val.lower()
309 bval = stringutil.parsebool(val)
309 bval = stringutil.parsebool(val)
310 abort = pycompat.iswindows or lval == 'abort'
310 abort = pycompat.iswindows or lval == 'abort'
311 warn = bval or lval == 'warn'
311 warn = bval or lval == 'warn'
312 if bval is None and not (warn or abort or lval == 'ignore'):
312 if bval is None and not (warn or abort or lval == 'ignore'):
313 raise error.ConfigError(
313 raise error.ConfigError(
314 _("ui.portablefilenames value is invalid ('%s')") % val)
314 _("ui.portablefilenames value is invalid ('%s')") % val)
315 return abort, warn
315 return abort, warn
316
316
317 class casecollisionauditor(object):
317 class casecollisionauditor(object):
318 def __init__(self, ui, abort, dirstate):
318 def __init__(self, ui, abort, dirstate):
319 self._ui = ui
319 self._ui = ui
320 self._abort = abort
320 self._abort = abort
321 allfiles = '\0'.join(dirstate._map)
321 allfiles = '\0'.join(dirstate._map)
322 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._dirstate = dirstate
323 self._dirstate = dirstate
324 # The purpose of _newfiles is so that we don't complain about
324 # The purpose of _newfiles is so that we don't complain about
325 # case collisions if someone were to call this object with the
325 # case collisions if someone were to call this object with the
326 # same filename twice.
326 # same filename twice.
327 self._newfiles = set()
327 self._newfiles = set()
328
328
329 def __call__(self, f):
329 def __call__(self, f):
330 if f in self._newfiles:
330 if f in self._newfiles:
331 return
331 return
332 fl = encoding.lower(f)
332 fl = encoding.lower(f)
333 if fl in self._loweredfiles and f not in self._dirstate:
333 if fl in self._loweredfiles and f not in self._dirstate:
334 msg = _('possible case-folding collision for %s') % f
334 msg = _('possible case-folding collision for %s') % f
335 if self._abort:
335 if self._abort:
336 raise error.Abort(msg)
336 raise error.Abort(msg)
337 self._ui.warn(_("warning: %s\n") % msg)
337 self._ui.warn(_("warning: %s\n") % msg)
338 self._loweredfiles.add(fl)
338 self._loweredfiles.add(fl)
339 self._newfiles.add(f)
339 self._newfiles.add(f)
340
340
341 def filteredhash(repo, maxrev):
341 def filteredhash(repo, maxrev):
342 """build hash of filtered revisions in the current repoview.
342 """build hash of filtered revisions in the current repoview.
343
343
344 Multiple caches perform up-to-date validation by checking that the
344 Multiple caches perform up-to-date validation by checking that the
345 tiprev and tipnode stored in the cache file match the current repository.
345 tiprev and tipnode stored in the cache file match the current repository.
346 However, this is not sufficient for validating repoviews because the set
346 However, this is not sufficient for validating repoviews because the set
347 of revisions in the view may change without the repository tiprev and
347 of revisions in the view may change without the repository tiprev and
348 tipnode changing.
348 tipnode changing.
349
349
350 This function hashes all the revs filtered from the view and returns
350 This function hashes all the revs filtered from the view and returns
351 that SHA-1 digest.
351 that SHA-1 digest.
352 """
352 """
353 cl = repo.changelog
353 cl = repo.changelog
354 if not cl.filteredrevs:
354 if not cl.filteredrevs:
355 return None
355 return None
356 key = None
356 key = None
357 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 if revs:
358 if revs:
359 s = hashlib.sha1()
359 s = hashlib.sha1()
360 for rev in revs:
360 for rev in revs:
361 s.update('%d;' % rev)
361 s.update('%d;' % rev)
362 key = s.digest()
362 key = s.digest()
363 return key
363 return key
364
364
365 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 '''yield every hg repository under path, always recursively.
366 '''yield every hg repository under path, always recursively.
367 The recurse flag will only control recursion into repo working dirs'''
367 The recurse flag will only control recursion into repo working dirs'''
368 def errhandler(err):
368 def errhandler(err):
369 if err.filename == path:
369 if err.filename == path:
370 raise err
370 raise err
371 samestat = getattr(os.path, 'samestat', None)
371 samestat = getattr(os.path, 'samestat', None)
372 if followsym and samestat is not None:
372 if followsym and samestat is not None:
373 def adddir(dirlst, dirname):
373 def adddir(dirlst, dirname):
374 dirstat = os.stat(dirname)
374 dirstat = os.stat(dirname)
375 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 if not match:
376 if not match:
377 dirlst.append(dirstat)
377 dirlst.append(dirstat)
378 return not match
378 return not match
379 else:
379 else:
380 followsym = False
380 followsym = False
381
381
382 if (seen_dirs is None) and followsym:
382 if (seen_dirs is None) and followsym:
383 seen_dirs = []
383 seen_dirs = []
384 adddir(seen_dirs, path)
384 adddir(seen_dirs, path)
385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 dirs.sort()
386 dirs.sort()
387 if '.hg' in dirs:
387 if '.hg' in dirs:
388 yield root # found a repository
388 yield root # found a repository
389 qroot = os.path.join(root, '.hg', 'patches')
389 qroot = os.path.join(root, '.hg', 'patches')
390 if os.path.isdir(os.path.join(qroot, '.hg')):
390 if os.path.isdir(os.path.join(qroot, '.hg')):
391 yield qroot # we have a patch queue repo here
391 yield qroot # we have a patch queue repo here
392 if recurse:
392 if recurse:
393 # avoid recursing inside the .hg directory
393 # avoid recursing inside the .hg directory
394 dirs.remove('.hg')
394 dirs.remove('.hg')
395 else:
395 else:
396 dirs[:] = [] # don't descend further
396 dirs[:] = [] # don't descend further
397 elif followsym:
397 elif followsym:
398 newdirs = []
398 newdirs = []
399 for d in dirs:
399 for d in dirs:
400 fname = os.path.join(root, d)
400 fname = os.path.join(root, d)
401 if adddir(seen_dirs, fname):
401 if adddir(seen_dirs, fname):
402 if os.path.islink(fname):
402 if os.path.islink(fname):
403 for hgname in walkrepos(fname, True, seen_dirs):
403 for hgname in walkrepos(fname, True, seen_dirs):
404 yield hgname
404 yield hgname
405 else:
405 else:
406 newdirs.append(d)
406 newdirs.append(d)
407 dirs[:] = newdirs
407 dirs[:] = newdirs
408
408
409 def binnode(ctx):
409 def binnode(ctx):
410 """Return binary node id for a given basectx"""
410 """Return binary node id for a given basectx"""
411 node = ctx.node()
411 node = ctx.node()
412 if node is None:
412 if node is None:
413 return wdirid
413 return wdirid
414 return node
414 return node
415
415
416 def intrev(ctx):
416 def intrev(ctx):
417 """Return integer for a given basectx that can be used in comparison or
417 """Return integer for a given basectx that can be used in comparison or
418 arithmetic operation"""
418 arithmetic operation"""
419 rev = ctx.rev()
419 rev = ctx.rev()
420 if rev is None:
420 if rev is None:
421 return wdirrev
421 return wdirrev
422 return rev
422 return rev
423
423
424 def formatchangeid(ctx):
424 def formatchangeid(ctx):
425 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 template provided by logcmdutil.changesettemplater"""
426 template provided by logcmdutil.changesettemplater"""
427 repo = ctx.repo()
427 repo = ctx.repo()
428 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429
429
430 def formatrevnode(ui, rev, node):
430 def formatrevnode(ui, rev, node):
431 """Format given revision and node depending on the current verbosity"""
431 """Format given revision and node depending on the current verbosity"""
432 if ui.debugflag:
432 if ui.debugflag:
433 hexfunc = hex
433 hexfunc = hex
434 else:
434 else:
435 hexfunc = short
435 hexfunc = short
436 return '%d:%s' % (rev, hexfunc(node))
436 return '%d:%s' % (rev, hexfunc(node))
437
437
438 def resolvehexnodeidprefix(repo, prefix):
438 def resolvehexnodeidprefix(repo, prefix):
439 if (prefix.startswith('x') and
439 if (prefix.startswith('x') and
440 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
440 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 prefix = prefix[1:]
441 prefix = prefix[1:]
442 try:
442 try:
443 # Uses unfiltered repo because it's faster when prefix is ambiguous/
443 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 # This matches the shortesthexnodeidprefix() function below.
444 # This matches the shortesthexnodeidprefix() function below.
445 node = repo.unfiltered().changelog._partialmatch(prefix)
445 node = repo.unfiltered().changelog._partialmatch(prefix)
446 except error.AmbiguousPrefixLookupError:
446 except error.AmbiguousPrefixLookupError:
447 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
447 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 if revset:
448 if revset:
449 # Clear config to avoid infinite recursion
449 # Clear config to avoid infinite recursion
450 configoverrides = {('experimental',
450 configoverrides = {('experimental',
451 'revisions.disambiguatewithin'): None}
451 'revisions.disambiguatewithin'): None}
452 with repo.ui.configoverride(configoverrides):
452 with repo.ui.configoverride(configoverrides):
453 revs = repo.anyrevs([revset], user=True)
453 revs = repo.anyrevs([revset], user=True)
454 matches = []
454 matches = []
455 for rev in revs:
455 for rev in revs:
456 node = repo.changelog.node(rev)
456 node = repo.changelog.node(rev)
457 if hex(node).startswith(prefix):
457 if hex(node).startswith(prefix):
458 matches.append(node)
458 matches.append(node)
459 if len(matches) == 1:
459 if len(matches) == 1:
460 return matches[0]
460 return matches[0]
461 raise
461 raise
462 if node is None:
462 if node is None:
463 return
463 return
464 repo.changelog.rev(node) # make sure node isn't filtered
464 repo.changelog.rev(node) # make sure node isn't filtered
465 return node
465 return node
466
466
467 def mayberevnum(repo, prefix):
467 def mayberevnum(repo, prefix):
468 """Checks if the given prefix may be mistaken for a revision number"""
468 """Checks if the given prefix may be mistaken for a revision number"""
469 try:
469 try:
470 i = int(prefix)
470 i = int(prefix)
471 # if we are a pure int, then starting with zero will not be
471 # if we are a pure int, then starting with zero will not be
472 # confused as a rev; or, obviously, if the int is larger
472 # confused as a rev; or, obviously, if the int is larger
473 # than the value of the tip rev. We still need to disambiguate if
473 # than the value of the tip rev. We still need to disambiguate if
474 # prefix == '0', since that *is* a valid revnum.
474 # prefix == '0', since that *is* a valid revnum.
475 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
475 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 return False
476 return False
477 return True
477 return True
478 except ValueError:
478 except ValueError:
479 return False
479 return False
480
480
481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 """Find the shortest unambiguous prefix that matches hexnode.
482 """Find the shortest unambiguous prefix that matches hexnode.
483
483
484 If "cache" is not None, it must be a dictionary that can be used for
484 If "cache" is not None, it must be a dictionary that can be used for
485 caching between calls to this method.
485 caching between calls to this method.
486 """
486 """
487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 # which would be unacceptably slow. so we look for hash collision in
488 # which would be unacceptably slow. so we look for hash collision in
489 # unfiltered space, which means some hashes may be slightly longer.
489 # unfiltered space, which means some hashes may be slightly longer.
490
490
491 minlength=max(minlength, 1)
491 minlength=max(minlength, 1)
492
492
493 def disambiguate(prefix):
493 def disambiguate(prefix):
494 """Disambiguate against revnums."""
494 """Disambiguate against revnums."""
495 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
495 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 if mayberevnum(repo, prefix):
496 if mayberevnum(repo, prefix):
497 return 'x' + prefix
497 return 'x' + prefix
498 else:
498 else:
499 return prefix
499 return prefix
500
500
501 hexnode = hex(node)
501 hexnode = hex(node)
502 for length in range(len(prefix), len(hexnode) + 1):
502 for length in range(len(prefix), len(hexnode) + 1):
503 prefix = hexnode[:length]
503 prefix = hexnode[:length]
504 if not mayberevnum(repo, prefix):
504 if not mayberevnum(repo, prefix):
505 return prefix
505 return prefix
506
506
507 cl = repo.unfiltered().changelog
507 cl = repo.unfiltered().changelog
508 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
508 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 if revset:
509 if revset:
510 revs = None
510 revs = None
511 if cache is not None:
511 if cache is not None:
512 revs = cache.get('disambiguationrevset')
512 revs = cache.get('disambiguationrevset')
513 if revs is None:
513 if revs is None:
514 revs = repo.anyrevs([revset], user=True)
514 revs = repo.anyrevs([revset], user=True)
515 if cache is not None:
515 if cache is not None:
516 cache['disambiguationrevset'] = revs
516 cache['disambiguationrevset'] = revs
517 if cl.rev(node) in revs:
517 if cl.rev(node) in revs:
518 hexnode = hex(node)
518 hexnode = hex(node)
519 nodetree = None
519 nodetree = None
520 if cache is not None:
520 if cache is not None:
521 nodetree = cache.get('disambiguationnodetree')
521 nodetree = cache.get('disambiguationnodetree')
522 if not nodetree:
522 if not nodetree:
523 try:
523 try:
524 nodetree = parsers.nodetree(cl.index, len(revs))
524 nodetree = parsers.nodetree(cl.index, len(revs))
525 except AttributeError:
525 except AttributeError:
526 # no native nodetree
526 # no native nodetree
527 pass
527 pass
528 else:
528 else:
529 for r in revs:
529 for r in revs:
530 nodetree.insert(r)
530 nodetree.insert(r)
531 if cache is not None:
531 if cache is not None:
532 cache['disambiguationnodetree'] = nodetree
532 cache['disambiguationnodetree'] = nodetree
533 if nodetree is not None:
533 if nodetree is not None:
534 length = max(nodetree.shortest(node), minlength)
534 length = max(nodetree.shortest(node), minlength)
535 prefix = hexnode[:length]
535 prefix = hexnode[:length]
536 return disambiguate(prefix)
536 return disambiguate(prefix)
537 for length in range(minlength, len(hexnode) + 1):
537 for length in range(minlength, len(hexnode) + 1):
538 matches = []
538 matches = []
539 prefix = hexnode[:length]
539 prefix = hexnode[:length]
540 for rev in revs:
540 for rev in revs:
541 otherhexnode = repo[rev].hex()
541 otherhexnode = repo[rev].hex()
542 if prefix == otherhexnode[:length]:
542 if prefix == otherhexnode[:length]:
543 matches.append(otherhexnode)
543 matches.append(otherhexnode)
544 if len(matches) == 1:
544 if len(matches) == 1:
545 return disambiguate(prefix)
545 return disambiguate(prefix)
546
546
547 try:
547 try:
548 return disambiguate(cl.shortest(node, minlength))
548 return disambiguate(cl.shortest(node, minlength))
549 except error.LookupError:
549 except error.LookupError:
550 raise error.RepoLookupError()
550 raise error.RepoLookupError()
551
551
552 def isrevsymbol(repo, symbol):
552 def isrevsymbol(repo, symbol):
553 """Checks if a symbol exists in the repo.
553 """Checks if a symbol exists in the repo.
554
554
555 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
555 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 symbol is an ambiguous nodeid prefix.
556 symbol is an ambiguous nodeid prefix.
557 """
557 """
558 try:
558 try:
559 revsymbol(repo, symbol)
559 revsymbol(repo, symbol)
560 return True
560 return True
561 except error.RepoLookupError:
561 except error.RepoLookupError:
562 return False
562 return False
563
563
564 def revsymbol(repo, symbol):
564 def revsymbol(repo, symbol):
565 """Returns a context given a single revision symbol (as string).
565 """Returns a context given a single revision symbol (as string).
566
566
567 This is similar to revsingle(), but accepts only a single revision symbol,
567 This is similar to revsingle(), but accepts only a single revision symbol,
568 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
568 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 not "max(public())".
569 not "max(public())".
570 """
570 """
571 if not isinstance(symbol, bytes):
571 if not isinstance(symbol, bytes):
572 msg = ("symbol (%s of type %s) was not a string, did you mean "
572 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 "repo[symbol]?" % (symbol, type(symbol)))
573 "repo[symbol]?" % (symbol, type(symbol)))
574 raise error.ProgrammingError(msg)
574 raise error.ProgrammingError(msg)
575 try:
575 try:
576 if symbol in ('.', 'tip', 'null'):
576 if symbol in ('.', 'tip', 'null'):
577 return repo[symbol]
577 return repo[symbol]
578
578
579 try:
579 try:
580 r = int(symbol)
580 r = int(symbol)
581 if '%d' % r != symbol:
581 if '%d' % r != symbol:
582 raise ValueError
582 raise ValueError
583 l = len(repo.changelog)
583 l = len(repo.changelog)
584 if r < 0:
584 if r < 0:
585 r += l
585 r += l
586 if r < 0 or r >= l and r != wdirrev:
586 if r < 0 or r >= l and r != wdirrev:
587 raise ValueError
587 raise ValueError
588 return repo[r]
588 return repo[r]
589 except error.FilteredIndexError:
589 except error.FilteredIndexError:
590 raise
590 raise
591 except (ValueError, OverflowError, IndexError):
591 except (ValueError, OverflowError, IndexError):
592 pass
592 pass
593
593
594 if len(symbol) == 40:
594 if len(symbol) == 40:
595 try:
595 try:
596 node = bin(symbol)
596 node = bin(symbol)
597 rev = repo.changelog.rev(node)
597 rev = repo.changelog.rev(node)
598 return repo[rev]
598 return repo[rev]
599 except error.FilteredLookupError:
599 except error.FilteredLookupError:
600 raise
600 raise
601 except (TypeError, LookupError):
601 except (TypeError, LookupError):
602 pass
602 pass
603
603
604 # look up bookmarks through the name interface
604 # look up bookmarks through the name interface
605 try:
605 try:
606 node = repo.names.singlenode(repo, symbol)
606 node = repo.names.singlenode(repo, symbol)
607 rev = repo.changelog.rev(node)
607 rev = repo.changelog.rev(node)
608 return repo[rev]
608 return repo[rev]
609 except KeyError:
609 except KeyError:
610 pass
610 pass
611
611
612 node = resolvehexnodeidprefix(repo, symbol)
612 node = resolvehexnodeidprefix(repo, symbol)
613 if node is not None:
613 if node is not None:
614 rev = repo.changelog.rev(node)
614 rev = repo.changelog.rev(node)
615 return repo[rev]
615 return repo[rev]
616
616
617 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
617 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618
618
619 except error.WdirUnsupported:
619 except error.WdirUnsupported:
620 return repo[None]
620 return repo[None]
621 except (error.FilteredIndexError, error.FilteredLookupError,
621 except (error.FilteredIndexError, error.FilteredLookupError,
622 error.FilteredRepoLookupError):
622 error.FilteredRepoLookupError):
623 raise _filterederror(repo, symbol)
623 raise _filterederror(repo, symbol)
624
624
625 def _filterederror(repo, changeid):
625 def _filterederror(repo, changeid):
626 """build an exception to be raised about a filtered changeid
626 """build an exception to be raised about a filtered changeid
627
627
628 This is extracted in a function to help extensions (eg: evolve) to
628 This is extracted in a function to help extensions (eg: evolve) to
629 experiment with various message variants."""
629 experiment with various message variants."""
630 if repo.filtername.startswith('visible'):
630 if repo.filtername.startswith('visible'):
631
631
632 # Check if the changeset is obsolete
632 # Check if the changeset is obsolete
633 unfilteredrepo = repo.unfiltered()
633 unfilteredrepo = repo.unfiltered()
634 ctx = revsymbol(unfilteredrepo, changeid)
634 ctx = revsymbol(unfilteredrepo, changeid)
635
635
636 # If the changeset is obsolete, enrich the message with the reason
636 # If the changeset is obsolete, enrich the message with the reason
637 # that made this changeset not visible
637 # that made this changeset not visible
638 if ctx.obsolete():
638 if ctx.obsolete():
639 msg = obsutil._getfilteredreason(repo, changeid, ctx)
639 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 else:
640 else:
641 msg = _("hidden revision '%s'") % changeid
641 msg = _("hidden revision '%s'") % changeid
642
642
643 hint = _('use --hidden to access hidden revisions')
643 hint = _('use --hidden to access hidden revisions')
644
644
645 return error.FilteredRepoLookupError(msg, hint=hint)
645 return error.FilteredRepoLookupError(msg, hint=hint)
646 msg = _("filtered revision '%s' (not in '%s' subset)")
646 msg = _("filtered revision '%s' (not in '%s' subset)")
647 msg %= (changeid, repo.filtername)
647 msg %= (changeid, repo.filtername)
648 return error.FilteredRepoLookupError(msg)
648 return error.FilteredRepoLookupError(msg)
649
649
650 def revsingle(repo, revspec, default='.', localalias=None):
650 def revsingle(repo, revspec, default='.', localalias=None):
651 if not revspec and revspec != 0:
651 if not revspec and revspec != 0:
652 return repo[default]
652 return repo[default]
653
653
654 l = revrange(repo, [revspec], localalias=localalias)
654 l = revrange(repo, [revspec], localalias=localalias)
655 if not l:
655 if not l:
656 raise error.Abort(_('empty revision set'))
656 raise error.Abort(_('empty revision set'))
657 return repo[l.last()]
657 return repo[l.last()]
658
658
659 def _pairspec(revspec):
659 def _pairspec(revspec):
660 tree = revsetlang.parse(revspec)
660 tree = revsetlang.parse(revspec)
661 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
661 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662
662
663 def revpair(repo, revs):
663 def revpair(repo, revs):
664 if not revs:
664 if not revs:
665 return repo['.'], repo[None]
665 return repo['.'], repo[None]
666
666
667 l = revrange(repo, revs)
667 l = revrange(repo, revs)
668
668
669 if not l:
669 if not l:
670 raise error.Abort(_('empty revision range'))
670 raise error.Abort(_('empty revision range'))
671
671
672 first = l.first()
672 first = l.first()
673 second = l.last()
673 second = l.last()
674
674
675 if (first == second and len(revs) >= 2
675 if (first == second and len(revs) >= 2
676 and not all(revrange(repo, [r]) for r in revs)):
676 and not all(revrange(repo, [r]) for r in revs)):
677 raise error.Abort(_('empty revision on one side of range'))
677 raise error.Abort(_('empty revision on one side of range'))
678
678
679 # if top-level is range expression, the result must always be a pair
679 # if top-level is range expression, the result must always be a pair
680 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
680 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 return repo[first], repo[None]
681 return repo[first], repo[None]
682
682
683 return repo[first], repo[second]
683 return repo[first], repo[second]
684
684
685 def revrange(repo, specs, localalias=None):
685 def revrange(repo, specs, localalias=None):
686 """Execute 1 to many revsets and return the union.
686 """Execute 1 to many revsets and return the union.
687
687
688 This is the preferred mechanism for executing revsets using user-specified
688 This is the preferred mechanism for executing revsets using user-specified
689 config options, such as revset aliases.
689 config options, such as revset aliases.
690
690
691 The revsets specified by ``specs`` will be executed via a chained ``OR``
691 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 expression. If ``specs`` is empty, an empty result is returned.
692 expression. If ``specs`` is empty, an empty result is returned.
693
693
694 ``specs`` can contain integers, in which case they are assumed to be
694 ``specs`` can contain integers, in which case they are assumed to be
695 revision numbers.
695 revision numbers.
696
696
697 It is assumed the revsets are already formatted. If you have arguments
697 It is assumed the revsets are already formatted. If you have arguments
698 that need to be expanded in the revset, call ``revsetlang.formatspec()``
698 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 and pass the result as an element of ``specs``.
699 and pass the result as an element of ``specs``.
700
700
701 Specifying a single revset is allowed.
701 Specifying a single revset is allowed.
702
702
703 Returns a ``revset.abstractsmartset`` which is a list-like interface over
703 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 integer revisions.
704 integer revisions.
705 """
705 """
706 allspecs = []
706 allspecs = []
707 for spec in specs:
707 for spec in specs:
708 if isinstance(spec, int):
708 if isinstance(spec, int):
709 spec = revsetlang.formatspec('%d', spec)
709 spec = revsetlang.formatspec('%d', spec)
710 allspecs.append(spec)
710 allspecs.append(spec)
711 return repo.anyrevs(allspecs, user=True, localalias=localalias)
711 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712
712
713 def meaningfulparents(repo, ctx):
713 def meaningfulparents(repo, ctx):
714 """Return list of meaningful (or all if debug) parentrevs for rev.
714 """Return list of meaningful (or all if debug) parentrevs for rev.
715
715
716 For merges (two non-nullrev revisions) both parents are meaningful.
716 For merges (two non-nullrev revisions) both parents are meaningful.
717 Otherwise the first parent revision is considered meaningful if it
717 Otherwise the first parent revision is considered meaningful if it
718 is not the preceding revision.
718 is not the preceding revision.
719 """
719 """
720 parents = ctx.parents()
720 parents = ctx.parents()
721 if len(parents) > 1:
721 if len(parents) > 1:
722 return parents
722 return parents
723 if repo.ui.debugflag:
723 if repo.ui.debugflag:
724 return [parents[0], repo[nullrev]]
724 return [parents[0], repo[nullrev]]
725 if parents[0].rev() >= intrev(ctx) - 1:
725 if parents[0].rev() >= intrev(ctx) - 1:
726 return []
726 return []
727 return parents
727 return parents
728
728
729 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
729 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 """Return a function that produced paths for presenting to the user.
730 """Return a function that produced paths for presenting to the user.
731
731
732 The returned function takes a repo-relative path and produces a path
732 The returned function takes a repo-relative path and produces a path
733 that can be presented in the UI.
733 that can be presented in the UI.
734
734
735 Depending on the value of ui.relative-paths, either a repo-relative or
735 Depending on the value of ui.relative-paths, either a repo-relative or
736 cwd-relative path will be produced.
736 cwd-relative path will be produced.
737
737
738 legacyrelativevalue is the value to use if ui.relative-paths=legacy
738 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739
739
740 If forcerelativevalue is not None, then that value will be used regardless
740 If forcerelativevalue is not None, then that value will be used regardless
741 of what ui.relative-paths is set to.
741 of what ui.relative-paths is set to.
742 """
742 """
743 if forcerelativevalue is not None:
743 if forcerelativevalue is not None:
744 relative = forcerelativevalue
744 relative = forcerelativevalue
745 else:
745 else:
746 config = repo.ui.config('ui', 'relative-paths')
746 config = repo.ui.config('ui', 'relative-paths')
747 if config == 'legacy':
747 if config == 'legacy':
748 relative = legacyrelativevalue
748 relative = legacyrelativevalue
749 else:
749 else:
750 relative = stringutil.parsebool(config)
750 relative = stringutil.parsebool(config)
751 if relative is None:
751 if relative is None:
752 raise error.ConfigError(
752 raise error.ConfigError(
753 _("ui.relative-paths is not a boolean ('%s')") % config)
753 _("ui.relative-paths is not a boolean ('%s')") % config)
754
754
755 if relative:
755 if relative:
756 cwd = repo.getcwd()
756 cwd = repo.getcwd()
757 pathto = repo.pathto
757 pathto = repo.pathto
758 return lambda f: pathto(f, cwd)
758 return lambda f: pathto(f, cwd)
759 else:
759 else:
760 return lambda f: f
760 return lambda f: f
761
761
762 def subdiruipathfn(subpath, uipathfn):
762 def subdiruipathfn(subpath, uipathfn):
763 '''Create a new uipathfn that treats the file as relative to subpath.'''
763 '''Create a new uipathfn that treats the file as relative to subpath.'''
764 return lambda f: uipathfn(posixpath.join(subpath, f))
764 return lambda f: uipathfn(posixpath.join(subpath, f))
765
765
766 def anypats(pats, opts):
766 def anypats(pats, opts):
767 '''Checks if any patterns, including --include and --exclude were given.
767 '''Checks if any patterns, including --include and --exclude were given.
768
768
769 Some commands (e.g. addremove) use this condition for deciding whether to
769 Some commands (e.g. addremove) use this condition for deciding whether to
770 print absolute or relative paths.
770 print absolute or relative paths.
771 '''
771 '''
772 return bool(pats or opts.get('include') or opts.get('exclude'))
772 return bool(pats or opts.get('include') or opts.get('exclude'))
773
773
774 def expandpats(pats):
774 def expandpats(pats):
775 '''Expand bare globs when running on windows.
775 '''Expand bare globs when running on windows.
776 On posix we assume it already has already been done by sh.'''
776 On posix we assume it already has already been done by sh.'''
777 if not util.expandglobs:
777 if not util.expandglobs:
778 return list(pats)
778 return list(pats)
779 ret = []
779 ret = []
780 for kindpat in pats:
780 for kindpat in pats:
781 kind, pat = matchmod._patsplit(kindpat, None)
781 kind, pat = matchmod._patsplit(kindpat, None)
782 if kind is None:
782 if kind is None:
783 try:
783 try:
784 globbed = glob.glob(pat)
784 globbed = glob.glob(pat)
785 except re.error:
785 except re.error:
786 globbed = [pat]
786 globbed = [pat]
787 if globbed:
787 if globbed:
788 ret.extend(globbed)
788 ret.extend(globbed)
789 continue
789 continue
790 ret.append(kindpat)
790 ret.append(kindpat)
791 return ret
791 return ret
792
792
793 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
793 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
794 badfn=None):
794 badfn=None):
795 '''Return a matcher and the patterns that were used.
795 '''Return a matcher and the patterns that were used.
796 The matcher will warn about bad matches, unless an alternate badfn callback
796 The matcher will warn about bad matches, unless an alternate badfn callback
797 is provided.'''
797 is provided.'''
798 if pats == ("",):
798 if pats == ("",):
799 pats = []
799 pats = []
800 if opts is None:
800 if opts is None:
801 opts = {}
801 opts = {}
802 if not globbed and default == 'relpath':
802 if not globbed and default == 'relpath':
803 pats = expandpats(pats or [])
803 pats = expandpats(pats or [])
804
804
805 def bad(f, msg):
805 def bad(f, msg):
806 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
806 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
807
807
808 if badfn is None:
808 if badfn is None:
809 badfn = bad
809 badfn = bad
810
810
811 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
811 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
812 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
812 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
813
813
814 if m.always():
814 if m.always():
815 pats = []
815 pats = []
816 return m, pats
816 return m, pats
817
817
818 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
818 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
819 badfn=None):
819 badfn=None):
820 '''Return a matcher that will warn about bad matches.'''
820 '''Return a matcher that will warn about bad matches.'''
821 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
821 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
822
822
823 def matchall(repo):
823 def matchall(repo):
824 '''Return a matcher that will efficiently match everything.'''
824 '''Return a matcher that will efficiently match everything.'''
825 return matchmod.always(repo.root, repo.getcwd())
825 return matchmod.always(repo.root, repo.getcwd())
826
826
827 def matchfiles(repo, files, badfn=None):
827 def matchfiles(repo, files, badfn=None):
828 '''Return a matcher that will efficiently match exactly these files.'''
828 '''Return a matcher that will efficiently match exactly these files.'''
829 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
829 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
830
830
831 def parsefollowlinespattern(repo, rev, pat, msg):
831 def parsefollowlinespattern(repo, rev, pat, msg):
832 """Return a file name from `pat` pattern suitable for usage in followlines
832 """Return a file name from `pat` pattern suitable for usage in followlines
833 logic.
833 logic.
834 """
834 """
835 if not matchmod.patkind(pat):
835 if not matchmod.patkind(pat):
836 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
836 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
837 else:
837 else:
838 ctx = repo[rev]
838 ctx = repo[rev]
839 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
839 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
840 files = [f for f in ctx if m(f)]
840 files = [f for f in ctx if m(f)]
841 if len(files) != 1:
841 if len(files) != 1:
842 raise error.ParseError(msg)
842 raise error.ParseError(msg)
843 return files[0]
843 return files[0]
844
844
845 def getorigvfs(ui, repo):
845 def getorigvfs(ui, repo):
846 """return a vfs suitable to save 'orig' file
846 """return a vfs suitable to save 'orig' file
847
847
848 return None if no special directory is configured"""
848 return None if no special directory is configured"""
849 origbackuppath = ui.config('ui', 'origbackuppath')
849 origbackuppath = ui.config('ui', 'origbackuppath')
850 if not origbackuppath:
850 if not origbackuppath:
851 return None
851 return None
852 return vfs.vfs(repo.wvfs.join(origbackuppath))
852 return vfs.vfs(repo.wvfs.join(origbackuppath))
853
853
854 def backuppath(ui, repo, filepath):
854 def backuppath(ui, repo, filepath):
855 '''customize where working copy backup files (.orig files) are created
855 '''customize where working copy backup files (.orig files) are created
856
856
857 Fetch user defined path from config file: [ui] origbackuppath = <path>
857 Fetch user defined path from config file: [ui] origbackuppath = <path>
858 Fall back to default (filepath with .orig suffix) if not specified
858 Fall back to default (filepath with .orig suffix) if not specified
859
859
860 filepath is repo-relative
860 filepath is repo-relative
861
861
862 Returns an absolute path
862 Returns an absolute path
863 '''
863 '''
864 origvfs = getorigvfs(ui, repo)
864 origvfs = getorigvfs(ui, repo)
865 if origvfs is None:
865 if origvfs is None:
866 return repo.wjoin(filepath + ".orig")
866 return repo.wjoin(filepath + ".orig")
867
867
868 origbackupdir = origvfs.dirname(filepath)
868 origbackupdir = origvfs.dirname(filepath)
869 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
869 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
870 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
870 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
871
871
872 # Remove any files that conflict with the backup file's path
872 # Remove any files that conflict with the backup file's path
873 for f in reversed(list(util.finddirs(filepath))):
873 for f in reversed(list(util.finddirs(filepath))):
874 if origvfs.isfileorlink(f):
874 if origvfs.isfileorlink(f):
875 ui.note(_('removing conflicting file: %s\n')
875 ui.note(_('removing conflicting file: %s\n')
876 % origvfs.join(f))
876 % origvfs.join(f))
877 origvfs.unlink(f)
877 origvfs.unlink(f)
878 break
878 break
879
879
880 origvfs.makedirs(origbackupdir)
880 origvfs.makedirs(origbackupdir)
881
881
882 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
882 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
883 ui.note(_('removing conflicting directory: %s\n')
883 ui.note(_('removing conflicting directory: %s\n')
884 % origvfs.join(filepath))
884 % origvfs.join(filepath))
885 origvfs.rmtree(filepath, forcibly=True)
885 origvfs.rmtree(filepath, forcibly=True)
886
886
887 return origvfs.join(filepath)
887 return origvfs.join(filepath)
888
888
889 class _containsnode(object):
889 class _containsnode(object):
890 """proxy __contains__(node) to container.__contains__ which accepts revs"""
890 """proxy __contains__(node) to container.__contains__ which accepts revs"""
891
891
892 def __init__(self, repo, revcontainer):
892 def __init__(self, repo, revcontainer):
893 self._torev = repo.changelog.rev
893 self._torev = repo.changelog.rev
894 self._revcontains = revcontainer.__contains__
894 self._revcontains = revcontainer.__contains__
895
895
896 def __contains__(self, node):
896 def __contains__(self, node):
897 return self._revcontains(self._torev(node))
897 return self._revcontains(self._torev(node))
898
898
899 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
899 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
900 fixphase=False, targetphase=None, backup=True):
900 fixphase=False, targetphase=None, backup=True):
901 """do common cleanups when old nodes are replaced by new nodes
901 """do common cleanups when old nodes are replaced by new nodes
902
902
903 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
903 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
904 (we might also want to move working directory parent in the future)
904 (we might also want to move working directory parent in the future)
905
905
906 By default, bookmark moves are calculated automatically from 'replacements',
906 By default, bookmark moves are calculated automatically from 'replacements',
907 but 'moves' can be used to override that. Also, 'moves' may include
907 but 'moves' can be used to override that. Also, 'moves' may include
908 additional bookmark moves that should not have associated obsmarkers.
908 additional bookmark moves that should not have associated obsmarkers.
909
909
910 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
910 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
911 have replacements. operation is a string, like "rebase".
911 have replacements. operation is a string, like "rebase".
912
912
913 metadata is dictionary containing metadata to be stored in obsmarker if
913 metadata is dictionary containing metadata to be stored in obsmarker if
914 obsolescence is enabled.
914 obsolescence is enabled.
915 """
915 """
916 assert fixphase or targetphase is None
916 assert fixphase or targetphase is None
917 if not replacements and not moves:
917 if not replacements and not moves:
918 return
918 return
919
919
920 # translate mapping's other forms
920 # translate mapping's other forms
921 if not util.safehasattr(replacements, 'items'):
921 if not util.safehasattr(replacements, 'items'):
922 replacements = {(n,): () for n in replacements}
922 replacements = {(n,): () for n in replacements}
923 else:
923 else:
924 # upgrading non tuple "source" to tuple ones for BC
924 # upgrading non tuple "source" to tuple ones for BC
925 repls = {}
925 repls = {}
926 for key, value in replacements.items():
926 for key, value in replacements.items():
927 if not isinstance(key, tuple):
927 if not isinstance(key, tuple):
928 key = (key,)
928 key = (key,)
929 repls[key] = value
929 repls[key] = value
930 replacements = repls
930 replacements = repls
931
931
932 # Unfiltered repo is needed since nodes in replacements might be hidden.
932 # Unfiltered repo is needed since nodes in replacements might be hidden.
933 unfi = repo.unfiltered()
933 unfi = repo.unfiltered()
934
934
935 # Calculate bookmark movements
935 # Calculate bookmark movements
936 if moves is None:
936 if moves is None:
937 moves = {}
937 moves = {}
938 for oldnodes, newnodes in replacements.items():
938 for oldnodes, newnodes in replacements.items():
939 for oldnode in oldnodes:
939 for oldnode in oldnodes:
940 if oldnode in moves:
940 if oldnode in moves:
941 continue
941 continue
942 if len(newnodes) > 1:
942 if len(newnodes) > 1:
943 # usually a split, take the one with biggest rev number
943 # usually a split, take the one with biggest rev number
944 newnode = next(unfi.set('max(%ln)', newnodes)).node()
944 newnode = next(unfi.set('max(%ln)', newnodes)).node()
945 elif len(newnodes) == 0:
945 elif len(newnodes) == 0:
946 # move bookmark backwards
946 # move bookmark backwards
947 allreplaced = []
947 allreplaced = []
948 for rep in replacements:
948 for rep in replacements:
949 allreplaced.extend(rep)
949 allreplaced.extend(rep)
950 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
950 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
951 allreplaced))
951 allreplaced))
952 if roots:
952 if roots:
953 newnode = roots[0].node()
953 newnode = roots[0].node()
954 else:
954 else:
955 newnode = nullid
955 newnode = nullid
956 else:
956 else:
957 newnode = newnodes[0]
957 newnode = newnodes[0]
958 moves[oldnode] = newnode
958 moves[oldnode] = newnode
959
959
960 allnewnodes = [n for ns in replacements.values() for n in ns]
960 allnewnodes = [n for ns in replacements.values() for n in ns]
961 toretract = {}
961 toretract = {}
962 toadvance = {}
962 toadvance = {}
963 if fixphase:
963 if fixphase:
964 precursors = {}
964 precursors = {}
965 for oldnodes, newnodes in replacements.items():
965 for oldnodes, newnodes in replacements.items():
966 for oldnode in oldnodes:
966 for oldnode in oldnodes:
967 for newnode in newnodes:
967 for newnode in newnodes:
968 precursors.setdefault(newnode, []).append(oldnode)
968 precursors.setdefault(newnode, []).append(oldnode)
969
969
970 allnewnodes.sort(key=lambda n: unfi[n].rev())
970 allnewnodes.sort(key=lambda n: unfi[n].rev())
971 newphases = {}
971 newphases = {}
972 def phase(ctx):
972 def phase(ctx):
973 return newphases.get(ctx.node(), ctx.phase())
973 return newphases.get(ctx.node(), ctx.phase())
974 for newnode in allnewnodes:
974 for newnode in allnewnodes:
975 ctx = unfi[newnode]
975 ctx = unfi[newnode]
976 parentphase = max(phase(p) for p in ctx.parents())
976 parentphase = max(phase(p) for p in ctx.parents())
977 if targetphase is None:
977 if targetphase is None:
978 oldphase = max(unfi[oldnode].phase()
978 oldphase = max(unfi[oldnode].phase()
979 for oldnode in precursors[newnode])
979 for oldnode in precursors[newnode])
980 newphase = max(oldphase, parentphase)
980 newphase = max(oldphase, parentphase)
981 else:
981 else:
982 newphase = max(targetphase, parentphase)
982 newphase = max(targetphase, parentphase)
983 newphases[newnode] = newphase
983 newphases[newnode] = newphase
984 if newphase > ctx.phase():
984 if newphase > ctx.phase():
985 toretract.setdefault(newphase, []).append(newnode)
985 toretract.setdefault(newphase, []).append(newnode)
986 elif newphase < ctx.phase():
986 elif newphase < ctx.phase():
987 toadvance.setdefault(newphase, []).append(newnode)
987 toadvance.setdefault(newphase, []).append(newnode)
988
988
989 with repo.transaction('cleanup') as tr:
989 with repo.transaction('cleanup') as tr:
990 # Move bookmarks
990 # Move bookmarks
991 bmarks = repo._bookmarks
991 bmarks = repo._bookmarks
992 bmarkchanges = []
992 bmarkchanges = []
993 for oldnode, newnode in moves.items():
993 for oldnode, newnode in moves.items():
994 oldbmarks = repo.nodebookmarks(oldnode)
994 oldbmarks = repo.nodebookmarks(oldnode)
995 if not oldbmarks:
995 if not oldbmarks:
996 continue
996 continue
997 from . import bookmarks # avoid import cycle
997 from . import bookmarks # avoid import cycle
998 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
998 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
999 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
999 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1000 hex(oldnode), hex(newnode)))
1000 hex(oldnode), hex(newnode)))
1001 # Delete divergent bookmarks being parents of related newnodes
1001 # Delete divergent bookmarks being parents of related newnodes
1002 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1002 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1003 allnewnodes, newnode, oldnode)
1003 allnewnodes, newnode, oldnode)
1004 deletenodes = _containsnode(repo, deleterevs)
1004 deletenodes = _containsnode(repo, deleterevs)
1005 for name in oldbmarks:
1005 for name in oldbmarks:
1006 bmarkchanges.append((name, newnode))
1006 bmarkchanges.append((name, newnode))
1007 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1007 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1008 bmarkchanges.append((b, None))
1008 bmarkchanges.append((b, None))
1009
1009
1010 if bmarkchanges:
1010 if bmarkchanges:
1011 bmarks.applychanges(repo, tr, bmarkchanges)
1011 bmarks.applychanges(repo, tr, bmarkchanges)
1012
1012
1013 for phase, nodes in toretract.items():
1013 for phase, nodes in toretract.items():
1014 phases.retractboundary(repo, tr, phase, nodes)
1014 phases.retractboundary(repo, tr, phase, nodes)
1015 for phase, nodes in toadvance.items():
1015 for phase, nodes in toadvance.items():
1016 phases.advanceboundary(repo, tr, phase, nodes)
1016 phases.advanceboundary(repo, tr, phase, nodes)
1017
1017
1018 # Obsolete or strip nodes
1018 # Obsolete or strip nodes
1019 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1019 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1020 # If a node is already obsoleted, and we want to obsolete it
1020 # If a node is already obsoleted, and we want to obsolete it
1021 # without a successor, skip that obssolete request since it's
1021 # without a successor, skip that obssolete request since it's
1022 # unnecessary. That's the "if s or not isobs(n)" check below.
1022 # unnecessary. That's the "if s or not isobs(n)" check below.
1023 # Also sort the node in topology order, that might be useful for
1023 # Also sort the node in topology order, that might be useful for
1024 # some obsstore logic.
1024 # some obsstore logic.
1025 # NOTE: the sorting might belong to createmarkers.
1025 # NOTE: the sorting might belong to createmarkers.
1026 torev = unfi.changelog.rev
1026 torev = unfi.changelog.rev
1027 sortfunc = lambda ns: torev(ns[0][0])
1027 sortfunc = lambda ns: torev(ns[0][0])
1028 rels = []
1028 rels = []
1029 for ns, s in sorted(replacements.items(), key=sortfunc):
1029 for ns, s in sorted(replacements.items(), key=sortfunc):
1030 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1030 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1031 rels.append(rel)
1031 rels.append(rel)
1032 if rels:
1032 if rels:
1033 obsolete.createmarkers(repo, rels, operation=operation,
1033 obsolete.createmarkers(repo, rels, operation=operation,
1034 metadata=metadata)
1034 metadata=metadata)
1035 else:
1035 else:
1036 from . import repair # avoid import cycle
1036 from . import repair # avoid import cycle
1037 tostrip = list(n for ns in replacements for n in ns)
1037 tostrip = list(n for ns in replacements for n in ns)
1038 if tostrip:
1038 if tostrip:
1039 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1039 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1040 backup=backup)
1040 backup=backup)
1041
1041
1042 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1042 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1043 if opts is None:
1043 if opts is None:
1044 opts = {}
1044 opts = {}
1045 m = matcher
1045 m = matcher
1046 dry_run = opts.get('dry_run')
1046 dry_run = opts.get('dry_run')
1047 try:
1047 try:
1048 similarity = float(opts.get('similarity') or 0)
1048 similarity = float(opts.get('similarity') or 0)
1049 except ValueError:
1049 except ValueError:
1050 raise error.Abort(_('similarity must be a number'))
1050 raise error.Abort(_('similarity must be a number'))
1051 if similarity < 0 or similarity > 100:
1051 if similarity < 0 or similarity > 100:
1052 raise error.Abort(_('similarity must be between 0 and 100'))
1052 raise error.Abort(_('similarity must be between 0 and 100'))
1053 similarity /= 100.0
1053 similarity /= 100.0
1054
1054
1055 ret = 0
1055 ret = 0
1056
1056
1057 wctx = repo[None]
1057 wctx = repo[None]
1058 for subpath in sorted(wctx.substate):
1058 for subpath in sorted(wctx.substate):
1059 submatch = matchmod.subdirmatcher(subpath, m)
1059 submatch = matchmod.subdirmatcher(subpath, m)
1060 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1060 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1061 sub = wctx.sub(subpath)
1061 sub = wctx.sub(subpath)
1062 subprefix = repo.wvfs.reljoin(prefix, subpath)
1062 subprefix = repo.wvfs.reljoin(prefix, subpath)
1063 subuipathfn = subdiruipathfn(subpath, uipathfn)
1063 subuipathfn = subdiruipathfn(subpath, uipathfn)
1064 try:
1064 try:
1065 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1065 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1066 ret = 1
1066 ret = 1
1067 except error.LookupError:
1067 except error.LookupError:
1068 repo.ui.status(_("skipping missing subrepository: %s\n")
1068 repo.ui.status(_("skipping missing subrepository: %s\n")
1069 % uipathfn(subpath))
1069 % uipathfn(subpath))
1070
1070
1071 rejected = []
1071 rejected = []
1072 def badfn(f, msg):
1072 def badfn(f, msg):
1073 if f in m.files():
1073 if f in m.files():
1074 m.bad(f, msg)
1074 m.bad(f, msg)
1075 rejected.append(f)
1075 rejected.append(f)
1076
1076
1077 badmatch = matchmod.badmatch(m, badfn)
1077 badmatch = matchmod.badmatch(m, badfn)
1078 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1078 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1079 badmatch)
1079 badmatch)
1080
1080
1081 unknownset = set(unknown + forgotten)
1081 unknownset = set(unknown + forgotten)
1082 toprint = unknownset.copy()
1082 toprint = unknownset.copy()
1083 toprint.update(deleted)
1083 toprint.update(deleted)
1084 for abs in sorted(toprint):
1084 for abs in sorted(toprint):
1085 if repo.ui.verbose or not m.exact(abs):
1085 if repo.ui.verbose or not m.exact(abs):
1086 if abs in unknownset:
1086 if abs in unknownset:
1087 status = _('adding %s\n') % uipathfn(abs)
1087 status = _('adding %s\n') % uipathfn(abs)
1088 label = 'ui.addremove.added'
1088 label = 'ui.addremove.added'
1089 else:
1089 else:
1090 status = _('removing %s\n') % uipathfn(abs)
1090 status = _('removing %s\n') % uipathfn(abs)
1091 label = 'ui.addremove.removed'
1091 label = 'ui.addremove.removed'
1092 repo.ui.status(status, label=label)
1092 repo.ui.status(status, label=label)
1093
1093
1094 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1094 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1095 similarity)
1095 similarity, uipathfn)
1096
1096
1097 if not dry_run:
1097 if not dry_run:
1098 _markchanges(repo, unknown + forgotten, deleted, renames)
1098 _markchanges(repo, unknown + forgotten, deleted, renames)
1099
1099
1100 for f in rejected:
1100 for f in rejected:
1101 if f in m.files():
1101 if f in m.files():
1102 return 1
1102 return 1
1103 return ret
1103 return ret
1104
1104
1105 def marktouched(repo, files, similarity=0.0):
1105 def marktouched(repo, files, similarity=0.0):
1106 '''Assert that files have somehow been operated upon. files are relative to
1106 '''Assert that files have somehow been operated upon. files are relative to
1107 the repo root.'''
1107 the repo root.'''
1108 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1108 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1109 rejected = []
1109 rejected = []
1110
1110
1111 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1111 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1112
1112
1113 if repo.ui.verbose:
1113 if repo.ui.verbose:
1114 unknownset = set(unknown + forgotten)
1114 unknownset = set(unknown + forgotten)
1115 toprint = unknownset.copy()
1115 toprint = unknownset.copy()
1116 toprint.update(deleted)
1116 toprint.update(deleted)
1117 for abs in sorted(toprint):
1117 for abs in sorted(toprint):
1118 if abs in unknownset:
1118 if abs in unknownset:
1119 status = _('adding %s\n') % abs
1119 status = _('adding %s\n') % abs
1120 else:
1120 else:
1121 status = _('removing %s\n') % abs
1121 status = _('removing %s\n') % abs
1122 repo.ui.status(status)
1122 repo.ui.status(status)
1123
1123
1124 # TODO: We should probably have the caller pass in uipathfn and apply it to
1125 # the messages above too. forcerelativevalue=True is consistent with how
1126 # it used to work.
1127 uipathfn = getuipathfn(repo, forcerelativevalue=True)
1124 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1128 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1125 similarity)
1129 similarity, uipathfn)
1126
1130
1127 _markchanges(repo, unknown + forgotten, deleted, renames)
1131 _markchanges(repo, unknown + forgotten, deleted, renames)
1128
1132
1129 for f in rejected:
1133 for f in rejected:
1130 if f in m.files():
1134 if f in m.files():
1131 return 1
1135 return 1
1132 return 0
1136 return 0
1133
1137
1134 def _interestingfiles(repo, matcher):
1138 def _interestingfiles(repo, matcher):
1135 '''Walk dirstate with matcher, looking for files that addremove would care
1139 '''Walk dirstate with matcher, looking for files that addremove would care
1136 about.
1140 about.
1137
1141
1138 This is different from dirstate.status because it doesn't care about
1142 This is different from dirstate.status because it doesn't care about
1139 whether files are modified or clean.'''
1143 whether files are modified or clean.'''
1140 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1144 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1141 audit_path = pathutil.pathauditor(repo.root, cached=True)
1145 audit_path = pathutil.pathauditor(repo.root, cached=True)
1142
1146
1143 ctx = repo[None]
1147 ctx = repo[None]
1144 dirstate = repo.dirstate
1148 dirstate = repo.dirstate
1145 matcher = repo.narrowmatch(matcher, includeexact=True)
1149 matcher = repo.narrowmatch(matcher, includeexact=True)
1146 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1150 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1147 unknown=True, ignored=False, full=False)
1151 unknown=True, ignored=False, full=False)
1148 for abs, st in walkresults.iteritems():
1152 for abs, st in walkresults.iteritems():
1149 dstate = dirstate[abs]
1153 dstate = dirstate[abs]
1150 if dstate == '?' and audit_path.check(abs):
1154 if dstate == '?' and audit_path.check(abs):
1151 unknown.append(abs)
1155 unknown.append(abs)
1152 elif dstate != 'r' and not st:
1156 elif dstate != 'r' and not st:
1153 deleted.append(abs)
1157 deleted.append(abs)
1154 elif dstate == 'r' and st:
1158 elif dstate == 'r' and st:
1155 forgotten.append(abs)
1159 forgotten.append(abs)
1156 # for finding renames
1160 # for finding renames
1157 elif dstate == 'r' and not st:
1161 elif dstate == 'r' and not st:
1158 removed.append(abs)
1162 removed.append(abs)
1159 elif dstate == 'a':
1163 elif dstate == 'a':
1160 added.append(abs)
1164 added.append(abs)
1161
1165
1162 return added, unknown, deleted, removed, forgotten
1166 return added, unknown, deleted, removed, forgotten
1163
1167
1164 def _findrenames(repo, matcher, added, removed, similarity):
1168 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1165 '''Find renames from removed files to added ones.'''
1169 '''Find renames from removed files to added ones.'''
1166 renames = {}
1170 renames = {}
1167 if similarity > 0:
1171 if similarity > 0:
1168 for old, new, score in similar.findrenames(repo, added, removed,
1172 for old, new, score in similar.findrenames(repo, added, removed,
1169 similarity):
1173 similarity):
1170 if (repo.ui.verbose or not matcher.exact(old)
1174 if (repo.ui.verbose or not matcher.exact(old)
1171 or not matcher.exact(new)):
1175 or not matcher.exact(new)):
1172 repo.ui.status(_('recording removal of %s as rename to %s '
1176 repo.ui.status(_('recording removal of %s as rename to %s '
1173 '(%d%% similar)\n') %
1177 '(%d%% similar)\n') %
1174 (matcher.rel(old), matcher.rel(new),
1178 (uipathfn(old), uipathfn(new),
1175 score * 100))
1179 score * 100))
1176 renames[new] = old
1180 renames[new] = old
1177 return renames
1181 return renames
1178
1182
1179 def _markchanges(repo, unknown, deleted, renames):
1183 def _markchanges(repo, unknown, deleted, renames):
1180 '''Marks the files in unknown as added, the files in deleted as removed,
1184 '''Marks the files in unknown as added, the files in deleted as removed,
1181 and the files in renames as copied.'''
1185 and the files in renames as copied.'''
1182 wctx = repo[None]
1186 wctx = repo[None]
1183 with repo.wlock():
1187 with repo.wlock():
1184 wctx.forget(deleted)
1188 wctx.forget(deleted)
1185 wctx.add(unknown)
1189 wctx.add(unknown)
1186 for new, old in renames.iteritems():
1190 for new, old in renames.iteritems():
1187 wctx.copy(old, new)
1191 wctx.copy(old, new)
1188
1192
1189 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1193 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1190 """Update the dirstate to reflect the intent of copying src to dst. For
1194 """Update the dirstate to reflect the intent of copying src to dst. For
1191 different reasons it might not end with dst being marked as copied from src.
1195 different reasons it might not end with dst being marked as copied from src.
1192 """
1196 """
1193 origsrc = repo.dirstate.copied(src) or src
1197 origsrc = repo.dirstate.copied(src) or src
1194 if dst == origsrc: # copying back a copy?
1198 if dst == origsrc: # copying back a copy?
1195 if repo.dirstate[dst] not in 'mn' and not dryrun:
1199 if repo.dirstate[dst] not in 'mn' and not dryrun:
1196 repo.dirstate.normallookup(dst)
1200 repo.dirstate.normallookup(dst)
1197 else:
1201 else:
1198 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1202 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1199 if not ui.quiet:
1203 if not ui.quiet:
1200 ui.warn(_("%s has not been committed yet, so no copy "
1204 ui.warn(_("%s has not been committed yet, so no copy "
1201 "data will be stored for %s.\n")
1205 "data will be stored for %s.\n")
1202 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1206 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1203 if repo.dirstate[dst] in '?r' and not dryrun:
1207 if repo.dirstate[dst] in '?r' and not dryrun:
1204 wctx.add([dst])
1208 wctx.add([dst])
1205 elif not dryrun:
1209 elif not dryrun:
1206 wctx.copy(origsrc, dst)
1210 wctx.copy(origsrc, dst)
1207
1211
1208 def writerequires(opener, requirements):
1212 def writerequires(opener, requirements):
1209 with opener('requires', 'w', atomictemp=True) as fp:
1213 with opener('requires', 'w', atomictemp=True) as fp:
1210 for r in sorted(requirements):
1214 for r in sorted(requirements):
1211 fp.write("%s\n" % r)
1215 fp.write("%s\n" % r)
1212
1216
1213 class filecachesubentry(object):
1217 class filecachesubentry(object):
1214 def __init__(self, path, stat):
1218 def __init__(self, path, stat):
1215 self.path = path
1219 self.path = path
1216 self.cachestat = None
1220 self.cachestat = None
1217 self._cacheable = None
1221 self._cacheable = None
1218
1222
1219 if stat:
1223 if stat:
1220 self.cachestat = filecachesubentry.stat(self.path)
1224 self.cachestat = filecachesubentry.stat(self.path)
1221
1225
1222 if self.cachestat:
1226 if self.cachestat:
1223 self._cacheable = self.cachestat.cacheable()
1227 self._cacheable = self.cachestat.cacheable()
1224 else:
1228 else:
1225 # None means we don't know yet
1229 # None means we don't know yet
1226 self._cacheable = None
1230 self._cacheable = None
1227
1231
1228 def refresh(self):
1232 def refresh(self):
1229 if self.cacheable():
1233 if self.cacheable():
1230 self.cachestat = filecachesubentry.stat(self.path)
1234 self.cachestat = filecachesubentry.stat(self.path)
1231
1235
1232 def cacheable(self):
1236 def cacheable(self):
1233 if self._cacheable is not None:
1237 if self._cacheable is not None:
1234 return self._cacheable
1238 return self._cacheable
1235
1239
1236 # we don't know yet, assume it is for now
1240 # we don't know yet, assume it is for now
1237 return True
1241 return True
1238
1242
1239 def changed(self):
1243 def changed(self):
1240 # no point in going further if we can't cache it
1244 # no point in going further if we can't cache it
1241 if not self.cacheable():
1245 if not self.cacheable():
1242 return True
1246 return True
1243
1247
1244 newstat = filecachesubentry.stat(self.path)
1248 newstat = filecachesubentry.stat(self.path)
1245
1249
1246 # we may not know if it's cacheable yet, check again now
1250 # we may not know if it's cacheable yet, check again now
1247 if newstat and self._cacheable is None:
1251 if newstat and self._cacheable is None:
1248 self._cacheable = newstat.cacheable()
1252 self._cacheable = newstat.cacheable()
1249
1253
1250 # check again
1254 # check again
1251 if not self._cacheable:
1255 if not self._cacheable:
1252 return True
1256 return True
1253
1257
1254 if self.cachestat != newstat:
1258 if self.cachestat != newstat:
1255 self.cachestat = newstat
1259 self.cachestat = newstat
1256 return True
1260 return True
1257 else:
1261 else:
1258 return False
1262 return False
1259
1263
1260 @staticmethod
1264 @staticmethod
1261 def stat(path):
1265 def stat(path):
1262 try:
1266 try:
1263 return util.cachestat(path)
1267 return util.cachestat(path)
1264 except OSError as e:
1268 except OSError as e:
1265 if e.errno != errno.ENOENT:
1269 if e.errno != errno.ENOENT:
1266 raise
1270 raise
1267
1271
1268 class filecacheentry(object):
1272 class filecacheentry(object):
1269 def __init__(self, paths, stat=True):
1273 def __init__(self, paths, stat=True):
1270 self._entries = []
1274 self._entries = []
1271 for path in paths:
1275 for path in paths:
1272 self._entries.append(filecachesubentry(path, stat))
1276 self._entries.append(filecachesubentry(path, stat))
1273
1277
1274 def changed(self):
1278 def changed(self):
1275 '''true if any entry has changed'''
1279 '''true if any entry has changed'''
1276 for entry in self._entries:
1280 for entry in self._entries:
1277 if entry.changed():
1281 if entry.changed():
1278 return True
1282 return True
1279 return False
1283 return False
1280
1284
1281 def refresh(self):
1285 def refresh(self):
1282 for entry in self._entries:
1286 for entry in self._entries:
1283 entry.refresh()
1287 entry.refresh()
1284
1288
1285 class filecache(object):
1289 class filecache(object):
1286 """A property like decorator that tracks files under .hg/ for updates.
1290 """A property like decorator that tracks files under .hg/ for updates.
1287
1291
1288 On first access, the files defined as arguments are stat()ed and the
1292 On first access, the files defined as arguments are stat()ed and the
1289 results cached. The decorated function is called. The results are stashed
1293 results cached. The decorated function is called. The results are stashed
1290 away in a ``_filecache`` dict on the object whose method is decorated.
1294 away in a ``_filecache`` dict on the object whose method is decorated.
1291
1295
1292 On subsequent access, the cached result is used as it is set to the
1296 On subsequent access, the cached result is used as it is set to the
1293 instance dictionary.
1297 instance dictionary.
1294
1298
1295 On external property set/delete operations, the caller must update the
1299 On external property set/delete operations, the caller must update the
1296 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1300 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1297 instead of directly setting <attr>.
1301 instead of directly setting <attr>.
1298
1302
1299 When using the property API, the cached data is always used if available.
1303 When using the property API, the cached data is always used if available.
1300 No stat() is performed to check if the file has changed.
1304 No stat() is performed to check if the file has changed.
1301
1305
1302 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1306 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1303 can populate an entry before the property's getter is called. In this case,
1307 can populate an entry before the property's getter is called. In this case,
1304 entries in ``_filecache`` will be used during property operations,
1308 entries in ``_filecache`` will be used during property operations,
1305 if available. If the underlying file changes, it is up to external callers
1309 if available. If the underlying file changes, it is up to external callers
1306 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1310 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1307 method result as well as possibly calling ``del obj._filecache[attr]`` to
1311 method result as well as possibly calling ``del obj._filecache[attr]`` to
1308 remove the ``filecacheentry``.
1312 remove the ``filecacheentry``.
1309 """
1313 """
1310
1314
1311 def __init__(self, *paths):
1315 def __init__(self, *paths):
1312 self.paths = paths
1316 self.paths = paths
1313
1317
1314 def join(self, obj, fname):
1318 def join(self, obj, fname):
1315 """Used to compute the runtime path of a cached file.
1319 """Used to compute the runtime path of a cached file.
1316
1320
1317 Users should subclass filecache and provide their own version of this
1321 Users should subclass filecache and provide their own version of this
1318 function to call the appropriate join function on 'obj' (an instance
1322 function to call the appropriate join function on 'obj' (an instance
1319 of the class that its member function was decorated).
1323 of the class that its member function was decorated).
1320 """
1324 """
1321 raise NotImplementedError
1325 raise NotImplementedError
1322
1326
1323 def __call__(self, func):
1327 def __call__(self, func):
1324 self.func = func
1328 self.func = func
1325 self.sname = func.__name__
1329 self.sname = func.__name__
1326 self.name = pycompat.sysbytes(self.sname)
1330 self.name = pycompat.sysbytes(self.sname)
1327 return self
1331 return self
1328
1332
1329 def __get__(self, obj, type=None):
1333 def __get__(self, obj, type=None):
1330 # if accessed on the class, return the descriptor itself.
1334 # if accessed on the class, return the descriptor itself.
1331 if obj is None:
1335 if obj is None:
1332 return self
1336 return self
1333
1337
1334 assert self.sname not in obj.__dict__
1338 assert self.sname not in obj.__dict__
1335
1339
1336 entry = obj._filecache.get(self.name)
1340 entry = obj._filecache.get(self.name)
1337
1341
1338 if entry:
1342 if entry:
1339 if entry.changed():
1343 if entry.changed():
1340 entry.obj = self.func(obj)
1344 entry.obj = self.func(obj)
1341 else:
1345 else:
1342 paths = [self.join(obj, path) for path in self.paths]
1346 paths = [self.join(obj, path) for path in self.paths]
1343
1347
1344 # We stat -before- creating the object so our cache doesn't lie if
1348 # We stat -before- creating the object so our cache doesn't lie if
1345 # a writer modified between the time we read and stat
1349 # a writer modified between the time we read and stat
1346 entry = filecacheentry(paths, True)
1350 entry = filecacheentry(paths, True)
1347 entry.obj = self.func(obj)
1351 entry.obj = self.func(obj)
1348
1352
1349 obj._filecache[self.name] = entry
1353 obj._filecache[self.name] = entry
1350
1354
1351 obj.__dict__[self.sname] = entry.obj
1355 obj.__dict__[self.sname] = entry.obj
1352 return entry.obj
1356 return entry.obj
1353
1357
1354 # don't implement __set__(), which would make __dict__ lookup as slow as
1358 # don't implement __set__(), which would make __dict__ lookup as slow as
1355 # function call.
1359 # function call.
1356
1360
1357 def set(self, obj, value):
1361 def set(self, obj, value):
1358 if self.name not in obj._filecache:
1362 if self.name not in obj._filecache:
1359 # we add an entry for the missing value because X in __dict__
1363 # we add an entry for the missing value because X in __dict__
1360 # implies X in _filecache
1364 # implies X in _filecache
1361 paths = [self.join(obj, path) for path in self.paths]
1365 paths = [self.join(obj, path) for path in self.paths]
1362 ce = filecacheentry(paths, False)
1366 ce = filecacheentry(paths, False)
1363 obj._filecache[self.name] = ce
1367 obj._filecache[self.name] = ce
1364 else:
1368 else:
1365 ce = obj._filecache[self.name]
1369 ce = obj._filecache[self.name]
1366
1370
1367 ce.obj = value # update cached copy
1371 ce.obj = value # update cached copy
1368 obj.__dict__[self.sname] = value # update copy returned by obj.x
1372 obj.__dict__[self.sname] = value # update copy returned by obj.x
1369
1373
1370 def extdatasource(repo, source):
1374 def extdatasource(repo, source):
1371 """Gather a map of rev -> value dict from the specified source
1375 """Gather a map of rev -> value dict from the specified source
1372
1376
1373 A source spec is treated as a URL, with a special case shell: type
1377 A source spec is treated as a URL, with a special case shell: type
1374 for parsing the output from a shell command.
1378 for parsing the output from a shell command.
1375
1379
1376 The data is parsed as a series of newline-separated records where
1380 The data is parsed as a series of newline-separated records where
1377 each record is a revision specifier optionally followed by a space
1381 each record is a revision specifier optionally followed by a space
1378 and a freeform string value. If the revision is known locally, it
1382 and a freeform string value. If the revision is known locally, it
1379 is converted to a rev, otherwise the record is skipped.
1383 is converted to a rev, otherwise the record is skipped.
1380
1384
1381 Note that both key and value are treated as UTF-8 and converted to
1385 Note that both key and value are treated as UTF-8 and converted to
1382 the local encoding. This allows uniformity between local and
1386 the local encoding. This allows uniformity between local and
1383 remote data sources.
1387 remote data sources.
1384 """
1388 """
1385
1389
1386 spec = repo.ui.config("extdata", source)
1390 spec = repo.ui.config("extdata", source)
1387 if not spec:
1391 if not spec:
1388 raise error.Abort(_("unknown extdata source '%s'") % source)
1392 raise error.Abort(_("unknown extdata source '%s'") % source)
1389
1393
1390 data = {}
1394 data = {}
1391 src = proc = None
1395 src = proc = None
1392 try:
1396 try:
1393 if spec.startswith("shell:"):
1397 if spec.startswith("shell:"):
1394 # external commands should be run relative to the repo root
1398 # external commands should be run relative to the repo root
1395 cmd = spec[6:]
1399 cmd = spec[6:]
1396 proc = subprocess.Popen(procutil.tonativestr(cmd),
1400 proc = subprocess.Popen(procutil.tonativestr(cmd),
1397 shell=True, bufsize=-1,
1401 shell=True, bufsize=-1,
1398 close_fds=procutil.closefds,
1402 close_fds=procutil.closefds,
1399 stdout=subprocess.PIPE,
1403 stdout=subprocess.PIPE,
1400 cwd=procutil.tonativestr(repo.root))
1404 cwd=procutil.tonativestr(repo.root))
1401 src = proc.stdout
1405 src = proc.stdout
1402 else:
1406 else:
1403 # treat as a URL or file
1407 # treat as a URL or file
1404 src = url.open(repo.ui, spec)
1408 src = url.open(repo.ui, spec)
1405 for l in src:
1409 for l in src:
1406 if " " in l:
1410 if " " in l:
1407 k, v = l.strip().split(" ", 1)
1411 k, v = l.strip().split(" ", 1)
1408 else:
1412 else:
1409 k, v = l.strip(), ""
1413 k, v = l.strip(), ""
1410
1414
1411 k = encoding.tolocal(k)
1415 k = encoding.tolocal(k)
1412 try:
1416 try:
1413 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1417 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1414 except (error.LookupError, error.RepoLookupError):
1418 except (error.LookupError, error.RepoLookupError):
1415 pass # we ignore data for nodes that don't exist locally
1419 pass # we ignore data for nodes that don't exist locally
1416 finally:
1420 finally:
1417 if proc:
1421 if proc:
1418 proc.communicate()
1422 proc.communicate()
1419 if src:
1423 if src:
1420 src.close()
1424 src.close()
1421 if proc and proc.returncode != 0:
1425 if proc and proc.returncode != 0:
1422 raise error.Abort(_("extdata command '%s' failed: %s")
1426 raise error.Abort(_("extdata command '%s' failed: %s")
1423 % (cmd, procutil.explainexit(proc.returncode)))
1427 % (cmd, procutil.explainexit(proc.returncode)))
1424
1428
1425 return data
1429 return data
1426
1430
1427 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1431 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1428 if lock is None:
1432 if lock is None:
1429 raise error.LockInheritanceContractViolation(
1433 raise error.LockInheritanceContractViolation(
1430 'lock can only be inherited while held')
1434 'lock can only be inherited while held')
1431 if environ is None:
1435 if environ is None:
1432 environ = {}
1436 environ = {}
1433 with lock.inherit() as locker:
1437 with lock.inherit() as locker:
1434 environ[envvar] = locker
1438 environ[envvar] = locker
1435 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1439 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1436
1440
1437 def wlocksub(repo, cmd, *args, **kwargs):
1441 def wlocksub(repo, cmd, *args, **kwargs):
1438 """run cmd as a subprocess that allows inheriting repo's wlock
1442 """run cmd as a subprocess that allows inheriting repo's wlock
1439
1443
1440 This can only be called while the wlock is held. This takes all the
1444 This can only be called while the wlock is held. This takes all the
1441 arguments that ui.system does, and returns the exit code of the
1445 arguments that ui.system does, and returns the exit code of the
1442 subprocess."""
1446 subprocess."""
1443 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1447 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1444 **kwargs)
1448 **kwargs)
1445
1449
1446 class progress(object):
1450 class progress(object):
1447 def __init__(self, ui, updatebar, topic, unit="", total=None):
1451 def __init__(self, ui, updatebar, topic, unit="", total=None):
1448 self.ui = ui
1452 self.ui = ui
1449 self.pos = 0
1453 self.pos = 0
1450 self.topic = topic
1454 self.topic = topic
1451 self.unit = unit
1455 self.unit = unit
1452 self.total = total
1456 self.total = total
1453 self.debug = ui.configbool('progress', 'debug')
1457 self.debug = ui.configbool('progress', 'debug')
1454 self._updatebar = updatebar
1458 self._updatebar = updatebar
1455
1459
1456 def __enter__(self):
1460 def __enter__(self):
1457 return self
1461 return self
1458
1462
1459 def __exit__(self, exc_type, exc_value, exc_tb):
1463 def __exit__(self, exc_type, exc_value, exc_tb):
1460 self.complete()
1464 self.complete()
1461
1465
1462 def update(self, pos, item="", total=None):
1466 def update(self, pos, item="", total=None):
1463 assert pos is not None
1467 assert pos is not None
1464 if total:
1468 if total:
1465 self.total = total
1469 self.total = total
1466 self.pos = pos
1470 self.pos = pos
1467 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1471 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1468 if self.debug:
1472 if self.debug:
1469 self._printdebug(item)
1473 self._printdebug(item)
1470
1474
1471 def increment(self, step=1, item="", total=None):
1475 def increment(self, step=1, item="", total=None):
1472 self.update(self.pos + step, item, total)
1476 self.update(self.pos + step, item, total)
1473
1477
1474 def complete(self):
1478 def complete(self):
1475 self.pos = None
1479 self.pos = None
1476 self.unit = ""
1480 self.unit = ""
1477 self.total = None
1481 self.total = None
1478 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1482 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1479
1483
1480 def _printdebug(self, item):
1484 def _printdebug(self, item):
1481 if self.unit:
1485 if self.unit:
1482 unit = ' ' + self.unit
1486 unit = ' ' + self.unit
1483 if item:
1487 if item:
1484 item = ' ' + item
1488 item = ' ' + item
1485
1489
1486 if self.total:
1490 if self.total:
1487 pct = 100.0 * self.pos / self.total
1491 pct = 100.0 * self.pos / self.total
1488 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1492 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1489 % (self.topic, item, self.pos, self.total, unit, pct))
1493 % (self.topic, item, self.pos, self.total, unit, pct))
1490 else:
1494 else:
1491 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1495 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1492
1496
1493 def gdinitconfig(ui):
1497 def gdinitconfig(ui):
1494 """helper function to know if a repo should be created as general delta
1498 """helper function to know if a repo should be created as general delta
1495 """
1499 """
1496 # experimental config: format.generaldelta
1500 # experimental config: format.generaldelta
1497 return (ui.configbool('format', 'generaldelta')
1501 return (ui.configbool('format', 'generaldelta')
1498 or ui.configbool('format', 'usegeneraldelta'))
1502 or ui.configbool('format', 'usegeneraldelta'))
1499
1503
1500 def gddeltaconfig(ui):
1504 def gddeltaconfig(ui):
1501 """helper function to know if incoming delta should be optimised
1505 """helper function to know if incoming delta should be optimised
1502 """
1506 """
1503 # experimental config: format.generaldelta
1507 # experimental config: format.generaldelta
1504 return ui.configbool('format', 'generaldelta')
1508 return ui.configbool('format', 'generaldelta')
1505
1509
1506 class simplekeyvaluefile(object):
1510 class simplekeyvaluefile(object):
1507 """A simple file with key=value lines
1511 """A simple file with key=value lines
1508
1512
1509 Keys must be alphanumerics and start with a letter, values must not
1513 Keys must be alphanumerics and start with a letter, values must not
1510 contain '\n' characters"""
1514 contain '\n' characters"""
1511 firstlinekey = '__firstline'
1515 firstlinekey = '__firstline'
1512
1516
1513 def __init__(self, vfs, path, keys=None):
1517 def __init__(self, vfs, path, keys=None):
1514 self.vfs = vfs
1518 self.vfs = vfs
1515 self.path = path
1519 self.path = path
1516
1520
1517 def read(self, firstlinenonkeyval=False):
1521 def read(self, firstlinenonkeyval=False):
1518 """Read the contents of a simple key-value file
1522 """Read the contents of a simple key-value file
1519
1523
1520 'firstlinenonkeyval' indicates whether the first line of file should
1524 'firstlinenonkeyval' indicates whether the first line of file should
1521 be treated as a key-value pair or reuturned fully under the
1525 be treated as a key-value pair or reuturned fully under the
1522 __firstline key."""
1526 __firstline key."""
1523 lines = self.vfs.readlines(self.path)
1527 lines = self.vfs.readlines(self.path)
1524 d = {}
1528 d = {}
1525 if firstlinenonkeyval:
1529 if firstlinenonkeyval:
1526 if not lines:
1530 if not lines:
1527 e = _("empty simplekeyvalue file")
1531 e = _("empty simplekeyvalue file")
1528 raise error.CorruptedState(e)
1532 raise error.CorruptedState(e)
1529 # we don't want to include '\n' in the __firstline
1533 # we don't want to include '\n' in the __firstline
1530 d[self.firstlinekey] = lines[0][:-1]
1534 d[self.firstlinekey] = lines[0][:-1]
1531 del lines[0]
1535 del lines[0]
1532
1536
1533 try:
1537 try:
1534 # the 'if line.strip()' part prevents us from failing on empty
1538 # the 'if line.strip()' part prevents us from failing on empty
1535 # lines which only contain '\n' therefore are not skipped
1539 # lines which only contain '\n' therefore are not skipped
1536 # by 'if line'
1540 # by 'if line'
1537 updatedict = dict(line[:-1].split('=', 1) for line in lines
1541 updatedict = dict(line[:-1].split('=', 1) for line in lines
1538 if line.strip())
1542 if line.strip())
1539 if self.firstlinekey in updatedict:
1543 if self.firstlinekey in updatedict:
1540 e = _("%r can't be used as a key")
1544 e = _("%r can't be used as a key")
1541 raise error.CorruptedState(e % self.firstlinekey)
1545 raise error.CorruptedState(e % self.firstlinekey)
1542 d.update(updatedict)
1546 d.update(updatedict)
1543 except ValueError as e:
1547 except ValueError as e:
1544 raise error.CorruptedState(str(e))
1548 raise error.CorruptedState(str(e))
1545 return d
1549 return d
1546
1550
1547 def write(self, data, firstline=None):
1551 def write(self, data, firstline=None):
1548 """Write key=>value mapping to a file
1552 """Write key=>value mapping to a file
1549 data is a dict. Keys must be alphanumerical and start with a letter.
1553 data is a dict. Keys must be alphanumerical and start with a letter.
1550 Values must not contain newline characters.
1554 Values must not contain newline characters.
1551
1555
1552 If 'firstline' is not None, it is written to file before
1556 If 'firstline' is not None, it is written to file before
1553 everything else, as it is, not in a key=value form"""
1557 everything else, as it is, not in a key=value form"""
1554 lines = []
1558 lines = []
1555 if firstline is not None:
1559 if firstline is not None:
1556 lines.append('%s\n' % firstline)
1560 lines.append('%s\n' % firstline)
1557
1561
1558 for k, v in data.items():
1562 for k, v in data.items():
1559 if k == self.firstlinekey:
1563 if k == self.firstlinekey:
1560 e = "key name '%s' is reserved" % self.firstlinekey
1564 e = "key name '%s' is reserved" % self.firstlinekey
1561 raise error.ProgrammingError(e)
1565 raise error.ProgrammingError(e)
1562 if not k[0:1].isalpha():
1566 if not k[0:1].isalpha():
1563 e = "keys must start with a letter in a key-value file"
1567 e = "keys must start with a letter in a key-value file"
1564 raise error.ProgrammingError(e)
1568 raise error.ProgrammingError(e)
1565 if not k.isalnum():
1569 if not k.isalnum():
1566 e = "invalid key name in a simple key-value file"
1570 e = "invalid key name in a simple key-value file"
1567 raise error.ProgrammingError(e)
1571 raise error.ProgrammingError(e)
1568 if '\n' in v:
1572 if '\n' in v:
1569 e = "invalid value in a simple key-value file"
1573 e = "invalid value in a simple key-value file"
1570 raise error.ProgrammingError(e)
1574 raise error.ProgrammingError(e)
1571 lines.append("%s=%s\n" % (k, v))
1575 lines.append("%s=%s\n" % (k, v))
1572 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1576 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1573 fp.write(''.join(lines))
1577 fp.write(''.join(lines))
1574
1578
1575 _reportobsoletedsource = [
1579 _reportobsoletedsource = [
1576 'debugobsolete',
1580 'debugobsolete',
1577 'pull',
1581 'pull',
1578 'push',
1582 'push',
1579 'serve',
1583 'serve',
1580 'unbundle',
1584 'unbundle',
1581 ]
1585 ]
1582
1586
1583 _reportnewcssource = [
1587 _reportnewcssource = [
1584 'pull',
1588 'pull',
1585 'unbundle',
1589 'unbundle',
1586 ]
1590 ]
1587
1591
1588 def prefetchfiles(repo, revs, match):
1592 def prefetchfiles(repo, revs, match):
1589 """Invokes the registered file prefetch functions, allowing extensions to
1593 """Invokes the registered file prefetch functions, allowing extensions to
1590 ensure the corresponding files are available locally, before the command
1594 ensure the corresponding files are available locally, before the command
1591 uses them."""
1595 uses them."""
1592 if match:
1596 if match:
1593 # The command itself will complain about files that don't exist, so
1597 # The command itself will complain about files that don't exist, so
1594 # don't duplicate the message.
1598 # don't duplicate the message.
1595 match = matchmod.badmatch(match, lambda fn, msg: None)
1599 match = matchmod.badmatch(match, lambda fn, msg: None)
1596 else:
1600 else:
1597 match = matchall(repo)
1601 match = matchall(repo)
1598
1602
1599 fileprefetchhooks(repo, revs, match)
1603 fileprefetchhooks(repo, revs, match)
1600
1604
1601 # a list of (repo, revs, match) prefetch functions
1605 # a list of (repo, revs, match) prefetch functions
1602 fileprefetchhooks = util.hooks()
1606 fileprefetchhooks = util.hooks()
1603
1607
1604 # A marker that tells the evolve extension to suppress its own reporting
1608 # A marker that tells the evolve extension to suppress its own reporting
1605 _reportstroubledchangesets = True
1609 _reportstroubledchangesets = True
1606
1610
1607 def registersummarycallback(repo, otr, txnname=''):
1611 def registersummarycallback(repo, otr, txnname=''):
1608 """register a callback to issue a summary after the transaction is closed
1612 """register a callback to issue a summary after the transaction is closed
1609 """
1613 """
1610 def txmatch(sources):
1614 def txmatch(sources):
1611 return any(txnname.startswith(source) for source in sources)
1615 return any(txnname.startswith(source) for source in sources)
1612
1616
1613 categories = []
1617 categories = []
1614
1618
1615 def reportsummary(func):
1619 def reportsummary(func):
1616 """decorator for report callbacks."""
1620 """decorator for report callbacks."""
1617 # The repoview life cycle is shorter than the one of the actual
1621 # The repoview life cycle is shorter than the one of the actual
1618 # underlying repository. So the filtered object can die before the
1622 # underlying repository. So the filtered object can die before the
1619 # weakref is used leading to troubles. We keep a reference to the
1623 # weakref is used leading to troubles. We keep a reference to the
1620 # unfiltered object and restore the filtering when retrieving the
1624 # unfiltered object and restore the filtering when retrieving the
1621 # repository through the weakref.
1625 # repository through the weakref.
1622 filtername = repo.filtername
1626 filtername = repo.filtername
1623 reporef = weakref.ref(repo.unfiltered())
1627 reporef = weakref.ref(repo.unfiltered())
1624 def wrapped(tr):
1628 def wrapped(tr):
1625 repo = reporef()
1629 repo = reporef()
1626 if filtername:
1630 if filtername:
1627 repo = repo.filtered(filtername)
1631 repo = repo.filtered(filtername)
1628 func(repo, tr)
1632 func(repo, tr)
1629 newcat = '%02i-txnreport' % len(categories)
1633 newcat = '%02i-txnreport' % len(categories)
1630 otr.addpostclose(newcat, wrapped)
1634 otr.addpostclose(newcat, wrapped)
1631 categories.append(newcat)
1635 categories.append(newcat)
1632 return wrapped
1636 return wrapped
1633
1637
1634 if txmatch(_reportobsoletedsource):
1638 if txmatch(_reportobsoletedsource):
1635 @reportsummary
1639 @reportsummary
1636 def reportobsoleted(repo, tr):
1640 def reportobsoleted(repo, tr):
1637 obsoleted = obsutil.getobsoleted(repo, tr)
1641 obsoleted = obsutil.getobsoleted(repo, tr)
1638 if obsoleted:
1642 if obsoleted:
1639 repo.ui.status(_('obsoleted %i changesets\n')
1643 repo.ui.status(_('obsoleted %i changesets\n')
1640 % len(obsoleted))
1644 % len(obsoleted))
1641
1645
1642 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1646 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1643 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1647 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1644 instabilitytypes = [
1648 instabilitytypes = [
1645 ('orphan', 'orphan'),
1649 ('orphan', 'orphan'),
1646 ('phase-divergent', 'phasedivergent'),
1650 ('phase-divergent', 'phasedivergent'),
1647 ('content-divergent', 'contentdivergent'),
1651 ('content-divergent', 'contentdivergent'),
1648 ]
1652 ]
1649
1653
1650 def getinstabilitycounts(repo):
1654 def getinstabilitycounts(repo):
1651 filtered = repo.changelog.filteredrevs
1655 filtered = repo.changelog.filteredrevs
1652 counts = {}
1656 counts = {}
1653 for instability, revset in instabilitytypes:
1657 for instability, revset in instabilitytypes:
1654 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1658 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1655 filtered)
1659 filtered)
1656 return counts
1660 return counts
1657
1661
1658 oldinstabilitycounts = getinstabilitycounts(repo)
1662 oldinstabilitycounts = getinstabilitycounts(repo)
1659 @reportsummary
1663 @reportsummary
1660 def reportnewinstabilities(repo, tr):
1664 def reportnewinstabilities(repo, tr):
1661 newinstabilitycounts = getinstabilitycounts(repo)
1665 newinstabilitycounts = getinstabilitycounts(repo)
1662 for instability, revset in instabilitytypes:
1666 for instability, revset in instabilitytypes:
1663 delta = (newinstabilitycounts[instability] -
1667 delta = (newinstabilitycounts[instability] -
1664 oldinstabilitycounts[instability])
1668 oldinstabilitycounts[instability])
1665 msg = getinstabilitymessage(delta, instability)
1669 msg = getinstabilitymessage(delta, instability)
1666 if msg:
1670 if msg:
1667 repo.ui.warn(msg)
1671 repo.ui.warn(msg)
1668
1672
1669 if txmatch(_reportnewcssource):
1673 if txmatch(_reportnewcssource):
1670 @reportsummary
1674 @reportsummary
1671 def reportnewcs(repo, tr):
1675 def reportnewcs(repo, tr):
1672 """Report the range of new revisions pulled/unbundled."""
1676 """Report the range of new revisions pulled/unbundled."""
1673 origrepolen = tr.changes.get('origrepolen', len(repo))
1677 origrepolen = tr.changes.get('origrepolen', len(repo))
1674 unfi = repo.unfiltered()
1678 unfi = repo.unfiltered()
1675 if origrepolen >= len(unfi):
1679 if origrepolen >= len(unfi):
1676 return
1680 return
1677
1681
1678 # Compute the bounds of new visible revisions' range.
1682 # Compute the bounds of new visible revisions' range.
1679 revs = smartset.spanset(repo, start=origrepolen)
1683 revs = smartset.spanset(repo, start=origrepolen)
1680 if revs:
1684 if revs:
1681 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1685 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1682
1686
1683 if minrev == maxrev:
1687 if minrev == maxrev:
1684 revrange = minrev
1688 revrange = minrev
1685 else:
1689 else:
1686 revrange = '%s:%s' % (minrev, maxrev)
1690 revrange = '%s:%s' % (minrev, maxrev)
1687 draft = len(repo.revs('%ld and draft()', revs))
1691 draft = len(repo.revs('%ld and draft()', revs))
1688 secret = len(repo.revs('%ld and secret()', revs))
1692 secret = len(repo.revs('%ld and secret()', revs))
1689 if not (draft or secret):
1693 if not (draft or secret):
1690 msg = _('new changesets %s\n') % revrange
1694 msg = _('new changesets %s\n') % revrange
1691 elif draft and secret:
1695 elif draft and secret:
1692 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1696 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1693 msg %= (revrange, draft, secret)
1697 msg %= (revrange, draft, secret)
1694 elif draft:
1698 elif draft:
1695 msg = _('new changesets %s (%d drafts)\n')
1699 msg = _('new changesets %s (%d drafts)\n')
1696 msg %= (revrange, draft)
1700 msg %= (revrange, draft)
1697 elif secret:
1701 elif secret:
1698 msg = _('new changesets %s (%d secrets)\n')
1702 msg = _('new changesets %s (%d secrets)\n')
1699 msg %= (revrange, secret)
1703 msg %= (revrange, secret)
1700 else:
1704 else:
1701 errormsg = 'entered unreachable condition'
1705 errormsg = 'entered unreachable condition'
1702 raise error.ProgrammingError(errormsg)
1706 raise error.ProgrammingError(errormsg)
1703 repo.ui.status(msg)
1707 repo.ui.status(msg)
1704
1708
1705 # search new changesets directly pulled as obsolete
1709 # search new changesets directly pulled as obsolete
1706 duplicates = tr.changes.get('revduplicates', ())
1710 duplicates = tr.changes.get('revduplicates', ())
1707 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1711 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1708 origrepolen, duplicates)
1712 origrepolen, duplicates)
1709 cl = repo.changelog
1713 cl = repo.changelog
1710 extinctadded = [r for r in obsadded if r not in cl]
1714 extinctadded = [r for r in obsadded if r not in cl]
1711 if extinctadded:
1715 if extinctadded:
1712 # They are not just obsolete, but obsolete and invisible
1716 # They are not just obsolete, but obsolete and invisible
1713 # we call them "extinct" internally but the terms have not been
1717 # we call them "extinct" internally but the terms have not been
1714 # exposed to users.
1718 # exposed to users.
1715 msg = '(%d other changesets obsolete on arrival)\n'
1719 msg = '(%d other changesets obsolete on arrival)\n'
1716 repo.ui.status(msg % len(extinctadded))
1720 repo.ui.status(msg % len(extinctadded))
1717
1721
1718 @reportsummary
1722 @reportsummary
1719 def reportphasechanges(repo, tr):
1723 def reportphasechanges(repo, tr):
1720 """Report statistics of phase changes for changesets pre-existing
1724 """Report statistics of phase changes for changesets pre-existing
1721 pull/unbundle.
1725 pull/unbundle.
1722 """
1726 """
1723 origrepolen = tr.changes.get('origrepolen', len(repo))
1727 origrepolen = tr.changes.get('origrepolen', len(repo))
1724 phasetracking = tr.changes.get('phases', {})
1728 phasetracking = tr.changes.get('phases', {})
1725 if not phasetracking:
1729 if not phasetracking:
1726 return
1730 return
1727 published = [
1731 published = [
1728 rev for rev, (old, new) in phasetracking.iteritems()
1732 rev for rev, (old, new) in phasetracking.iteritems()
1729 if new == phases.public and rev < origrepolen
1733 if new == phases.public and rev < origrepolen
1730 ]
1734 ]
1731 if not published:
1735 if not published:
1732 return
1736 return
1733 repo.ui.status(_('%d local changesets published\n')
1737 repo.ui.status(_('%d local changesets published\n')
1734 % len(published))
1738 % len(published))
1735
1739
1736 def getinstabilitymessage(delta, instability):
1740 def getinstabilitymessage(delta, instability):
1737 """function to return the message to show warning about new instabilities
1741 """function to return the message to show warning about new instabilities
1738
1742
1739 exists as a separate function so that extension can wrap to show more
1743 exists as a separate function so that extension can wrap to show more
1740 information like how to fix instabilities"""
1744 information like how to fix instabilities"""
1741 if delta > 0:
1745 if delta > 0:
1742 return _('%i new %s changesets\n') % (delta, instability)
1746 return _('%i new %s changesets\n') % (delta, instability)
1743
1747
1744 def nodesummaries(repo, nodes, maxnumnodes=4):
1748 def nodesummaries(repo, nodes, maxnumnodes=4):
1745 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1749 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1746 return ' '.join(short(h) for h in nodes)
1750 return ' '.join(short(h) for h in nodes)
1747 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1751 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1748 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1752 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1749
1753
1750 def enforcesinglehead(repo, tr, desc):
1754 def enforcesinglehead(repo, tr, desc):
1751 """check that no named branch has multiple heads"""
1755 """check that no named branch has multiple heads"""
1752 if desc in ('strip', 'repair'):
1756 if desc in ('strip', 'repair'):
1753 # skip the logic during strip
1757 # skip the logic during strip
1754 return
1758 return
1755 visible = repo.filtered('visible')
1759 visible = repo.filtered('visible')
1756 # possible improvement: we could restrict the check to affected branch
1760 # possible improvement: we could restrict the check to affected branch
1757 for name, heads in visible.branchmap().iteritems():
1761 for name, heads in visible.branchmap().iteritems():
1758 if len(heads) > 1:
1762 if len(heads) > 1:
1759 msg = _('rejecting multiple heads on branch "%s"')
1763 msg = _('rejecting multiple heads on branch "%s"')
1760 msg %= name
1764 msg %= name
1761 hint = _('%d heads: %s')
1765 hint = _('%d heads: %s')
1762 hint %= (len(heads), nodesummaries(repo, heads))
1766 hint %= (len(heads), nodesummaries(repo, heads))
1763 raise error.Abort(msg, hint=hint)
1767 raise error.Abort(msg, hint=hint)
1764
1768
1765 def wrapconvertsink(sink):
1769 def wrapconvertsink(sink):
1766 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1770 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1767 before it is used, whether or not the convert extension was formally loaded.
1771 before it is used, whether or not the convert extension was formally loaded.
1768 """
1772 """
1769 return sink
1773 return sink
1770
1774
1771 def unhidehashlikerevs(repo, specs, hiddentype):
1775 def unhidehashlikerevs(repo, specs, hiddentype):
1772 """parse the user specs and unhide changesets whose hash or revision number
1776 """parse the user specs and unhide changesets whose hash or revision number
1773 is passed.
1777 is passed.
1774
1778
1775 hiddentype can be: 1) 'warn': warn while unhiding changesets
1779 hiddentype can be: 1) 'warn': warn while unhiding changesets
1776 2) 'nowarn': don't warn while unhiding changesets
1780 2) 'nowarn': don't warn while unhiding changesets
1777
1781
1778 returns a repo object with the required changesets unhidden
1782 returns a repo object with the required changesets unhidden
1779 """
1783 """
1780 if not repo.filtername or not repo.ui.configbool('experimental',
1784 if not repo.filtername or not repo.ui.configbool('experimental',
1781 'directaccess'):
1785 'directaccess'):
1782 return repo
1786 return repo
1783
1787
1784 if repo.filtername not in ('visible', 'visible-hidden'):
1788 if repo.filtername not in ('visible', 'visible-hidden'):
1785 return repo
1789 return repo
1786
1790
1787 symbols = set()
1791 symbols = set()
1788 for spec in specs:
1792 for spec in specs:
1789 try:
1793 try:
1790 tree = revsetlang.parse(spec)
1794 tree = revsetlang.parse(spec)
1791 except error.ParseError: # will be reported by scmutil.revrange()
1795 except error.ParseError: # will be reported by scmutil.revrange()
1792 continue
1796 continue
1793
1797
1794 symbols.update(revsetlang.gethashlikesymbols(tree))
1798 symbols.update(revsetlang.gethashlikesymbols(tree))
1795
1799
1796 if not symbols:
1800 if not symbols:
1797 return repo
1801 return repo
1798
1802
1799 revs = _getrevsfromsymbols(repo, symbols)
1803 revs = _getrevsfromsymbols(repo, symbols)
1800
1804
1801 if not revs:
1805 if not revs:
1802 return repo
1806 return repo
1803
1807
1804 if hiddentype == 'warn':
1808 if hiddentype == 'warn':
1805 unfi = repo.unfiltered()
1809 unfi = repo.unfiltered()
1806 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1810 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1807 repo.ui.warn(_("warning: accessing hidden changesets for write "
1811 repo.ui.warn(_("warning: accessing hidden changesets for write "
1808 "operation: %s\n") % revstr)
1812 "operation: %s\n") % revstr)
1809
1813
1810 # we have to use new filtername to separate branch/tags cache until we can
1814 # we have to use new filtername to separate branch/tags cache until we can
1811 # disbale these cache when revisions are dynamically pinned.
1815 # disbale these cache when revisions are dynamically pinned.
1812 return repo.filtered('visible-hidden', revs)
1816 return repo.filtered('visible-hidden', revs)
1813
1817
1814 def _getrevsfromsymbols(repo, symbols):
1818 def _getrevsfromsymbols(repo, symbols):
1815 """parse the list of symbols and returns a set of revision numbers of hidden
1819 """parse the list of symbols and returns a set of revision numbers of hidden
1816 changesets present in symbols"""
1820 changesets present in symbols"""
1817 revs = set()
1821 revs = set()
1818 unfi = repo.unfiltered()
1822 unfi = repo.unfiltered()
1819 unficl = unfi.changelog
1823 unficl = unfi.changelog
1820 cl = repo.changelog
1824 cl = repo.changelog
1821 tiprev = len(unficl)
1825 tiprev = len(unficl)
1822 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1826 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1823 for s in symbols:
1827 for s in symbols:
1824 try:
1828 try:
1825 n = int(s)
1829 n = int(s)
1826 if n <= tiprev:
1830 if n <= tiprev:
1827 if not allowrevnums:
1831 if not allowrevnums:
1828 continue
1832 continue
1829 else:
1833 else:
1830 if n not in cl:
1834 if n not in cl:
1831 revs.add(n)
1835 revs.add(n)
1832 continue
1836 continue
1833 except ValueError:
1837 except ValueError:
1834 pass
1838 pass
1835
1839
1836 try:
1840 try:
1837 s = resolvehexnodeidprefix(unfi, s)
1841 s = resolvehexnodeidprefix(unfi, s)
1838 except (error.LookupError, error.WdirUnsupported):
1842 except (error.LookupError, error.WdirUnsupported):
1839 s = None
1843 s = None
1840
1844
1841 if s is not None:
1845 if s is not None:
1842 rev = unficl.rev(s)
1846 rev = unficl.rev(s)
1843 if rev not in cl:
1847 if rev not in cl:
1844 revs.add(rev)
1848 revs.add(rev)
1845
1849
1846 return revs
1850 return revs
1847
1851
1848 def bookmarkrevs(repo, mark):
1852 def bookmarkrevs(repo, mark):
1849 """
1853 """
1850 Select revisions reachable by a given bookmark
1854 Select revisions reachable by a given bookmark
1851 """
1855 """
1852 return repo.revs("ancestors(bookmark(%s)) - "
1856 return repo.revs("ancestors(bookmark(%s)) - "
1853 "ancestors(head() and not bookmark(%s)) - "
1857 "ancestors(head() and not bookmark(%s)) - "
1854 "ancestors(bookmark() and not bookmark(%s))",
1858 "ancestors(bookmark() and not bookmark(%s))",
1855 mark, mark, mark)
1859 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now