##// END OF EJS Templates
shortest: never emit 0-length prefix even if unique...
Martin von Zweigbergk -
r40439:bf249bb6 default
parent child Browse files
Show More
@@ -1,1804 +1,1806 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 encoding,
31 encoding,
32 error,
32 error,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 obsutil,
35 obsutil,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 policy,
38 policy,
39 pycompat,
39 pycompat,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 procutil,
49 procutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 if pycompat.iswindows:
53 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
54 from . import scmwindows as scmplatform
55 else:
55 else:
56 from . import scmposix as scmplatform
56 from . import scmposix as scmplatform
57
57
58 parsers = policy.importmod(r'parsers')
58 parsers = policy.importmod(r'parsers')
59
59
60 termsize = scmplatform.termsize
60 termsize = scmplatform.termsize
61
61
62 class status(tuple):
62 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
64 and 'ignored' properties are only relevant to the working copy.
65 '''
65 '''
66
66
67 __slots__ = ()
67 __slots__ = ()
68
68
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
70 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
72 ignored, clean))
73
73
74 @property
74 @property
75 def modified(self):
75 def modified(self):
76 '''files that have been modified'''
76 '''files that have been modified'''
77 return self[0]
77 return self[0]
78
78
79 @property
79 @property
80 def added(self):
80 def added(self):
81 '''files that have been added'''
81 '''files that have been added'''
82 return self[1]
82 return self[1]
83
83
84 @property
84 @property
85 def removed(self):
85 def removed(self):
86 '''files that have been removed'''
86 '''files that have been removed'''
87 return self[2]
87 return self[2]
88
88
89 @property
89 @property
90 def deleted(self):
90 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
91 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
92 working copy (aka "missing")
93 '''
93 '''
94 return self[3]
94 return self[3]
95
95
96 @property
96 @property
97 def unknown(self):
97 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
98 '''files not in the dirstate that are not ignored'''
99 return self[4]
99 return self[4]
100
100
101 @property
101 @property
102 def ignored(self):
102 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
104 return self[5]
105
105
106 @property
106 @property
107 def clean(self):
107 def clean(self):
108 '''files that have not been modified'''
108 '''files that have not been modified'''
109 return self[6]
109 return self[6]
110
110
111 def __repr__(self, *args, **kwargs):
111 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
115
116 def itersubrepos(ctx1, ctx2):
116 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
117 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
123
124 missing = set()
124 missing = set()
125
125
126 for subpath in ctx2.substate:
126 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
127 if subpath not in ctx1.substate:
128 del subpaths[subpath]
128 del subpaths[subpath]
129 missing.add(subpath)
129 missing.add(subpath)
130
130
131 for subpath, ctx in sorted(subpaths.iteritems()):
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
132 yield subpath, ctx.sub(subpath)
133
133
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
135 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
137 # against itself.
138 for subpath in missing:
138 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140
140
141 def nochangesfound(ui, repo, excluded=None):
141 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
142 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
143 nodes excluded from the push/pull.
144 '''
144 '''
145 secretlist = []
145 secretlist = []
146 if excluded:
146 if excluded:
147 for n in excluded:
147 for n in excluded:
148 ctx = repo[n]
148 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
150 secretlist.append(n)
151
151
152 if secretlist:
152 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
154 % len(secretlist))
155 else:
155 else:
156 ui.status(_("no changes found\n"))
156 ui.status(_("no changes found\n"))
157
157
158 def callcatch(ui, func):
158 def callcatch(ui, func):
159 """call func() with global exception handling
159 """call func() with global exception handling
160
160
161 return func() if no exception happens. otherwise do some error handling
161 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
162 and return an exit code accordingly. does not handle all exceptions.
163 """
163 """
164 try:
164 try:
165 try:
165 try:
166 return func()
166 return func()
167 except: # re-raises
167 except: # re-raises
168 ui.traceback()
168 ui.traceback()
169 raise
169 raise
170 # Global exception handling, alphabetically
170 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
171 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
172 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
173 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % (
174 reason = _('timed out waiting for lock held by %r') % (
175 pycompat.bytestr(inst.locker))
175 pycompat.bytestr(inst.locker))
176 else:
176 else:
177 reason = _('lock held by %r') % inst.locker
177 reason = _('lock held by %r') % inst.locker
178 ui.error(_("abort: %s: %s\n") % (
178 ui.error(_("abort: %s: %s\n") % (
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 if not inst.locker:
180 if not inst.locker:
181 ui.error(_("(lock might be very busy)\n"))
181 ui.error(_("(lock might be very busy)\n"))
182 except error.LockUnavailable as inst:
182 except error.LockUnavailable as inst:
183 ui.error(_("abort: could not lock %s: %s\n") %
183 ui.error(_("abort: could not lock %s: %s\n") %
184 (inst.desc or stringutil.forcebytestr(inst.filename),
184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 encoding.strtolocal(inst.strerror)))
185 encoding.strtolocal(inst.strerror)))
186 except error.OutOfBandError as inst:
186 except error.OutOfBandError as inst:
187 if inst.args:
187 if inst.args:
188 msg = _("abort: remote error:\n")
188 msg = _("abort: remote error:\n")
189 else:
189 else:
190 msg = _("abort: remote error\n")
190 msg = _("abort: remote error\n")
191 ui.error(msg)
191 ui.error(msg)
192 if inst.args:
192 if inst.args:
193 ui.error(''.join(inst.args))
193 ui.error(''.join(inst.args))
194 if inst.hint:
194 if inst.hint:
195 ui.error('(%s)\n' % inst.hint)
195 ui.error('(%s)\n' % inst.hint)
196 except error.RepoError as inst:
196 except error.RepoError as inst:
197 ui.error(_("abort: %s!\n") % inst)
197 ui.error(_("abort: %s!\n") % inst)
198 if inst.hint:
198 if inst.hint:
199 ui.error(_("(%s)\n") % inst.hint)
199 ui.error(_("(%s)\n") % inst.hint)
200 except error.ResponseError as inst:
200 except error.ResponseError as inst:
201 ui.error(_("abort: %s") % inst.args[0])
201 ui.error(_("abort: %s") % inst.args[0])
202 msg = inst.args[1]
202 msg = inst.args[1]
203 if isinstance(msg, type(u'')):
203 if isinstance(msg, type(u'')):
204 msg = pycompat.sysbytes(msg)
204 msg = pycompat.sysbytes(msg)
205 if not isinstance(msg, bytes):
205 if not isinstance(msg, bytes):
206 ui.error(" %r\n" % (msg,))
206 ui.error(" %r\n" % (msg,))
207 elif not msg:
207 elif not msg:
208 ui.error(_(" empty string\n"))
208 ui.error(_(" empty string\n"))
209 else:
209 else:
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 except error.CensoredNodeError as inst:
211 except error.CensoredNodeError as inst:
212 ui.error(_("abort: file censored %s!\n") % inst)
212 ui.error(_("abort: file censored %s!\n") % inst)
213 except error.StorageError as inst:
213 except error.StorageError as inst:
214 ui.error(_("abort: %s!\n") % inst)
214 ui.error(_("abort: %s!\n") % inst)
215 except error.InterventionRequired as inst:
215 except error.InterventionRequired as inst:
216 ui.error("%s\n" % inst)
216 ui.error("%s\n" % inst)
217 if inst.hint:
217 if inst.hint:
218 ui.error(_("(%s)\n") % inst.hint)
218 ui.error(_("(%s)\n") % inst.hint)
219 return 1
219 return 1
220 except error.WdirUnsupported:
220 except error.WdirUnsupported:
221 ui.error(_("abort: working directory revision cannot be specified\n"))
221 ui.error(_("abort: working directory revision cannot be specified\n"))
222 except error.Abort as inst:
222 except error.Abort as inst:
223 ui.error(_("abort: %s\n") % inst)
223 ui.error(_("abort: %s\n") % inst)
224 if inst.hint:
224 if inst.hint:
225 ui.error(_("(%s)\n") % inst.hint)
225 ui.error(_("(%s)\n") % inst.hint)
226 except ImportError as inst:
226 except ImportError as inst:
227 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
227 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
228 m = stringutil.forcebytestr(inst).split()[-1]
228 m = stringutil.forcebytestr(inst).split()[-1]
229 if m in "mpatch bdiff".split():
229 if m in "mpatch bdiff".split():
230 ui.error(_("(did you forget to compile extensions?)\n"))
230 ui.error(_("(did you forget to compile extensions?)\n"))
231 elif m in "zlib".split():
231 elif m in "zlib".split():
232 ui.error(_("(is your Python install correct?)\n"))
232 ui.error(_("(is your Python install correct?)\n"))
233 except IOError as inst:
233 except IOError as inst:
234 if util.safehasattr(inst, "code"):
234 if util.safehasattr(inst, "code"):
235 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
235 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
236 elif util.safehasattr(inst, "reason"):
236 elif util.safehasattr(inst, "reason"):
237 try: # usually it is in the form (errno, strerror)
237 try: # usually it is in the form (errno, strerror)
238 reason = inst.reason.args[1]
238 reason = inst.reason.args[1]
239 except (AttributeError, IndexError):
239 except (AttributeError, IndexError):
240 # it might be anything, for example a string
240 # it might be anything, for example a string
241 reason = inst.reason
241 reason = inst.reason
242 if isinstance(reason, pycompat.unicode):
242 if isinstance(reason, pycompat.unicode):
243 # SSLError of Python 2.7.9 contains a unicode
243 # SSLError of Python 2.7.9 contains a unicode
244 reason = encoding.unitolocal(reason)
244 reason = encoding.unitolocal(reason)
245 ui.error(_("abort: error: %s\n") % reason)
245 ui.error(_("abort: error: %s\n") % reason)
246 elif (util.safehasattr(inst, "args")
246 elif (util.safehasattr(inst, "args")
247 and inst.args and inst.args[0] == errno.EPIPE):
247 and inst.args and inst.args[0] == errno.EPIPE):
248 pass
248 pass
249 elif getattr(inst, "strerror", None):
249 elif getattr(inst, "strerror", None):
250 if getattr(inst, "filename", None):
250 if getattr(inst, "filename", None):
251 ui.error(_("abort: %s: %s\n") % (
251 ui.error(_("abort: %s: %s\n") % (
252 encoding.strtolocal(inst.strerror),
252 encoding.strtolocal(inst.strerror),
253 stringutil.forcebytestr(inst.filename)))
253 stringutil.forcebytestr(inst.filename)))
254 else:
254 else:
255 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
255 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 else:
256 else:
257 raise
257 raise
258 except OSError as inst:
258 except OSError as inst:
259 if getattr(inst, "filename", None) is not None:
259 if getattr(inst, "filename", None) is not None:
260 ui.error(_("abort: %s: '%s'\n") % (
260 ui.error(_("abort: %s: '%s'\n") % (
261 encoding.strtolocal(inst.strerror),
261 encoding.strtolocal(inst.strerror),
262 stringutil.forcebytestr(inst.filename)))
262 stringutil.forcebytestr(inst.filename)))
263 else:
263 else:
264 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
264 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
265 except MemoryError:
265 except MemoryError:
266 ui.error(_("abort: out of memory\n"))
266 ui.error(_("abort: out of memory\n"))
267 except SystemExit as inst:
267 except SystemExit as inst:
268 # Commands shouldn't sys.exit directly, but give a return code.
268 # Commands shouldn't sys.exit directly, but give a return code.
269 # Just in case catch this and and pass exit code to caller.
269 # Just in case catch this and and pass exit code to caller.
270 return inst.code
270 return inst.code
271 except socket.error as inst:
271 except socket.error as inst:
272 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
272 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
273
273
274 return -1
274 return -1
275
275
276 def checknewlabel(repo, lbl, kind):
276 def checknewlabel(repo, lbl, kind):
277 # Do not use the "kind" parameter in ui output.
277 # Do not use the "kind" parameter in ui output.
278 # It makes strings difficult to translate.
278 # It makes strings difficult to translate.
279 if lbl in ['tip', '.', 'null']:
279 if lbl in ['tip', '.', 'null']:
280 raise error.Abort(_("the name '%s' is reserved") % lbl)
280 raise error.Abort(_("the name '%s' is reserved") % lbl)
281 for c in (':', '\0', '\n', '\r'):
281 for c in (':', '\0', '\n', '\r'):
282 if c in lbl:
282 if c in lbl:
283 raise error.Abort(
283 raise error.Abort(
284 _("%r cannot be used in a name") % pycompat.bytestr(c))
284 _("%r cannot be used in a name") % pycompat.bytestr(c))
285 try:
285 try:
286 int(lbl)
286 int(lbl)
287 raise error.Abort(_("cannot use an integer as a name"))
287 raise error.Abort(_("cannot use an integer as a name"))
288 except ValueError:
288 except ValueError:
289 pass
289 pass
290 if lbl.strip() != lbl:
290 if lbl.strip() != lbl:
291 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
291 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
292
292
293 def checkfilename(f):
293 def checkfilename(f):
294 '''Check that the filename f is an acceptable filename for a tracked file'''
294 '''Check that the filename f is an acceptable filename for a tracked file'''
295 if '\r' in f or '\n' in f:
295 if '\r' in f or '\n' in f:
296 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
296 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
297 % pycompat.bytestr(f))
297 % pycompat.bytestr(f))
298
298
299 def checkportable(ui, f):
299 def checkportable(ui, f):
300 '''Check if filename f is portable and warn or abort depending on config'''
300 '''Check if filename f is portable and warn or abort depending on config'''
301 checkfilename(f)
301 checkfilename(f)
302 abort, warn = checkportabilityalert(ui)
302 abort, warn = checkportabilityalert(ui)
303 if abort or warn:
303 if abort or warn:
304 msg = util.checkwinfilename(f)
304 msg = util.checkwinfilename(f)
305 if msg:
305 if msg:
306 msg = "%s: %s" % (msg, procutil.shellquote(f))
306 msg = "%s: %s" % (msg, procutil.shellquote(f))
307 if abort:
307 if abort:
308 raise error.Abort(msg)
308 raise error.Abort(msg)
309 ui.warn(_("warning: %s\n") % msg)
309 ui.warn(_("warning: %s\n") % msg)
310
310
311 def checkportabilityalert(ui):
311 def checkportabilityalert(ui):
312 '''check if the user's config requests nothing, a warning, or abort for
312 '''check if the user's config requests nothing, a warning, or abort for
313 non-portable filenames'''
313 non-portable filenames'''
314 val = ui.config('ui', 'portablefilenames')
314 val = ui.config('ui', 'portablefilenames')
315 lval = val.lower()
315 lval = val.lower()
316 bval = stringutil.parsebool(val)
316 bval = stringutil.parsebool(val)
317 abort = pycompat.iswindows or lval == 'abort'
317 abort = pycompat.iswindows or lval == 'abort'
318 warn = bval or lval == 'warn'
318 warn = bval or lval == 'warn'
319 if bval is None and not (warn or abort or lval == 'ignore'):
319 if bval is None and not (warn or abort or lval == 'ignore'):
320 raise error.ConfigError(
320 raise error.ConfigError(
321 _("ui.portablefilenames value is invalid ('%s')") % val)
321 _("ui.portablefilenames value is invalid ('%s')") % val)
322 return abort, warn
322 return abort, warn
323
323
324 class casecollisionauditor(object):
324 class casecollisionauditor(object):
325 def __init__(self, ui, abort, dirstate):
325 def __init__(self, ui, abort, dirstate):
326 self._ui = ui
326 self._ui = ui
327 self._abort = abort
327 self._abort = abort
328 allfiles = '\0'.join(dirstate._map)
328 allfiles = '\0'.join(dirstate._map)
329 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
329 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
330 self._dirstate = dirstate
330 self._dirstate = dirstate
331 # The purpose of _newfiles is so that we don't complain about
331 # The purpose of _newfiles is so that we don't complain about
332 # case collisions if someone were to call this object with the
332 # case collisions if someone were to call this object with the
333 # same filename twice.
333 # same filename twice.
334 self._newfiles = set()
334 self._newfiles = set()
335
335
336 def __call__(self, f):
336 def __call__(self, f):
337 if f in self._newfiles:
337 if f in self._newfiles:
338 return
338 return
339 fl = encoding.lower(f)
339 fl = encoding.lower(f)
340 if fl in self._loweredfiles and f not in self._dirstate:
340 if fl in self._loweredfiles and f not in self._dirstate:
341 msg = _('possible case-folding collision for %s') % f
341 msg = _('possible case-folding collision for %s') % f
342 if self._abort:
342 if self._abort:
343 raise error.Abort(msg)
343 raise error.Abort(msg)
344 self._ui.warn(_("warning: %s\n") % msg)
344 self._ui.warn(_("warning: %s\n") % msg)
345 self._loweredfiles.add(fl)
345 self._loweredfiles.add(fl)
346 self._newfiles.add(f)
346 self._newfiles.add(f)
347
347
348 def filteredhash(repo, maxrev):
348 def filteredhash(repo, maxrev):
349 """build hash of filtered revisions in the current repoview.
349 """build hash of filtered revisions in the current repoview.
350
350
351 Multiple caches perform up-to-date validation by checking that the
351 Multiple caches perform up-to-date validation by checking that the
352 tiprev and tipnode stored in the cache file match the current repository.
352 tiprev and tipnode stored in the cache file match the current repository.
353 However, this is not sufficient for validating repoviews because the set
353 However, this is not sufficient for validating repoviews because the set
354 of revisions in the view may change without the repository tiprev and
354 of revisions in the view may change without the repository tiprev and
355 tipnode changing.
355 tipnode changing.
356
356
357 This function hashes all the revs filtered from the view and returns
357 This function hashes all the revs filtered from the view and returns
358 that SHA-1 digest.
358 that SHA-1 digest.
359 """
359 """
360 cl = repo.changelog
360 cl = repo.changelog
361 if not cl.filteredrevs:
361 if not cl.filteredrevs:
362 return None
362 return None
363 key = None
363 key = None
364 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
364 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
365 if revs:
365 if revs:
366 s = hashlib.sha1()
366 s = hashlib.sha1()
367 for rev in revs:
367 for rev in revs:
368 s.update('%d;' % rev)
368 s.update('%d;' % rev)
369 key = s.digest()
369 key = s.digest()
370 return key
370 return key
371
371
372 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
372 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
373 '''yield every hg repository under path, always recursively.
373 '''yield every hg repository under path, always recursively.
374 The recurse flag will only control recursion into repo working dirs'''
374 The recurse flag will only control recursion into repo working dirs'''
375 def errhandler(err):
375 def errhandler(err):
376 if err.filename == path:
376 if err.filename == path:
377 raise err
377 raise err
378 samestat = getattr(os.path, 'samestat', None)
378 samestat = getattr(os.path, 'samestat', None)
379 if followsym and samestat is not None:
379 if followsym and samestat is not None:
380 def adddir(dirlst, dirname):
380 def adddir(dirlst, dirname):
381 dirstat = os.stat(dirname)
381 dirstat = os.stat(dirname)
382 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
382 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
383 if not match:
383 if not match:
384 dirlst.append(dirstat)
384 dirlst.append(dirstat)
385 return not match
385 return not match
386 else:
386 else:
387 followsym = False
387 followsym = False
388
388
389 if (seen_dirs is None) and followsym:
389 if (seen_dirs is None) and followsym:
390 seen_dirs = []
390 seen_dirs = []
391 adddir(seen_dirs, path)
391 adddir(seen_dirs, path)
392 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
392 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
393 dirs.sort()
393 dirs.sort()
394 if '.hg' in dirs:
394 if '.hg' in dirs:
395 yield root # found a repository
395 yield root # found a repository
396 qroot = os.path.join(root, '.hg', 'patches')
396 qroot = os.path.join(root, '.hg', 'patches')
397 if os.path.isdir(os.path.join(qroot, '.hg')):
397 if os.path.isdir(os.path.join(qroot, '.hg')):
398 yield qroot # we have a patch queue repo here
398 yield qroot # we have a patch queue repo here
399 if recurse:
399 if recurse:
400 # avoid recursing inside the .hg directory
400 # avoid recursing inside the .hg directory
401 dirs.remove('.hg')
401 dirs.remove('.hg')
402 else:
402 else:
403 dirs[:] = [] # don't descend further
403 dirs[:] = [] # don't descend further
404 elif followsym:
404 elif followsym:
405 newdirs = []
405 newdirs = []
406 for d in dirs:
406 for d in dirs:
407 fname = os.path.join(root, d)
407 fname = os.path.join(root, d)
408 if adddir(seen_dirs, fname):
408 if adddir(seen_dirs, fname):
409 if os.path.islink(fname):
409 if os.path.islink(fname):
410 for hgname in walkrepos(fname, True, seen_dirs):
410 for hgname in walkrepos(fname, True, seen_dirs):
411 yield hgname
411 yield hgname
412 else:
412 else:
413 newdirs.append(d)
413 newdirs.append(d)
414 dirs[:] = newdirs
414 dirs[:] = newdirs
415
415
416 def binnode(ctx):
416 def binnode(ctx):
417 """Return binary node id for a given basectx"""
417 """Return binary node id for a given basectx"""
418 node = ctx.node()
418 node = ctx.node()
419 if node is None:
419 if node is None:
420 return wdirid
420 return wdirid
421 return node
421 return node
422
422
423 def intrev(ctx):
423 def intrev(ctx):
424 """Return integer for a given basectx that can be used in comparison or
424 """Return integer for a given basectx that can be used in comparison or
425 arithmetic operation"""
425 arithmetic operation"""
426 rev = ctx.rev()
426 rev = ctx.rev()
427 if rev is None:
427 if rev is None:
428 return wdirrev
428 return wdirrev
429 return rev
429 return rev
430
430
431 def formatchangeid(ctx):
431 def formatchangeid(ctx):
432 """Format changectx as '{rev}:{node|formatnode}', which is the default
432 """Format changectx as '{rev}:{node|formatnode}', which is the default
433 template provided by logcmdutil.changesettemplater"""
433 template provided by logcmdutil.changesettemplater"""
434 repo = ctx.repo()
434 repo = ctx.repo()
435 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
435 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
436
436
437 def formatrevnode(ui, rev, node):
437 def formatrevnode(ui, rev, node):
438 """Format given revision and node depending on the current verbosity"""
438 """Format given revision and node depending on the current verbosity"""
439 if ui.debugflag:
439 if ui.debugflag:
440 hexfunc = hex
440 hexfunc = hex
441 else:
441 else:
442 hexfunc = short
442 hexfunc = short
443 return '%d:%s' % (rev, hexfunc(node))
443 return '%d:%s' % (rev, hexfunc(node))
444
444
445 def resolvehexnodeidprefix(repo, prefix):
445 def resolvehexnodeidprefix(repo, prefix):
446 if (prefix.startswith('x') and
446 if (prefix.startswith('x') and
447 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
447 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
448 prefix = prefix[1:]
448 prefix = prefix[1:]
449 try:
449 try:
450 # Uses unfiltered repo because it's faster when prefix is ambiguous/
450 # Uses unfiltered repo because it's faster when prefix is ambiguous/
451 # This matches the shortesthexnodeidprefix() function below.
451 # This matches the shortesthexnodeidprefix() function below.
452 node = repo.unfiltered().changelog._partialmatch(prefix)
452 node = repo.unfiltered().changelog._partialmatch(prefix)
453 except error.AmbiguousPrefixLookupError:
453 except error.AmbiguousPrefixLookupError:
454 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
454 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
455 if revset:
455 if revset:
456 # Clear config to avoid infinite recursion
456 # Clear config to avoid infinite recursion
457 configoverrides = {('experimental',
457 configoverrides = {('experimental',
458 'revisions.disambiguatewithin'): None}
458 'revisions.disambiguatewithin'): None}
459 with repo.ui.configoverride(configoverrides):
459 with repo.ui.configoverride(configoverrides):
460 revs = repo.anyrevs([revset], user=True)
460 revs = repo.anyrevs([revset], user=True)
461 matches = []
461 matches = []
462 for rev in revs:
462 for rev in revs:
463 node = repo.changelog.node(rev)
463 node = repo.changelog.node(rev)
464 if hex(node).startswith(prefix):
464 if hex(node).startswith(prefix):
465 matches.append(node)
465 matches.append(node)
466 if len(matches) == 1:
466 if len(matches) == 1:
467 return matches[0]
467 return matches[0]
468 raise
468 raise
469 if node is None:
469 if node is None:
470 return
470 return
471 repo.changelog.rev(node) # make sure node isn't filtered
471 repo.changelog.rev(node) # make sure node isn't filtered
472 return node
472 return node
473
473
474 def mayberevnum(repo, prefix):
474 def mayberevnum(repo, prefix):
475 """Checks if the given prefix may be mistaken for a revision number"""
475 """Checks if the given prefix may be mistaken for a revision number"""
476 try:
476 try:
477 i = int(prefix)
477 i = int(prefix)
478 # if we are a pure int, then starting with zero will not be
478 # if we are a pure int, then starting with zero will not be
479 # confused as a rev; or, obviously, if the int is larger
479 # confused as a rev; or, obviously, if the int is larger
480 # than the value of the tip rev. We still need to disambiguate if
480 # than the value of the tip rev. We still need to disambiguate if
481 # prefix == '0', since that *is* a valid revnum.
481 # prefix == '0', since that *is* a valid revnum.
482 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
482 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
483 return False
483 return False
484 return True
484 return True
485 except ValueError:
485 except ValueError:
486 return False
486 return False
487
487
488 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
488 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
489 """Find the shortest unambiguous prefix that matches hexnode.
489 """Find the shortest unambiguous prefix that matches hexnode.
490
490
491 If "cache" is not None, it must be a dictionary that can be used for
491 If "cache" is not None, it must be a dictionary that can be used for
492 caching between calls to this method.
492 caching between calls to this method.
493 """
493 """
494 # _partialmatch() of filtered changelog could take O(len(repo)) time,
494 # _partialmatch() of filtered changelog could take O(len(repo)) time,
495 # which would be unacceptably slow. so we look for hash collision in
495 # which would be unacceptably slow. so we look for hash collision in
496 # unfiltered space, which means some hashes may be slightly longer.
496 # unfiltered space, which means some hashes may be slightly longer.
497
497
498 minlength=max(minlength, 1)
499
498 def disambiguate(prefix):
500 def disambiguate(prefix):
499 """Disambiguate against revnums."""
501 """Disambiguate against revnums."""
500 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
502 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
501 if mayberevnum(repo, prefix):
503 if mayberevnum(repo, prefix):
502 return 'x' + prefix
504 return 'x' + prefix
503 else:
505 else:
504 return prefix
506 return prefix
505
507
506 hexnode = hex(node)
508 hexnode = hex(node)
507 for length in range(len(prefix), len(hexnode) + 1):
509 for length in range(len(prefix), len(hexnode) + 1):
508 prefix = hexnode[:length]
510 prefix = hexnode[:length]
509 if not mayberevnum(repo, prefix):
511 if not mayberevnum(repo, prefix):
510 return prefix
512 return prefix
511
513
512 cl = repo.unfiltered().changelog
514 cl = repo.unfiltered().changelog
513 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
515 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
514 if revset:
516 if revset:
515 revs = None
517 revs = None
516 if cache is not None:
518 if cache is not None:
517 revs = cache.get('disambiguationrevset')
519 revs = cache.get('disambiguationrevset')
518 if revs is None:
520 if revs is None:
519 revs = repo.anyrevs([revset], user=True)
521 revs = repo.anyrevs([revset], user=True)
520 if cache is not None:
522 if cache is not None:
521 cache['disambiguationrevset'] = revs
523 cache['disambiguationrevset'] = revs
522 if cl.rev(node) in revs:
524 if cl.rev(node) in revs:
523 hexnode = hex(node)
525 hexnode = hex(node)
524 nodetree = None
526 nodetree = None
525 if cache is not None:
527 if cache is not None:
526 nodetree = cache.get('disambiguationnodetree')
528 nodetree = cache.get('disambiguationnodetree')
527 if not nodetree:
529 if not nodetree:
528 try:
530 try:
529 nodetree = parsers.nodetree(cl.index, len(revs))
531 nodetree = parsers.nodetree(cl.index, len(revs))
530 except AttributeError:
532 except AttributeError:
531 # no native nodetree
533 # no native nodetree
532 pass
534 pass
533 else:
535 else:
534 for r in revs:
536 for r in revs:
535 nodetree.insert(r)
537 nodetree.insert(r)
536 if cache is not None:
538 if cache is not None:
537 cache['disambiguationnodetree'] = nodetree
539 cache['disambiguationnodetree'] = nodetree
538 if nodetree is not None:
540 if nodetree is not None:
539 length = max(nodetree.shortest(node), minlength)
541 length = max(nodetree.shortest(node), minlength)
540 prefix = hexnode[:length]
542 prefix = hexnode[:length]
541 return disambiguate(prefix)
543 return disambiguate(prefix)
542 for length in range(minlength, len(hexnode) + 1):
544 for length in range(minlength, len(hexnode) + 1):
543 matches = []
545 matches = []
544 prefix = hexnode[:length]
546 prefix = hexnode[:length]
545 for rev in revs:
547 for rev in revs:
546 otherhexnode = repo[rev].hex()
548 otherhexnode = repo[rev].hex()
547 if prefix == otherhexnode[:length]:
549 if prefix == otherhexnode[:length]:
548 matches.append(otherhexnode)
550 matches.append(otherhexnode)
549 if len(matches) == 1:
551 if len(matches) == 1:
550 return disambiguate(prefix)
552 return disambiguate(prefix)
551
553
552 try:
554 try:
553 return disambiguate(cl.shortest(node, minlength))
555 return disambiguate(cl.shortest(node, minlength))
554 except error.LookupError:
556 except error.LookupError:
555 raise error.RepoLookupError()
557 raise error.RepoLookupError()
556
558
557 def isrevsymbol(repo, symbol):
559 def isrevsymbol(repo, symbol):
558 """Checks if a symbol exists in the repo.
560 """Checks if a symbol exists in the repo.
559
561
560 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
562 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
561 symbol is an ambiguous nodeid prefix.
563 symbol is an ambiguous nodeid prefix.
562 """
564 """
563 try:
565 try:
564 revsymbol(repo, symbol)
566 revsymbol(repo, symbol)
565 return True
567 return True
566 except error.RepoLookupError:
568 except error.RepoLookupError:
567 return False
569 return False
568
570
569 def revsymbol(repo, symbol):
571 def revsymbol(repo, symbol):
570 """Returns a context given a single revision symbol (as string).
572 """Returns a context given a single revision symbol (as string).
571
573
572 This is similar to revsingle(), but accepts only a single revision symbol,
574 This is similar to revsingle(), but accepts only a single revision symbol,
573 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
575 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
574 not "max(public())".
576 not "max(public())".
575 """
577 """
576 if not isinstance(symbol, bytes):
578 if not isinstance(symbol, bytes):
577 msg = ("symbol (%s of type %s) was not a string, did you mean "
579 msg = ("symbol (%s of type %s) was not a string, did you mean "
578 "repo[symbol]?" % (symbol, type(symbol)))
580 "repo[symbol]?" % (symbol, type(symbol)))
579 raise error.ProgrammingError(msg)
581 raise error.ProgrammingError(msg)
580 try:
582 try:
581 if symbol in ('.', 'tip', 'null'):
583 if symbol in ('.', 'tip', 'null'):
582 return repo[symbol]
584 return repo[symbol]
583
585
584 try:
586 try:
585 r = int(symbol)
587 r = int(symbol)
586 if '%d' % r != symbol:
588 if '%d' % r != symbol:
587 raise ValueError
589 raise ValueError
588 l = len(repo.changelog)
590 l = len(repo.changelog)
589 if r < 0:
591 if r < 0:
590 r += l
592 r += l
591 if r < 0 or r >= l and r != wdirrev:
593 if r < 0 or r >= l and r != wdirrev:
592 raise ValueError
594 raise ValueError
593 return repo[r]
595 return repo[r]
594 except error.FilteredIndexError:
596 except error.FilteredIndexError:
595 raise
597 raise
596 except (ValueError, OverflowError, IndexError):
598 except (ValueError, OverflowError, IndexError):
597 pass
599 pass
598
600
599 if len(symbol) == 40:
601 if len(symbol) == 40:
600 try:
602 try:
601 node = bin(symbol)
603 node = bin(symbol)
602 rev = repo.changelog.rev(node)
604 rev = repo.changelog.rev(node)
603 return repo[rev]
605 return repo[rev]
604 except error.FilteredLookupError:
606 except error.FilteredLookupError:
605 raise
607 raise
606 except (TypeError, LookupError):
608 except (TypeError, LookupError):
607 pass
609 pass
608
610
609 # look up bookmarks through the name interface
611 # look up bookmarks through the name interface
610 try:
612 try:
611 node = repo.names.singlenode(repo, symbol)
613 node = repo.names.singlenode(repo, symbol)
612 rev = repo.changelog.rev(node)
614 rev = repo.changelog.rev(node)
613 return repo[rev]
615 return repo[rev]
614 except KeyError:
616 except KeyError:
615 pass
617 pass
616
618
617 node = resolvehexnodeidprefix(repo, symbol)
619 node = resolvehexnodeidprefix(repo, symbol)
618 if node is not None:
620 if node is not None:
619 rev = repo.changelog.rev(node)
621 rev = repo.changelog.rev(node)
620 return repo[rev]
622 return repo[rev]
621
623
622 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
624 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
623
625
624 except error.WdirUnsupported:
626 except error.WdirUnsupported:
625 return repo[None]
627 return repo[None]
626 except (error.FilteredIndexError, error.FilteredLookupError,
628 except (error.FilteredIndexError, error.FilteredLookupError,
627 error.FilteredRepoLookupError):
629 error.FilteredRepoLookupError):
628 raise _filterederror(repo, symbol)
630 raise _filterederror(repo, symbol)
629
631
630 def _filterederror(repo, changeid):
632 def _filterederror(repo, changeid):
631 """build an exception to be raised about a filtered changeid
633 """build an exception to be raised about a filtered changeid
632
634
633 This is extracted in a function to help extensions (eg: evolve) to
635 This is extracted in a function to help extensions (eg: evolve) to
634 experiment with various message variants."""
636 experiment with various message variants."""
635 if repo.filtername.startswith('visible'):
637 if repo.filtername.startswith('visible'):
636
638
637 # Check if the changeset is obsolete
639 # Check if the changeset is obsolete
638 unfilteredrepo = repo.unfiltered()
640 unfilteredrepo = repo.unfiltered()
639 ctx = revsymbol(unfilteredrepo, changeid)
641 ctx = revsymbol(unfilteredrepo, changeid)
640
642
641 # If the changeset is obsolete, enrich the message with the reason
643 # If the changeset is obsolete, enrich the message with the reason
642 # that made this changeset not visible
644 # that made this changeset not visible
643 if ctx.obsolete():
645 if ctx.obsolete():
644 msg = obsutil._getfilteredreason(repo, changeid, ctx)
646 msg = obsutil._getfilteredreason(repo, changeid, ctx)
645 else:
647 else:
646 msg = _("hidden revision '%s'") % changeid
648 msg = _("hidden revision '%s'") % changeid
647
649
648 hint = _('use --hidden to access hidden revisions')
650 hint = _('use --hidden to access hidden revisions')
649
651
650 return error.FilteredRepoLookupError(msg, hint=hint)
652 return error.FilteredRepoLookupError(msg, hint=hint)
651 msg = _("filtered revision '%s' (not in '%s' subset)")
653 msg = _("filtered revision '%s' (not in '%s' subset)")
652 msg %= (changeid, repo.filtername)
654 msg %= (changeid, repo.filtername)
653 return error.FilteredRepoLookupError(msg)
655 return error.FilteredRepoLookupError(msg)
654
656
655 def revsingle(repo, revspec, default='.', localalias=None):
657 def revsingle(repo, revspec, default='.', localalias=None):
656 if not revspec and revspec != 0:
658 if not revspec and revspec != 0:
657 return repo[default]
659 return repo[default]
658
660
659 l = revrange(repo, [revspec], localalias=localalias)
661 l = revrange(repo, [revspec], localalias=localalias)
660 if not l:
662 if not l:
661 raise error.Abort(_('empty revision set'))
663 raise error.Abort(_('empty revision set'))
662 return repo[l.last()]
664 return repo[l.last()]
663
665
664 def _pairspec(revspec):
666 def _pairspec(revspec):
665 tree = revsetlang.parse(revspec)
667 tree = revsetlang.parse(revspec)
666 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
668 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
667
669
668 def revpair(repo, revs):
670 def revpair(repo, revs):
669 if not revs:
671 if not revs:
670 return repo['.'], repo[None]
672 return repo['.'], repo[None]
671
673
672 l = revrange(repo, revs)
674 l = revrange(repo, revs)
673
675
674 if not l:
676 if not l:
675 first = second = None
677 first = second = None
676 elif l.isascending():
678 elif l.isascending():
677 first = l.min()
679 first = l.min()
678 second = l.max()
680 second = l.max()
679 elif l.isdescending():
681 elif l.isdescending():
680 first = l.max()
682 first = l.max()
681 second = l.min()
683 second = l.min()
682 else:
684 else:
683 first = l.first()
685 first = l.first()
684 second = l.last()
686 second = l.last()
685
687
686 if first is None:
688 if first is None:
687 raise error.Abort(_('empty revision range'))
689 raise error.Abort(_('empty revision range'))
688 if (first == second and len(revs) >= 2
690 if (first == second and len(revs) >= 2
689 and not all(revrange(repo, [r]) for r in revs)):
691 and not all(revrange(repo, [r]) for r in revs)):
690 raise error.Abort(_('empty revision on one side of range'))
692 raise error.Abort(_('empty revision on one side of range'))
691
693
692 # if top-level is range expression, the result must always be a pair
694 # if top-level is range expression, the result must always be a pair
693 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
695 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
694 return repo[first], repo[None]
696 return repo[first], repo[None]
695
697
696 return repo[first], repo[second]
698 return repo[first], repo[second]
697
699
698 def revrange(repo, specs, localalias=None):
700 def revrange(repo, specs, localalias=None):
699 """Execute 1 to many revsets and return the union.
701 """Execute 1 to many revsets and return the union.
700
702
701 This is the preferred mechanism for executing revsets using user-specified
703 This is the preferred mechanism for executing revsets using user-specified
702 config options, such as revset aliases.
704 config options, such as revset aliases.
703
705
704 The revsets specified by ``specs`` will be executed via a chained ``OR``
706 The revsets specified by ``specs`` will be executed via a chained ``OR``
705 expression. If ``specs`` is empty, an empty result is returned.
707 expression. If ``specs`` is empty, an empty result is returned.
706
708
707 ``specs`` can contain integers, in which case they are assumed to be
709 ``specs`` can contain integers, in which case they are assumed to be
708 revision numbers.
710 revision numbers.
709
711
710 It is assumed the revsets are already formatted. If you have arguments
712 It is assumed the revsets are already formatted. If you have arguments
711 that need to be expanded in the revset, call ``revsetlang.formatspec()``
713 that need to be expanded in the revset, call ``revsetlang.formatspec()``
712 and pass the result as an element of ``specs``.
714 and pass the result as an element of ``specs``.
713
715
714 Specifying a single revset is allowed.
716 Specifying a single revset is allowed.
715
717
716 Returns a ``revset.abstractsmartset`` which is a list-like interface over
718 Returns a ``revset.abstractsmartset`` which is a list-like interface over
717 integer revisions.
719 integer revisions.
718 """
720 """
719 allspecs = []
721 allspecs = []
720 for spec in specs:
722 for spec in specs:
721 if isinstance(spec, int):
723 if isinstance(spec, int):
722 spec = revsetlang.formatspec('rev(%d)', spec)
724 spec = revsetlang.formatspec('rev(%d)', spec)
723 allspecs.append(spec)
725 allspecs.append(spec)
724 return repo.anyrevs(allspecs, user=True, localalias=localalias)
726 return repo.anyrevs(allspecs, user=True, localalias=localalias)
725
727
726 def meaningfulparents(repo, ctx):
728 def meaningfulparents(repo, ctx):
727 """Return list of meaningful (or all if debug) parentrevs for rev.
729 """Return list of meaningful (or all if debug) parentrevs for rev.
728
730
729 For merges (two non-nullrev revisions) both parents are meaningful.
731 For merges (two non-nullrev revisions) both parents are meaningful.
730 Otherwise the first parent revision is considered meaningful if it
732 Otherwise the first parent revision is considered meaningful if it
731 is not the preceding revision.
733 is not the preceding revision.
732 """
734 """
733 parents = ctx.parents()
735 parents = ctx.parents()
734 if len(parents) > 1:
736 if len(parents) > 1:
735 return parents
737 return parents
736 if repo.ui.debugflag:
738 if repo.ui.debugflag:
737 return [parents[0], repo[nullrev]]
739 return [parents[0], repo[nullrev]]
738 if parents[0].rev() >= intrev(ctx) - 1:
740 if parents[0].rev() >= intrev(ctx) - 1:
739 return []
741 return []
740 return parents
742 return parents
741
743
742 def expandpats(pats):
744 def expandpats(pats):
743 '''Expand bare globs when running on windows.
745 '''Expand bare globs when running on windows.
744 On posix we assume it already has already been done by sh.'''
746 On posix we assume it already has already been done by sh.'''
745 if not util.expandglobs:
747 if not util.expandglobs:
746 return list(pats)
748 return list(pats)
747 ret = []
749 ret = []
748 for kindpat in pats:
750 for kindpat in pats:
749 kind, pat = matchmod._patsplit(kindpat, None)
751 kind, pat = matchmod._patsplit(kindpat, None)
750 if kind is None:
752 if kind is None:
751 try:
753 try:
752 globbed = glob.glob(pat)
754 globbed = glob.glob(pat)
753 except re.error:
755 except re.error:
754 globbed = [pat]
756 globbed = [pat]
755 if globbed:
757 if globbed:
756 ret.extend(globbed)
758 ret.extend(globbed)
757 continue
759 continue
758 ret.append(kindpat)
760 ret.append(kindpat)
759 return ret
761 return ret
760
762
761 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
763 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
762 badfn=None):
764 badfn=None):
763 '''Return a matcher and the patterns that were used.
765 '''Return a matcher and the patterns that were used.
764 The matcher will warn about bad matches, unless an alternate badfn callback
766 The matcher will warn about bad matches, unless an alternate badfn callback
765 is provided.'''
767 is provided.'''
766 if pats == ("",):
768 if pats == ("",):
767 pats = []
769 pats = []
768 if opts is None:
770 if opts is None:
769 opts = {}
771 opts = {}
770 if not globbed and default == 'relpath':
772 if not globbed and default == 'relpath':
771 pats = expandpats(pats or [])
773 pats = expandpats(pats or [])
772
774
773 def bad(f, msg):
775 def bad(f, msg):
774 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
776 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
775
777
776 if badfn is None:
778 if badfn is None:
777 badfn = bad
779 badfn = bad
778
780
779 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
781 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
780 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
782 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
781
783
782 if m.always():
784 if m.always():
783 pats = []
785 pats = []
784 return m, pats
786 return m, pats
785
787
786 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
788 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
787 badfn=None):
789 badfn=None):
788 '''Return a matcher that will warn about bad matches.'''
790 '''Return a matcher that will warn about bad matches.'''
789 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
791 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
790
792
791 def matchall(repo):
793 def matchall(repo):
792 '''Return a matcher that will efficiently match everything.'''
794 '''Return a matcher that will efficiently match everything.'''
793 return matchmod.always(repo.root, repo.getcwd())
795 return matchmod.always(repo.root, repo.getcwd())
794
796
795 def matchfiles(repo, files, badfn=None):
797 def matchfiles(repo, files, badfn=None):
796 '''Return a matcher that will efficiently match exactly these files.'''
798 '''Return a matcher that will efficiently match exactly these files.'''
797 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
799 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
798
800
799 def parsefollowlinespattern(repo, rev, pat, msg):
801 def parsefollowlinespattern(repo, rev, pat, msg):
800 """Return a file name from `pat` pattern suitable for usage in followlines
802 """Return a file name from `pat` pattern suitable for usage in followlines
801 logic.
803 logic.
802 """
804 """
803 if not matchmod.patkind(pat):
805 if not matchmod.patkind(pat):
804 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
806 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
805 else:
807 else:
806 ctx = repo[rev]
808 ctx = repo[rev]
807 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
809 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
808 files = [f for f in ctx if m(f)]
810 files = [f for f in ctx if m(f)]
809 if len(files) != 1:
811 if len(files) != 1:
810 raise error.ParseError(msg)
812 raise error.ParseError(msg)
811 return files[0]
813 return files[0]
812
814
813 def origpath(ui, repo, filepath):
815 def origpath(ui, repo, filepath):
814 '''customize where .orig files are created
816 '''customize where .orig files are created
815
817
816 Fetch user defined path from config file: [ui] origbackuppath = <path>
818 Fetch user defined path from config file: [ui] origbackuppath = <path>
817 Fall back to default (filepath with .orig suffix) if not specified
819 Fall back to default (filepath with .orig suffix) if not specified
818 '''
820 '''
819 origbackuppath = ui.config('ui', 'origbackuppath')
821 origbackuppath = ui.config('ui', 'origbackuppath')
820 if not origbackuppath:
822 if not origbackuppath:
821 return filepath + ".orig"
823 return filepath + ".orig"
822
824
823 # Convert filepath from an absolute path into a path inside the repo.
825 # Convert filepath from an absolute path into a path inside the repo.
824 filepathfromroot = util.normpath(os.path.relpath(filepath,
826 filepathfromroot = util.normpath(os.path.relpath(filepath,
825 start=repo.root))
827 start=repo.root))
826
828
827 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
829 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
828 origbackupdir = origvfs.dirname(filepathfromroot)
830 origbackupdir = origvfs.dirname(filepathfromroot)
829 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
831 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
830 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
832 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
831
833
832 # Remove any files that conflict with the backup file's path
834 # Remove any files that conflict with the backup file's path
833 for f in reversed(list(util.finddirs(filepathfromroot))):
835 for f in reversed(list(util.finddirs(filepathfromroot))):
834 if origvfs.isfileorlink(f):
836 if origvfs.isfileorlink(f):
835 ui.note(_('removing conflicting file: %s\n')
837 ui.note(_('removing conflicting file: %s\n')
836 % origvfs.join(f))
838 % origvfs.join(f))
837 origvfs.unlink(f)
839 origvfs.unlink(f)
838 break
840 break
839
841
840 origvfs.makedirs(origbackupdir)
842 origvfs.makedirs(origbackupdir)
841
843
842 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
844 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
843 ui.note(_('removing conflicting directory: %s\n')
845 ui.note(_('removing conflicting directory: %s\n')
844 % origvfs.join(filepathfromroot))
846 % origvfs.join(filepathfromroot))
845 origvfs.rmtree(filepathfromroot, forcibly=True)
847 origvfs.rmtree(filepathfromroot, forcibly=True)
846
848
847 return origvfs.join(filepathfromroot)
849 return origvfs.join(filepathfromroot)
848
850
849 class _containsnode(object):
851 class _containsnode(object):
850 """proxy __contains__(node) to container.__contains__ which accepts revs"""
852 """proxy __contains__(node) to container.__contains__ which accepts revs"""
851
853
852 def __init__(self, repo, revcontainer):
854 def __init__(self, repo, revcontainer):
853 self._torev = repo.changelog.rev
855 self._torev = repo.changelog.rev
854 self._revcontains = revcontainer.__contains__
856 self._revcontains = revcontainer.__contains__
855
857
856 def __contains__(self, node):
858 def __contains__(self, node):
857 return self._revcontains(self._torev(node))
859 return self._revcontains(self._torev(node))
858
860
859 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
861 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
860 fixphase=False, targetphase=None, backup=True):
862 fixphase=False, targetphase=None, backup=True):
861 """do common cleanups when old nodes are replaced by new nodes
863 """do common cleanups when old nodes are replaced by new nodes
862
864
863 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
865 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
864 (we might also want to move working directory parent in the future)
866 (we might also want to move working directory parent in the future)
865
867
866 By default, bookmark moves are calculated automatically from 'replacements',
868 By default, bookmark moves are calculated automatically from 'replacements',
867 but 'moves' can be used to override that. Also, 'moves' may include
869 but 'moves' can be used to override that. Also, 'moves' may include
868 additional bookmark moves that should not have associated obsmarkers.
870 additional bookmark moves that should not have associated obsmarkers.
869
871
870 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
872 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
871 have replacements. operation is a string, like "rebase".
873 have replacements. operation is a string, like "rebase".
872
874
873 metadata is dictionary containing metadata to be stored in obsmarker if
875 metadata is dictionary containing metadata to be stored in obsmarker if
874 obsolescence is enabled.
876 obsolescence is enabled.
875 """
877 """
876 assert fixphase or targetphase is None
878 assert fixphase or targetphase is None
877 if not replacements and not moves:
879 if not replacements and not moves:
878 return
880 return
879
881
880 # translate mapping's other forms
882 # translate mapping's other forms
881 if not util.safehasattr(replacements, 'items'):
883 if not util.safehasattr(replacements, 'items'):
882 replacements = {(n,): () for n in replacements}
884 replacements = {(n,): () for n in replacements}
883 else:
885 else:
884 # upgrading non tuple "source" to tuple ones for BC
886 # upgrading non tuple "source" to tuple ones for BC
885 repls = {}
887 repls = {}
886 for key, value in replacements.items():
888 for key, value in replacements.items():
887 if not isinstance(key, tuple):
889 if not isinstance(key, tuple):
888 key = (key,)
890 key = (key,)
889 repls[key] = value
891 repls[key] = value
890 replacements = repls
892 replacements = repls
891
893
892 # Calculate bookmark movements
894 # Calculate bookmark movements
893 if moves is None:
895 if moves is None:
894 moves = {}
896 moves = {}
895 # Unfiltered repo is needed since nodes in replacements might be hidden.
897 # Unfiltered repo is needed since nodes in replacements might be hidden.
896 unfi = repo.unfiltered()
898 unfi = repo.unfiltered()
897 for oldnodes, newnodes in replacements.items():
899 for oldnodes, newnodes in replacements.items():
898 for oldnode in oldnodes:
900 for oldnode in oldnodes:
899 if oldnode in moves:
901 if oldnode in moves:
900 continue
902 continue
901 if len(newnodes) > 1:
903 if len(newnodes) > 1:
902 # usually a split, take the one with biggest rev number
904 # usually a split, take the one with biggest rev number
903 newnode = next(unfi.set('max(%ln)', newnodes)).node()
905 newnode = next(unfi.set('max(%ln)', newnodes)).node()
904 elif len(newnodes) == 0:
906 elif len(newnodes) == 0:
905 # move bookmark backwards
907 # move bookmark backwards
906 allreplaced = []
908 allreplaced = []
907 for rep in replacements:
909 for rep in replacements:
908 allreplaced.extend(rep)
910 allreplaced.extend(rep)
909 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
911 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
910 allreplaced))
912 allreplaced))
911 if roots:
913 if roots:
912 newnode = roots[0].node()
914 newnode = roots[0].node()
913 else:
915 else:
914 newnode = nullid
916 newnode = nullid
915 else:
917 else:
916 newnode = newnodes[0]
918 newnode = newnodes[0]
917 moves[oldnode] = newnode
919 moves[oldnode] = newnode
918
920
919 allnewnodes = [n for ns in replacements.values() for n in ns]
921 allnewnodes = [n for ns in replacements.values() for n in ns]
920 toretract = {}
922 toretract = {}
921 toadvance = {}
923 toadvance = {}
922 if fixphase:
924 if fixphase:
923 precursors = {}
925 precursors = {}
924 for oldnodes, newnodes in replacements.items():
926 for oldnodes, newnodes in replacements.items():
925 for oldnode in oldnodes:
927 for oldnode in oldnodes:
926 for newnode in newnodes:
928 for newnode in newnodes:
927 precursors.setdefault(newnode, []).append(oldnode)
929 precursors.setdefault(newnode, []).append(oldnode)
928
930
929 allnewnodes.sort(key=lambda n: unfi[n].rev())
931 allnewnodes.sort(key=lambda n: unfi[n].rev())
930 newphases = {}
932 newphases = {}
931 def phase(ctx):
933 def phase(ctx):
932 return newphases.get(ctx.node(), ctx.phase())
934 return newphases.get(ctx.node(), ctx.phase())
933 for newnode in allnewnodes:
935 for newnode in allnewnodes:
934 ctx = unfi[newnode]
936 ctx = unfi[newnode]
935 parentphase = max(phase(p) for p in ctx.parents())
937 parentphase = max(phase(p) for p in ctx.parents())
936 if targetphase is None:
938 if targetphase is None:
937 oldphase = max(unfi[oldnode].phase()
939 oldphase = max(unfi[oldnode].phase()
938 for oldnode in precursors[newnode])
940 for oldnode in precursors[newnode])
939 newphase = max(oldphase, parentphase)
941 newphase = max(oldphase, parentphase)
940 else:
942 else:
941 newphase = max(targetphase, parentphase)
943 newphase = max(targetphase, parentphase)
942 newphases[newnode] = newphase
944 newphases[newnode] = newphase
943 if newphase > ctx.phase():
945 if newphase > ctx.phase():
944 toretract.setdefault(newphase, []).append(newnode)
946 toretract.setdefault(newphase, []).append(newnode)
945 elif newphase < ctx.phase():
947 elif newphase < ctx.phase():
946 toadvance.setdefault(newphase, []).append(newnode)
948 toadvance.setdefault(newphase, []).append(newnode)
947
949
948 with repo.transaction('cleanup') as tr:
950 with repo.transaction('cleanup') as tr:
949 # Move bookmarks
951 # Move bookmarks
950 bmarks = repo._bookmarks
952 bmarks = repo._bookmarks
951 bmarkchanges = []
953 bmarkchanges = []
952 for oldnode, newnode in moves.items():
954 for oldnode, newnode in moves.items():
953 oldbmarks = repo.nodebookmarks(oldnode)
955 oldbmarks = repo.nodebookmarks(oldnode)
954 if not oldbmarks:
956 if not oldbmarks:
955 continue
957 continue
956 from . import bookmarks # avoid import cycle
958 from . import bookmarks # avoid import cycle
957 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
959 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
958 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
960 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
959 hex(oldnode), hex(newnode)))
961 hex(oldnode), hex(newnode)))
960 # Delete divergent bookmarks being parents of related newnodes
962 # Delete divergent bookmarks being parents of related newnodes
961 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
963 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
962 allnewnodes, newnode, oldnode)
964 allnewnodes, newnode, oldnode)
963 deletenodes = _containsnode(repo, deleterevs)
965 deletenodes = _containsnode(repo, deleterevs)
964 for name in oldbmarks:
966 for name in oldbmarks:
965 bmarkchanges.append((name, newnode))
967 bmarkchanges.append((name, newnode))
966 for b in bookmarks.divergent2delete(repo, deletenodes, name):
968 for b in bookmarks.divergent2delete(repo, deletenodes, name):
967 bmarkchanges.append((b, None))
969 bmarkchanges.append((b, None))
968
970
969 if bmarkchanges:
971 if bmarkchanges:
970 bmarks.applychanges(repo, tr, bmarkchanges)
972 bmarks.applychanges(repo, tr, bmarkchanges)
971
973
972 for phase, nodes in toretract.items():
974 for phase, nodes in toretract.items():
973 phases.retractboundary(repo, tr, phase, nodes)
975 phases.retractboundary(repo, tr, phase, nodes)
974 for phase, nodes in toadvance.items():
976 for phase, nodes in toadvance.items():
975 phases.advanceboundary(repo, tr, phase, nodes)
977 phases.advanceboundary(repo, tr, phase, nodes)
976
978
977 # Obsolete or strip nodes
979 # Obsolete or strip nodes
978 if obsolete.isenabled(repo, obsolete.createmarkersopt):
980 if obsolete.isenabled(repo, obsolete.createmarkersopt):
979 # If a node is already obsoleted, and we want to obsolete it
981 # If a node is already obsoleted, and we want to obsolete it
980 # without a successor, skip that obssolete request since it's
982 # without a successor, skip that obssolete request since it's
981 # unnecessary. That's the "if s or not isobs(n)" check below.
983 # unnecessary. That's the "if s or not isobs(n)" check below.
982 # Also sort the node in topology order, that might be useful for
984 # Also sort the node in topology order, that might be useful for
983 # some obsstore logic.
985 # some obsstore logic.
984 # NOTE: the sorting might belong to createmarkers.
986 # NOTE: the sorting might belong to createmarkers.
985 torev = unfi.changelog.rev
987 torev = unfi.changelog.rev
986 sortfunc = lambda ns: torev(ns[0][0])
988 sortfunc = lambda ns: torev(ns[0][0])
987 rels = []
989 rels = []
988 for ns, s in sorted(replacements.items(), key=sortfunc):
990 for ns, s in sorted(replacements.items(), key=sortfunc):
989 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
991 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
990 rels.append(rel)
992 rels.append(rel)
991 if rels:
993 if rels:
992 obsolete.createmarkers(repo, rels, operation=operation,
994 obsolete.createmarkers(repo, rels, operation=operation,
993 metadata=metadata)
995 metadata=metadata)
994 else:
996 else:
995 from . import repair # avoid import cycle
997 from . import repair # avoid import cycle
996 tostrip = list(n for ns in replacements for n in ns)
998 tostrip = list(n for ns in replacements for n in ns)
997 if tostrip:
999 if tostrip:
998 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1000 repair.delayedstrip(repo.ui, repo, tostrip, operation,
999 backup=backup)
1001 backup=backup)
1000
1002
1001 def addremove(repo, matcher, prefix, opts=None):
1003 def addremove(repo, matcher, prefix, opts=None):
1002 if opts is None:
1004 if opts is None:
1003 opts = {}
1005 opts = {}
1004 m = matcher
1006 m = matcher
1005 dry_run = opts.get('dry_run')
1007 dry_run = opts.get('dry_run')
1006 try:
1008 try:
1007 similarity = float(opts.get('similarity') or 0)
1009 similarity = float(opts.get('similarity') or 0)
1008 except ValueError:
1010 except ValueError:
1009 raise error.Abort(_('similarity must be a number'))
1011 raise error.Abort(_('similarity must be a number'))
1010 if similarity < 0 or similarity > 100:
1012 if similarity < 0 or similarity > 100:
1011 raise error.Abort(_('similarity must be between 0 and 100'))
1013 raise error.Abort(_('similarity must be between 0 and 100'))
1012 similarity /= 100.0
1014 similarity /= 100.0
1013
1015
1014 ret = 0
1016 ret = 0
1015 join = lambda f: os.path.join(prefix, f)
1017 join = lambda f: os.path.join(prefix, f)
1016
1018
1017 wctx = repo[None]
1019 wctx = repo[None]
1018 for subpath in sorted(wctx.substate):
1020 for subpath in sorted(wctx.substate):
1019 submatch = matchmod.subdirmatcher(subpath, m)
1021 submatch = matchmod.subdirmatcher(subpath, m)
1020 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1022 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1021 sub = wctx.sub(subpath)
1023 sub = wctx.sub(subpath)
1022 try:
1024 try:
1023 if sub.addremove(submatch, prefix, opts):
1025 if sub.addremove(submatch, prefix, opts):
1024 ret = 1
1026 ret = 1
1025 except error.LookupError:
1027 except error.LookupError:
1026 repo.ui.status(_("skipping missing subrepository: %s\n")
1028 repo.ui.status(_("skipping missing subrepository: %s\n")
1027 % join(subpath))
1029 % join(subpath))
1028
1030
1029 rejected = []
1031 rejected = []
1030 def badfn(f, msg):
1032 def badfn(f, msg):
1031 if f in m.files():
1033 if f in m.files():
1032 m.bad(f, msg)
1034 m.bad(f, msg)
1033 rejected.append(f)
1035 rejected.append(f)
1034
1036
1035 badmatch = matchmod.badmatch(m, badfn)
1037 badmatch = matchmod.badmatch(m, badfn)
1036 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1038 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1037 badmatch)
1039 badmatch)
1038
1040
1039 unknownset = set(unknown + forgotten)
1041 unknownset = set(unknown + forgotten)
1040 toprint = unknownset.copy()
1042 toprint = unknownset.copy()
1041 toprint.update(deleted)
1043 toprint.update(deleted)
1042 for abs in sorted(toprint):
1044 for abs in sorted(toprint):
1043 if repo.ui.verbose or not m.exact(abs):
1045 if repo.ui.verbose or not m.exact(abs):
1044 if abs in unknownset:
1046 if abs in unknownset:
1045 status = _('adding %s\n') % m.uipath(abs)
1047 status = _('adding %s\n') % m.uipath(abs)
1046 label = 'ui.addremove.added'
1048 label = 'ui.addremove.added'
1047 else:
1049 else:
1048 status = _('removing %s\n') % m.uipath(abs)
1050 status = _('removing %s\n') % m.uipath(abs)
1049 label = 'ui.addremove.removed'
1051 label = 'ui.addremove.removed'
1050 repo.ui.status(status, label=label)
1052 repo.ui.status(status, label=label)
1051
1053
1052 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1054 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1053 similarity)
1055 similarity)
1054
1056
1055 if not dry_run:
1057 if not dry_run:
1056 _markchanges(repo, unknown + forgotten, deleted, renames)
1058 _markchanges(repo, unknown + forgotten, deleted, renames)
1057
1059
1058 for f in rejected:
1060 for f in rejected:
1059 if f in m.files():
1061 if f in m.files():
1060 return 1
1062 return 1
1061 return ret
1063 return ret
1062
1064
1063 def marktouched(repo, files, similarity=0.0):
1065 def marktouched(repo, files, similarity=0.0):
1064 '''Assert that files have somehow been operated upon. files are relative to
1066 '''Assert that files have somehow been operated upon. files are relative to
1065 the repo root.'''
1067 the repo root.'''
1066 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1068 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1067 rejected = []
1069 rejected = []
1068
1070
1069 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1071 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1070
1072
1071 if repo.ui.verbose:
1073 if repo.ui.verbose:
1072 unknownset = set(unknown + forgotten)
1074 unknownset = set(unknown + forgotten)
1073 toprint = unknownset.copy()
1075 toprint = unknownset.copy()
1074 toprint.update(deleted)
1076 toprint.update(deleted)
1075 for abs in sorted(toprint):
1077 for abs in sorted(toprint):
1076 if abs in unknownset:
1078 if abs in unknownset:
1077 status = _('adding %s\n') % abs
1079 status = _('adding %s\n') % abs
1078 else:
1080 else:
1079 status = _('removing %s\n') % abs
1081 status = _('removing %s\n') % abs
1080 repo.ui.status(status)
1082 repo.ui.status(status)
1081
1083
1082 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1084 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1083 similarity)
1085 similarity)
1084
1086
1085 _markchanges(repo, unknown + forgotten, deleted, renames)
1087 _markchanges(repo, unknown + forgotten, deleted, renames)
1086
1088
1087 for f in rejected:
1089 for f in rejected:
1088 if f in m.files():
1090 if f in m.files():
1089 return 1
1091 return 1
1090 return 0
1092 return 0
1091
1093
1092 def _interestingfiles(repo, matcher):
1094 def _interestingfiles(repo, matcher):
1093 '''Walk dirstate with matcher, looking for files that addremove would care
1095 '''Walk dirstate with matcher, looking for files that addremove would care
1094 about.
1096 about.
1095
1097
1096 This is different from dirstate.status because it doesn't care about
1098 This is different from dirstate.status because it doesn't care about
1097 whether files are modified or clean.'''
1099 whether files are modified or clean.'''
1098 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1100 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1099 audit_path = pathutil.pathauditor(repo.root, cached=True)
1101 audit_path = pathutil.pathauditor(repo.root, cached=True)
1100
1102
1101 ctx = repo[None]
1103 ctx = repo[None]
1102 dirstate = repo.dirstate
1104 dirstate = repo.dirstate
1103 matcher = repo.narrowmatch(matcher, includeexact=True)
1105 matcher = repo.narrowmatch(matcher, includeexact=True)
1104 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1106 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1105 unknown=True, ignored=False, full=False)
1107 unknown=True, ignored=False, full=False)
1106 for abs, st in walkresults.iteritems():
1108 for abs, st in walkresults.iteritems():
1107 dstate = dirstate[abs]
1109 dstate = dirstate[abs]
1108 if dstate == '?' and audit_path.check(abs):
1110 if dstate == '?' and audit_path.check(abs):
1109 unknown.append(abs)
1111 unknown.append(abs)
1110 elif dstate != 'r' and not st:
1112 elif dstate != 'r' and not st:
1111 deleted.append(abs)
1113 deleted.append(abs)
1112 elif dstate == 'r' and st:
1114 elif dstate == 'r' and st:
1113 forgotten.append(abs)
1115 forgotten.append(abs)
1114 # for finding renames
1116 # for finding renames
1115 elif dstate == 'r' and not st:
1117 elif dstate == 'r' and not st:
1116 removed.append(abs)
1118 removed.append(abs)
1117 elif dstate == 'a':
1119 elif dstate == 'a':
1118 added.append(abs)
1120 added.append(abs)
1119
1121
1120 return added, unknown, deleted, removed, forgotten
1122 return added, unknown, deleted, removed, forgotten
1121
1123
1122 def _findrenames(repo, matcher, added, removed, similarity):
1124 def _findrenames(repo, matcher, added, removed, similarity):
1123 '''Find renames from removed files to added ones.'''
1125 '''Find renames from removed files to added ones.'''
1124 renames = {}
1126 renames = {}
1125 if similarity > 0:
1127 if similarity > 0:
1126 for old, new, score in similar.findrenames(repo, added, removed,
1128 for old, new, score in similar.findrenames(repo, added, removed,
1127 similarity):
1129 similarity):
1128 if (repo.ui.verbose or not matcher.exact(old)
1130 if (repo.ui.verbose or not matcher.exact(old)
1129 or not matcher.exact(new)):
1131 or not matcher.exact(new)):
1130 repo.ui.status(_('recording removal of %s as rename to %s '
1132 repo.ui.status(_('recording removal of %s as rename to %s '
1131 '(%d%% similar)\n') %
1133 '(%d%% similar)\n') %
1132 (matcher.rel(old), matcher.rel(new),
1134 (matcher.rel(old), matcher.rel(new),
1133 score * 100))
1135 score * 100))
1134 renames[new] = old
1136 renames[new] = old
1135 return renames
1137 return renames
1136
1138
1137 def _markchanges(repo, unknown, deleted, renames):
1139 def _markchanges(repo, unknown, deleted, renames):
1138 '''Marks the files in unknown as added, the files in deleted as removed,
1140 '''Marks the files in unknown as added, the files in deleted as removed,
1139 and the files in renames as copied.'''
1141 and the files in renames as copied.'''
1140 wctx = repo[None]
1142 wctx = repo[None]
1141 with repo.wlock():
1143 with repo.wlock():
1142 wctx.forget(deleted)
1144 wctx.forget(deleted)
1143 wctx.add(unknown)
1145 wctx.add(unknown)
1144 for new, old in renames.iteritems():
1146 for new, old in renames.iteritems():
1145 wctx.copy(old, new)
1147 wctx.copy(old, new)
1146
1148
1147 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1149 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1148 """Update the dirstate to reflect the intent of copying src to dst. For
1150 """Update the dirstate to reflect the intent of copying src to dst. For
1149 different reasons it might not end with dst being marked as copied from src.
1151 different reasons it might not end with dst being marked as copied from src.
1150 """
1152 """
1151 origsrc = repo.dirstate.copied(src) or src
1153 origsrc = repo.dirstate.copied(src) or src
1152 if dst == origsrc: # copying back a copy?
1154 if dst == origsrc: # copying back a copy?
1153 if repo.dirstate[dst] not in 'mn' and not dryrun:
1155 if repo.dirstate[dst] not in 'mn' and not dryrun:
1154 repo.dirstate.normallookup(dst)
1156 repo.dirstate.normallookup(dst)
1155 else:
1157 else:
1156 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1158 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1157 if not ui.quiet:
1159 if not ui.quiet:
1158 ui.warn(_("%s has not been committed yet, so no copy "
1160 ui.warn(_("%s has not been committed yet, so no copy "
1159 "data will be stored for %s.\n")
1161 "data will be stored for %s.\n")
1160 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1162 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1161 if repo.dirstate[dst] in '?r' and not dryrun:
1163 if repo.dirstate[dst] in '?r' and not dryrun:
1162 wctx.add([dst])
1164 wctx.add([dst])
1163 elif not dryrun:
1165 elif not dryrun:
1164 wctx.copy(origsrc, dst)
1166 wctx.copy(origsrc, dst)
1165
1167
1166 def writerequires(opener, requirements):
1168 def writerequires(opener, requirements):
1167 with opener('requires', 'w') as fp:
1169 with opener('requires', 'w') as fp:
1168 for r in sorted(requirements):
1170 for r in sorted(requirements):
1169 fp.write("%s\n" % r)
1171 fp.write("%s\n" % r)
1170
1172
1171 class filecachesubentry(object):
1173 class filecachesubentry(object):
1172 def __init__(self, path, stat):
1174 def __init__(self, path, stat):
1173 self.path = path
1175 self.path = path
1174 self.cachestat = None
1176 self.cachestat = None
1175 self._cacheable = None
1177 self._cacheable = None
1176
1178
1177 if stat:
1179 if stat:
1178 self.cachestat = filecachesubentry.stat(self.path)
1180 self.cachestat = filecachesubentry.stat(self.path)
1179
1181
1180 if self.cachestat:
1182 if self.cachestat:
1181 self._cacheable = self.cachestat.cacheable()
1183 self._cacheable = self.cachestat.cacheable()
1182 else:
1184 else:
1183 # None means we don't know yet
1185 # None means we don't know yet
1184 self._cacheable = None
1186 self._cacheable = None
1185
1187
1186 def refresh(self):
1188 def refresh(self):
1187 if self.cacheable():
1189 if self.cacheable():
1188 self.cachestat = filecachesubentry.stat(self.path)
1190 self.cachestat = filecachesubentry.stat(self.path)
1189
1191
1190 def cacheable(self):
1192 def cacheable(self):
1191 if self._cacheable is not None:
1193 if self._cacheable is not None:
1192 return self._cacheable
1194 return self._cacheable
1193
1195
1194 # we don't know yet, assume it is for now
1196 # we don't know yet, assume it is for now
1195 return True
1197 return True
1196
1198
1197 def changed(self):
1199 def changed(self):
1198 # no point in going further if we can't cache it
1200 # no point in going further if we can't cache it
1199 if not self.cacheable():
1201 if not self.cacheable():
1200 return True
1202 return True
1201
1203
1202 newstat = filecachesubentry.stat(self.path)
1204 newstat = filecachesubentry.stat(self.path)
1203
1205
1204 # we may not know if it's cacheable yet, check again now
1206 # we may not know if it's cacheable yet, check again now
1205 if newstat and self._cacheable is None:
1207 if newstat and self._cacheable is None:
1206 self._cacheable = newstat.cacheable()
1208 self._cacheable = newstat.cacheable()
1207
1209
1208 # check again
1210 # check again
1209 if not self._cacheable:
1211 if not self._cacheable:
1210 return True
1212 return True
1211
1213
1212 if self.cachestat != newstat:
1214 if self.cachestat != newstat:
1213 self.cachestat = newstat
1215 self.cachestat = newstat
1214 return True
1216 return True
1215 else:
1217 else:
1216 return False
1218 return False
1217
1219
1218 @staticmethod
1220 @staticmethod
1219 def stat(path):
1221 def stat(path):
1220 try:
1222 try:
1221 return util.cachestat(path)
1223 return util.cachestat(path)
1222 except OSError as e:
1224 except OSError as e:
1223 if e.errno != errno.ENOENT:
1225 if e.errno != errno.ENOENT:
1224 raise
1226 raise
1225
1227
1226 class filecacheentry(object):
1228 class filecacheentry(object):
1227 def __init__(self, paths, stat=True):
1229 def __init__(self, paths, stat=True):
1228 self._entries = []
1230 self._entries = []
1229 for path in paths:
1231 for path in paths:
1230 self._entries.append(filecachesubentry(path, stat))
1232 self._entries.append(filecachesubentry(path, stat))
1231
1233
1232 def changed(self):
1234 def changed(self):
1233 '''true if any entry has changed'''
1235 '''true if any entry has changed'''
1234 for entry in self._entries:
1236 for entry in self._entries:
1235 if entry.changed():
1237 if entry.changed():
1236 return True
1238 return True
1237 return False
1239 return False
1238
1240
1239 def refresh(self):
1241 def refresh(self):
1240 for entry in self._entries:
1242 for entry in self._entries:
1241 entry.refresh()
1243 entry.refresh()
1242
1244
1243 class filecache(object):
1245 class filecache(object):
1244 """A property like decorator that tracks files under .hg/ for updates.
1246 """A property like decorator that tracks files under .hg/ for updates.
1245
1247
1246 On first access, the files defined as arguments are stat()ed and the
1248 On first access, the files defined as arguments are stat()ed and the
1247 results cached. The decorated function is called. The results are stashed
1249 results cached. The decorated function is called. The results are stashed
1248 away in a ``_filecache`` dict on the object whose method is decorated.
1250 away in a ``_filecache`` dict on the object whose method is decorated.
1249
1251
1250 On subsequent access, the cached result is returned.
1252 On subsequent access, the cached result is returned.
1251
1253
1252 On external property set operations, stat() calls are performed and the new
1254 On external property set operations, stat() calls are performed and the new
1253 value is cached.
1255 value is cached.
1254
1256
1255 On property delete operations, cached data is removed.
1257 On property delete operations, cached data is removed.
1256
1258
1257 When using the property API, cached data is always returned, if available:
1259 When using the property API, cached data is always returned, if available:
1258 no stat() is performed to check if the file has changed and if the function
1260 no stat() is performed to check if the file has changed and if the function
1259 needs to be called to reflect file changes.
1261 needs to be called to reflect file changes.
1260
1262
1261 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1263 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1262 can populate an entry before the property's getter is called. In this case,
1264 can populate an entry before the property's getter is called. In this case,
1263 entries in ``_filecache`` will be used during property operations,
1265 entries in ``_filecache`` will be used during property operations,
1264 if available. If the underlying file changes, it is up to external callers
1266 if available. If the underlying file changes, it is up to external callers
1265 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1267 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1266 method result as well as possibly calling ``del obj._filecache[attr]`` to
1268 method result as well as possibly calling ``del obj._filecache[attr]`` to
1267 remove the ``filecacheentry``.
1269 remove the ``filecacheentry``.
1268 """
1270 """
1269
1271
1270 def __init__(self, *paths):
1272 def __init__(self, *paths):
1271 self.paths = paths
1273 self.paths = paths
1272
1274
1273 def join(self, obj, fname):
1275 def join(self, obj, fname):
1274 """Used to compute the runtime path of a cached file.
1276 """Used to compute the runtime path of a cached file.
1275
1277
1276 Users should subclass filecache and provide their own version of this
1278 Users should subclass filecache and provide their own version of this
1277 function to call the appropriate join function on 'obj' (an instance
1279 function to call the appropriate join function on 'obj' (an instance
1278 of the class that its member function was decorated).
1280 of the class that its member function was decorated).
1279 """
1281 """
1280 raise NotImplementedError
1282 raise NotImplementedError
1281
1283
1282 def __call__(self, func):
1284 def __call__(self, func):
1283 self.func = func
1285 self.func = func
1284 self.sname = func.__name__
1286 self.sname = func.__name__
1285 self.name = pycompat.sysbytes(self.sname)
1287 self.name = pycompat.sysbytes(self.sname)
1286 return self
1288 return self
1287
1289
1288 def __get__(self, obj, type=None):
1290 def __get__(self, obj, type=None):
1289 # if accessed on the class, return the descriptor itself.
1291 # if accessed on the class, return the descriptor itself.
1290 if obj is None:
1292 if obj is None:
1291 return self
1293 return self
1292 # do we need to check if the file changed?
1294 # do we need to check if the file changed?
1293 if self.sname in obj.__dict__:
1295 if self.sname in obj.__dict__:
1294 assert self.name in obj._filecache, self.name
1296 assert self.name in obj._filecache, self.name
1295 return obj.__dict__[self.sname]
1297 return obj.__dict__[self.sname]
1296
1298
1297 entry = obj._filecache.get(self.name)
1299 entry = obj._filecache.get(self.name)
1298
1300
1299 if entry:
1301 if entry:
1300 if entry.changed():
1302 if entry.changed():
1301 entry.obj = self.func(obj)
1303 entry.obj = self.func(obj)
1302 else:
1304 else:
1303 paths = [self.join(obj, path) for path in self.paths]
1305 paths = [self.join(obj, path) for path in self.paths]
1304
1306
1305 # We stat -before- creating the object so our cache doesn't lie if
1307 # We stat -before- creating the object so our cache doesn't lie if
1306 # a writer modified between the time we read and stat
1308 # a writer modified between the time we read and stat
1307 entry = filecacheentry(paths, True)
1309 entry = filecacheentry(paths, True)
1308 entry.obj = self.func(obj)
1310 entry.obj = self.func(obj)
1309
1311
1310 obj._filecache[self.name] = entry
1312 obj._filecache[self.name] = entry
1311
1313
1312 obj.__dict__[self.sname] = entry.obj
1314 obj.__dict__[self.sname] = entry.obj
1313 return entry.obj
1315 return entry.obj
1314
1316
1315 def __set__(self, obj, value):
1317 def __set__(self, obj, value):
1316 if self.name not in obj._filecache:
1318 if self.name not in obj._filecache:
1317 # we add an entry for the missing value because X in __dict__
1319 # we add an entry for the missing value because X in __dict__
1318 # implies X in _filecache
1320 # implies X in _filecache
1319 paths = [self.join(obj, path) for path in self.paths]
1321 paths = [self.join(obj, path) for path in self.paths]
1320 ce = filecacheentry(paths, False)
1322 ce = filecacheentry(paths, False)
1321 obj._filecache[self.name] = ce
1323 obj._filecache[self.name] = ce
1322 else:
1324 else:
1323 ce = obj._filecache[self.name]
1325 ce = obj._filecache[self.name]
1324
1326
1325 ce.obj = value # update cached copy
1327 ce.obj = value # update cached copy
1326 obj.__dict__[self.sname] = value # update copy returned by obj.x
1328 obj.__dict__[self.sname] = value # update copy returned by obj.x
1327
1329
1328 def __delete__(self, obj):
1330 def __delete__(self, obj):
1329 try:
1331 try:
1330 del obj.__dict__[self.sname]
1332 del obj.__dict__[self.sname]
1331 except KeyError:
1333 except KeyError:
1332 raise AttributeError(self.sname)
1334 raise AttributeError(self.sname)
1333
1335
1334 def extdatasource(repo, source):
1336 def extdatasource(repo, source):
1335 """Gather a map of rev -> value dict from the specified source
1337 """Gather a map of rev -> value dict from the specified source
1336
1338
1337 A source spec is treated as a URL, with a special case shell: type
1339 A source spec is treated as a URL, with a special case shell: type
1338 for parsing the output from a shell command.
1340 for parsing the output from a shell command.
1339
1341
1340 The data is parsed as a series of newline-separated records where
1342 The data is parsed as a series of newline-separated records where
1341 each record is a revision specifier optionally followed by a space
1343 each record is a revision specifier optionally followed by a space
1342 and a freeform string value. If the revision is known locally, it
1344 and a freeform string value. If the revision is known locally, it
1343 is converted to a rev, otherwise the record is skipped.
1345 is converted to a rev, otherwise the record is skipped.
1344
1346
1345 Note that both key and value are treated as UTF-8 and converted to
1347 Note that both key and value are treated as UTF-8 and converted to
1346 the local encoding. This allows uniformity between local and
1348 the local encoding. This allows uniformity between local and
1347 remote data sources.
1349 remote data sources.
1348 """
1350 """
1349
1351
1350 spec = repo.ui.config("extdata", source)
1352 spec = repo.ui.config("extdata", source)
1351 if not spec:
1353 if not spec:
1352 raise error.Abort(_("unknown extdata source '%s'") % source)
1354 raise error.Abort(_("unknown extdata source '%s'") % source)
1353
1355
1354 data = {}
1356 data = {}
1355 src = proc = None
1357 src = proc = None
1356 try:
1358 try:
1357 if spec.startswith("shell:"):
1359 if spec.startswith("shell:"):
1358 # external commands should be run relative to the repo root
1360 # external commands should be run relative to the repo root
1359 cmd = spec[6:]
1361 cmd = spec[6:]
1360 proc = subprocess.Popen(procutil.tonativestr(cmd),
1362 proc = subprocess.Popen(procutil.tonativestr(cmd),
1361 shell=True, bufsize=-1,
1363 shell=True, bufsize=-1,
1362 close_fds=procutil.closefds,
1364 close_fds=procutil.closefds,
1363 stdout=subprocess.PIPE,
1365 stdout=subprocess.PIPE,
1364 cwd=procutil.tonativestr(repo.root))
1366 cwd=procutil.tonativestr(repo.root))
1365 src = proc.stdout
1367 src = proc.stdout
1366 else:
1368 else:
1367 # treat as a URL or file
1369 # treat as a URL or file
1368 src = url.open(repo.ui, spec)
1370 src = url.open(repo.ui, spec)
1369 for l in src:
1371 for l in src:
1370 if " " in l:
1372 if " " in l:
1371 k, v = l.strip().split(" ", 1)
1373 k, v = l.strip().split(" ", 1)
1372 else:
1374 else:
1373 k, v = l.strip(), ""
1375 k, v = l.strip(), ""
1374
1376
1375 k = encoding.tolocal(k)
1377 k = encoding.tolocal(k)
1376 try:
1378 try:
1377 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1379 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1378 except (error.LookupError, error.RepoLookupError):
1380 except (error.LookupError, error.RepoLookupError):
1379 pass # we ignore data for nodes that don't exist locally
1381 pass # we ignore data for nodes that don't exist locally
1380 finally:
1382 finally:
1381 if proc:
1383 if proc:
1382 proc.communicate()
1384 proc.communicate()
1383 if src:
1385 if src:
1384 src.close()
1386 src.close()
1385 if proc and proc.returncode != 0:
1387 if proc and proc.returncode != 0:
1386 raise error.Abort(_("extdata command '%s' failed: %s")
1388 raise error.Abort(_("extdata command '%s' failed: %s")
1387 % (cmd, procutil.explainexit(proc.returncode)))
1389 % (cmd, procutil.explainexit(proc.returncode)))
1388
1390
1389 return data
1391 return data
1390
1392
1391 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1393 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1392 if lock is None:
1394 if lock is None:
1393 raise error.LockInheritanceContractViolation(
1395 raise error.LockInheritanceContractViolation(
1394 'lock can only be inherited while held')
1396 'lock can only be inherited while held')
1395 if environ is None:
1397 if environ is None:
1396 environ = {}
1398 environ = {}
1397 with lock.inherit() as locker:
1399 with lock.inherit() as locker:
1398 environ[envvar] = locker
1400 environ[envvar] = locker
1399 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1401 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1400
1402
1401 def wlocksub(repo, cmd, *args, **kwargs):
1403 def wlocksub(repo, cmd, *args, **kwargs):
1402 """run cmd as a subprocess that allows inheriting repo's wlock
1404 """run cmd as a subprocess that allows inheriting repo's wlock
1403
1405
1404 This can only be called while the wlock is held. This takes all the
1406 This can only be called while the wlock is held. This takes all the
1405 arguments that ui.system does, and returns the exit code of the
1407 arguments that ui.system does, and returns the exit code of the
1406 subprocess."""
1408 subprocess."""
1407 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1409 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1408 **kwargs)
1410 **kwargs)
1409
1411
1410 class progress(object):
1412 class progress(object):
1411 def __init__(self, ui, topic, unit="", total=None):
1413 def __init__(self, ui, topic, unit="", total=None):
1412 self.ui = ui
1414 self.ui = ui
1413 self.pos = 0
1415 self.pos = 0
1414 self.topic = topic
1416 self.topic = topic
1415 self.unit = unit
1417 self.unit = unit
1416 self.total = total
1418 self.total = total
1417
1419
1418 def __enter__(self):
1420 def __enter__(self):
1419 return self
1421 return self
1420
1422
1421 def __exit__(self, exc_type, exc_value, exc_tb):
1423 def __exit__(self, exc_type, exc_value, exc_tb):
1422 self.complete()
1424 self.complete()
1423
1425
1424 def update(self, pos, item="", total=None):
1426 def update(self, pos, item="", total=None):
1425 assert pos is not None
1427 assert pos is not None
1426 if total:
1428 if total:
1427 self.total = total
1429 self.total = total
1428 self.pos = pos
1430 self.pos = pos
1429 self._print(item)
1431 self._print(item)
1430
1432
1431 def increment(self, step=1, item="", total=None):
1433 def increment(self, step=1, item="", total=None):
1432 self.update(self.pos + step, item, total)
1434 self.update(self.pos + step, item, total)
1433
1435
1434 def complete(self):
1436 def complete(self):
1435 self.ui.progress(self.topic, None)
1437 self.ui.progress(self.topic, None)
1436
1438
1437 def _print(self, item):
1439 def _print(self, item):
1438 self.ui.progress(self.topic, self.pos, item, self.unit,
1440 self.ui.progress(self.topic, self.pos, item, self.unit,
1439 self.total)
1441 self.total)
1440
1442
1441 def gdinitconfig(ui):
1443 def gdinitconfig(ui):
1442 """helper function to know if a repo should be created as general delta
1444 """helper function to know if a repo should be created as general delta
1443 """
1445 """
1444 # experimental config: format.generaldelta
1446 # experimental config: format.generaldelta
1445 return (ui.configbool('format', 'generaldelta')
1447 return (ui.configbool('format', 'generaldelta')
1446 or ui.configbool('format', 'usegeneraldelta')
1448 or ui.configbool('format', 'usegeneraldelta')
1447 or ui.configbool('format', 'sparse-revlog'))
1449 or ui.configbool('format', 'sparse-revlog'))
1448
1450
1449 def gddeltaconfig(ui):
1451 def gddeltaconfig(ui):
1450 """helper function to know if incoming delta should be optimised
1452 """helper function to know if incoming delta should be optimised
1451 """
1453 """
1452 # experimental config: format.generaldelta
1454 # experimental config: format.generaldelta
1453 return ui.configbool('format', 'generaldelta')
1455 return ui.configbool('format', 'generaldelta')
1454
1456
1455 class simplekeyvaluefile(object):
1457 class simplekeyvaluefile(object):
1456 """A simple file with key=value lines
1458 """A simple file with key=value lines
1457
1459
1458 Keys must be alphanumerics and start with a letter, values must not
1460 Keys must be alphanumerics and start with a letter, values must not
1459 contain '\n' characters"""
1461 contain '\n' characters"""
1460 firstlinekey = '__firstline'
1462 firstlinekey = '__firstline'
1461
1463
1462 def __init__(self, vfs, path, keys=None):
1464 def __init__(self, vfs, path, keys=None):
1463 self.vfs = vfs
1465 self.vfs = vfs
1464 self.path = path
1466 self.path = path
1465
1467
1466 def read(self, firstlinenonkeyval=False):
1468 def read(self, firstlinenonkeyval=False):
1467 """Read the contents of a simple key-value file
1469 """Read the contents of a simple key-value file
1468
1470
1469 'firstlinenonkeyval' indicates whether the first line of file should
1471 'firstlinenonkeyval' indicates whether the first line of file should
1470 be treated as a key-value pair or reuturned fully under the
1472 be treated as a key-value pair or reuturned fully under the
1471 __firstline key."""
1473 __firstline key."""
1472 lines = self.vfs.readlines(self.path)
1474 lines = self.vfs.readlines(self.path)
1473 d = {}
1475 d = {}
1474 if firstlinenonkeyval:
1476 if firstlinenonkeyval:
1475 if not lines:
1477 if not lines:
1476 e = _("empty simplekeyvalue file")
1478 e = _("empty simplekeyvalue file")
1477 raise error.CorruptedState(e)
1479 raise error.CorruptedState(e)
1478 # we don't want to include '\n' in the __firstline
1480 # we don't want to include '\n' in the __firstline
1479 d[self.firstlinekey] = lines[0][:-1]
1481 d[self.firstlinekey] = lines[0][:-1]
1480 del lines[0]
1482 del lines[0]
1481
1483
1482 try:
1484 try:
1483 # the 'if line.strip()' part prevents us from failing on empty
1485 # the 'if line.strip()' part prevents us from failing on empty
1484 # lines which only contain '\n' therefore are not skipped
1486 # lines which only contain '\n' therefore are not skipped
1485 # by 'if line'
1487 # by 'if line'
1486 updatedict = dict(line[:-1].split('=', 1) for line in lines
1488 updatedict = dict(line[:-1].split('=', 1) for line in lines
1487 if line.strip())
1489 if line.strip())
1488 if self.firstlinekey in updatedict:
1490 if self.firstlinekey in updatedict:
1489 e = _("%r can't be used as a key")
1491 e = _("%r can't be used as a key")
1490 raise error.CorruptedState(e % self.firstlinekey)
1492 raise error.CorruptedState(e % self.firstlinekey)
1491 d.update(updatedict)
1493 d.update(updatedict)
1492 except ValueError as e:
1494 except ValueError as e:
1493 raise error.CorruptedState(str(e))
1495 raise error.CorruptedState(str(e))
1494 return d
1496 return d
1495
1497
1496 def write(self, data, firstline=None):
1498 def write(self, data, firstline=None):
1497 """Write key=>value mapping to a file
1499 """Write key=>value mapping to a file
1498 data is a dict. Keys must be alphanumerical and start with a letter.
1500 data is a dict. Keys must be alphanumerical and start with a letter.
1499 Values must not contain newline characters.
1501 Values must not contain newline characters.
1500
1502
1501 If 'firstline' is not None, it is written to file before
1503 If 'firstline' is not None, it is written to file before
1502 everything else, as it is, not in a key=value form"""
1504 everything else, as it is, not in a key=value form"""
1503 lines = []
1505 lines = []
1504 if firstline is not None:
1506 if firstline is not None:
1505 lines.append('%s\n' % firstline)
1507 lines.append('%s\n' % firstline)
1506
1508
1507 for k, v in data.items():
1509 for k, v in data.items():
1508 if k == self.firstlinekey:
1510 if k == self.firstlinekey:
1509 e = "key name '%s' is reserved" % self.firstlinekey
1511 e = "key name '%s' is reserved" % self.firstlinekey
1510 raise error.ProgrammingError(e)
1512 raise error.ProgrammingError(e)
1511 if not k[0:1].isalpha():
1513 if not k[0:1].isalpha():
1512 e = "keys must start with a letter in a key-value file"
1514 e = "keys must start with a letter in a key-value file"
1513 raise error.ProgrammingError(e)
1515 raise error.ProgrammingError(e)
1514 if not k.isalnum():
1516 if not k.isalnum():
1515 e = "invalid key name in a simple key-value file"
1517 e = "invalid key name in a simple key-value file"
1516 raise error.ProgrammingError(e)
1518 raise error.ProgrammingError(e)
1517 if '\n' in v:
1519 if '\n' in v:
1518 e = "invalid value in a simple key-value file"
1520 e = "invalid value in a simple key-value file"
1519 raise error.ProgrammingError(e)
1521 raise error.ProgrammingError(e)
1520 lines.append("%s=%s\n" % (k, v))
1522 lines.append("%s=%s\n" % (k, v))
1521 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1523 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1522 fp.write(''.join(lines))
1524 fp.write(''.join(lines))
1523
1525
1524 _reportobsoletedsource = [
1526 _reportobsoletedsource = [
1525 'debugobsolete',
1527 'debugobsolete',
1526 'pull',
1528 'pull',
1527 'push',
1529 'push',
1528 'serve',
1530 'serve',
1529 'unbundle',
1531 'unbundle',
1530 ]
1532 ]
1531
1533
1532 _reportnewcssource = [
1534 _reportnewcssource = [
1533 'pull',
1535 'pull',
1534 'unbundle',
1536 'unbundle',
1535 ]
1537 ]
1536
1538
1537 def prefetchfiles(repo, revs, match):
1539 def prefetchfiles(repo, revs, match):
1538 """Invokes the registered file prefetch functions, allowing extensions to
1540 """Invokes the registered file prefetch functions, allowing extensions to
1539 ensure the corresponding files are available locally, before the command
1541 ensure the corresponding files are available locally, before the command
1540 uses them."""
1542 uses them."""
1541 if match:
1543 if match:
1542 # The command itself will complain about files that don't exist, so
1544 # The command itself will complain about files that don't exist, so
1543 # don't duplicate the message.
1545 # don't duplicate the message.
1544 match = matchmod.badmatch(match, lambda fn, msg: None)
1546 match = matchmod.badmatch(match, lambda fn, msg: None)
1545 else:
1547 else:
1546 match = matchall(repo)
1548 match = matchall(repo)
1547
1549
1548 fileprefetchhooks(repo, revs, match)
1550 fileprefetchhooks(repo, revs, match)
1549
1551
1550 # a list of (repo, revs, match) prefetch functions
1552 # a list of (repo, revs, match) prefetch functions
1551 fileprefetchhooks = util.hooks()
1553 fileprefetchhooks = util.hooks()
1552
1554
1553 # A marker that tells the evolve extension to suppress its own reporting
1555 # A marker that tells the evolve extension to suppress its own reporting
1554 _reportstroubledchangesets = True
1556 _reportstroubledchangesets = True
1555
1557
1556 def registersummarycallback(repo, otr, txnname=''):
1558 def registersummarycallback(repo, otr, txnname=''):
1557 """register a callback to issue a summary after the transaction is closed
1559 """register a callback to issue a summary after the transaction is closed
1558 """
1560 """
1559 def txmatch(sources):
1561 def txmatch(sources):
1560 return any(txnname.startswith(source) for source in sources)
1562 return any(txnname.startswith(source) for source in sources)
1561
1563
1562 categories = []
1564 categories = []
1563
1565
1564 def reportsummary(func):
1566 def reportsummary(func):
1565 """decorator for report callbacks."""
1567 """decorator for report callbacks."""
1566 # The repoview life cycle is shorter than the one of the actual
1568 # The repoview life cycle is shorter than the one of the actual
1567 # underlying repository. So the filtered object can die before the
1569 # underlying repository. So the filtered object can die before the
1568 # weakref is used leading to troubles. We keep a reference to the
1570 # weakref is used leading to troubles. We keep a reference to the
1569 # unfiltered object and restore the filtering when retrieving the
1571 # unfiltered object and restore the filtering when retrieving the
1570 # repository through the weakref.
1572 # repository through the weakref.
1571 filtername = repo.filtername
1573 filtername = repo.filtername
1572 reporef = weakref.ref(repo.unfiltered())
1574 reporef = weakref.ref(repo.unfiltered())
1573 def wrapped(tr):
1575 def wrapped(tr):
1574 repo = reporef()
1576 repo = reporef()
1575 if filtername:
1577 if filtername:
1576 repo = repo.filtered(filtername)
1578 repo = repo.filtered(filtername)
1577 func(repo, tr)
1579 func(repo, tr)
1578 newcat = '%02i-txnreport' % len(categories)
1580 newcat = '%02i-txnreport' % len(categories)
1579 otr.addpostclose(newcat, wrapped)
1581 otr.addpostclose(newcat, wrapped)
1580 categories.append(newcat)
1582 categories.append(newcat)
1581 return wrapped
1583 return wrapped
1582
1584
1583 if txmatch(_reportobsoletedsource):
1585 if txmatch(_reportobsoletedsource):
1584 @reportsummary
1586 @reportsummary
1585 def reportobsoleted(repo, tr):
1587 def reportobsoleted(repo, tr):
1586 obsoleted = obsutil.getobsoleted(repo, tr)
1588 obsoleted = obsutil.getobsoleted(repo, tr)
1587 if obsoleted:
1589 if obsoleted:
1588 repo.ui.status(_('obsoleted %i changesets\n')
1590 repo.ui.status(_('obsoleted %i changesets\n')
1589 % len(obsoleted))
1591 % len(obsoleted))
1590
1592
1591 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1593 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1592 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1594 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1593 instabilitytypes = [
1595 instabilitytypes = [
1594 ('orphan', 'orphan'),
1596 ('orphan', 'orphan'),
1595 ('phase-divergent', 'phasedivergent'),
1597 ('phase-divergent', 'phasedivergent'),
1596 ('content-divergent', 'contentdivergent'),
1598 ('content-divergent', 'contentdivergent'),
1597 ]
1599 ]
1598
1600
1599 def getinstabilitycounts(repo):
1601 def getinstabilitycounts(repo):
1600 filtered = repo.changelog.filteredrevs
1602 filtered = repo.changelog.filteredrevs
1601 counts = {}
1603 counts = {}
1602 for instability, revset in instabilitytypes:
1604 for instability, revset in instabilitytypes:
1603 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1605 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1604 filtered)
1606 filtered)
1605 return counts
1607 return counts
1606
1608
1607 oldinstabilitycounts = getinstabilitycounts(repo)
1609 oldinstabilitycounts = getinstabilitycounts(repo)
1608 @reportsummary
1610 @reportsummary
1609 def reportnewinstabilities(repo, tr):
1611 def reportnewinstabilities(repo, tr):
1610 newinstabilitycounts = getinstabilitycounts(repo)
1612 newinstabilitycounts = getinstabilitycounts(repo)
1611 for instability, revset in instabilitytypes:
1613 for instability, revset in instabilitytypes:
1612 delta = (newinstabilitycounts[instability] -
1614 delta = (newinstabilitycounts[instability] -
1613 oldinstabilitycounts[instability])
1615 oldinstabilitycounts[instability])
1614 msg = getinstabilitymessage(delta, instability)
1616 msg = getinstabilitymessage(delta, instability)
1615 if msg:
1617 if msg:
1616 repo.ui.warn(msg)
1618 repo.ui.warn(msg)
1617
1619
1618 if txmatch(_reportnewcssource):
1620 if txmatch(_reportnewcssource):
1619 @reportsummary
1621 @reportsummary
1620 def reportnewcs(repo, tr):
1622 def reportnewcs(repo, tr):
1621 """Report the range of new revisions pulled/unbundled."""
1623 """Report the range of new revisions pulled/unbundled."""
1622 origrepolen = tr.changes.get('origrepolen', len(repo))
1624 origrepolen = tr.changes.get('origrepolen', len(repo))
1623 unfi = repo.unfiltered()
1625 unfi = repo.unfiltered()
1624 if origrepolen >= len(unfi):
1626 if origrepolen >= len(unfi):
1625 return
1627 return
1626
1628
1627 # Compute the bounds of new visible revisions' range.
1629 # Compute the bounds of new visible revisions' range.
1628 revs = smartset.spanset(repo, start=origrepolen)
1630 revs = smartset.spanset(repo, start=origrepolen)
1629 if revs:
1631 if revs:
1630 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1632 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1631
1633
1632 if minrev == maxrev:
1634 if minrev == maxrev:
1633 revrange = minrev
1635 revrange = minrev
1634 else:
1636 else:
1635 revrange = '%s:%s' % (minrev, maxrev)
1637 revrange = '%s:%s' % (minrev, maxrev)
1636 draft = len(repo.revs('%ld and draft()', revs))
1638 draft = len(repo.revs('%ld and draft()', revs))
1637 secret = len(repo.revs('%ld and secret()', revs))
1639 secret = len(repo.revs('%ld and secret()', revs))
1638 if not (draft or secret):
1640 if not (draft or secret):
1639 msg = _('new changesets %s\n') % revrange
1641 msg = _('new changesets %s\n') % revrange
1640 elif draft and secret:
1642 elif draft and secret:
1641 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1643 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1642 msg %= (revrange, draft, secret)
1644 msg %= (revrange, draft, secret)
1643 elif draft:
1645 elif draft:
1644 msg = _('new changesets %s (%d drafts)\n')
1646 msg = _('new changesets %s (%d drafts)\n')
1645 msg %= (revrange, draft)
1647 msg %= (revrange, draft)
1646 elif secret:
1648 elif secret:
1647 msg = _('new changesets %s (%d secrets)\n')
1649 msg = _('new changesets %s (%d secrets)\n')
1648 msg %= (revrange, secret)
1650 msg %= (revrange, secret)
1649 else:
1651 else:
1650 errormsg = 'entered unreachable condition'
1652 errormsg = 'entered unreachable condition'
1651 raise error.ProgrammingError(errormsg)
1653 raise error.ProgrammingError(errormsg)
1652 repo.ui.status(msg)
1654 repo.ui.status(msg)
1653
1655
1654 # search new changesets directly pulled as obsolete
1656 # search new changesets directly pulled as obsolete
1655 duplicates = tr.changes.get('revduplicates', ())
1657 duplicates = tr.changes.get('revduplicates', ())
1656 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1658 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1657 origrepolen, duplicates)
1659 origrepolen, duplicates)
1658 cl = repo.changelog
1660 cl = repo.changelog
1659 extinctadded = [r for r in obsadded if r not in cl]
1661 extinctadded = [r for r in obsadded if r not in cl]
1660 if extinctadded:
1662 if extinctadded:
1661 # They are not just obsolete, but obsolete and invisible
1663 # They are not just obsolete, but obsolete and invisible
1662 # we call them "extinct" internally but the terms have not been
1664 # we call them "extinct" internally but the terms have not been
1663 # exposed to users.
1665 # exposed to users.
1664 msg = '(%d other changesets obsolete on arrival)\n'
1666 msg = '(%d other changesets obsolete on arrival)\n'
1665 repo.ui.status(msg % len(extinctadded))
1667 repo.ui.status(msg % len(extinctadded))
1666
1668
1667 @reportsummary
1669 @reportsummary
1668 def reportphasechanges(repo, tr):
1670 def reportphasechanges(repo, tr):
1669 """Report statistics of phase changes for changesets pre-existing
1671 """Report statistics of phase changes for changesets pre-existing
1670 pull/unbundle.
1672 pull/unbundle.
1671 """
1673 """
1672 origrepolen = tr.changes.get('origrepolen', len(repo))
1674 origrepolen = tr.changes.get('origrepolen', len(repo))
1673 phasetracking = tr.changes.get('phases', {})
1675 phasetracking = tr.changes.get('phases', {})
1674 if not phasetracking:
1676 if not phasetracking:
1675 return
1677 return
1676 published = [
1678 published = [
1677 rev for rev, (old, new) in phasetracking.iteritems()
1679 rev for rev, (old, new) in phasetracking.iteritems()
1678 if new == phases.public and rev < origrepolen
1680 if new == phases.public and rev < origrepolen
1679 ]
1681 ]
1680 if not published:
1682 if not published:
1681 return
1683 return
1682 repo.ui.status(_('%d local changesets published\n')
1684 repo.ui.status(_('%d local changesets published\n')
1683 % len(published))
1685 % len(published))
1684
1686
1685 def getinstabilitymessage(delta, instability):
1687 def getinstabilitymessage(delta, instability):
1686 """function to return the message to show warning about new instabilities
1688 """function to return the message to show warning about new instabilities
1687
1689
1688 exists as a separate function so that extension can wrap to show more
1690 exists as a separate function so that extension can wrap to show more
1689 information like how to fix instabilities"""
1691 information like how to fix instabilities"""
1690 if delta > 0:
1692 if delta > 0:
1691 return _('%i new %s changesets\n') % (delta, instability)
1693 return _('%i new %s changesets\n') % (delta, instability)
1692
1694
1693 def nodesummaries(repo, nodes, maxnumnodes=4):
1695 def nodesummaries(repo, nodes, maxnumnodes=4):
1694 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1696 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1695 return ' '.join(short(h) for h in nodes)
1697 return ' '.join(short(h) for h in nodes)
1696 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1698 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1697 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1699 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1698
1700
1699 def enforcesinglehead(repo, tr, desc):
1701 def enforcesinglehead(repo, tr, desc):
1700 """check that no named branch has multiple heads"""
1702 """check that no named branch has multiple heads"""
1701 if desc in ('strip', 'repair'):
1703 if desc in ('strip', 'repair'):
1702 # skip the logic during strip
1704 # skip the logic during strip
1703 return
1705 return
1704 visible = repo.filtered('visible')
1706 visible = repo.filtered('visible')
1705 # possible improvement: we could restrict the check to affected branch
1707 # possible improvement: we could restrict the check to affected branch
1706 for name, heads in visible.branchmap().iteritems():
1708 for name, heads in visible.branchmap().iteritems():
1707 if len(heads) > 1:
1709 if len(heads) > 1:
1708 msg = _('rejecting multiple heads on branch "%s"')
1710 msg = _('rejecting multiple heads on branch "%s"')
1709 msg %= name
1711 msg %= name
1710 hint = _('%d heads: %s')
1712 hint = _('%d heads: %s')
1711 hint %= (len(heads), nodesummaries(repo, heads))
1713 hint %= (len(heads), nodesummaries(repo, heads))
1712 raise error.Abort(msg, hint=hint)
1714 raise error.Abort(msg, hint=hint)
1713
1715
1714 def wrapconvertsink(sink):
1716 def wrapconvertsink(sink):
1715 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1717 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1716 before it is used, whether or not the convert extension was formally loaded.
1718 before it is used, whether or not the convert extension was formally loaded.
1717 """
1719 """
1718 return sink
1720 return sink
1719
1721
1720 def unhidehashlikerevs(repo, specs, hiddentype):
1722 def unhidehashlikerevs(repo, specs, hiddentype):
1721 """parse the user specs and unhide changesets whose hash or revision number
1723 """parse the user specs and unhide changesets whose hash or revision number
1722 is passed.
1724 is passed.
1723
1725
1724 hiddentype can be: 1) 'warn': warn while unhiding changesets
1726 hiddentype can be: 1) 'warn': warn while unhiding changesets
1725 2) 'nowarn': don't warn while unhiding changesets
1727 2) 'nowarn': don't warn while unhiding changesets
1726
1728
1727 returns a repo object with the required changesets unhidden
1729 returns a repo object with the required changesets unhidden
1728 """
1730 """
1729 if not repo.filtername or not repo.ui.configbool('experimental',
1731 if not repo.filtername or not repo.ui.configbool('experimental',
1730 'directaccess'):
1732 'directaccess'):
1731 return repo
1733 return repo
1732
1734
1733 if repo.filtername not in ('visible', 'visible-hidden'):
1735 if repo.filtername not in ('visible', 'visible-hidden'):
1734 return repo
1736 return repo
1735
1737
1736 symbols = set()
1738 symbols = set()
1737 for spec in specs:
1739 for spec in specs:
1738 try:
1740 try:
1739 tree = revsetlang.parse(spec)
1741 tree = revsetlang.parse(spec)
1740 except error.ParseError: # will be reported by scmutil.revrange()
1742 except error.ParseError: # will be reported by scmutil.revrange()
1741 continue
1743 continue
1742
1744
1743 symbols.update(revsetlang.gethashlikesymbols(tree))
1745 symbols.update(revsetlang.gethashlikesymbols(tree))
1744
1746
1745 if not symbols:
1747 if not symbols:
1746 return repo
1748 return repo
1747
1749
1748 revs = _getrevsfromsymbols(repo, symbols)
1750 revs = _getrevsfromsymbols(repo, symbols)
1749
1751
1750 if not revs:
1752 if not revs:
1751 return repo
1753 return repo
1752
1754
1753 if hiddentype == 'warn':
1755 if hiddentype == 'warn':
1754 unfi = repo.unfiltered()
1756 unfi = repo.unfiltered()
1755 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1757 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1756 repo.ui.warn(_("warning: accessing hidden changesets for write "
1758 repo.ui.warn(_("warning: accessing hidden changesets for write "
1757 "operation: %s\n") % revstr)
1759 "operation: %s\n") % revstr)
1758
1760
1759 # we have to use new filtername to separate branch/tags cache until we can
1761 # we have to use new filtername to separate branch/tags cache until we can
1760 # disbale these cache when revisions are dynamically pinned.
1762 # disbale these cache when revisions are dynamically pinned.
1761 return repo.filtered('visible-hidden', revs)
1763 return repo.filtered('visible-hidden', revs)
1762
1764
1763 def _getrevsfromsymbols(repo, symbols):
1765 def _getrevsfromsymbols(repo, symbols):
1764 """parse the list of symbols and returns a set of revision numbers of hidden
1766 """parse the list of symbols and returns a set of revision numbers of hidden
1765 changesets present in symbols"""
1767 changesets present in symbols"""
1766 revs = set()
1768 revs = set()
1767 unfi = repo.unfiltered()
1769 unfi = repo.unfiltered()
1768 unficl = unfi.changelog
1770 unficl = unfi.changelog
1769 cl = repo.changelog
1771 cl = repo.changelog
1770 tiprev = len(unficl)
1772 tiprev = len(unficl)
1771 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1773 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1772 for s in symbols:
1774 for s in symbols:
1773 try:
1775 try:
1774 n = int(s)
1776 n = int(s)
1775 if n <= tiprev:
1777 if n <= tiprev:
1776 if not allowrevnums:
1778 if not allowrevnums:
1777 continue
1779 continue
1778 else:
1780 else:
1779 if n not in cl:
1781 if n not in cl:
1780 revs.add(n)
1782 revs.add(n)
1781 continue
1783 continue
1782 except ValueError:
1784 except ValueError:
1783 pass
1785 pass
1784
1786
1785 try:
1787 try:
1786 s = resolvehexnodeidprefix(unfi, s)
1788 s = resolvehexnodeidprefix(unfi, s)
1787 except (error.LookupError, error.WdirUnsupported):
1789 except (error.LookupError, error.WdirUnsupported):
1788 s = None
1790 s = None
1789
1791
1790 if s is not None:
1792 if s is not None:
1791 rev = unficl.rev(s)
1793 rev = unficl.rev(s)
1792 if rev not in cl:
1794 if rev not in cl:
1793 revs.add(rev)
1795 revs.add(rev)
1794
1796
1795 return revs
1797 return revs
1796
1798
1797 def bookmarkrevs(repo, mark):
1799 def bookmarkrevs(repo, mark):
1798 """
1800 """
1799 Select revisions reachable by a given bookmark
1801 Select revisions reachable by a given bookmark
1800 """
1802 """
1801 return repo.revs("ancestors(bookmark(%s)) - "
1803 return repo.revs("ancestors(bookmark(%s)) - "
1802 "ancestors(head() and not bookmark(%s)) - "
1804 "ancestors(head() and not bookmark(%s)) - "
1803 "ancestors(bookmark() and not bookmark(%s))",
1805 "ancestors(bookmark() and not bookmark(%s))",
1804 mark, mark, mark)
1806 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now