##// END OF EJS Templates
pullreport: skip or rework some early return...
Boris Feld -
r39934:b5e12039 default
parent child Browse files
Show More
@@ -1,1790 +1,1791
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 encoding,
31 encoding,
32 error,
32 error,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 obsutil,
35 obsutil,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 policy,
38 policy,
39 pycompat,
39 pycompat,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 procutil,
49 procutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 if pycompat.iswindows:
53 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
54 from . import scmwindows as scmplatform
55 else:
55 else:
56 from . import scmposix as scmplatform
56 from . import scmposix as scmplatform
57
57
58 parsers = policy.importmod(r'parsers')
58 parsers = policy.importmod(r'parsers')
59
59
60 termsize = scmplatform.termsize
60 termsize = scmplatform.termsize
61
61
62 class status(tuple):
62 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
64 and 'ignored' properties are only relevant to the working copy.
65 '''
65 '''
66
66
67 __slots__ = ()
67 __slots__ = ()
68
68
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
70 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
72 ignored, clean))
73
73
74 @property
74 @property
75 def modified(self):
75 def modified(self):
76 '''files that have been modified'''
76 '''files that have been modified'''
77 return self[0]
77 return self[0]
78
78
79 @property
79 @property
80 def added(self):
80 def added(self):
81 '''files that have been added'''
81 '''files that have been added'''
82 return self[1]
82 return self[1]
83
83
84 @property
84 @property
85 def removed(self):
85 def removed(self):
86 '''files that have been removed'''
86 '''files that have been removed'''
87 return self[2]
87 return self[2]
88
88
89 @property
89 @property
90 def deleted(self):
90 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
91 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
92 working copy (aka "missing")
93 '''
93 '''
94 return self[3]
94 return self[3]
95
95
96 @property
96 @property
97 def unknown(self):
97 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
98 '''files not in the dirstate that are not ignored'''
99 return self[4]
99 return self[4]
100
100
101 @property
101 @property
102 def ignored(self):
102 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
104 return self[5]
105
105
106 @property
106 @property
107 def clean(self):
107 def clean(self):
108 '''files that have not been modified'''
108 '''files that have not been modified'''
109 return self[6]
109 return self[6]
110
110
111 def __repr__(self, *args, **kwargs):
111 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
115
116 def itersubrepos(ctx1, ctx2):
116 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
117 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
123
124 missing = set()
124 missing = set()
125
125
126 for subpath in ctx2.substate:
126 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
127 if subpath not in ctx1.substate:
128 del subpaths[subpath]
128 del subpaths[subpath]
129 missing.add(subpath)
129 missing.add(subpath)
130
130
131 for subpath, ctx in sorted(subpaths.iteritems()):
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
132 yield subpath, ctx.sub(subpath)
133
133
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
135 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
137 # against itself.
138 for subpath in missing:
138 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140
140
141 def nochangesfound(ui, repo, excluded=None):
141 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
142 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
143 nodes excluded from the push/pull.
144 '''
144 '''
145 secretlist = []
145 secretlist = []
146 if excluded:
146 if excluded:
147 for n in excluded:
147 for n in excluded:
148 ctx = repo[n]
148 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
150 secretlist.append(n)
151
151
152 if secretlist:
152 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
154 % len(secretlist))
155 else:
155 else:
156 ui.status(_("no changes found\n"))
156 ui.status(_("no changes found\n"))
157
157
158 def callcatch(ui, func):
158 def callcatch(ui, func):
159 """call func() with global exception handling
159 """call func() with global exception handling
160
160
161 return func() if no exception happens. otherwise do some error handling
161 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
162 and return an exit code accordingly. does not handle all exceptions.
163 """
163 """
164 try:
164 try:
165 try:
165 try:
166 return func()
166 return func()
167 except: # re-raises
167 except: # re-raises
168 ui.traceback()
168 ui.traceback()
169 raise
169 raise
170 # Global exception handling, alphabetically
170 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
171 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
172 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
173 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % inst.locker
174 reason = _('timed out waiting for lock held by %r') % inst.locker
175 else:
175 else:
176 reason = _('lock held by %r') % inst.locker
176 reason = _('lock held by %r') % inst.locker
177 ui.error(_("abort: %s: %s\n") % (
177 ui.error(_("abort: %s: %s\n") % (
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 if not inst.locker:
179 if not inst.locker:
180 ui.error(_("(lock might be very busy)\n"))
180 ui.error(_("(lock might be very busy)\n"))
181 except error.LockUnavailable as inst:
181 except error.LockUnavailable as inst:
182 ui.error(_("abort: could not lock %s: %s\n") %
182 ui.error(_("abort: could not lock %s: %s\n") %
183 (inst.desc or stringutil.forcebytestr(inst.filename),
183 (inst.desc or stringutil.forcebytestr(inst.filename),
184 encoding.strtolocal(inst.strerror)))
184 encoding.strtolocal(inst.strerror)))
185 except error.OutOfBandError as inst:
185 except error.OutOfBandError as inst:
186 if inst.args:
186 if inst.args:
187 msg = _("abort: remote error:\n")
187 msg = _("abort: remote error:\n")
188 else:
188 else:
189 msg = _("abort: remote error\n")
189 msg = _("abort: remote error\n")
190 ui.error(msg)
190 ui.error(msg)
191 if inst.args:
191 if inst.args:
192 ui.error(''.join(inst.args))
192 ui.error(''.join(inst.args))
193 if inst.hint:
193 if inst.hint:
194 ui.error('(%s)\n' % inst.hint)
194 ui.error('(%s)\n' % inst.hint)
195 except error.RepoError as inst:
195 except error.RepoError as inst:
196 ui.error(_("abort: %s!\n") % inst)
196 ui.error(_("abort: %s!\n") % inst)
197 if inst.hint:
197 if inst.hint:
198 ui.error(_("(%s)\n") % inst.hint)
198 ui.error(_("(%s)\n") % inst.hint)
199 except error.ResponseError as inst:
199 except error.ResponseError as inst:
200 ui.error(_("abort: %s") % inst.args[0])
200 ui.error(_("abort: %s") % inst.args[0])
201 msg = inst.args[1]
201 msg = inst.args[1]
202 if isinstance(msg, type(u'')):
202 if isinstance(msg, type(u'')):
203 msg = pycompat.sysbytes(msg)
203 msg = pycompat.sysbytes(msg)
204 if not isinstance(msg, bytes):
204 if not isinstance(msg, bytes):
205 ui.error(" %r\n" % (msg,))
205 ui.error(" %r\n" % (msg,))
206 elif not msg:
206 elif not msg:
207 ui.error(_(" empty string\n"))
207 ui.error(_(" empty string\n"))
208 else:
208 else:
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 except error.CensoredNodeError as inst:
210 except error.CensoredNodeError as inst:
211 ui.error(_("abort: file censored %s!\n") % inst)
211 ui.error(_("abort: file censored %s!\n") % inst)
212 except error.StorageError as inst:
212 except error.StorageError as inst:
213 ui.error(_("abort: %s!\n") % inst)
213 ui.error(_("abort: %s!\n") % inst)
214 except error.InterventionRequired as inst:
214 except error.InterventionRequired as inst:
215 ui.error("%s\n" % inst)
215 ui.error("%s\n" % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.error(_("(%s)\n") % inst.hint)
217 ui.error(_("(%s)\n") % inst.hint)
218 return 1
218 return 1
219 except error.WdirUnsupported:
219 except error.WdirUnsupported:
220 ui.error(_("abort: working directory revision cannot be specified\n"))
220 ui.error(_("abort: working directory revision cannot be specified\n"))
221 except error.Abort as inst:
221 except error.Abort as inst:
222 ui.error(_("abort: %s\n") % inst)
222 ui.error(_("abort: %s\n") % inst)
223 if inst.hint:
223 if inst.hint:
224 ui.error(_("(%s)\n") % inst.hint)
224 ui.error(_("(%s)\n") % inst.hint)
225 except ImportError as inst:
225 except ImportError as inst:
226 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
226 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
227 m = stringutil.forcebytestr(inst).split()[-1]
227 m = stringutil.forcebytestr(inst).split()[-1]
228 if m in "mpatch bdiff".split():
228 if m in "mpatch bdiff".split():
229 ui.error(_("(did you forget to compile extensions?)\n"))
229 ui.error(_("(did you forget to compile extensions?)\n"))
230 elif m in "zlib".split():
230 elif m in "zlib".split():
231 ui.error(_("(is your Python install correct?)\n"))
231 ui.error(_("(is your Python install correct?)\n"))
232 except IOError as inst:
232 except IOError as inst:
233 if util.safehasattr(inst, "code"):
233 if util.safehasattr(inst, "code"):
234 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
234 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
235 elif util.safehasattr(inst, "reason"):
235 elif util.safehasattr(inst, "reason"):
236 try: # usually it is in the form (errno, strerror)
236 try: # usually it is in the form (errno, strerror)
237 reason = inst.reason.args[1]
237 reason = inst.reason.args[1]
238 except (AttributeError, IndexError):
238 except (AttributeError, IndexError):
239 # it might be anything, for example a string
239 # it might be anything, for example a string
240 reason = inst.reason
240 reason = inst.reason
241 if isinstance(reason, pycompat.unicode):
241 if isinstance(reason, pycompat.unicode):
242 # SSLError of Python 2.7.9 contains a unicode
242 # SSLError of Python 2.7.9 contains a unicode
243 reason = encoding.unitolocal(reason)
243 reason = encoding.unitolocal(reason)
244 ui.error(_("abort: error: %s\n") % reason)
244 ui.error(_("abort: error: %s\n") % reason)
245 elif (util.safehasattr(inst, "args")
245 elif (util.safehasattr(inst, "args")
246 and inst.args and inst.args[0] == errno.EPIPE):
246 and inst.args and inst.args[0] == errno.EPIPE):
247 pass
247 pass
248 elif getattr(inst, "strerror", None):
248 elif getattr(inst, "strerror", None):
249 if getattr(inst, "filename", None):
249 if getattr(inst, "filename", None):
250 ui.error(_("abort: %s: %s\n") % (
250 ui.error(_("abort: %s: %s\n") % (
251 encoding.strtolocal(inst.strerror),
251 encoding.strtolocal(inst.strerror),
252 stringutil.forcebytestr(inst.filename)))
252 stringutil.forcebytestr(inst.filename)))
253 else:
253 else:
254 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
254 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
255 else:
255 else:
256 raise
256 raise
257 except OSError as inst:
257 except OSError as inst:
258 if getattr(inst, "filename", None) is not None:
258 if getattr(inst, "filename", None) is not None:
259 ui.error(_("abort: %s: '%s'\n") % (
259 ui.error(_("abort: %s: '%s'\n") % (
260 encoding.strtolocal(inst.strerror),
260 encoding.strtolocal(inst.strerror),
261 stringutil.forcebytestr(inst.filename)))
261 stringutil.forcebytestr(inst.filename)))
262 else:
262 else:
263 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
263 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
264 except MemoryError:
264 except MemoryError:
265 ui.error(_("abort: out of memory\n"))
265 ui.error(_("abort: out of memory\n"))
266 except SystemExit as inst:
266 except SystemExit as inst:
267 # Commands shouldn't sys.exit directly, but give a return code.
267 # Commands shouldn't sys.exit directly, but give a return code.
268 # Just in case catch this and and pass exit code to caller.
268 # Just in case catch this and and pass exit code to caller.
269 return inst.code
269 return inst.code
270 except socket.error as inst:
270 except socket.error as inst:
271 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
271 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
272
272
273 return -1
273 return -1
274
274
275 def checknewlabel(repo, lbl, kind):
275 def checknewlabel(repo, lbl, kind):
276 # Do not use the "kind" parameter in ui output.
276 # Do not use the "kind" parameter in ui output.
277 # It makes strings difficult to translate.
277 # It makes strings difficult to translate.
278 if lbl in ['tip', '.', 'null']:
278 if lbl in ['tip', '.', 'null']:
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
280 for c in (':', '\0', '\n', '\r'):
280 for c in (':', '\0', '\n', '\r'):
281 if c in lbl:
281 if c in lbl:
282 raise error.Abort(
282 raise error.Abort(
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
284 try:
284 try:
285 int(lbl)
285 int(lbl)
286 raise error.Abort(_("cannot use an integer as a name"))
286 raise error.Abort(_("cannot use an integer as a name"))
287 except ValueError:
287 except ValueError:
288 pass
288 pass
289 if lbl.strip() != lbl:
289 if lbl.strip() != lbl:
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
291
291
292 def checkfilename(f):
292 def checkfilename(f):
293 '''Check that the filename f is an acceptable filename for a tracked file'''
293 '''Check that the filename f is an acceptable filename for a tracked file'''
294 if '\r' in f or '\n' in f:
294 if '\r' in f or '\n' in f:
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
296 % pycompat.bytestr(f))
296 % pycompat.bytestr(f))
297
297
298 def checkportable(ui, f):
298 def checkportable(ui, f):
299 '''Check if filename f is portable and warn or abort depending on config'''
299 '''Check if filename f is portable and warn or abort depending on config'''
300 checkfilename(f)
300 checkfilename(f)
301 abort, warn = checkportabilityalert(ui)
301 abort, warn = checkportabilityalert(ui)
302 if abort or warn:
302 if abort or warn:
303 msg = util.checkwinfilename(f)
303 msg = util.checkwinfilename(f)
304 if msg:
304 if msg:
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
306 if abort:
306 if abort:
307 raise error.Abort(msg)
307 raise error.Abort(msg)
308 ui.warn(_("warning: %s\n") % msg)
308 ui.warn(_("warning: %s\n") % msg)
309
309
310 def checkportabilityalert(ui):
310 def checkportabilityalert(ui):
311 '''check if the user's config requests nothing, a warning, or abort for
311 '''check if the user's config requests nothing, a warning, or abort for
312 non-portable filenames'''
312 non-portable filenames'''
313 val = ui.config('ui', 'portablefilenames')
313 val = ui.config('ui', 'portablefilenames')
314 lval = val.lower()
314 lval = val.lower()
315 bval = stringutil.parsebool(val)
315 bval = stringutil.parsebool(val)
316 abort = pycompat.iswindows or lval == 'abort'
316 abort = pycompat.iswindows or lval == 'abort'
317 warn = bval or lval == 'warn'
317 warn = bval or lval == 'warn'
318 if bval is None and not (warn or abort or lval == 'ignore'):
318 if bval is None and not (warn or abort or lval == 'ignore'):
319 raise error.ConfigError(
319 raise error.ConfigError(
320 _("ui.portablefilenames value is invalid ('%s')") % val)
320 _("ui.portablefilenames value is invalid ('%s')") % val)
321 return abort, warn
321 return abort, warn
322
322
323 class casecollisionauditor(object):
323 class casecollisionauditor(object):
324 def __init__(self, ui, abort, dirstate):
324 def __init__(self, ui, abort, dirstate):
325 self._ui = ui
325 self._ui = ui
326 self._abort = abort
326 self._abort = abort
327 allfiles = '\0'.join(dirstate._map)
327 allfiles = '\0'.join(dirstate._map)
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
329 self._dirstate = dirstate
329 self._dirstate = dirstate
330 # The purpose of _newfiles is so that we don't complain about
330 # The purpose of _newfiles is so that we don't complain about
331 # case collisions if someone were to call this object with the
331 # case collisions if someone were to call this object with the
332 # same filename twice.
332 # same filename twice.
333 self._newfiles = set()
333 self._newfiles = set()
334
334
335 def __call__(self, f):
335 def __call__(self, f):
336 if f in self._newfiles:
336 if f in self._newfiles:
337 return
337 return
338 fl = encoding.lower(f)
338 fl = encoding.lower(f)
339 if fl in self._loweredfiles and f not in self._dirstate:
339 if fl in self._loweredfiles and f not in self._dirstate:
340 msg = _('possible case-folding collision for %s') % f
340 msg = _('possible case-folding collision for %s') % f
341 if self._abort:
341 if self._abort:
342 raise error.Abort(msg)
342 raise error.Abort(msg)
343 self._ui.warn(_("warning: %s\n") % msg)
343 self._ui.warn(_("warning: %s\n") % msg)
344 self._loweredfiles.add(fl)
344 self._loweredfiles.add(fl)
345 self._newfiles.add(f)
345 self._newfiles.add(f)
346
346
347 def filteredhash(repo, maxrev):
347 def filteredhash(repo, maxrev):
348 """build hash of filtered revisions in the current repoview.
348 """build hash of filtered revisions in the current repoview.
349
349
350 Multiple caches perform up-to-date validation by checking that the
350 Multiple caches perform up-to-date validation by checking that the
351 tiprev and tipnode stored in the cache file match the current repository.
351 tiprev and tipnode stored in the cache file match the current repository.
352 However, this is not sufficient for validating repoviews because the set
352 However, this is not sufficient for validating repoviews because the set
353 of revisions in the view may change without the repository tiprev and
353 of revisions in the view may change without the repository tiprev and
354 tipnode changing.
354 tipnode changing.
355
355
356 This function hashes all the revs filtered from the view and returns
356 This function hashes all the revs filtered from the view and returns
357 that SHA-1 digest.
357 that SHA-1 digest.
358 """
358 """
359 cl = repo.changelog
359 cl = repo.changelog
360 if not cl.filteredrevs:
360 if not cl.filteredrevs:
361 return None
361 return None
362 key = None
362 key = None
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
364 if revs:
364 if revs:
365 s = hashlib.sha1()
365 s = hashlib.sha1()
366 for rev in revs:
366 for rev in revs:
367 s.update('%d;' % rev)
367 s.update('%d;' % rev)
368 key = s.digest()
368 key = s.digest()
369 return key
369 return key
370
370
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
372 '''yield every hg repository under path, always recursively.
372 '''yield every hg repository under path, always recursively.
373 The recurse flag will only control recursion into repo working dirs'''
373 The recurse flag will only control recursion into repo working dirs'''
374 def errhandler(err):
374 def errhandler(err):
375 if err.filename == path:
375 if err.filename == path:
376 raise err
376 raise err
377 samestat = getattr(os.path, 'samestat', None)
377 samestat = getattr(os.path, 'samestat', None)
378 if followsym and samestat is not None:
378 if followsym and samestat is not None:
379 def adddir(dirlst, dirname):
379 def adddir(dirlst, dirname):
380 dirstat = os.stat(dirname)
380 dirstat = os.stat(dirname)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
382 if not match:
382 if not match:
383 dirlst.append(dirstat)
383 dirlst.append(dirstat)
384 return not match
384 return not match
385 else:
385 else:
386 followsym = False
386 followsym = False
387
387
388 if (seen_dirs is None) and followsym:
388 if (seen_dirs is None) and followsym:
389 seen_dirs = []
389 seen_dirs = []
390 adddir(seen_dirs, path)
390 adddir(seen_dirs, path)
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
392 dirs.sort()
392 dirs.sort()
393 if '.hg' in dirs:
393 if '.hg' in dirs:
394 yield root # found a repository
394 yield root # found a repository
395 qroot = os.path.join(root, '.hg', 'patches')
395 qroot = os.path.join(root, '.hg', 'patches')
396 if os.path.isdir(os.path.join(qroot, '.hg')):
396 if os.path.isdir(os.path.join(qroot, '.hg')):
397 yield qroot # we have a patch queue repo here
397 yield qroot # we have a patch queue repo here
398 if recurse:
398 if recurse:
399 # avoid recursing inside the .hg directory
399 # avoid recursing inside the .hg directory
400 dirs.remove('.hg')
400 dirs.remove('.hg')
401 else:
401 else:
402 dirs[:] = [] # don't descend further
402 dirs[:] = [] # don't descend further
403 elif followsym:
403 elif followsym:
404 newdirs = []
404 newdirs = []
405 for d in dirs:
405 for d in dirs:
406 fname = os.path.join(root, d)
406 fname = os.path.join(root, d)
407 if adddir(seen_dirs, fname):
407 if adddir(seen_dirs, fname):
408 if os.path.islink(fname):
408 if os.path.islink(fname):
409 for hgname in walkrepos(fname, True, seen_dirs):
409 for hgname in walkrepos(fname, True, seen_dirs):
410 yield hgname
410 yield hgname
411 else:
411 else:
412 newdirs.append(d)
412 newdirs.append(d)
413 dirs[:] = newdirs
413 dirs[:] = newdirs
414
414
415 def binnode(ctx):
415 def binnode(ctx):
416 """Return binary node id for a given basectx"""
416 """Return binary node id for a given basectx"""
417 node = ctx.node()
417 node = ctx.node()
418 if node is None:
418 if node is None:
419 return wdirid
419 return wdirid
420 return node
420 return node
421
421
422 def intrev(ctx):
422 def intrev(ctx):
423 """Return integer for a given basectx that can be used in comparison or
423 """Return integer for a given basectx that can be used in comparison or
424 arithmetic operation"""
424 arithmetic operation"""
425 rev = ctx.rev()
425 rev = ctx.rev()
426 if rev is None:
426 if rev is None:
427 return wdirrev
427 return wdirrev
428 return rev
428 return rev
429
429
430 def formatchangeid(ctx):
430 def formatchangeid(ctx):
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
432 template provided by logcmdutil.changesettemplater"""
432 template provided by logcmdutil.changesettemplater"""
433 repo = ctx.repo()
433 repo = ctx.repo()
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
435
435
436 def formatrevnode(ui, rev, node):
436 def formatrevnode(ui, rev, node):
437 """Format given revision and node depending on the current verbosity"""
437 """Format given revision and node depending on the current verbosity"""
438 if ui.debugflag:
438 if ui.debugflag:
439 hexfunc = hex
439 hexfunc = hex
440 else:
440 else:
441 hexfunc = short
441 hexfunc = short
442 return '%d:%s' % (rev, hexfunc(node))
442 return '%d:%s' % (rev, hexfunc(node))
443
443
444 def resolvehexnodeidprefix(repo, prefix):
444 def resolvehexnodeidprefix(repo, prefix):
445 if (prefix.startswith('x') and
445 if (prefix.startswith('x') and
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
447 prefix = prefix[1:]
447 prefix = prefix[1:]
448 try:
448 try:
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
450 # This matches the shortesthexnodeidprefix() function below.
450 # This matches the shortesthexnodeidprefix() function below.
451 node = repo.unfiltered().changelog._partialmatch(prefix)
451 node = repo.unfiltered().changelog._partialmatch(prefix)
452 except error.AmbiguousPrefixLookupError:
452 except error.AmbiguousPrefixLookupError:
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
454 if revset:
454 if revset:
455 # Clear config to avoid infinite recursion
455 # Clear config to avoid infinite recursion
456 configoverrides = {('experimental',
456 configoverrides = {('experimental',
457 'revisions.disambiguatewithin'): None}
457 'revisions.disambiguatewithin'): None}
458 with repo.ui.configoverride(configoverrides):
458 with repo.ui.configoverride(configoverrides):
459 revs = repo.anyrevs([revset], user=True)
459 revs = repo.anyrevs([revset], user=True)
460 matches = []
460 matches = []
461 for rev in revs:
461 for rev in revs:
462 node = repo.changelog.node(rev)
462 node = repo.changelog.node(rev)
463 if hex(node).startswith(prefix):
463 if hex(node).startswith(prefix):
464 matches.append(node)
464 matches.append(node)
465 if len(matches) == 1:
465 if len(matches) == 1:
466 return matches[0]
466 return matches[0]
467 raise
467 raise
468 if node is None:
468 if node is None:
469 return
469 return
470 repo.changelog.rev(node) # make sure node isn't filtered
470 repo.changelog.rev(node) # make sure node isn't filtered
471 return node
471 return node
472
472
473 def mayberevnum(repo, prefix):
473 def mayberevnum(repo, prefix):
474 """Checks if the given prefix may be mistaken for a revision number"""
474 """Checks if the given prefix may be mistaken for a revision number"""
475 try:
475 try:
476 i = int(prefix)
476 i = int(prefix)
477 # if we are a pure int, then starting with zero will not be
477 # if we are a pure int, then starting with zero will not be
478 # confused as a rev; or, obviously, if the int is larger
478 # confused as a rev; or, obviously, if the int is larger
479 # than the value of the tip rev
479 # than the value of the tip rev
480 if prefix[0:1] == b'0' or i >= len(repo):
480 if prefix[0:1] == b'0' or i >= len(repo):
481 return False
481 return False
482 return True
482 return True
483 except ValueError:
483 except ValueError:
484 return False
484 return False
485
485
486 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
486 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
487 """Find the shortest unambiguous prefix that matches hexnode.
487 """Find the shortest unambiguous prefix that matches hexnode.
488
488
489 If "cache" is not None, it must be a dictionary that can be used for
489 If "cache" is not None, it must be a dictionary that can be used for
490 caching between calls to this method.
490 caching between calls to this method.
491 """
491 """
492 # _partialmatch() of filtered changelog could take O(len(repo)) time,
492 # _partialmatch() of filtered changelog could take O(len(repo)) time,
493 # which would be unacceptably slow. so we look for hash collision in
493 # which would be unacceptably slow. so we look for hash collision in
494 # unfiltered space, which means some hashes may be slightly longer.
494 # unfiltered space, which means some hashes may be slightly longer.
495
495
496 def disambiguate(prefix):
496 def disambiguate(prefix):
497 """Disambiguate against revnums."""
497 """Disambiguate against revnums."""
498 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
498 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
499 if mayberevnum(repo, prefix):
499 if mayberevnum(repo, prefix):
500 return 'x' + prefix
500 return 'x' + prefix
501 else:
501 else:
502 return prefix
502 return prefix
503
503
504 hexnode = hex(node)
504 hexnode = hex(node)
505 for length in range(len(prefix), len(hexnode) + 1):
505 for length in range(len(prefix), len(hexnode) + 1):
506 prefix = hexnode[:length]
506 prefix = hexnode[:length]
507 if not mayberevnum(repo, prefix):
507 if not mayberevnum(repo, prefix):
508 return prefix
508 return prefix
509
509
510 cl = repo.unfiltered().changelog
510 cl = repo.unfiltered().changelog
511 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
511 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
512 if revset:
512 if revset:
513 revs = None
513 revs = None
514 if cache is not None:
514 if cache is not None:
515 revs = cache.get('disambiguationrevset')
515 revs = cache.get('disambiguationrevset')
516 if revs is None:
516 if revs is None:
517 revs = repo.anyrevs([revset], user=True)
517 revs = repo.anyrevs([revset], user=True)
518 if cache is not None:
518 if cache is not None:
519 cache['disambiguationrevset'] = revs
519 cache['disambiguationrevset'] = revs
520 if cl.rev(node) in revs:
520 if cl.rev(node) in revs:
521 hexnode = hex(node)
521 hexnode = hex(node)
522 nodetree = None
522 nodetree = None
523 if cache is not None:
523 if cache is not None:
524 nodetree = cache.get('disambiguationnodetree')
524 nodetree = cache.get('disambiguationnodetree')
525 if not nodetree:
525 if not nodetree:
526 try:
526 try:
527 nodetree = parsers.nodetree(cl.index, len(revs))
527 nodetree = parsers.nodetree(cl.index, len(revs))
528 except AttributeError:
528 except AttributeError:
529 # no native nodetree
529 # no native nodetree
530 pass
530 pass
531 else:
531 else:
532 for r in revs:
532 for r in revs:
533 nodetree.insert(r)
533 nodetree.insert(r)
534 if cache is not None:
534 if cache is not None:
535 cache['disambiguationnodetree'] = nodetree
535 cache['disambiguationnodetree'] = nodetree
536 if nodetree is not None:
536 if nodetree is not None:
537 length = max(nodetree.shortest(node), minlength)
537 length = max(nodetree.shortest(node), minlength)
538 prefix = hexnode[:length]
538 prefix = hexnode[:length]
539 return disambiguate(prefix)
539 return disambiguate(prefix)
540 for length in range(minlength, len(hexnode) + 1):
540 for length in range(minlength, len(hexnode) + 1):
541 matches = []
541 matches = []
542 prefix = hexnode[:length]
542 prefix = hexnode[:length]
543 for rev in revs:
543 for rev in revs:
544 otherhexnode = repo[rev].hex()
544 otherhexnode = repo[rev].hex()
545 if prefix == otherhexnode[:length]:
545 if prefix == otherhexnode[:length]:
546 matches.append(otherhexnode)
546 matches.append(otherhexnode)
547 if len(matches) == 1:
547 if len(matches) == 1:
548 return disambiguate(prefix)
548 return disambiguate(prefix)
549
549
550 try:
550 try:
551 return disambiguate(cl.shortest(node, minlength))
551 return disambiguate(cl.shortest(node, minlength))
552 except error.LookupError:
552 except error.LookupError:
553 raise error.RepoLookupError()
553 raise error.RepoLookupError()
554
554
555 def isrevsymbol(repo, symbol):
555 def isrevsymbol(repo, symbol):
556 """Checks if a symbol exists in the repo.
556 """Checks if a symbol exists in the repo.
557
557
558 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
558 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
559 symbol is an ambiguous nodeid prefix.
559 symbol is an ambiguous nodeid prefix.
560 """
560 """
561 try:
561 try:
562 revsymbol(repo, symbol)
562 revsymbol(repo, symbol)
563 return True
563 return True
564 except error.RepoLookupError:
564 except error.RepoLookupError:
565 return False
565 return False
566
566
567 def revsymbol(repo, symbol):
567 def revsymbol(repo, symbol):
568 """Returns a context given a single revision symbol (as string).
568 """Returns a context given a single revision symbol (as string).
569
569
570 This is similar to revsingle(), but accepts only a single revision symbol,
570 This is similar to revsingle(), but accepts only a single revision symbol,
571 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
571 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
572 not "max(public())".
572 not "max(public())".
573 """
573 """
574 if not isinstance(symbol, bytes):
574 if not isinstance(symbol, bytes):
575 msg = ("symbol (%s of type %s) was not a string, did you mean "
575 msg = ("symbol (%s of type %s) was not a string, did you mean "
576 "repo[symbol]?" % (symbol, type(symbol)))
576 "repo[symbol]?" % (symbol, type(symbol)))
577 raise error.ProgrammingError(msg)
577 raise error.ProgrammingError(msg)
578 try:
578 try:
579 if symbol in ('.', 'tip', 'null'):
579 if symbol in ('.', 'tip', 'null'):
580 return repo[symbol]
580 return repo[symbol]
581
581
582 try:
582 try:
583 r = int(symbol)
583 r = int(symbol)
584 if '%d' % r != symbol:
584 if '%d' % r != symbol:
585 raise ValueError
585 raise ValueError
586 l = len(repo.changelog)
586 l = len(repo.changelog)
587 if r < 0:
587 if r < 0:
588 r += l
588 r += l
589 if r < 0 or r >= l and r != wdirrev:
589 if r < 0 or r >= l and r != wdirrev:
590 raise ValueError
590 raise ValueError
591 return repo[r]
591 return repo[r]
592 except error.FilteredIndexError:
592 except error.FilteredIndexError:
593 raise
593 raise
594 except (ValueError, OverflowError, IndexError):
594 except (ValueError, OverflowError, IndexError):
595 pass
595 pass
596
596
597 if len(symbol) == 40:
597 if len(symbol) == 40:
598 try:
598 try:
599 node = bin(symbol)
599 node = bin(symbol)
600 rev = repo.changelog.rev(node)
600 rev = repo.changelog.rev(node)
601 return repo[rev]
601 return repo[rev]
602 except error.FilteredLookupError:
602 except error.FilteredLookupError:
603 raise
603 raise
604 except (TypeError, LookupError):
604 except (TypeError, LookupError):
605 pass
605 pass
606
606
607 # look up bookmarks through the name interface
607 # look up bookmarks through the name interface
608 try:
608 try:
609 node = repo.names.singlenode(repo, symbol)
609 node = repo.names.singlenode(repo, symbol)
610 rev = repo.changelog.rev(node)
610 rev = repo.changelog.rev(node)
611 return repo[rev]
611 return repo[rev]
612 except KeyError:
612 except KeyError:
613 pass
613 pass
614
614
615 node = resolvehexnodeidprefix(repo, symbol)
615 node = resolvehexnodeidprefix(repo, symbol)
616 if node is not None:
616 if node is not None:
617 rev = repo.changelog.rev(node)
617 rev = repo.changelog.rev(node)
618 return repo[rev]
618 return repo[rev]
619
619
620 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
620 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
621
621
622 except error.WdirUnsupported:
622 except error.WdirUnsupported:
623 return repo[None]
623 return repo[None]
624 except (error.FilteredIndexError, error.FilteredLookupError,
624 except (error.FilteredIndexError, error.FilteredLookupError,
625 error.FilteredRepoLookupError):
625 error.FilteredRepoLookupError):
626 raise _filterederror(repo, symbol)
626 raise _filterederror(repo, symbol)
627
627
628 def _filterederror(repo, changeid):
628 def _filterederror(repo, changeid):
629 """build an exception to be raised about a filtered changeid
629 """build an exception to be raised about a filtered changeid
630
630
631 This is extracted in a function to help extensions (eg: evolve) to
631 This is extracted in a function to help extensions (eg: evolve) to
632 experiment with various message variants."""
632 experiment with various message variants."""
633 if repo.filtername.startswith('visible'):
633 if repo.filtername.startswith('visible'):
634
634
635 # Check if the changeset is obsolete
635 # Check if the changeset is obsolete
636 unfilteredrepo = repo.unfiltered()
636 unfilteredrepo = repo.unfiltered()
637 ctx = revsymbol(unfilteredrepo, changeid)
637 ctx = revsymbol(unfilteredrepo, changeid)
638
638
639 # If the changeset is obsolete, enrich the message with the reason
639 # If the changeset is obsolete, enrich the message with the reason
640 # that made this changeset not visible
640 # that made this changeset not visible
641 if ctx.obsolete():
641 if ctx.obsolete():
642 msg = obsutil._getfilteredreason(repo, changeid, ctx)
642 msg = obsutil._getfilteredreason(repo, changeid, ctx)
643 else:
643 else:
644 msg = _("hidden revision '%s'") % changeid
644 msg = _("hidden revision '%s'") % changeid
645
645
646 hint = _('use --hidden to access hidden revisions')
646 hint = _('use --hidden to access hidden revisions')
647
647
648 return error.FilteredRepoLookupError(msg, hint=hint)
648 return error.FilteredRepoLookupError(msg, hint=hint)
649 msg = _("filtered revision '%s' (not in '%s' subset)")
649 msg = _("filtered revision '%s' (not in '%s' subset)")
650 msg %= (changeid, repo.filtername)
650 msg %= (changeid, repo.filtername)
651 return error.FilteredRepoLookupError(msg)
651 return error.FilteredRepoLookupError(msg)
652
652
653 def revsingle(repo, revspec, default='.', localalias=None):
653 def revsingle(repo, revspec, default='.', localalias=None):
654 if not revspec and revspec != 0:
654 if not revspec and revspec != 0:
655 return repo[default]
655 return repo[default]
656
656
657 l = revrange(repo, [revspec], localalias=localalias)
657 l = revrange(repo, [revspec], localalias=localalias)
658 if not l:
658 if not l:
659 raise error.Abort(_('empty revision set'))
659 raise error.Abort(_('empty revision set'))
660 return repo[l.last()]
660 return repo[l.last()]
661
661
662 def _pairspec(revspec):
662 def _pairspec(revspec):
663 tree = revsetlang.parse(revspec)
663 tree = revsetlang.parse(revspec)
664 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
664 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
665
665
666 def revpair(repo, revs):
666 def revpair(repo, revs):
667 if not revs:
667 if not revs:
668 return repo['.'], repo[None]
668 return repo['.'], repo[None]
669
669
670 l = revrange(repo, revs)
670 l = revrange(repo, revs)
671
671
672 if not l:
672 if not l:
673 first = second = None
673 first = second = None
674 elif l.isascending():
674 elif l.isascending():
675 first = l.min()
675 first = l.min()
676 second = l.max()
676 second = l.max()
677 elif l.isdescending():
677 elif l.isdescending():
678 first = l.max()
678 first = l.max()
679 second = l.min()
679 second = l.min()
680 else:
680 else:
681 first = l.first()
681 first = l.first()
682 second = l.last()
682 second = l.last()
683
683
684 if first is None:
684 if first is None:
685 raise error.Abort(_('empty revision range'))
685 raise error.Abort(_('empty revision range'))
686 if (first == second and len(revs) >= 2
686 if (first == second and len(revs) >= 2
687 and not all(revrange(repo, [r]) for r in revs)):
687 and not all(revrange(repo, [r]) for r in revs)):
688 raise error.Abort(_('empty revision on one side of range'))
688 raise error.Abort(_('empty revision on one side of range'))
689
689
690 # if top-level is range expression, the result must always be a pair
690 # if top-level is range expression, the result must always be a pair
691 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
691 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
692 return repo[first], repo[None]
692 return repo[first], repo[None]
693
693
694 return repo[first], repo[second]
694 return repo[first], repo[second]
695
695
696 def revrange(repo, specs, localalias=None):
696 def revrange(repo, specs, localalias=None):
697 """Execute 1 to many revsets and return the union.
697 """Execute 1 to many revsets and return the union.
698
698
699 This is the preferred mechanism for executing revsets using user-specified
699 This is the preferred mechanism for executing revsets using user-specified
700 config options, such as revset aliases.
700 config options, such as revset aliases.
701
701
702 The revsets specified by ``specs`` will be executed via a chained ``OR``
702 The revsets specified by ``specs`` will be executed via a chained ``OR``
703 expression. If ``specs`` is empty, an empty result is returned.
703 expression. If ``specs`` is empty, an empty result is returned.
704
704
705 ``specs`` can contain integers, in which case they are assumed to be
705 ``specs`` can contain integers, in which case they are assumed to be
706 revision numbers.
706 revision numbers.
707
707
708 It is assumed the revsets are already formatted. If you have arguments
708 It is assumed the revsets are already formatted. If you have arguments
709 that need to be expanded in the revset, call ``revsetlang.formatspec()``
709 that need to be expanded in the revset, call ``revsetlang.formatspec()``
710 and pass the result as an element of ``specs``.
710 and pass the result as an element of ``specs``.
711
711
712 Specifying a single revset is allowed.
712 Specifying a single revset is allowed.
713
713
714 Returns a ``revset.abstractsmartset`` which is a list-like interface over
714 Returns a ``revset.abstractsmartset`` which is a list-like interface over
715 integer revisions.
715 integer revisions.
716 """
716 """
717 allspecs = []
717 allspecs = []
718 for spec in specs:
718 for spec in specs:
719 if isinstance(spec, int):
719 if isinstance(spec, int):
720 spec = revsetlang.formatspec('rev(%d)', spec)
720 spec = revsetlang.formatspec('rev(%d)', spec)
721 allspecs.append(spec)
721 allspecs.append(spec)
722 return repo.anyrevs(allspecs, user=True, localalias=localalias)
722 return repo.anyrevs(allspecs, user=True, localalias=localalias)
723
723
724 def meaningfulparents(repo, ctx):
724 def meaningfulparents(repo, ctx):
725 """Return list of meaningful (or all if debug) parentrevs for rev.
725 """Return list of meaningful (or all if debug) parentrevs for rev.
726
726
727 For merges (two non-nullrev revisions) both parents are meaningful.
727 For merges (two non-nullrev revisions) both parents are meaningful.
728 Otherwise the first parent revision is considered meaningful if it
728 Otherwise the first parent revision is considered meaningful if it
729 is not the preceding revision.
729 is not the preceding revision.
730 """
730 """
731 parents = ctx.parents()
731 parents = ctx.parents()
732 if len(parents) > 1:
732 if len(parents) > 1:
733 return parents
733 return parents
734 if repo.ui.debugflag:
734 if repo.ui.debugflag:
735 return [parents[0], repo[nullrev]]
735 return [parents[0], repo[nullrev]]
736 if parents[0].rev() >= intrev(ctx) - 1:
736 if parents[0].rev() >= intrev(ctx) - 1:
737 return []
737 return []
738 return parents
738 return parents
739
739
740 def expandpats(pats):
740 def expandpats(pats):
741 '''Expand bare globs when running on windows.
741 '''Expand bare globs when running on windows.
742 On posix we assume it already has already been done by sh.'''
742 On posix we assume it already has already been done by sh.'''
743 if not util.expandglobs:
743 if not util.expandglobs:
744 return list(pats)
744 return list(pats)
745 ret = []
745 ret = []
746 for kindpat in pats:
746 for kindpat in pats:
747 kind, pat = matchmod._patsplit(kindpat, None)
747 kind, pat = matchmod._patsplit(kindpat, None)
748 if kind is None:
748 if kind is None:
749 try:
749 try:
750 globbed = glob.glob(pat)
750 globbed = glob.glob(pat)
751 except re.error:
751 except re.error:
752 globbed = [pat]
752 globbed = [pat]
753 if globbed:
753 if globbed:
754 ret.extend(globbed)
754 ret.extend(globbed)
755 continue
755 continue
756 ret.append(kindpat)
756 ret.append(kindpat)
757 return ret
757 return ret
758
758
759 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
759 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
760 badfn=None):
760 badfn=None):
761 '''Return a matcher and the patterns that were used.
761 '''Return a matcher and the patterns that were used.
762 The matcher will warn about bad matches, unless an alternate badfn callback
762 The matcher will warn about bad matches, unless an alternate badfn callback
763 is provided.'''
763 is provided.'''
764 if pats == ("",):
764 if pats == ("",):
765 pats = []
765 pats = []
766 if opts is None:
766 if opts is None:
767 opts = {}
767 opts = {}
768 if not globbed and default == 'relpath':
768 if not globbed and default == 'relpath':
769 pats = expandpats(pats or [])
769 pats = expandpats(pats or [])
770
770
771 def bad(f, msg):
771 def bad(f, msg):
772 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
772 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
773
773
774 if badfn is None:
774 if badfn is None:
775 badfn = bad
775 badfn = bad
776
776
777 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
777 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
778 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
778 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
779
779
780 if m.always():
780 if m.always():
781 pats = []
781 pats = []
782 return m, pats
782 return m, pats
783
783
784 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
784 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
785 badfn=None):
785 badfn=None):
786 '''Return a matcher that will warn about bad matches.'''
786 '''Return a matcher that will warn about bad matches.'''
787 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
787 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
788
788
789 def matchall(repo):
789 def matchall(repo):
790 '''Return a matcher that will efficiently match everything.'''
790 '''Return a matcher that will efficiently match everything.'''
791 return matchmod.always(repo.root, repo.getcwd())
791 return matchmod.always(repo.root, repo.getcwd())
792
792
793 def matchfiles(repo, files, badfn=None):
793 def matchfiles(repo, files, badfn=None):
794 '''Return a matcher that will efficiently match exactly these files.'''
794 '''Return a matcher that will efficiently match exactly these files.'''
795 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
795 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
796
796
797 def parsefollowlinespattern(repo, rev, pat, msg):
797 def parsefollowlinespattern(repo, rev, pat, msg):
798 """Return a file name from `pat` pattern suitable for usage in followlines
798 """Return a file name from `pat` pattern suitable for usage in followlines
799 logic.
799 logic.
800 """
800 """
801 if not matchmod.patkind(pat):
801 if not matchmod.patkind(pat):
802 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
802 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
803 else:
803 else:
804 ctx = repo[rev]
804 ctx = repo[rev]
805 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
805 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
806 files = [f for f in ctx if m(f)]
806 files = [f for f in ctx if m(f)]
807 if len(files) != 1:
807 if len(files) != 1:
808 raise error.ParseError(msg)
808 raise error.ParseError(msg)
809 return files[0]
809 return files[0]
810
810
811 def origpath(ui, repo, filepath):
811 def origpath(ui, repo, filepath):
812 '''customize where .orig files are created
812 '''customize where .orig files are created
813
813
814 Fetch user defined path from config file: [ui] origbackuppath = <path>
814 Fetch user defined path from config file: [ui] origbackuppath = <path>
815 Fall back to default (filepath with .orig suffix) if not specified
815 Fall back to default (filepath with .orig suffix) if not specified
816 '''
816 '''
817 origbackuppath = ui.config('ui', 'origbackuppath')
817 origbackuppath = ui.config('ui', 'origbackuppath')
818 if not origbackuppath:
818 if not origbackuppath:
819 return filepath + ".orig"
819 return filepath + ".orig"
820
820
821 # Convert filepath from an absolute path into a path inside the repo.
821 # Convert filepath from an absolute path into a path inside the repo.
822 filepathfromroot = util.normpath(os.path.relpath(filepath,
822 filepathfromroot = util.normpath(os.path.relpath(filepath,
823 start=repo.root))
823 start=repo.root))
824
824
825 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
825 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
826 origbackupdir = origvfs.dirname(filepathfromroot)
826 origbackupdir = origvfs.dirname(filepathfromroot)
827 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
827 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
828 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
828 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
829
829
830 # Remove any files that conflict with the backup file's path
830 # Remove any files that conflict with the backup file's path
831 for f in reversed(list(util.finddirs(filepathfromroot))):
831 for f in reversed(list(util.finddirs(filepathfromroot))):
832 if origvfs.isfileorlink(f):
832 if origvfs.isfileorlink(f):
833 ui.note(_('removing conflicting file: %s\n')
833 ui.note(_('removing conflicting file: %s\n')
834 % origvfs.join(f))
834 % origvfs.join(f))
835 origvfs.unlink(f)
835 origvfs.unlink(f)
836 break
836 break
837
837
838 origvfs.makedirs(origbackupdir)
838 origvfs.makedirs(origbackupdir)
839
839
840 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
840 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
841 ui.note(_('removing conflicting directory: %s\n')
841 ui.note(_('removing conflicting directory: %s\n')
842 % origvfs.join(filepathfromroot))
842 % origvfs.join(filepathfromroot))
843 origvfs.rmtree(filepathfromroot, forcibly=True)
843 origvfs.rmtree(filepathfromroot, forcibly=True)
844
844
845 return origvfs.join(filepathfromroot)
845 return origvfs.join(filepathfromroot)
846
846
847 class _containsnode(object):
847 class _containsnode(object):
848 """proxy __contains__(node) to container.__contains__ which accepts revs"""
848 """proxy __contains__(node) to container.__contains__ which accepts revs"""
849
849
850 def __init__(self, repo, revcontainer):
850 def __init__(self, repo, revcontainer):
851 self._torev = repo.changelog.rev
851 self._torev = repo.changelog.rev
852 self._revcontains = revcontainer.__contains__
852 self._revcontains = revcontainer.__contains__
853
853
854 def __contains__(self, node):
854 def __contains__(self, node):
855 return self._revcontains(self._torev(node))
855 return self._revcontains(self._torev(node))
856
856
857 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
857 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
858 fixphase=False, targetphase=None, backup=True):
858 fixphase=False, targetphase=None, backup=True):
859 """do common cleanups when old nodes are replaced by new nodes
859 """do common cleanups when old nodes are replaced by new nodes
860
860
861 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
861 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
862 (we might also want to move working directory parent in the future)
862 (we might also want to move working directory parent in the future)
863
863
864 By default, bookmark moves are calculated automatically from 'replacements',
864 By default, bookmark moves are calculated automatically from 'replacements',
865 but 'moves' can be used to override that. Also, 'moves' may include
865 but 'moves' can be used to override that. Also, 'moves' may include
866 additional bookmark moves that should not have associated obsmarkers.
866 additional bookmark moves that should not have associated obsmarkers.
867
867
868 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
868 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
869 have replacements. operation is a string, like "rebase".
869 have replacements. operation is a string, like "rebase".
870
870
871 metadata is dictionary containing metadata to be stored in obsmarker if
871 metadata is dictionary containing metadata to be stored in obsmarker if
872 obsolescence is enabled.
872 obsolescence is enabled.
873 """
873 """
874 assert fixphase or targetphase is None
874 assert fixphase or targetphase is None
875 if not replacements and not moves:
875 if not replacements and not moves:
876 return
876 return
877
877
878 # translate mapping's other forms
878 # translate mapping's other forms
879 if not util.safehasattr(replacements, 'items'):
879 if not util.safehasattr(replacements, 'items'):
880 replacements = {(n,): () for n in replacements}
880 replacements = {(n,): () for n in replacements}
881 else:
881 else:
882 # upgrading non tuple "source" to tuple ones for BC
882 # upgrading non tuple "source" to tuple ones for BC
883 repls = {}
883 repls = {}
884 for key, value in replacements.items():
884 for key, value in replacements.items():
885 if not isinstance(key, tuple):
885 if not isinstance(key, tuple):
886 key = (key,)
886 key = (key,)
887 repls[key] = value
887 repls[key] = value
888 replacements = repls
888 replacements = repls
889
889
890 # Calculate bookmark movements
890 # Calculate bookmark movements
891 if moves is None:
891 if moves is None:
892 moves = {}
892 moves = {}
893 # Unfiltered repo is needed since nodes in replacements might be hidden.
893 # Unfiltered repo is needed since nodes in replacements might be hidden.
894 unfi = repo.unfiltered()
894 unfi = repo.unfiltered()
895 for oldnodes, newnodes in replacements.items():
895 for oldnodes, newnodes in replacements.items():
896 for oldnode in oldnodes:
896 for oldnode in oldnodes:
897 if oldnode in moves:
897 if oldnode in moves:
898 continue
898 continue
899 if len(newnodes) > 1:
899 if len(newnodes) > 1:
900 # usually a split, take the one with biggest rev number
900 # usually a split, take the one with biggest rev number
901 newnode = next(unfi.set('max(%ln)', newnodes)).node()
901 newnode = next(unfi.set('max(%ln)', newnodes)).node()
902 elif len(newnodes) == 0:
902 elif len(newnodes) == 0:
903 # move bookmark backwards
903 # move bookmark backwards
904 allreplaced = []
904 allreplaced = []
905 for rep in replacements:
905 for rep in replacements:
906 allreplaced.extend(rep)
906 allreplaced.extend(rep)
907 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
907 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
908 allreplaced))
908 allreplaced))
909 if roots:
909 if roots:
910 newnode = roots[0].node()
910 newnode = roots[0].node()
911 else:
911 else:
912 newnode = nullid
912 newnode = nullid
913 else:
913 else:
914 newnode = newnodes[0]
914 newnode = newnodes[0]
915 moves[oldnode] = newnode
915 moves[oldnode] = newnode
916
916
917 allnewnodes = [n for ns in replacements.values() for n in ns]
917 allnewnodes = [n for ns in replacements.values() for n in ns]
918 toretract = {}
918 toretract = {}
919 toadvance = {}
919 toadvance = {}
920 if fixphase:
920 if fixphase:
921 precursors = {}
921 precursors = {}
922 for oldnodes, newnodes in replacements.items():
922 for oldnodes, newnodes in replacements.items():
923 for oldnode in oldnodes:
923 for oldnode in oldnodes:
924 for newnode in newnodes:
924 for newnode in newnodes:
925 precursors.setdefault(newnode, []).append(oldnode)
925 precursors.setdefault(newnode, []).append(oldnode)
926
926
927 allnewnodes.sort(key=lambda n: unfi[n].rev())
927 allnewnodes.sort(key=lambda n: unfi[n].rev())
928 newphases = {}
928 newphases = {}
929 def phase(ctx):
929 def phase(ctx):
930 return newphases.get(ctx.node(), ctx.phase())
930 return newphases.get(ctx.node(), ctx.phase())
931 for newnode in allnewnodes:
931 for newnode in allnewnodes:
932 ctx = unfi[newnode]
932 ctx = unfi[newnode]
933 parentphase = max(phase(p) for p in ctx.parents())
933 parentphase = max(phase(p) for p in ctx.parents())
934 if targetphase is None:
934 if targetphase is None:
935 oldphase = max(unfi[oldnode].phase()
935 oldphase = max(unfi[oldnode].phase()
936 for oldnode in precursors[newnode])
936 for oldnode in precursors[newnode])
937 newphase = max(oldphase, parentphase)
937 newphase = max(oldphase, parentphase)
938 else:
938 else:
939 newphase = max(targetphase, parentphase)
939 newphase = max(targetphase, parentphase)
940 newphases[newnode] = newphase
940 newphases[newnode] = newphase
941 if newphase > ctx.phase():
941 if newphase > ctx.phase():
942 toretract.setdefault(newphase, []).append(newnode)
942 toretract.setdefault(newphase, []).append(newnode)
943 elif newphase < ctx.phase():
943 elif newphase < ctx.phase():
944 toadvance.setdefault(newphase, []).append(newnode)
944 toadvance.setdefault(newphase, []).append(newnode)
945
945
946 with repo.transaction('cleanup') as tr:
946 with repo.transaction('cleanup') as tr:
947 # Move bookmarks
947 # Move bookmarks
948 bmarks = repo._bookmarks
948 bmarks = repo._bookmarks
949 bmarkchanges = []
949 bmarkchanges = []
950 for oldnode, newnode in moves.items():
950 for oldnode, newnode in moves.items():
951 oldbmarks = repo.nodebookmarks(oldnode)
951 oldbmarks = repo.nodebookmarks(oldnode)
952 if not oldbmarks:
952 if not oldbmarks:
953 continue
953 continue
954 from . import bookmarks # avoid import cycle
954 from . import bookmarks # avoid import cycle
955 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
955 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
956 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
956 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
957 hex(oldnode), hex(newnode)))
957 hex(oldnode), hex(newnode)))
958 # Delete divergent bookmarks being parents of related newnodes
958 # Delete divergent bookmarks being parents of related newnodes
959 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
959 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
960 allnewnodes, newnode, oldnode)
960 allnewnodes, newnode, oldnode)
961 deletenodes = _containsnode(repo, deleterevs)
961 deletenodes = _containsnode(repo, deleterevs)
962 for name in oldbmarks:
962 for name in oldbmarks:
963 bmarkchanges.append((name, newnode))
963 bmarkchanges.append((name, newnode))
964 for b in bookmarks.divergent2delete(repo, deletenodes, name):
964 for b in bookmarks.divergent2delete(repo, deletenodes, name):
965 bmarkchanges.append((b, None))
965 bmarkchanges.append((b, None))
966
966
967 if bmarkchanges:
967 if bmarkchanges:
968 bmarks.applychanges(repo, tr, bmarkchanges)
968 bmarks.applychanges(repo, tr, bmarkchanges)
969
969
970 for phase, nodes in toretract.items():
970 for phase, nodes in toretract.items():
971 phases.retractboundary(repo, tr, phase, nodes)
971 phases.retractboundary(repo, tr, phase, nodes)
972 for phase, nodes in toadvance.items():
972 for phase, nodes in toadvance.items():
973 phases.advanceboundary(repo, tr, phase, nodes)
973 phases.advanceboundary(repo, tr, phase, nodes)
974
974
975 # Obsolete or strip nodes
975 # Obsolete or strip nodes
976 if obsolete.isenabled(repo, obsolete.createmarkersopt):
976 if obsolete.isenabled(repo, obsolete.createmarkersopt):
977 # If a node is already obsoleted, and we want to obsolete it
977 # If a node is already obsoleted, and we want to obsolete it
978 # without a successor, skip that obssolete request since it's
978 # without a successor, skip that obssolete request since it's
979 # unnecessary. That's the "if s or not isobs(n)" check below.
979 # unnecessary. That's the "if s or not isobs(n)" check below.
980 # Also sort the node in topology order, that might be useful for
980 # Also sort the node in topology order, that might be useful for
981 # some obsstore logic.
981 # some obsstore logic.
982 # NOTE: the filtering and sorting might belong to createmarkers.
982 # NOTE: the filtering and sorting might belong to createmarkers.
983 isobs = unfi.obsstore.successors.__contains__
983 isobs = unfi.obsstore.successors.__contains__
984 torev = unfi.changelog.rev
984 torev = unfi.changelog.rev
985 sortfunc = lambda ns: torev(ns[0][0])
985 sortfunc = lambda ns: torev(ns[0][0])
986 rels = []
986 rels = []
987 for ns, s in sorted(replacements.items(), key=sortfunc):
987 for ns, s in sorted(replacements.items(), key=sortfunc):
988 for n in ns:
988 for n in ns:
989 if s or not isobs(n):
989 if s or not isobs(n):
990 rel = (unfi[n], tuple(unfi[m] for m in s))
990 rel = (unfi[n], tuple(unfi[m] for m in s))
991 rels.append(rel)
991 rels.append(rel)
992 if rels:
992 if rels:
993 obsolete.createmarkers(repo, rels, operation=operation,
993 obsolete.createmarkers(repo, rels, operation=operation,
994 metadata=metadata)
994 metadata=metadata)
995 else:
995 else:
996 from . import repair # avoid import cycle
996 from . import repair # avoid import cycle
997 tostrip = list(n for ns in replacements for n in ns)
997 tostrip = list(n for ns in replacements for n in ns)
998 if tostrip:
998 if tostrip:
999 repair.delayedstrip(repo.ui, repo, tostrip, operation,
999 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1000 backup=backup)
1000 backup=backup)
1001
1001
1002 def addremove(repo, matcher, prefix, opts=None):
1002 def addremove(repo, matcher, prefix, opts=None):
1003 if opts is None:
1003 if opts is None:
1004 opts = {}
1004 opts = {}
1005 m = matcher
1005 m = matcher
1006 dry_run = opts.get('dry_run')
1006 dry_run = opts.get('dry_run')
1007 try:
1007 try:
1008 similarity = float(opts.get('similarity') or 0)
1008 similarity = float(opts.get('similarity') or 0)
1009 except ValueError:
1009 except ValueError:
1010 raise error.Abort(_('similarity must be a number'))
1010 raise error.Abort(_('similarity must be a number'))
1011 if similarity < 0 or similarity > 100:
1011 if similarity < 0 or similarity > 100:
1012 raise error.Abort(_('similarity must be between 0 and 100'))
1012 raise error.Abort(_('similarity must be between 0 and 100'))
1013 similarity /= 100.0
1013 similarity /= 100.0
1014
1014
1015 ret = 0
1015 ret = 0
1016 join = lambda f: os.path.join(prefix, f)
1016 join = lambda f: os.path.join(prefix, f)
1017
1017
1018 wctx = repo[None]
1018 wctx = repo[None]
1019 for subpath in sorted(wctx.substate):
1019 for subpath in sorted(wctx.substate):
1020 submatch = matchmod.subdirmatcher(subpath, m)
1020 submatch = matchmod.subdirmatcher(subpath, m)
1021 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1021 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1022 sub = wctx.sub(subpath)
1022 sub = wctx.sub(subpath)
1023 try:
1023 try:
1024 if sub.addremove(submatch, prefix, opts):
1024 if sub.addremove(submatch, prefix, opts):
1025 ret = 1
1025 ret = 1
1026 except error.LookupError:
1026 except error.LookupError:
1027 repo.ui.status(_("skipping missing subrepository: %s\n")
1027 repo.ui.status(_("skipping missing subrepository: %s\n")
1028 % join(subpath))
1028 % join(subpath))
1029
1029
1030 rejected = []
1030 rejected = []
1031 def badfn(f, msg):
1031 def badfn(f, msg):
1032 if f in m.files():
1032 if f in m.files():
1033 m.bad(f, msg)
1033 m.bad(f, msg)
1034 rejected.append(f)
1034 rejected.append(f)
1035
1035
1036 badmatch = matchmod.badmatch(m, badfn)
1036 badmatch = matchmod.badmatch(m, badfn)
1037 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1037 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1038 badmatch)
1038 badmatch)
1039
1039
1040 unknownset = set(unknown + forgotten)
1040 unknownset = set(unknown + forgotten)
1041 toprint = unknownset.copy()
1041 toprint = unknownset.copy()
1042 toprint.update(deleted)
1042 toprint.update(deleted)
1043 for abs in sorted(toprint):
1043 for abs in sorted(toprint):
1044 if repo.ui.verbose or not m.exact(abs):
1044 if repo.ui.verbose or not m.exact(abs):
1045 if abs in unknownset:
1045 if abs in unknownset:
1046 status = _('adding %s\n') % m.uipath(abs)
1046 status = _('adding %s\n') % m.uipath(abs)
1047 label = 'addremove.added'
1047 label = 'addremove.added'
1048 else:
1048 else:
1049 status = _('removing %s\n') % m.uipath(abs)
1049 status = _('removing %s\n') % m.uipath(abs)
1050 label = 'addremove.removed'
1050 label = 'addremove.removed'
1051 repo.ui.status(status, label=label)
1051 repo.ui.status(status, label=label)
1052
1052
1053 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1053 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1054 similarity)
1054 similarity)
1055
1055
1056 if not dry_run:
1056 if not dry_run:
1057 _markchanges(repo, unknown + forgotten, deleted, renames)
1057 _markchanges(repo, unknown + forgotten, deleted, renames)
1058
1058
1059 for f in rejected:
1059 for f in rejected:
1060 if f in m.files():
1060 if f in m.files():
1061 return 1
1061 return 1
1062 return ret
1062 return ret
1063
1063
1064 def marktouched(repo, files, similarity=0.0):
1064 def marktouched(repo, files, similarity=0.0):
1065 '''Assert that files have somehow been operated upon. files are relative to
1065 '''Assert that files have somehow been operated upon. files are relative to
1066 the repo root.'''
1066 the repo root.'''
1067 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1067 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1068 rejected = []
1068 rejected = []
1069
1069
1070 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1070 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1071
1071
1072 if repo.ui.verbose:
1072 if repo.ui.verbose:
1073 unknownset = set(unknown + forgotten)
1073 unknownset = set(unknown + forgotten)
1074 toprint = unknownset.copy()
1074 toprint = unknownset.copy()
1075 toprint.update(deleted)
1075 toprint.update(deleted)
1076 for abs in sorted(toprint):
1076 for abs in sorted(toprint):
1077 if abs in unknownset:
1077 if abs in unknownset:
1078 status = _('adding %s\n') % abs
1078 status = _('adding %s\n') % abs
1079 else:
1079 else:
1080 status = _('removing %s\n') % abs
1080 status = _('removing %s\n') % abs
1081 repo.ui.status(status)
1081 repo.ui.status(status)
1082
1082
1083 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1083 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1084 similarity)
1084 similarity)
1085
1085
1086 _markchanges(repo, unknown + forgotten, deleted, renames)
1086 _markchanges(repo, unknown + forgotten, deleted, renames)
1087
1087
1088 for f in rejected:
1088 for f in rejected:
1089 if f in m.files():
1089 if f in m.files():
1090 return 1
1090 return 1
1091 return 0
1091 return 0
1092
1092
1093 def _interestingfiles(repo, matcher):
1093 def _interestingfiles(repo, matcher):
1094 '''Walk dirstate with matcher, looking for files that addremove would care
1094 '''Walk dirstate with matcher, looking for files that addremove would care
1095 about.
1095 about.
1096
1096
1097 This is different from dirstate.status because it doesn't care about
1097 This is different from dirstate.status because it doesn't care about
1098 whether files are modified or clean.'''
1098 whether files are modified or clean.'''
1099 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1099 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1100 audit_path = pathutil.pathauditor(repo.root, cached=True)
1100 audit_path = pathutil.pathauditor(repo.root, cached=True)
1101
1101
1102 ctx = repo[None]
1102 ctx = repo[None]
1103 dirstate = repo.dirstate
1103 dirstate = repo.dirstate
1104 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1104 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1105 unknown=True, ignored=False, full=False)
1105 unknown=True, ignored=False, full=False)
1106 for abs, st in walkresults.iteritems():
1106 for abs, st in walkresults.iteritems():
1107 dstate = dirstate[abs]
1107 dstate = dirstate[abs]
1108 if dstate == '?' and audit_path.check(abs):
1108 if dstate == '?' and audit_path.check(abs):
1109 unknown.append(abs)
1109 unknown.append(abs)
1110 elif dstate != 'r' and not st:
1110 elif dstate != 'r' and not st:
1111 deleted.append(abs)
1111 deleted.append(abs)
1112 elif dstate == 'r' and st:
1112 elif dstate == 'r' and st:
1113 forgotten.append(abs)
1113 forgotten.append(abs)
1114 # for finding renames
1114 # for finding renames
1115 elif dstate == 'r' and not st:
1115 elif dstate == 'r' and not st:
1116 removed.append(abs)
1116 removed.append(abs)
1117 elif dstate == 'a':
1117 elif dstate == 'a':
1118 added.append(abs)
1118 added.append(abs)
1119
1119
1120 return added, unknown, deleted, removed, forgotten
1120 return added, unknown, deleted, removed, forgotten
1121
1121
1122 def _findrenames(repo, matcher, added, removed, similarity):
1122 def _findrenames(repo, matcher, added, removed, similarity):
1123 '''Find renames from removed files to added ones.'''
1123 '''Find renames from removed files to added ones.'''
1124 renames = {}
1124 renames = {}
1125 if similarity > 0:
1125 if similarity > 0:
1126 for old, new, score in similar.findrenames(repo, added, removed,
1126 for old, new, score in similar.findrenames(repo, added, removed,
1127 similarity):
1127 similarity):
1128 if (repo.ui.verbose or not matcher.exact(old)
1128 if (repo.ui.verbose or not matcher.exact(old)
1129 or not matcher.exact(new)):
1129 or not matcher.exact(new)):
1130 repo.ui.status(_('recording removal of %s as rename to %s '
1130 repo.ui.status(_('recording removal of %s as rename to %s '
1131 '(%d%% similar)\n') %
1131 '(%d%% similar)\n') %
1132 (matcher.rel(old), matcher.rel(new),
1132 (matcher.rel(old), matcher.rel(new),
1133 score * 100))
1133 score * 100))
1134 renames[new] = old
1134 renames[new] = old
1135 return renames
1135 return renames
1136
1136
1137 def _markchanges(repo, unknown, deleted, renames):
1137 def _markchanges(repo, unknown, deleted, renames):
1138 '''Marks the files in unknown as added, the files in deleted as removed,
1138 '''Marks the files in unknown as added, the files in deleted as removed,
1139 and the files in renames as copied.'''
1139 and the files in renames as copied.'''
1140 wctx = repo[None]
1140 wctx = repo[None]
1141 with repo.wlock():
1141 with repo.wlock():
1142 wctx.forget(deleted)
1142 wctx.forget(deleted)
1143 wctx.add(unknown)
1143 wctx.add(unknown)
1144 for new, old in renames.iteritems():
1144 for new, old in renames.iteritems():
1145 wctx.copy(old, new)
1145 wctx.copy(old, new)
1146
1146
1147 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1147 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1148 """Update the dirstate to reflect the intent of copying src to dst. For
1148 """Update the dirstate to reflect the intent of copying src to dst. For
1149 different reasons it might not end with dst being marked as copied from src.
1149 different reasons it might not end with dst being marked as copied from src.
1150 """
1150 """
1151 origsrc = repo.dirstate.copied(src) or src
1151 origsrc = repo.dirstate.copied(src) or src
1152 if dst == origsrc: # copying back a copy?
1152 if dst == origsrc: # copying back a copy?
1153 if repo.dirstate[dst] not in 'mn' and not dryrun:
1153 if repo.dirstate[dst] not in 'mn' and not dryrun:
1154 repo.dirstate.normallookup(dst)
1154 repo.dirstate.normallookup(dst)
1155 else:
1155 else:
1156 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1156 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1157 if not ui.quiet:
1157 if not ui.quiet:
1158 ui.warn(_("%s has not been committed yet, so no copy "
1158 ui.warn(_("%s has not been committed yet, so no copy "
1159 "data will be stored for %s.\n")
1159 "data will be stored for %s.\n")
1160 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1160 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1161 if repo.dirstate[dst] in '?r' and not dryrun:
1161 if repo.dirstate[dst] in '?r' and not dryrun:
1162 wctx.add([dst])
1162 wctx.add([dst])
1163 elif not dryrun:
1163 elif not dryrun:
1164 wctx.copy(origsrc, dst)
1164 wctx.copy(origsrc, dst)
1165
1165
1166 def writerequires(opener, requirements):
1166 def writerequires(opener, requirements):
1167 with opener('requires', 'w') as fp:
1167 with opener('requires', 'w') as fp:
1168 for r in sorted(requirements):
1168 for r in sorted(requirements):
1169 fp.write("%s\n" % r)
1169 fp.write("%s\n" % r)
1170
1170
1171 class filecachesubentry(object):
1171 class filecachesubentry(object):
1172 def __init__(self, path, stat):
1172 def __init__(self, path, stat):
1173 self.path = path
1173 self.path = path
1174 self.cachestat = None
1174 self.cachestat = None
1175 self._cacheable = None
1175 self._cacheable = None
1176
1176
1177 if stat:
1177 if stat:
1178 self.cachestat = filecachesubentry.stat(self.path)
1178 self.cachestat = filecachesubentry.stat(self.path)
1179
1179
1180 if self.cachestat:
1180 if self.cachestat:
1181 self._cacheable = self.cachestat.cacheable()
1181 self._cacheable = self.cachestat.cacheable()
1182 else:
1182 else:
1183 # None means we don't know yet
1183 # None means we don't know yet
1184 self._cacheable = None
1184 self._cacheable = None
1185
1185
1186 def refresh(self):
1186 def refresh(self):
1187 if self.cacheable():
1187 if self.cacheable():
1188 self.cachestat = filecachesubentry.stat(self.path)
1188 self.cachestat = filecachesubentry.stat(self.path)
1189
1189
1190 def cacheable(self):
1190 def cacheable(self):
1191 if self._cacheable is not None:
1191 if self._cacheable is not None:
1192 return self._cacheable
1192 return self._cacheable
1193
1193
1194 # we don't know yet, assume it is for now
1194 # we don't know yet, assume it is for now
1195 return True
1195 return True
1196
1196
1197 def changed(self):
1197 def changed(self):
1198 # no point in going further if we can't cache it
1198 # no point in going further if we can't cache it
1199 if not self.cacheable():
1199 if not self.cacheable():
1200 return True
1200 return True
1201
1201
1202 newstat = filecachesubentry.stat(self.path)
1202 newstat = filecachesubentry.stat(self.path)
1203
1203
1204 # we may not know if it's cacheable yet, check again now
1204 # we may not know if it's cacheable yet, check again now
1205 if newstat and self._cacheable is None:
1205 if newstat and self._cacheable is None:
1206 self._cacheable = newstat.cacheable()
1206 self._cacheable = newstat.cacheable()
1207
1207
1208 # check again
1208 # check again
1209 if not self._cacheable:
1209 if not self._cacheable:
1210 return True
1210 return True
1211
1211
1212 if self.cachestat != newstat:
1212 if self.cachestat != newstat:
1213 self.cachestat = newstat
1213 self.cachestat = newstat
1214 return True
1214 return True
1215 else:
1215 else:
1216 return False
1216 return False
1217
1217
1218 @staticmethod
1218 @staticmethod
1219 def stat(path):
1219 def stat(path):
1220 try:
1220 try:
1221 return util.cachestat(path)
1221 return util.cachestat(path)
1222 except OSError as e:
1222 except OSError as e:
1223 if e.errno != errno.ENOENT:
1223 if e.errno != errno.ENOENT:
1224 raise
1224 raise
1225
1225
1226 class filecacheentry(object):
1226 class filecacheentry(object):
1227 def __init__(self, paths, stat=True):
1227 def __init__(self, paths, stat=True):
1228 self._entries = []
1228 self._entries = []
1229 for path in paths:
1229 for path in paths:
1230 self._entries.append(filecachesubentry(path, stat))
1230 self._entries.append(filecachesubentry(path, stat))
1231
1231
1232 def changed(self):
1232 def changed(self):
1233 '''true if any entry has changed'''
1233 '''true if any entry has changed'''
1234 for entry in self._entries:
1234 for entry in self._entries:
1235 if entry.changed():
1235 if entry.changed():
1236 return True
1236 return True
1237 return False
1237 return False
1238
1238
1239 def refresh(self):
1239 def refresh(self):
1240 for entry in self._entries:
1240 for entry in self._entries:
1241 entry.refresh()
1241 entry.refresh()
1242
1242
1243 class filecache(object):
1243 class filecache(object):
1244 """A property like decorator that tracks files under .hg/ for updates.
1244 """A property like decorator that tracks files under .hg/ for updates.
1245
1245
1246 On first access, the files defined as arguments are stat()ed and the
1246 On first access, the files defined as arguments are stat()ed and the
1247 results cached. The decorated function is called. The results are stashed
1247 results cached. The decorated function is called. The results are stashed
1248 away in a ``_filecache`` dict on the object whose method is decorated.
1248 away in a ``_filecache`` dict on the object whose method is decorated.
1249
1249
1250 On subsequent access, the cached result is returned.
1250 On subsequent access, the cached result is returned.
1251
1251
1252 On external property set operations, stat() calls are performed and the new
1252 On external property set operations, stat() calls are performed and the new
1253 value is cached.
1253 value is cached.
1254
1254
1255 On property delete operations, cached data is removed.
1255 On property delete operations, cached data is removed.
1256
1256
1257 When using the property API, cached data is always returned, if available:
1257 When using the property API, cached data is always returned, if available:
1258 no stat() is performed to check if the file has changed and if the function
1258 no stat() is performed to check if the file has changed and if the function
1259 needs to be called to reflect file changes.
1259 needs to be called to reflect file changes.
1260
1260
1261 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1261 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1262 can populate an entry before the property's getter is called. In this case,
1262 can populate an entry before the property's getter is called. In this case,
1263 entries in ``_filecache`` will be used during property operations,
1263 entries in ``_filecache`` will be used during property operations,
1264 if available. If the underlying file changes, it is up to external callers
1264 if available. If the underlying file changes, it is up to external callers
1265 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1265 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1266 method result as well as possibly calling ``del obj._filecache[attr]`` to
1266 method result as well as possibly calling ``del obj._filecache[attr]`` to
1267 remove the ``filecacheentry``.
1267 remove the ``filecacheentry``.
1268 """
1268 """
1269
1269
1270 def __init__(self, *paths):
1270 def __init__(self, *paths):
1271 self.paths = paths
1271 self.paths = paths
1272
1272
1273 def join(self, obj, fname):
1273 def join(self, obj, fname):
1274 """Used to compute the runtime path of a cached file.
1274 """Used to compute the runtime path of a cached file.
1275
1275
1276 Users should subclass filecache and provide their own version of this
1276 Users should subclass filecache and provide their own version of this
1277 function to call the appropriate join function on 'obj' (an instance
1277 function to call the appropriate join function on 'obj' (an instance
1278 of the class that its member function was decorated).
1278 of the class that its member function was decorated).
1279 """
1279 """
1280 raise NotImplementedError
1280 raise NotImplementedError
1281
1281
1282 def __call__(self, func):
1282 def __call__(self, func):
1283 self.func = func
1283 self.func = func
1284 self.sname = func.__name__
1284 self.sname = func.__name__
1285 self.name = pycompat.sysbytes(self.sname)
1285 self.name = pycompat.sysbytes(self.sname)
1286 return self
1286 return self
1287
1287
1288 def __get__(self, obj, type=None):
1288 def __get__(self, obj, type=None):
1289 # if accessed on the class, return the descriptor itself.
1289 # if accessed on the class, return the descriptor itself.
1290 if obj is None:
1290 if obj is None:
1291 return self
1291 return self
1292 # do we need to check if the file changed?
1292 # do we need to check if the file changed?
1293 if self.sname in obj.__dict__:
1293 if self.sname in obj.__dict__:
1294 assert self.name in obj._filecache, self.name
1294 assert self.name in obj._filecache, self.name
1295 return obj.__dict__[self.sname]
1295 return obj.__dict__[self.sname]
1296
1296
1297 entry = obj._filecache.get(self.name)
1297 entry = obj._filecache.get(self.name)
1298
1298
1299 if entry:
1299 if entry:
1300 if entry.changed():
1300 if entry.changed():
1301 entry.obj = self.func(obj)
1301 entry.obj = self.func(obj)
1302 else:
1302 else:
1303 paths = [self.join(obj, path) for path in self.paths]
1303 paths = [self.join(obj, path) for path in self.paths]
1304
1304
1305 # We stat -before- creating the object so our cache doesn't lie if
1305 # We stat -before- creating the object so our cache doesn't lie if
1306 # a writer modified between the time we read and stat
1306 # a writer modified between the time we read and stat
1307 entry = filecacheentry(paths, True)
1307 entry = filecacheentry(paths, True)
1308 entry.obj = self.func(obj)
1308 entry.obj = self.func(obj)
1309
1309
1310 obj._filecache[self.name] = entry
1310 obj._filecache[self.name] = entry
1311
1311
1312 obj.__dict__[self.sname] = entry.obj
1312 obj.__dict__[self.sname] = entry.obj
1313 return entry.obj
1313 return entry.obj
1314
1314
1315 def __set__(self, obj, value):
1315 def __set__(self, obj, value):
1316 if self.name not in obj._filecache:
1316 if self.name not in obj._filecache:
1317 # we add an entry for the missing value because X in __dict__
1317 # we add an entry for the missing value because X in __dict__
1318 # implies X in _filecache
1318 # implies X in _filecache
1319 paths = [self.join(obj, path) for path in self.paths]
1319 paths = [self.join(obj, path) for path in self.paths]
1320 ce = filecacheentry(paths, False)
1320 ce = filecacheentry(paths, False)
1321 obj._filecache[self.name] = ce
1321 obj._filecache[self.name] = ce
1322 else:
1322 else:
1323 ce = obj._filecache[self.name]
1323 ce = obj._filecache[self.name]
1324
1324
1325 ce.obj = value # update cached copy
1325 ce.obj = value # update cached copy
1326 obj.__dict__[self.sname] = value # update copy returned by obj.x
1326 obj.__dict__[self.sname] = value # update copy returned by obj.x
1327
1327
1328 def __delete__(self, obj):
1328 def __delete__(self, obj):
1329 try:
1329 try:
1330 del obj.__dict__[self.sname]
1330 del obj.__dict__[self.sname]
1331 except KeyError:
1331 except KeyError:
1332 raise AttributeError(self.sname)
1332 raise AttributeError(self.sname)
1333
1333
1334 def extdatasource(repo, source):
1334 def extdatasource(repo, source):
1335 """Gather a map of rev -> value dict from the specified source
1335 """Gather a map of rev -> value dict from the specified source
1336
1336
1337 A source spec is treated as a URL, with a special case shell: type
1337 A source spec is treated as a URL, with a special case shell: type
1338 for parsing the output from a shell command.
1338 for parsing the output from a shell command.
1339
1339
1340 The data is parsed as a series of newline-separated records where
1340 The data is parsed as a series of newline-separated records where
1341 each record is a revision specifier optionally followed by a space
1341 each record is a revision specifier optionally followed by a space
1342 and a freeform string value. If the revision is known locally, it
1342 and a freeform string value. If the revision is known locally, it
1343 is converted to a rev, otherwise the record is skipped.
1343 is converted to a rev, otherwise the record is skipped.
1344
1344
1345 Note that both key and value are treated as UTF-8 and converted to
1345 Note that both key and value are treated as UTF-8 and converted to
1346 the local encoding. This allows uniformity between local and
1346 the local encoding. This allows uniformity between local and
1347 remote data sources.
1347 remote data sources.
1348 """
1348 """
1349
1349
1350 spec = repo.ui.config("extdata", source)
1350 spec = repo.ui.config("extdata", source)
1351 if not spec:
1351 if not spec:
1352 raise error.Abort(_("unknown extdata source '%s'") % source)
1352 raise error.Abort(_("unknown extdata source '%s'") % source)
1353
1353
1354 data = {}
1354 data = {}
1355 src = proc = None
1355 src = proc = None
1356 try:
1356 try:
1357 if spec.startswith("shell:"):
1357 if spec.startswith("shell:"):
1358 # external commands should be run relative to the repo root
1358 # external commands should be run relative to the repo root
1359 cmd = spec[6:]
1359 cmd = spec[6:]
1360 proc = subprocess.Popen(procutil.tonativestr(cmd),
1360 proc = subprocess.Popen(procutil.tonativestr(cmd),
1361 shell=True, bufsize=-1,
1361 shell=True, bufsize=-1,
1362 close_fds=procutil.closefds,
1362 close_fds=procutil.closefds,
1363 stdout=subprocess.PIPE,
1363 stdout=subprocess.PIPE,
1364 cwd=procutil.tonativestr(repo.root))
1364 cwd=procutil.tonativestr(repo.root))
1365 src = proc.stdout
1365 src = proc.stdout
1366 else:
1366 else:
1367 # treat as a URL or file
1367 # treat as a URL or file
1368 src = url.open(repo.ui, spec)
1368 src = url.open(repo.ui, spec)
1369 for l in src:
1369 for l in src:
1370 if " " in l:
1370 if " " in l:
1371 k, v = l.strip().split(" ", 1)
1371 k, v = l.strip().split(" ", 1)
1372 else:
1372 else:
1373 k, v = l.strip(), ""
1373 k, v = l.strip(), ""
1374
1374
1375 k = encoding.tolocal(k)
1375 k = encoding.tolocal(k)
1376 try:
1376 try:
1377 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1377 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1378 except (error.LookupError, error.RepoLookupError):
1378 except (error.LookupError, error.RepoLookupError):
1379 pass # we ignore data for nodes that don't exist locally
1379 pass # we ignore data for nodes that don't exist locally
1380 finally:
1380 finally:
1381 if proc:
1381 if proc:
1382 proc.communicate()
1382 proc.communicate()
1383 if src:
1383 if src:
1384 src.close()
1384 src.close()
1385 if proc and proc.returncode != 0:
1385 if proc and proc.returncode != 0:
1386 raise error.Abort(_("extdata command '%s' failed: %s")
1386 raise error.Abort(_("extdata command '%s' failed: %s")
1387 % (cmd, procutil.explainexit(proc.returncode)))
1387 % (cmd, procutil.explainexit(proc.returncode)))
1388
1388
1389 return data
1389 return data
1390
1390
1391 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1391 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1392 if lock is None:
1392 if lock is None:
1393 raise error.LockInheritanceContractViolation(
1393 raise error.LockInheritanceContractViolation(
1394 'lock can only be inherited while held')
1394 'lock can only be inherited while held')
1395 if environ is None:
1395 if environ is None:
1396 environ = {}
1396 environ = {}
1397 with lock.inherit() as locker:
1397 with lock.inherit() as locker:
1398 environ[envvar] = locker
1398 environ[envvar] = locker
1399 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1399 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1400
1400
1401 def wlocksub(repo, cmd, *args, **kwargs):
1401 def wlocksub(repo, cmd, *args, **kwargs):
1402 """run cmd as a subprocess that allows inheriting repo's wlock
1402 """run cmd as a subprocess that allows inheriting repo's wlock
1403
1403
1404 This can only be called while the wlock is held. This takes all the
1404 This can only be called while the wlock is held. This takes all the
1405 arguments that ui.system does, and returns the exit code of the
1405 arguments that ui.system does, and returns the exit code of the
1406 subprocess."""
1406 subprocess."""
1407 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1407 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1408 **kwargs)
1408 **kwargs)
1409
1409
1410 class progress(object):
1410 class progress(object):
1411 def __init__(self, ui, topic, unit="", total=None):
1411 def __init__(self, ui, topic, unit="", total=None):
1412 self.ui = ui
1412 self.ui = ui
1413 self.pos = 0
1413 self.pos = 0
1414 self.topic = topic
1414 self.topic = topic
1415 self.unit = unit
1415 self.unit = unit
1416 self.total = total
1416 self.total = total
1417
1417
1418 def __enter__(self):
1418 def __enter__(self):
1419 return self
1419 return self
1420
1420
1421 def __exit__(self, exc_type, exc_value, exc_tb):
1421 def __exit__(self, exc_type, exc_value, exc_tb):
1422 self.complete()
1422 self.complete()
1423
1423
1424 def update(self, pos, item="", total=None):
1424 def update(self, pos, item="", total=None):
1425 assert pos is not None
1425 assert pos is not None
1426 if total:
1426 if total:
1427 self.total = total
1427 self.total = total
1428 self.pos = pos
1428 self.pos = pos
1429 self._print(item)
1429 self._print(item)
1430
1430
1431 def increment(self, step=1, item="", total=None):
1431 def increment(self, step=1, item="", total=None):
1432 self.update(self.pos + step, item, total)
1432 self.update(self.pos + step, item, total)
1433
1433
1434 def complete(self):
1434 def complete(self):
1435 self.ui.progress(self.topic, None)
1435 self.ui.progress(self.topic, None)
1436
1436
1437 def _print(self, item):
1437 def _print(self, item):
1438 self.ui.progress(self.topic, self.pos, item, self.unit,
1438 self.ui.progress(self.topic, self.pos, item, self.unit,
1439 self.total)
1439 self.total)
1440
1440
1441 def gdinitconfig(ui):
1441 def gdinitconfig(ui):
1442 """helper function to know if a repo should be created as general delta
1442 """helper function to know if a repo should be created as general delta
1443 """
1443 """
1444 # experimental config: format.generaldelta
1444 # experimental config: format.generaldelta
1445 return (ui.configbool('format', 'generaldelta')
1445 return (ui.configbool('format', 'generaldelta')
1446 or ui.configbool('format', 'usegeneraldelta')
1446 or ui.configbool('format', 'usegeneraldelta')
1447 or ui.configbool('format', 'sparse-revlog'))
1447 or ui.configbool('format', 'sparse-revlog'))
1448
1448
1449 def gddeltaconfig(ui):
1449 def gddeltaconfig(ui):
1450 """helper function to know if incoming delta should be optimised
1450 """helper function to know if incoming delta should be optimised
1451 """
1451 """
1452 # experimental config: format.generaldelta
1452 # experimental config: format.generaldelta
1453 return ui.configbool('format', 'generaldelta')
1453 return ui.configbool('format', 'generaldelta')
1454
1454
1455 class simplekeyvaluefile(object):
1455 class simplekeyvaluefile(object):
1456 """A simple file with key=value lines
1456 """A simple file with key=value lines
1457
1457
1458 Keys must be alphanumerics and start with a letter, values must not
1458 Keys must be alphanumerics and start with a letter, values must not
1459 contain '\n' characters"""
1459 contain '\n' characters"""
1460 firstlinekey = '__firstline'
1460 firstlinekey = '__firstline'
1461
1461
1462 def __init__(self, vfs, path, keys=None):
1462 def __init__(self, vfs, path, keys=None):
1463 self.vfs = vfs
1463 self.vfs = vfs
1464 self.path = path
1464 self.path = path
1465
1465
1466 def read(self, firstlinenonkeyval=False):
1466 def read(self, firstlinenonkeyval=False):
1467 """Read the contents of a simple key-value file
1467 """Read the contents of a simple key-value file
1468
1468
1469 'firstlinenonkeyval' indicates whether the first line of file should
1469 'firstlinenonkeyval' indicates whether the first line of file should
1470 be treated as a key-value pair or reuturned fully under the
1470 be treated as a key-value pair or reuturned fully under the
1471 __firstline key."""
1471 __firstline key."""
1472 lines = self.vfs.readlines(self.path)
1472 lines = self.vfs.readlines(self.path)
1473 d = {}
1473 d = {}
1474 if firstlinenonkeyval:
1474 if firstlinenonkeyval:
1475 if not lines:
1475 if not lines:
1476 e = _("empty simplekeyvalue file")
1476 e = _("empty simplekeyvalue file")
1477 raise error.CorruptedState(e)
1477 raise error.CorruptedState(e)
1478 # we don't want to include '\n' in the __firstline
1478 # we don't want to include '\n' in the __firstline
1479 d[self.firstlinekey] = lines[0][:-1]
1479 d[self.firstlinekey] = lines[0][:-1]
1480 del lines[0]
1480 del lines[0]
1481
1481
1482 try:
1482 try:
1483 # the 'if line.strip()' part prevents us from failing on empty
1483 # the 'if line.strip()' part prevents us from failing on empty
1484 # lines which only contain '\n' therefore are not skipped
1484 # lines which only contain '\n' therefore are not skipped
1485 # by 'if line'
1485 # by 'if line'
1486 updatedict = dict(line[:-1].split('=', 1) for line in lines
1486 updatedict = dict(line[:-1].split('=', 1) for line in lines
1487 if line.strip())
1487 if line.strip())
1488 if self.firstlinekey in updatedict:
1488 if self.firstlinekey in updatedict:
1489 e = _("%r can't be used as a key")
1489 e = _("%r can't be used as a key")
1490 raise error.CorruptedState(e % self.firstlinekey)
1490 raise error.CorruptedState(e % self.firstlinekey)
1491 d.update(updatedict)
1491 d.update(updatedict)
1492 except ValueError as e:
1492 except ValueError as e:
1493 raise error.CorruptedState(str(e))
1493 raise error.CorruptedState(str(e))
1494 return d
1494 return d
1495
1495
1496 def write(self, data, firstline=None):
1496 def write(self, data, firstline=None):
1497 """Write key=>value mapping to a file
1497 """Write key=>value mapping to a file
1498 data is a dict. Keys must be alphanumerical and start with a letter.
1498 data is a dict. Keys must be alphanumerical and start with a letter.
1499 Values must not contain newline characters.
1499 Values must not contain newline characters.
1500
1500
1501 If 'firstline' is not None, it is written to file before
1501 If 'firstline' is not None, it is written to file before
1502 everything else, as it is, not in a key=value form"""
1502 everything else, as it is, not in a key=value form"""
1503 lines = []
1503 lines = []
1504 if firstline is not None:
1504 if firstline is not None:
1505 lines.append('%s\n' % firstline)
1505 lines.append('%s\n' % firstline)
1506
1506
1507 for k, v in data.items():
1507 for k, v in data.items():
1508 if k == self.firstlinekey:
1508 if k == self.firstlinekey:
1509 e = "key name '%s' is reserved" % self.firstlinekey
1509 e = "key name '%s' is reserved" % self.firstlinekey
1510 raise error.ProgrammingError(e)
1510 raise error.ProgrammingError(e)
1511 if not k[0:1].isalpha():
1511 if not k[0:1].isalpha():
1512 e = "keys must start with a letter in a key-value file"
1512 e = "keys must start with a letter in a key-value file"
1513 raise error.ProgrammingError(e)
1513 raise error.ProgrammingError(e)
1514 if not k.isalnum():
1514 if not k.isalnum():
1515 e = "invalid key name in a simple key-value file"
1515 e = "invalid key name in a simple key-value file"
1516 raise error.ProgrammingError(e)
1516 raise error.ProgrammingError(e)
1517 if '\n' in v:
1517 if '\n' in v:
1518 e = "invalid value in a simple key-value file"
1518 e = "invalid value in a simple key-value file"
1519 raise error.ProgrammingError(e)
1519 raise error.ProgrammingError(e)
1520 lines.append("%s=%s\n" % (k, v))
1520 lines.append("%s=%s\n" % (k, v))
1521 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1521 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1522 fp.write(''.join(lines))
1522 fp.write(''.join(lines))
1523
1523
1524 _reportobsoletedsource = [
1524 _reportobsoletedsource = [
1525 'debugobsolete',
1525 'debugobsolete',
1526 'pull',
1526 'pull',
1527 'push',
1527 'push',
1528 'serve',
1528 'serve',
1529 'unbundle',
1529 'unbundle',
1530 ]
1530 ]
1531
1531
1532 _reportnewcssource = [
1532 _reportnewcssource = [
1533 'pull',
1533 'pull',
1534 'unbundle',
1534 'unbundle',
1535 ]
1535 ]
1536
1536
1537 def prefetchfiles(repo, revs, match):
1537 def prefetchfiles(repo, revs, match):
1538 """Invokes the registered file prefetch functions, allowing extensions to
1538 """Invokes the registered file prefetch functions, allowing extensions to
1539 ensure the corresponding files are available locally, before the command
1539 ensure the corresponding files are available locally, before the command
1540 uses them."""
1540 uses them."""
1541 if match:
1541 if match:
1542 # The command itself will complain about files that don't exist, so
1542 # The command itself will complain about files that don't exist, so
1543 # don't duplicate the message.
1543 # don't duplicate the message.
1544 match = matchmod.badmatch(match, lambda fn, msg: None)
1544 match = matchmod.badmatch(match, lambda fn, msg: None)
1545 else:
1545 else:
1546 match = matchall(repo)
1546 match = matchall(repo)
1547
1547
1548 fileprefetchhooks(repo, revs, match)
1548 fileprefetchhooks(repo, revs, match)
1549
1549
1550 # a list of (repo, revs, match) prefetch functions
1550 # a list of (repo, revs, match) prefetch functions
1551 fileprefetchhooks = util.hooks()
1551 fileprefetchhooks = util.hooks()
1552
1552
1553 # A marker that tells the evolve extension to suppress its own reporting
1553 # A marker that tells the evolve extension to suppress its own reporting
1554 _reportstroubledchangesets = True
1554 _reportstroubledchangesets = True
1555
1555
1556 def registersummarycallback(repo, otr, txnname=''):
1556 def registersummarycallback(repo, otr, txnname=''):
1557 """register a callback to issue a summary after the transaction is closed
1557 """register a callback to issue a summary after the transaction is closed
1558 """
1558 """
1559 def txmatch(sources):
1559 def txmatch(sources):
1560 return any(txnname.startswith(source) for source in sources)
1560 return any(txnname.startswith(source) for source in sources)
1561
1561
1562 categories = []
1562 categories = []
1563
1563
1564 def reportsummary(func):
1564 def reportsummary(func):
1565 """decorator for report callbacks."""
1565 """decorator for report callbacks."""
1566 # The repoview life cycle is shorter than the one of the actual
1566 # The repoview life cycle is shorter than the one of the actual
1567 # underlying repository. So the filtered object can die before the
1567 # underlying repository. So the filtered object can die before the
1568 # weakref is used leading to troubles. We keep a reference to the
1568 # weakref is used leading to troubles. We keep a reference to the
1569 # unfiltered object and restore the filtering when retrieving the
1569 # unfiltered object and restore the filtering when retrieving the
1570 # repository through the weakref.
1570 # repository through the weakref.
1571 filtername = repo.filtername
1571 filtername = repo.filtername
1572 reporef = weakref.ref(repo.unfiltered())
1572 reporef = weakref.ref(repo.unfiltered())
1573 def wrapped(tr):
1573 def wrapped(tr):
1574 repo = reporef()
1574 repo = reporef()
1575 if filtername:
1575 if filtername:
1576 repo = repo.filtered(filtername)
1576 repo = repo.filtered(filtername)
1577 func(repo, tr)
1577 func(repo, tr)
1578 newcat = '%02i-txnreport' % len(categories)
1578 newcat = '%02i-txnreport' % len(categories)
1579 otr.addpostclose(newcat, wrapped)
1579 otr.addpostclose(newcat, wrapped)
1580 categories.append(newcat)
1580 categories.append(newcat)
1581 return wrapped
1581 return wrapped
1582
1582
1583 if txmatch(_reportobsoletedsource):
1583 if txmatch(_reportobsoletedsource):
1584 @reportsummary
1584 @reportsummary
1585 def reportobsoleted(repo, tr):
1585 def reportobsoleted(repo, tr):
1586 obsoleted = obsutil.getobsoleted(repo, tr)
1586 obsoleted = obsutil.getobsoleted(repo, tr)
1587 if obsoleted:
1587 if obsoleted:
1588 repo.ui.status(_('obsoleted %i changesets\n')
1588 repo.ui.status(_('obsoleted %i changesets\n')
1589 % len(obsoleted))
1589 % len(obsoleted))
1590
1590
1591 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1591 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1592 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1592 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1593 instabilitytypes = [
1593 instabilitytypes = [
1594 ('orphan', 'orphan'),
1594 ('orphan', 'orphan'),
1595 ('phase-divergent', 'phasedivergent'),
1595 ('phase-divergent', 'phasedivergent'),
1596 ('content-divergent', 'contentdivergent'),
1596 ('content-divergent', 'contentdivergent'),
1597 ]
1597 ]
1598
1598
1599 def getinstabilitycounts(repo):
1599 def getinstabilitycounts(repo):
1600 filtered = repo.changelog.filteredrevs
1600 filtered = repo.changelog.filteredrevs
1601 counts = {}
1601 counts = {}
1602 for instability, revset in instabilitytypes:
1602 for instability, revset in instabilitytypes:
1603 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1603 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1604 filtered)
1604 filtered)
1605 return counts
1605 return counts
1606
1606
1607 oldinstabilitycounts = getinstabilitycounts(repo)
1607 oldinstabilitycounts = getinstabilitycounts(repo)
1608 @reportsummary
1608 @reportsummary
1609 def reportnewinstabilities(repo, tr):
1609 def reportnewinstabilities(repo, tr):
1610 newinstabilitycounts = getinstabilitycounts(repo)
1610 newinstabilitycounts = getinstabilitycounts(repo)
1611 for instability, revset in instabilitytypes:
1611 for instability, revset in instabilitytypes:
1612 delta = (newinstabilitycounts[instability] -
1612 delta = (newinstabilitycounts[instability] -
1613 oldinstabilitycounts[instability])
1613 oldinstabilitycounts[instability])
1614 msg = getinstabilitymessage(delta, instability)
1614 msg = getinstabilitymessage(delta, instability)
1615 if msg:
1615 if msg:
1616 repo.ui.warn(msg)
1616 repo.ui.warn(msg)
1617
1617
1618 if txmatch(_reportnewcssource):
1618 if txmatch(_reportnewcssource):
1619 @reportsummary
1619 @reportsummary
1620 def reportnewcs(repo, tr):
1620 def reportnewcs(repo, tr):
1621 """Report the range of new revisions pulled/unbundled."""
1621 """Report the range of new revisions pulled/unbundled."""
1622 origrepolen = tr.changes.get('origrepolen', len(repo))
1622 origrepolen = tr.changes.get('origrepolen', len(repo))
1623 if origrepolen >= len(repo):
1623 unfi = repo.unfiltered()
1624 if origrepolen >= len(unfi):
1624 return
1625 return
1625
1626
1626 # Compute the bounds of new visible revisions' range.
1627 # Compute the bounds of new visible revisions' range.
1627 revs = smartset.spanset(repo, start=origrepolen)
1628 revs = smartset.spanset(repo, start=origrepolen)
1628 if not revs:
1629 if revs:
1629 return
1630 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1630 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1631
1631
1632 if minrev == maxrev:
1632 if minrev == maxrev:
1633 revrange = minrev
1633 revrange = minrev
1634 else:
1634 else:
1635 revrange = '%s:%s' % (minrev, maxrev)
1635 revrange = '%s:%s' % (minrev, maxrev)
1636 draft = len(repo.revs('%ld and draft()', revs))
1636 draft = len(repo.revs('%ld and draft()', revs))
1637 secret = len(repo.revs('%ld and secret()', revs))
1637 secret = len(repo.revs('%ld and secret()', revs))
1638 if not (draft or secret):
1638 if not (draft or secret):
1639 msg = _('new changesets %s\n') % revrange
1639 msg = _('new changesets %s\n') % revrange
1640 elif draft and secret:
1640 elif draft and secret:
1641 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1641 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1642 msg %= (revrange, draft, secret)
1642 msg %= (revrange, draft, secret)
1643 elif draft:
1643 elif draft:
1644 msg = _('new changesets %s (%d drafts)\n')
1644 msg = _('new changesets %s (%d drafts)\n')
1645 msg %= (revrange, draft)
1645 msg %= (revrange, draft)
1646 elif secret:
1646 elif secret:
1647 msg = _('new changesets %s (%d secrets)\n')
1647 msg = _('new changesets %s (%d secrets)\n')
1648 msg %= (revrange, secret)
1648 msg %= (revrange, secret)
1649 else:
1649 else:
1650 raise error.ProgrammingError('entered unreachable condition')
1650 errormsg = 'entered unreachable condition'
1651 repo.ui.status(msg)
1651 raise error.ProgrammingError(errormsg)
1652 repo.ui.status(msg)
1652
1653
1653 @reportsummary
1654 @reportsummary
1654 def reportphasechanges(repo, tr):
1655 def reportphasechanges(repo, tr):
1655 """Report statistics of phase changes for changesets pre-existing
1656 """Report statistics of phase changes for changesets pre-existing
1656 pull/unbundle.
1657 pull/unbundle.
1657 """
1658 """
1658 origrepolen = tr.changes.get('origrepolen', len(repo))
1659 origrepolen = tr.changes.get('origrepolen', len(repo))
1659 phasetracking = tr.changes.get('phases', {})
1660 phasetracking = tr.changes.get('phases', {})
1660 if not phasetracking:
1661 if not phasetracking:
1661 return
1662 return
1662 published = [
1663 published = [
1663 rev for rev, (old, new) in phasetracking.iteritems()
1664 rev for rev, (old, new) in phasetracking.iteritems()
1664 if new == phases.public and rev < origrepolen
1665 if new == phases.public and rev < origrepolen
1665 ]
1666 ]
1666 if not published:
1667 if not published:
1667 return
1668 return
1668 repo.ui.status(_('%d local changesets published\n')
1669 repo.ui.status(_('%d local changesets published\n')
1669 % len(published))
1670 % len(published))
1670
1671
1671 def getinstabilitymessage(delta, instability):
1672 def getinstabilitymessage(delta, instability):
1672 """function to return the message to show warning about new instabilities
1673 """function to return the message to show warning about new instabilities
1673
1674
1674 exists as a separate function so that extension can wrap to show more
1675 exists as a separate function so that extension can wrap to show more
1675 information like how to fix instabilities"""
1676 information like how to fix instabilities"""
1676 if delta > 0:
1677 if delta > 0:
1677 return _('%i new %s changesets\n') % (delta, instability)
1678 return _('%i new %s changesets\n') % (delta, instability)
1678
1679
1679 def nodesummaries(repo, nodes, maxnumnodes=4):
1680 def nodesummaries(repo, nodes, maxnumnodes=4):
1680 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1681 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1681 return ' '.join(short(h) for h in nodes)
1682 return ' '.join(short(h) for h in nodes)
1682 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1683 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1683 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1684 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1684
1685
1685 def enforcesinglehead(repo, tr, desc):
1686 def enforcesinglehead(repo, tr, desc):
1686 """check that no named branch has multiple heads"""
1687 """check that no named branch has multiple heads"""
1687 if desc in ('strip', 'repair'):
1688 if desc in ('strip', 'repair'):
1688 # skip the logic during strip
1689 # skip the logic during strip
1689 return
1690 return
1690 visible = repo.filtered('visible')
1691 visible = repo.filtered('visible')
1691 # possible improvement: we could restrict the check to affected branch
1692 # possible improvement: we could restrict the check to affected branch
1692 for name, heads in visible.branchmap().iteritems():
1693 for name, heads in visible.branchmap().iteritems():
1693 if len(heads) > 1:
1694 if len(heads) > 1:
1694 msg = _('rejecting multiple heads on branch "%s"')
1695 msg = _('rejecting multiple heads on branch "%s"')
1695 msg %= name
1696 msg %= name
1696 hint = _('%d heads: %s')
1697 hint = _('%d heads: %s')
1697 hint %= (len(heads), nodesummaries(repo, heads))
1698 hint %= (len(heads), nodesummaries(repo, heads))
1698 raise error.Abort(msg, hint=hint)
1699 raise error.Abort(msg, hint=hint)
1699
1700
1700 def wrapconvertsink(sink):
1701 def wrapconvertsink(sink):
1701 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1702 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1702 before it is used, whether or not the convert extension was formally loaded.
1703 before it is used, whether or not the convert extension was formally loaded.
1703 """
1704 """
1704 return sink
1705 return sink
1705
1706
1706 def unhidehashlikerevs(repo, specs, hiddentype):
1707 def unhidehashlikerevs(repo, specs, hiddentype):
1707 """parse the user specs and unhide changesets whose hash or revision number
1708 """parse the user specs and unhide changesets whose hash or revision number
1708 is passed.
1709 is passed.
1709
1710
1710 hiddentype can be: 1) 'warn': warn while unhiding changesets
1711 hiddentype can be: 1) 'warn': warn while unhiding changesets
1711 2) 'nowarn': don't warn while unhiding changesets
1712 2) 'nowarn': don't warn while unhiding changesets
1712
1713
1713 returns a repo object with the required changesets unhidden
1714 returns a repo object with the required changesets unhidden
1714 """
1715 """
1715 if not repo.filtername or not repo.ui.configbool('experimental',
1716 if not repo.filtername or not repo.ui.configbool('experimental',
1716 'directaccess'):
1717 'directaccess'):
1717 return repo
1718 return repo
1718
1719
1719 if repo.filtername not in ('visible', 'visible-hidden'):
1720 if repo.filtername not in ('visible', 'visible-hidden'):
1720 return repo
1721 return repo
1721
1722
1722 symbols = set()
1723 symbols = set()
1723 for spec in specs:
1724 for spec in specs:
1724 try:
1725 try:
1725 tree = revsetlang.parse(spec)
1726 tree = revsetlang.parse(spec)
1726 except error.ParseError: # will be reported by scmutil.revrange()
1727 except error.ParseError: # will be reported by scmutil.revrange()
1727 continue
1728 continue
1728
1729
1729 symbols.update(revsetlang.gethashlikesymbols(tree))
1730 symbols.update(revsetlang.gethashlikesymbols(tree))
1730
1731
1731 if not symbols:
1732 if not symbols:
1732 return repo
1733 return repo
1733
1734
1734 revs = _getrevsfromsymbols(repo, symbols)
1735 revs = _getrevsfromsymbols(repo, symbols)
1735
1736
1736 if not revs:
1737 if not revs:
1737 return repo
1738 return repo
1738
1739
1739 if hiddentype == 'warn':
1740 if hiddentype == 'warn':
1740 unfi = repo.unfiltered()
1741 unfi = repo.unfiltered()
1741 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1742 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1742 repo.ui.warn(_("warning: accessing hidden changesets for write "
1743 repo.ui.warn(_("warning: accessing hidden changesets for write "
1743 "operation: %s\n") % revstr)
1744 "operation: %s\n") % revstr)
1744
1745
1745 # we have to use new filtername to separate branch/tags cache until we can
1746 # we have to use new filtername to separate branch/tags cache until we can
1746 # disbale these cache when revisions are dynamically pinned.
1747 # disbale these cache when revisions are dynamically pinned.
1747 return repo.filtered('visible-hidden', revs)
1748 return repo.filtered('visible-hidden', revs)
1748
1749
1749 def _getrevsfromsymbols(repo, symbols):
1750 def _getrevsfromsymbols(repo, symbols):
1750 """parse the list of symbols and returns a set of revision numbers of hidden
1751 """parse the list of symbols and returns a set of revision numbers of hidden
1751 changesets present in symbols"""
1752 changesets present in symbols"""
1752 revs = set()
1753 revs = set()
1753 unfi = repo.unfiltered()
1754 unfi = repo.unfiltered()
1754 unficl = unfi.changelog
1755 unficl = unfi.changelog
1755 cl = repo.changelog
1756 cl = repo.changelog
1756 tiprev = len(unficl)
1757 tiprev = len(unficl)
1757 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1758 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1758 for s in symbols:
1759 for s in symbols:
1759 try:
1760 try:
1760 n = int(s)
1761 n = int(s)
1761 if n <= tiprev:
1762 if n <= tiprev:
1762 if not allowrevnums:
1763 if not allowrevnums:
1763 continue
1764 continue
1764 else:
1765 else:
1765 if n not in cl:
1766 if n not in cl:
1766 revs.add(n)
1767 revs.add(n)
1767 continue
1768 continue
1768 except ValueError:
1769 except ValueError:
1769 pass
1770 pass
1770
1771
1771 try:
1772 try:
1772 s = resolvehexnodeidprefix(unfi, s)
1773 s = resolvehexnodeidprefix(unfi, s)
1773 except (error.LookupError, error.WdirUnsupported):
1774 except (error.LookupError, error.WdirUnsupported):
1774 s = None
1775 s = None
1775
1776
1776 if s is not None:
1777 if s is not None:
1777 rev = unficl.rev(s)
1778 rev = unficl.rev(s)
1778 if rev not in cl:
1779 if rev not in cl:
1779 revs.add(rev)
1780 revs.add(rev)
1780
1781
1781 return revs
1782 return revs
1782
1783
1783 def bookmarkrevs(repo, mark):
1784 def bookmarkrevs(repo, mark):
1784 """
1785 """
1785 Select revisions reachable by a given bookmark
1786 Select revisions reachable by a given bookmark
1786 """
1787 """
1787 return repo.revs("ancestors(bookmark(%s)) - "
1788 return repo.revs("ancestors(bookmark(%s)) - "
1788 "ancestors(head() and not bookmark(%s)) - "
1789 "ancestors(head() and not bookmark(%s)) - "
1789 "ancestors(bookmark() and not bookmark(%s))",
1790 "ancestors(bookmark() and not bookmark(%s))",
1790 mark, mark, mark)
1791 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now