##// END OF EJS Templates
revpair: simplify revpair by always relying on smartset.first/last...
Martin von Zweigbergk -
r41414:5079242a default
parent child Browse files
Show More
@@ -1,1823 +1,1814 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 policy,
37 policy,
38 pycompat,
38 pycompat,
39 revsetlang,
39 revsetlang,
40 similar,
40 similar,
41 smartset,
41 smartset,
42 url,
42 url,
43 util,
43 util,
44 vfs,
44 vfs,
45 )
45 )
46
46
47 from .utils import (
47 from .utils import (
48 procutil,
48 procutil,
49 stringutil,
49 stringutil,
50 )
50 )
51
51
52 if pycompat.iswindows:
52 if pycompat.iswindows:
53 from . import scmwindows as scmplatform
53 from . import scmwindows as scmplatform
54 else:
54 else:
55 from . import scmposix as scmplatform
55 from . import scmposix as scmplatform
56
56
57 parsers = policy.importmod(r'parsers')
57 parsers = policy.importmod(r'parsers')
58
58
59 termsize = scmplatform.termsize
59 termsize = scmplatform.termsize
60
60
61 class status(tuple):
61 class status(tuple):
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 and 'ignored' properties are only relevant to the working copy.
63 and 'ignored' properties are only relevant to the working copy.
64 '''
64 '''
65
65
66 __slots__ = ()
66 __slots__ = ()
67
67
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 clean):
69 clean):
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 ignored, clean))
71 ignored, clean))
72
72
73 @property
73 @property
74 def modified(self):
74 def modified(self):
75 '''files that have been modified'''
75 '''files that have been modified'''
76 return self[0]
76 return self[0]
77
77
78 @property
78 @property
79 def added(self):
79 def added(self):
80 '''files that have been added'''
80 '''files that have been added'''
81 return self[1]
81 return self[1]
82
82
83 @property
83 @property
84 def removed(self):
84 def removed(self):
85 '''files that have been removed'''
85 '''files that have been removed'''
86 return self[2]
86 return self[2]
87
87
88 @property
88 @property
89 def deleted(self):
89 def deleted(self):
90 '''files that are in the dirstate, but have been deleted from the
90 '''files that are in the dirstate, but have been deleted from the
91 working copy (aka "missing")
91 working copy (aka "missing")
92 '''
92 '''
93 return self[3]
93 return self[3]
94
94
95 @property
95 @property
96 def unknown(self):
96 def unknown(self):
97 '''files not in the dirstate that are not ignored'''
97 '''files not in the dirstate that are not ignored'''
98 return self[4]
98 return self[4]
99
99
100 @property
100 @property
101 def ignored(self):
101 def ignored(self):
102 '''files not in the dirstate that are ignored (by _dirignore())'''
102 '''files not in the dirstate that are ignored (by _dirignore())'''
103 return self[5]
103 return self[5]
104
104
105 @property
105 @property
106 def clean(self):
106 def clean(self):
107 '''files that have not been modified'''
107 '''files that have not been modified'''
108 return self[6]
108 return self[6]
109
109
110 def __repr__(self, *args, **kwargs):
110 def __repr__(self, *args, **kwargs):
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 r'unknown=%s, ignored=%s, clean=%s>') %
112 r'unknown=%s, ignored=%s, clean=%s>') %
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114
114
115 def itersubrepos(ctx1, ctx2):
115 def itersubrepos(ctx1, ctx2):
116 """find subrepos in ctx1 or ctx2"""
116 """find subrepos in ctx1 or ctx2"""
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # has been modified (in ctx2) but not yet committed (in ctx1).
119 # has been modified (in ctx2) but not yet committed (in ctx1).
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122
122
123 missing = set()
123 missing = set()
124
124
125 for subpath in ctx2.substate:
125 for subpath in ctx2.substate:
126 if subpath not in ctx1.substate:
126 if subpath not in ctx1.substate:
127 del subpaths[subpath]
127 del subpaths[subpath]
128 missing.add(subpath)
128 missing.add(subpath)
129
129
130 for subpath, ctx in sorted(subpaths.iteritems()):
130 for subpath, ctx in sorted(subpaths.iteritems()):
131 yield subpath, ctx.sub(subpath)
131 yield subpath, ctx.sub(subpath)
132
132
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # status and diff will have an accurate result when it does
134 # status and diff will have an accurate result when it does
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # against itself.
136 # against itself.
137 for subpath in missing:
137 for subpath in missing:
138 yield subpath, ctx2.nullsub(subpath, ctx1)
138 yield subpath, ctx2.nullsub(subpath, ctx1)
139
139
140 def nochangesfound(ui, repo, excluded=None):
140 def nochangesfound(ui, repo, excluded=None):
141 '''Report no changes for push/pull, excluded is None or a list of
141 '''Report no changes for push/pull, excluded is None or a list of
142 nodes excluded from the push/pull.
142 nodes excluded from the push/pull.
143 '''
143 '''
144 secretlist = []
144 secretlist = []
145 if excluded:
145 if excluded:
146 for n in excluded:
146 for n in excluded:
147 ctx = repo[n]
147 ctx = repo[n]
148 if ctx.phase() >= phases.secret and not ctx.extinct():
148 if ctx.phase() >= phases.secret and not ctx.extinct():
149 secretlist.append(n)
149 secretlist.append(n)
150
150
151 if secretlist:
151 if secretlist:
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 % len(secretlist))
153 % len(secretlist))
154 else:
154 else:
155 ui.status(_("no changes found\n"))
155 ui.status(_("no changes found\n"))
156
156
157 def callcatch(ui, func):
157 def callcatch(ui, func):
158 """call func() with global exception handling
158 """call func() with global exception handling
159
159
160 return func() if no exception happens. otherwise do some error handling
160 return func() if no exception happens. otherwise do some error handling
161 and return an exit code accordingly. does not handle all exceptions.
161 and return an exit code accordingly. does not handle all exceptions.
162 """
162 """
163 try:
163 try:
164 try:
164 try:
165 return func()
165 return func()
166 except: # re-raises
166 except: # re-raises
167 ui.traceback()
167 ui.traceback()
168 raise
168 raise
169 # Global exception handling, alphabetically
169 # Global exception handling, alphabetically
170 # Mercurial-specific first, followed by built-in and library exceptions
170 # Mercurial-specific first, followed by built-in and library exceptions
171 except error.LockHeld as inst:
171 except error.LockHeld as inst:
172 if inst.errno == errno.ETIMEDOUT:
172 if inst.errno == errno.ETIMEDOUT:
173 reason = _('timed out waiting for lock held by %r') % (
173 reason = _('timed out waiting for lock held by %r') % (
174 pycompat.bytestr(inst.locker))
174 pycompat.bytestr(inst.locker))
175 else:
175 else:
176 reason = _('lock held by %r') % inst.locker
176 reason = _('lock held by %r') % inst.locker
177 ui.error(_("abort: %s: %s\n") % (
177 ui.error(_("abort: %s: %s\n") % (
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 if not inst.locker:
179 if not inst.locker:
180 ui.error(_("(lock might be very busy)\n"))
180 ui.error(_("(lock might be very busy)\n"))
181 except error.LockUnavailable as inst:
181 except error.LockUnavailable as inst:
182 ui.error(_("abort: could not lock %s: %s\n") %
182 ui.error(_("abort: could not lock %s: %s\n") %
183 (inst.desc or stringutil.forcebytestr(inst.filename),
183 (inst.desc or stringutil.forcebytestr(inst.filename),
184 encoding.strtolocal(inst.strerror)))
184 encoding.strtolocal(inst.strerror)))
185 except error.OutOfBandError as inst:
185 except error.OutOfBandError as inst:
186 if inst.args:
186 if inst.args:
187 msg = _("abort: remote error:\n")
187 msg = _("abort: remote error:\n")
188 else:
188 else:
189 msg = _("abort: remote error\n")
189 msg = _("abort: remote error\n")
190 ui.error(msg)
190 ui.error(msg)
191 if inst.args:
191 if inst.args:
192 ui.error(''.join(inst.args))
192 ui.error(''.join(inst.args))
193 if inst.hint:
193 if inst.hint:
194 ui.error('(%s)\n' % inst.hint)
194 ui.error('(%s)\n' % inst.hint)
195 except error.RepoError as inst:
195 except error.RepoError as inst:
196 ui.error(_("abort: %s!\n") % inst)
196 ui.error(_("abort: %s!\n") % inst)
197 if inst.hint:
197 if inst.hint:
198 ui.error(_("(%s)\n") % inst.hint)
198 ui.error(_("(%s)\n") % inst.hint)
199 except error.ResponseError as inst:
199 except error.ResponseError as inst:
200 ui.error(_("abort: %s") % inst.args[0])
200 ui.error(_("abort: %s") % inst.args[0])
201 msg = inst.args[1]
201 msg = inst.args[1]
202 if isinstance(msg, type(u'')):
202 if isinstance(msg, type(u'')):
203 msg = pycompat.sysbytes(msg)
203 msg = pycompat.sysbytes(msg)
204 if not isinstance(msg, bytes):
204 if not isinstance(msg, bytes):
205 ui.error(" %r\n" % (msg,))
205 ui.error(" %r\n" % (msg,))
206 elif not msg:
206 elif not msg:
207 ui.error(_(" empty string\n"))
207 ui.error(_(" empty string\n"))
208 else:
208 else:
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 except error.CensoredNodeError as inst:
210 except error.CensoredNodeError as inst:
211 ui.error(_("abort: file censored %s!\n") % inst)
211 ui.error(_("abort: file censored %s!\n") % inst)
212 except error.StorageError as inst:
212 except error.StorageError as inst:
213 ui.error(_("abort: %s!\n") % inst)
213 ui.error(_("abort: %s!\n") % inst)
214 if inst.hint:
214 if inst.hint:
215 ui.error(_("(%s)\n") % inst.hint)
215 ui.error(_("(%s)\n") % inst.hint)
216 except error.InterventionRequired as inst:
216 except error.InterventionRequired as inst:
217 ui.error("%s\n" % inst)
217 ui.error("%s\n" % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.error(_("(%s)\n") % inst.hint)
219 ui.error(_("(%s)\n") % inst.hint)
220 return 1
220 return 1
221 except error.WdirUnsupported:
221 except error.WdirUnsupported:
222 ui.error(_("abort: working directory revision cannot be specified\n"))
222 ui.error(_("abort: working directory revision cannot be specified\n"))
223 except error.Abort as inst:
223 except error.Abort as inst:
224 ui.error(_("abort: %s\n") % inst)
224 ui.error(_("abort: %s\n") % inst)
225 if inst.hint:
225 if inst.hint:
226 ui.error(_("(%s)\n") % inst.hint)
226 ui.error(_("(%s)\n") % inst.hint)
227 except ImportError as inst:
227 except ImportError as inst:
228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 m = stringutil.forcebytestr(inst).split()[-1]
229 m = stringutil.forcebytestr(inst).split()[-1]
230 if m in "mpatch bdiff".split():
230 if m in "mpatch bdiff".split():
231 ui.error(_("(did you forget to compile extensions?)\n"))
231 ui.error(_("(did you forget to compile extensions?)\n"))
232 elif m in "zlib".split():
232 elif m in "zlib".split():
233 ui.error(_("(is your Python install correct?)\n"))
233 ui.error(_("(is your Python install correct?)\n"))
234 except IOError as inst:
234 except IOError as inst:
235 if util.safehasattr(inst, "code"):
235 if util.safehasattr(inst, "code"):
236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 elif util.safehasattr(inst, "reason"):
237 elif util.safehasattr(inst, "reason"):
238 try: # usually it is in the form (errno, strerror)
238 try: # usually it is in the form (errno, strerror)
239 reason = inst.reason.args[1]
239 reason = inst.reason.args[1]
240 except (AttributeError, IndexError):
240 except (AttributeError, IndexError):
241 # it might be anything, for example a string
241 # it might be anything, for example a string
242 reason = inst.reason
242 reason = inst.reason
243 if isinstance(reason, pycompat.unicode):
243 if isinstance(reason, pycompat.unicode):
244 # SSLError of Python 2.7.9 contains a unicode
244 # SSLError of Python 2.7.9 contains a unicode
245 reason = encoding.unitolocal(reason)
245 reason = encoding.unitolocal(reason)
246 ui.error(_("abort: error: %s\n") % reason)
246 ui.error(_("abort: error: %s\n") % reason)
247 elif (util.safehasattr(inst, "args")
247 elif (util.safehasattr(inst, "args")
248 and inst.args and inst.args[0] == errno.EPIPE):
248 and inst.args and inst.args[0] == errno.EPIPE):
249 pass
249 pass
250 elif getattr(inst, "strerror", None):
250 elif getattr(inst, "strerror", None):
251 if getattr(inst, "filename", None):
251 if getattr(inst, "filename", None):
252 ui.error(_("abort: %s: %s\n") % (
252 ui.error(_("abort: %s: %s\n") % (
253 encoding.strtolocal(inst.strerror),
253 encoding.strtolocal(inst.strerror),
254 stringutil.forcebytestr(inst.filename)))
254 stringutil.forcebytestr(inst.filename)))
255 else:
255 else:
256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 else:
257 else:
258 raise
258 raise
259 except OSError as inst:
259 except OSError as inst:
260 if getattr(inst, "filename", None) is not None:
260 if getattr(inst, "filename", None) is not None:
261 ui.error(_("abort: %s: '%s'\n") % (
261 ui.error(_("abort: %s: '%s'\n") % (
262 encoding.strtolocal(inst.strerror),
262 encoding.strtolocal(inst.strerror),
263 stringutil.forcebytestr(inst.filename)))
263 stringutil.forcebytestr(inst.filename)))
264 else:
264 else:
265 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
265 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
266 except MemoryError:
266 except MemoryError:
267 ui.error(_("abort: out of memory\n"))
267 ui.error(_("abort: out of memory\n"))
268 except SystemExit as inst:
268 except SystemExit as inst:
269 # Commands shouldn't sys.exit directly, but give a return code.
269 # Commands shouldn't sys.exit directly, but give a return code.
270 # Just in case catch this and and pass exit code to caller.
270 # Just in case catch this and and pass exit code to caller.
271 return inst.code
271 return inst.code
272
272
273 return -1
273 return -1
274
274
275 def checknewlabel(repo, lbl, kind):
275 def checknewlabel(repo, lbl, kind):
276 # Do not use the "kind" parameter in ui output.
276 # Do not use the "kind" parameter in ui output.
277 # It makes strings difficult to translate.
277 # It makes strings difficult to translate.
278 if lbl in ['tip', '.', 'null']:
278 if lbl in ['tip', '.', 'null']:
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
280 for c in (':', '\0', '\n', '\r'):
280 for c in (':', '\0', '\n', '\r'):
281 if c in lbl:
281 if c in lbl:
282 raise error.Abort(
282 raise error.Abort(
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
284 try:
284 try:
285 int(lbl)
285 int(lbl)
286 raise error.Abort(_("cannot use an integer as a name"))
286 raise error.Abort(_("cannot use an integer as a name"))
287 except ValueError:
287 except ValueError:
288 pass
288 pass
289 if lbl.strip() != lbl:
289 if lbl.strip() != lbl:
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
291
291
292 def checkfilename(f):
292 def checkfilename(f):
293 '''Check that the filename f is an acceptable filename for a tracked file'''
293 '''Check that the filename f is an acceptable filename for a tracked file'''
294 if '\r' in f or '\n' in f:
294 if '\r' in f or '\n' in f:
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
296 % pycompat.bytestr(f))
296 % pycompat.bytestr(f))
297
297
298 def checkportable(ui, f):
298 def checkportable(ui, f):
299 '''Check if filename f is portable and warn or abort depending on config'''
299 '''Check if filename f is portable and warn or abort depending on config'''
300 checkfilename(f)
300 checkfilename(f)
301 abort, warn = checkportabilityalert(ui)
301 abort, warn = checkportabilityalert(ui)
302 if abort or warn:
302 if abort or warn:
303 msg = util.checkwinfilename(f)
303 msg = util.checkwinfilename(f)
304 if msg:
304 if msg:
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
306 if abort:
306 if abort:
307 raise error.Abort(msg)
307 raise error.Abort(msg)
308 ui.warn(_("warning: %s\n") % msg)
308 ui.warn(_("warning: %s\n") % msg)
309
309
310 def checkportabilityalert(ui):
310 def checkportabilityalert(ui):
311 '''check if the user's config requests nothing, a warning, or abort for
311 '''check if the user's config requests nothing, a warning, or abort for
312 non-portable filenames'''
312 non-portable filenames'''
313 val = ui.config('ui', 'portablefilenames')
313 val = ui.config('ui', 'portablefilenames')
314 lval = val.lower()
314 lval = val.lower()
315 bval = stringutil.parsebool(val)
315 bval = stringutil.parsebool(val)
316 abort = pycompat.iswindows or lval == 'abort'
316 abort = pycompat.iswindows or lval == 'abort'
317 warn = bval or lval == 'warn'
317 warn = bval or lval == 'warn'
318 if bval is None and not (warn or abort or lval == 'ignore'):
318 if bval is None and not (warn or abort or lval == 'ignore'):
319 raise error.ConfigError(
319 raise error.ConfigError(
320 _("ui.portablefilenames value is invalid ('%s')") % val)
320 _("ui.portablefilenames value is invalid ('%s')") % val)
321 return abort, warn
321 return abort, warn
322
322
323 class casecollisionauditor(object):
323 class casecollisionauditor(object):
324 def __init__(self, ui, abort, dirstate):
324 def __init__(self, ui, abort, dirstate):
325 self._ui = ui
325 self._ui = ui
326 self._abort = abort
326 self._abort = abort
327 allfiles = '\0'.join(dirstate._map)
327 allfiles = '\0'.join(dirstate._map)
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
329 self._dirstate = dirstate
329 self._dirstate = dirstate
330 # The purpose of _newfiles is so that we don't complain about
330 # The purpose of _newfiles is so that we don't complain about
331 # case collisions if someone were to call this object with the
331 # case collisions if someone were to call this object with the
332 # same filename twice.
332 # same filename twice.
333 self._newfiles = set()
333 self._newfiles = set()
334
334
335 def __call__(self, f):
335 def __call__(self, f):
336 if f in self._newfiles:
336 if f in self._newfiles:
337 return
337 return
338 fl = encoding.lower(f)
338 fl = encoding.lower(f)
339 if fl in self._loweredfiles and f not in self._dirstate:
339 if fl in self._loweredfiles and f not in self._dirstate:
340 msg = _('possible case-folding collision for %s') % f
340 msg = _('possible case-folding collision for %s') % f
341 if self._abort:
341 if self._abort:
342 raise error.Abort(msg)
342 raise error.Abort(msg)
343 self._ui.warn(_("warning: %s\n") % msg)
343 self._ui.warn(_("warning: %s\n") % msg)
344 self._loweredfiles.add(fl)
344 self._loweredfiles.add(fl)
345 self._newfiles.add(f)
345 self._newfiles.add(f)
346
346
347 def filteredhash(repo, maxrev):
347 def filteredhash(repo, maxrev):
348 """build hash of filtered revisions in the current repoview.
348 """build hash of filtered revisions in the current repoview.
349
349
350 Multiple caches perform up-to-date validation by checking that the
350 Multiple caches perform up-to-date validation by checking that the
351 tiprev and tipnode stored in the cache file match the current repository.
351 tiprev and tipnode stored in the cache file match the current repository.
352 However, this is not sufficient for validating repoviews because the set
352 However, this is not sufficient for validating repoviews because the set
353 of revisions in the view may change without the repository tiprev and
353 of revisions in the view may change without the repository tiprev and
354 tipnode changing.
354 tipnode changing.
355
355
356 This function hashes all the revs filtered from the view and returns
356 This function hashes all the revs filtered from the view and returns
357 that SHA-1 digest.
357 that SHA-1 digest.
358 """
358 """
359 cl = repo.changelog
359 cl = repo.changelog
360 if not cl.filteredrevs:
360 if not cl.filteredrevs:
361 return None
361 return None
362 key = None
362 key = None
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
364 if revs:
364 if revs:
365 s = hashlib.sha1()
365 s = hashlib.sha1()
366 for rev in revs:
366 for rev in revs:
367 s.update('%d;' % rev)
367 s.update('%d;' % rev)
368 key = s.digest()
368 key = s.digest()
369 return key
369 return key
370
370
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
372 '''yield every hg repository under path, always recursively.
372 '''yield every hg repository under path, always recursively.
373 The recurse flag will only control recursion into repo working dirs'''
373 The recurse flag will only control recursion into repo working dirs'''
374 def errhandler(err):
374 def errhandler(err):
375 if err.filename == path:
375 if err.filename == path:
376 raise err
376 raise err
377 samestat = getattr(os.path, 'samestat', None)
377 samestat = getattr(os.path, 'samestat', None)
378 if followsym and samestat is not None:
378 if followsym and samestat is not None:
379 def adddir(dirlst, dirname):
379 def adddir(dirlst, dirname):
380 dirstat = os.stat(dirname)
380 dirstat = os.stat(dirname)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
382 if not match:
382 if not match:
383 dirlst.append(dirstat)
383 dirlst.append(dirstat)
384 return not match
384 return not match
385 else:
385 else:
386 followsym = False
386 followsym = False
387
387
388 if (seen_dirs is None) and followsym:
388 if (seen_dirs is None) and followsym:
389 seen_dirs = []
389 seen_dirs = []
390 adddir(seen_dirs, path)
390 adddir(seen_dirs, path)
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
392 dirs.sort()
392 dirs.sort()
393 if '.hg' in dirs:
393 if '.hg' in dirs:
394 yield root # found a repository
394 yield root # found a repository
395 qroot = os.path.join(root, '.hg', 'patches')
395 qroot = os.path.join(root, '.hg', 'patches')
396 if os.path.isdir(os.path.join(qroot, '.hg')):
396 if os.path.isdir(os.path.join(qroot, '.hg')):
397 yield qroot # we have a patch queue repo here
397 yield qroot # we have a patch queue repo here
398 if recurse:
398 if recurse:
399 # avoid recursing inside the .hg directory
399 # avoid recursing inside the .hg directory
400 dirs.remove('.hg')
400 dirs.remove('.hg')
401 else:
401 else:
402 dirs[:] = [] # don't descend further
402 dirs[:] = [] # don't descend further
403 elif followsym:
403 elif followsym:
404 newdirs = []
404 newdirs = []
405 for d in dirs:
405 for d in dirs:
406 fname = os.path.join(root, d)
406 fname = os.path.join(root, d)
407 if adddir(seen_dirs, fname):
407 if adddir(seen_dirs, fname):
408 if os.path.islink(fname):
408 if os.path.islink(fname):
409 for hgname in walkrepos(fname, True, seen_dirs):
409 for hgname in walkrepos(fname, True, seen_dirs):
410 yield hgname
410 yield hgname
411 else:
411 else:
412 newdirs.append(d)
412 newdirs.append(d)
413 dirs[:] = newdirs
413 dirs[:] = newdirs
414
414
415 def binnode(ctx):
415 def binnode(ctx):
416 """Return binary node id for a given basectx"""
416 """Return binary node id for a given basectx"""
417 node = ctx.node()
417 node = ctx.node()
418 if node is None:
418 if node is None:
419 return wdirid
419 return wdirid
420 return node
420 return node
421
421
422 def intrev(ctx):
422 def intrev(ctx):
423 """Return integer for a given basectx that can be used in comparison or
423 """Return integer for a given basectx that can be used in comparison or
424 arithmetic operation"""
424 arithmetic operation"""
425 rev = ctx.rev()
425 rev = ctx.rev()
426 if rev is None:
426 if rev is None:
427 return wdirrev
427 return wdirrev
428 return rev
428 return rev
429
429
430 def formatchangeid(ctx):
430 def formatchangeid(ctx):
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
432 template provided by logcmdutil.changesettemplater"""
432 template provided by logcmdutil.changesettemplater"""
433 repo = ctx.repo()
433 repo = ctx.repo()
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
435
435
436 def formatrevnode(ui, rev, node):
436 def formatrevnode(ui, rev, node):
437 """Format given revision and node depending on the current verbosity"""
437 """Format given revision and node depending on the current verbosity"""
438 if ui.debugflag:
438 if ui.debugflag:
439 hexfunc = hex
439 hexfunc = hex
440 else:
440 else:
441 hexfunc = short
441 hexfunc = short
442 return '%d:%s' % (rev, hexfunc(node))
442 return '%d:%s' % (rev, hexfunc(node))
443
443
444 def resolvehexnodeidprefix(repo, prefix):
444 def resolvehexnodeidprefix(repo, prefix):
445 if (prefix.startswith('x') and
445 if (prefix.startswith('x') and
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
447 prefix = prefix[1:]
447 prefix = prefix[1:]
448 try:
448 try:
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
450 # This matches the shortesthexnodeidprefix() function below.
450 # This matches the shortesthexnodeidprefix() function below.
451 node = repo.unfiltered().changelog._partialmatch(prefix)
451 node = repo.unfiltered().changelog._partialmatch(prefix)
452 except error.AmbiguousPrefixLookupError:
452 except error.AmbiguousPrefixLookupError:
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
454 if revset:
454 if revset:
455 # Clear config to avoid infinite recursion
455 # Clear config to avoid infinite recursion
456 configoverrides = {('experimental',
456 configoverrides = {('experimental',
457 'revisions.disambiguatewithin'): None}
457 'revisions.disambiguatewithin'): None}
458 with repo.ui.configoverride(configoverrides):
458 with repo.ui.configoverride(configoverrides):
459 revs = repo.anyrevs([revset], user=True)
459 revs = repo.anyrevs([revset], user=True)
460 matches = []
460 matches = []
461 for rev in revs:
461 for rev in revs:
462 node = repo.changelog.node(rev)
462 node = repo.changelog.node(rev)
463 if hex(node).startswith(prefix):
463 if hex(node).startswith(prefix):
464 matches.append(node)
464 matches.append(node)
465 if len(matches) == 1:
465 if len(matches) == 1:
466 return matches[0]
466 return matches[0]
467 raise
467 raise
468 if node is None:
468 if node is None:
469 return
469 return
470 repo.changelog.rev(node) # make sure node isn't filtered
470 repo.changelog.rev(node) # make sure node isn't filtered
471 return node
471 return node
472
472
473 def mayberevnum(repo, prefix):
473 def mayberevnum(repo, prefix):
474 """Checks if the given prefix may be mistaken for a revision number"""
474 """Checks if the given prefix may be mistaken for a revision number"""
475 try:
475 try:
476 i = int(prefix)
476 i = int(prefix)
477 # if we are a pure int, then starting with zero will not be
477 # if we are a pure int, then starting with zero will not be
478 # confused as a rev; or, obviously, if the int is larger
478 # confused as a rev; or, obviously, if the int is larger
479 # than the value of the tip rev. We still need to disambiguate if
479 # than the value of the tip rev. We still need to disambiguate if
480 # prefix == '0', since that *is* a valid revnum.
480 # prefix == '0', since that *is* a valid revnum.
481 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
481 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
482 return False
482 return False
483 return True
483 return True
484 except ValueError:
484 except ValueError:
485 return False
485 return False
486
486
487 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
487 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
488 """Find the shortest unambiguous prefix that matches hexnode.
488 """Find the shortest unambiguous prefix that matches hexnode.
489
489
490 If "cache" is not None, it must be a dictionary that can be used for
490 If "cache" is not None, it must be a dictionary that can be used for
491 caching between calls to this method.
491 caching between calls to this method.
492 """
492 """
493 # _partialmatch() of filtered changelog could take O(len(repo)) time,
493 # _partialmatch() of filtered changelog could take O(len(repo)) time,
494 # which would be unacceptably slow. so we look for hash collision in
494 # which would be unacceptably slow. so we look for hash collision in
495 # unfiltered space, which means some hashes may be slightly longer.
495 # unfiltered space, which means some hashes may be slightly longer.
496
496
497 minlength=max(minlength, 1)
497 minlength=max(minlength, 1)
498
498
499 def disambiguate(prefix):
499 def disambiguate(prefix):
500 """Disambiguate against revnums."""
500 """Disambiguate against revnums."""
501 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
501 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
502 if mayberevnum(repo, prefix):
502 if mayberevnum(repo, prefix):
503 return 'x' + prefix
503 return 'x' + prefix
504 else:
504 else:
505 return prefix
505 return prefix
506
506
507 hexnode = hex(node)
507 hexnode = hex(node)
508 for length in range(len(prefix), len(hexnode) + 1):
508 for length in range(len(prefix), len(hexnode) + 1):
509 prefix = hexnode[:length]
509 prefix = hexnode[:length]
510 if not mayberevnum(repo, prefix):
510 if not mayberevnum(repo, prefix):
511 return prefix
511 return prefix
512
512
513 cl = repo.unfiltered().changelog
513 cl = repo.unfiltered().changelog
514 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
514 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
515 if revset:
515 if revset:
516 revs = None
516 revs = None
517 if cache is not None:
517 if cache is not None:
518 revs = cache.get('disambiguationrevset')
518 revs = cache.get('disambiguationrevset')
519 if revs is None:
519 if revs is None:
520 revs = repo.anyrevs([revset], user=True)
520 revs = repo.anyrevs([revset], user=True)
521 if cache is not None:
521 if cache is not None:
522 cache['disambiguationrevset'] = revs
522 cache['disambiguationrevset'] = revs
523 if cl.rev(node) in revs:
523 if cl.rev(node) in revs:
524 hexnode = hex(node)
524 hexnode = hex(node)
525 nodetree = None
525 nodetree = None
526 if cache is not None:
526 if cache is not None:
527 nodetree = cache.get('disambiguationnodetree')
527 nodetree = cache.get('disambiguationnodetree')
528 if not nodetree:
528 if not nodetree:
529 try:
529 try:
530 nodetree = parsers.nodetree(cl.index, len(revs))
530 nodetree = parsers.nodetree(cl.index, len(revs))
531 except AttributeError:
531 except AttributeError:
532 # no native nodetree
532 # no native nodetree
533 pass
533 pass
534 else:
534 else:
535 for r in revs:
535 for r in revs:
536 nodetree.insert(r)
536 nodetree.insert(r)
537 if cache is not None:
537 if cache is not None:
538 cache['disambiguationnodetree'] = nodetree
538 cache['disambiguationnodetree'] = nodetree
539 if nodetree is not None:
539 if nodetree is not None:
540 length = max(nodetree.shortest(node), minlength)
540 length = max(nodetree.shortest(node), minlength)
541 prefix = hexnode[:length]
541 prefix = hexnode[:length]
542 return disambiguate(prefix)
542 return disambiguate(prefix)
543 for length in range(minlength, len(hexnode) + 1):
543 for length in range(minlength, len(hexnode) + 1):
544 matches = []
544 matches = []
545 prefix = hexnode[:length]
545 prefix = hexnode[:length]
546 for rev in revs:
546 for rev in revs:
547 otherhexnode = repo[rev].hex()
547 otherhexnode = repo[rev].hex()
548 if prefix == otherhexnode[:length]:
548 if prefix == otherhexnode[:length]:
549 matches.append(otherhexnode)
549 matches.append(otherhexnode)
550 if len(matches) == 1:
550 if len(matches) == 1:
551 return disambiguate(prefix)
551 return disambiguate(prefix)
552
552
553 try:
553 try:
554 return disambiguate(cl.shortest(node, minlength))
554 return disambiguate(cl.shortest(node, minlength))
555 except error.LookupError:
555 except error.LookupError:
556 raise error.RepoLookupError()
556 raise error.RepoLookupError()
557
557
558 def isrevsymbol(repo, symbol):
558 def isrevsymbol(repo, symbol):
559 """Checks if a symbol exists in the repo.
559 """Checks if a symbol exists in the repo.
560
560
561 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
561 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
562 symbol is an ambiguous nodeid prefix.
562 symbol is an ambiguous nodeid prefix.
563 """
563 """
564 try:
564 try:
565 revsymbol(repo, symbol)
565 revsymbol(repo, symbol)
566 return True
566 return True
567 except error.RepoLookupError:
567 except error.RepoLookupError:
568 return False
568 return False
569
569
570 def revsymbol(repo, symbol):
570 def revsymbol(repo, symbol):
571 """Returns a context given a single revision symbol (as string).
571 """Returns a context given a single revision symbol (as string).
572
572
573 This is similar to revsingle(), but accepts only a single revision symbol,
573 This is similar to revsingle(), but accepts only a single revision symbol,
574 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
574 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
575 not "max(public())".
575 not "max(public())".
576 """
576 """
577 if not isinstance(symbol, bytes):
577 if not isinstance(symbol, bytes):
578 msg = ("symbol (%s of type %s) was not a string, did you mean "
578 msg = ("symbol (%s of type %s) was not a string, did you mean "
579 "repo[symbol]?" % (symbol, type(symbol)))
579 "repo[symbol]?" % (symbol, type(symbol)))
580 raise error.ProgrammingError(msg)
580 raise error.ProgrammingError(msg)
581 try:
581 try:
582 if symbol in ('.', 'tip', 'null'):
582 if symbol in ('.', 'tip', 'null'):
583 return repo[symbol]
583 return repo[symbol]
584
584
585 try:
585 try:
586 r = int(symbol)
586 r = int(symbol)
587 if '%d' % r != symbol:
587 if '%d' % r != symbol:
588 raise ValueError
588 raise ValueError
589 l = len(repo.changelog)
589 l = len(repo.changelog)
590 if r < 0:
590 if r < 0:
591 r += l
591 r += l
592 if r < 0 or r >= l and r != wdirrev:
592 if r < 0 or r >= l and r != wdirrev:
593 raise ValueError
593 raise ValueError
594 return repo[r]
594 return repo[r]
595 except error.FilteredIndexError:
595 except error.FilteredIndexError:
596 raise
596 raise
597 except (ValueError, OverflowError, IndexError):
597 except (ValueError, OverflowError, IndexError):
598 pass
598 pass
599
599
600 if len(symbol) == 40:
600 if len(symbol) == 40:
601 try:
601 try:
602 node = bin(symbol)
602 node = bin(symbol)
603 rev = repo.changelog.rev(node)
603 rev = repo.changelog.rev(node)
604 return repo[rev]
604 return repo[rev]
605 except error.FilteredLookupError:
605 except error.FilteredLookupError:
606 raise
606 raise
607 except (TypeError, LookupError):
607 except (TypeError, LookupError):
608 pass
608 pass
609
609
610 # look up bookmarks through the name interface
610 # look up bookmarks through the name interface
611 try:
611 try:
612 node = repo.names.singlenode(repo, symbol)
612 node = repo.names.singlenode(repo, symbol)
613 rev = repo.changelog.rev(node)
613 rev = repo.changelog.rev(node)
614 return repo[rev]
614 return repo[rev]
615 except KeyError:
615 except KeyError:
616 pass
616 pass
617
617
618 node = resolvehexnodeidprefix(repo, symbol)
618 node = resolvehexnodeidprefix(repo, symbol)
619 if node is not None:
619 if node is not None:
620 rev = repo.changelog.rev(node)
620 rev = repo.changelog.rev(node)
621 return repo[rev]
621 return repo[rev]
622
622
623 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
623 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
624
624
625 except error.WdirUnsupported:
625 except error.WdirUnsupported:
626 return repo[None]
626 return repo[None]
627 except (error.FilteredIndexError, error.FilteredLookupError,
627 except (error.FilteredIndexError, error.FilteredLookupError,
628 error.FilteredRepoLookupError):
628 error.FilteredRepoLookupError):
629 raise _filterederror(repo, symbol)
629 raise _filterederror(repo, symbol)
630
630
631 def _filterederror(repo, changeid):
631 def _filterederror(repo, changeid):
632 """build an exception to be raised about a filtered changeid
632 """build an exception to be raised about a filtered changeid
633
633
634 This is extracted in a function to help extensions (eg: evolve) to
634 This is extracted in a function to help extensions (eg: evolve) to
635 experiment with various message variants."""
635 experiment with various message variants."""
636 if repo.filtername.startswith('visible'):
636 if repo.filtername.startswith('visible'):
637
637
638 # Check if the changeset is obsolete
638 # Check if the changeset is obsolete
639 unfilteredrepo = repo.unfiltered()
639 unfilteredrepo = repo.unfiltered()
640 ctx = revsymbol(unfilteredrepo, changeid)
640 ctx = revsymbol(unfilteredrepo, changeid)
641
641
642 # If the changeset is obsolete, enrich the message with the reason
642 # If the changeset is obsolete, enrich the message with the reason
643 # that made this changeset not visible
643 # that made this changeset not visible
644 if ctx.obsolete():
644 if ctx.obsolete():
645 msg = obsutil._getfilteredreason(repo, changeid, ctx)
645 msg = obsutil._getfilteredreason(repo, changeid, ctx)
646 else:
646 else:
647 msg = _("hidden revision '%s'") % changeid
647 msg = _("hidden revision '%s'") % changeid
648
648
649 hint = _('use --hidden to access hidden revisions')
649 hint = _('use --hidden to access hidden revisions')
650
650
651 return error.FilteredRepoLookupError(msg, hint=hint)
651 return error.FilteredRepoLookupError(msg, hint=hint)
652 msg = _("filtered revision '%s' (not in '%s' subset)")
652 msg = _("filtered revision '%s' (not in '%s' subset)")
653 msg %= (changeid, repo.filtername)
653 msg %= (changeid, repo.filtername)
654 return error.FilteredRepoLookupError(msg)
654 return error.FilteredRepoLookupError(msg)
655
655
656 def revsingle(repo, revspec, default='.', localalias=None):
656 def revsingle(repo, revspec, default='.', localalias=None):
657 if not revspec and revspec != 0:
657 if not revspec and revspec != 0:
658 return repo[default]
658 return repo[default]
659
659
660 l = revrange(repo, [revspec], localalias=localalias)
660 l = revrange(repo, [revspec], localalias=localalias)
661 if not l:
661 if not l:
662 raise error.Abort(_('empty revision set'))
662 raise error.Abort(_('empty revision set'))
663 return repo[l.last()]
663 return repo[l.last()]
664
664
665 def _pairspec(revspec):
665 def _pairspec(revspec):
666 tree = revsetlang.parse(revspec)
666 tree = revsetlang.parse(revspec)
667 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
667 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
668
668
669 def revpair(repo, revs):
669 def revpair(repo, revs):
670 if not revs:
670 if not revs:
671 return repo['.'], repo[None]
671 return repo['.'], repo[None]
672
672
673 l = revrange(repo, revs)
673 l = revrange(repo, revs)
674
674
675 if not l:
675 first = l.first()
676 first = second = None
676 second = l.last()
677 elif l.isascending():
678 first = l.min()
679 second = l.max()
680 elif l.isdescending():
681 first = l.max()
682 second = l.min()
683 else:
684 first = l.first()
685 second = l.last()
686
677
687 if first is None:
678 if first is None:
688 raise error.Abort(_('empty revision range'))
679 raise error.Abort(_('empty revision range'))
689 if (first == second and len(revs) >= 2
680 if (first == second and len(revs) >= 2
690 and not all(revrange(repo, [r]) for r in revs)):
681 and not all(revrange(repo, [r]) for r in revs)):
691 raise error.Abort(_('empty revision on one side of range'))
682 raise error.Abort(_('empty revision on one side of range'))
692
683
693 # if top-level is range expression, the result must always be a pair
684 # if top-level is range expression, the result must always be a pair
694 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
685 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
695 return repo[first], repo[None]
686 return repo[first], repo[None]
696
687
697 return repo[first], repo[second]
688 return repo[first], repo[second]
698
689
699 def revrange(repo, specs, localalias=None):
690 def revrange(repo, specs, localalias=None):
700 """Execute 1 to many revsets and return the union.
691 """Execute 1 to many revsets and return the union.
701
692
702 This is the preferred mechanism for executing revsets using user-specified
693 This is the preferred mechanism for executing revsets using user-specified
703 config options, such as revset aliases.
694 config options, such as revset aliases.
704
695
705 The revsets specified by ``specs`` will be executed via a chained ``OR``
696 The revsets specified by ``specs`` will be executed via a chained ``OR``
706 expression. If ``specs`` is empty, an empty result is returned.
697 expression. If ``specs`` is empty, an empty result is returned.
707
698
708 ``specs`` can contain integers, in which case they are assumed to be
699 ``specs`` can contain integers, in which case they are assumed to be
709 revision numbers.
700 revision numbers.
710
701
711 It is assumed the revsets are already formatted. If you have arguments
702 It is assumed the revsets are already formatted. If you have arguments
712 that need to be expanded in the revset, call ``revsetlang.formatspec()``
703 that need to be expanded in the revset, call ``revsetlang.formatspec()``
713 and pass the result as an element of ``specs``.
704 and pass the result as an element of ``specs``.
714
705
715 Specifying a single revset is allowed.
706 Specifying a single revset is allowed.
716
707
717 Returns a ``revset.abstractsmartset`` which is a list-like interface over
708 Returns a ``revset.abstractsmartset`` which is a list-like interface over
718 integer revisions.
709 integer revisions.
719 """
710 """
720 allspecs = []
711 allspecs = []
721 for spec in specs:
712 for spec in specs:
722 if isinstance(spec, int):
713 if isinstance(spec, int):
723 spec = revsetlang.formatspec('%d', spec)
714 spec = revsetlang.formatspec('%d', spec)
724 allspecs.append(spec)
715 allspecs.append(spec)
725 return repo.anyrevs(allspecs, user=True, localalias=localalias)
716 return repo.anyrevs(allspecs, user=True, localalias=localalias)
726
717
727 def meaningfulparents(repo, ctx):
718 def meaningfulparents(repo, ctx):
728 """Return list of meaningful (or all if debug) parentrevs for rev.
719 """Return list of meaningful (or all if debug) parentrevs for rev.
729
720
730 For merges (two non-nullrev revisions) both parents are meaningful.
721 For merges (two non-nullrev revisions) both parents are meaningful.
731 Otherwise the first parent revision is considered meaningful if it
722 Otherwise the first parent revision is considered meaningful if it
732 is not the preceding revision.
723 is not the preceding revision.
733 """
724 """
734 parents = ctx.parents()
725 parents = ctx.parents()
735 if len(parents) > 1:
726 if len(parents) > 1:
736 return parents
727 return parents
737 if repo.ui.debugflag:
728 if repo.ui.debugflag:
738 return [parents[0], repo[nullrev]]
729 return [parents[0], repo[nullrev]]
739 if parents[0].rev() >= intrev(ctx) - 1:
730 if parents[0].rev() >= intrev(ctx) - 1:
740 return []
731 return []
741 return parents
732 return parents
742
733
743 def expandpats(pats):
734 def expandpats(pats):
744 '''Expand bare globs when running on windows.
735 '''Expand bare globs when running on windows.
745 On posix we assume it already has already been done by sh.'''
736 On posix we assume it already has already been done by sh.'''
746 if not util.expandglobs:
737 if not util.expandglobs:
747 return list(pats)
738 return list(pats)
748 ret = []
739 ret = []
749 for kindpat in pats:
740 for kindpat in pats:
750 kind, pat = matchmod._patsplit(kindpat, None)
741 kind, pat = matchmod._patsplit(kindpat, None)
751 if kind is None:
742 if kind is None:
752 try:
743 try:
753 globbed = glob.glob(pat)
744 globbed = glob.glob(pat)
754 except re.error:
745 except re.error:
755 globbed = [pat]
746 globbed = [pat]
756 if globbed:
747 if globbed:
757 ret.extend(globbed)
748 ret.extend(globbed)
758 continue
749 continue
759 ret.append(kindpat)
750 ret.append(kindpat)
760 return ret
751 return ret
761
752
762 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
753 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
763 badfn=None):
754 badfn=None):
764 '''Return a matcher and the patterns that were used.
755 '''Return a matcher and the patterns that were used.
765 The matcher will warn about bad matches, unless an alternate badfn callback
756 The matcher will warn about bad matches, unless an alternate badfn callback
766 is provided.'''
757 is provided.'''
767 if pats == ("",):
758 if pats == ("",):
768 pats = []
759 pats = []
769 if opts is None:
760 if opts is None:
770 opts = {}
761 opts = {}
771 if not globbed and default == 'relpath':
762 if not globbed and default == 'relpath':
772 pats = expandpats(pats or [])
763 pats = expandpats(pats or [])
773
764
774 def bad(f, msg):
765 def bad(f, msg):
775 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
766 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
776
767
777 if badfn is None:
768 if badfn is None:
778 badfn = bad
769 badfn = bad
779
770
780 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
771 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
781 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
772 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
782
773
783 if m.always():
774 if m.always():
784 pats = []
775 pats = []
785 return m, pats
776 return m, pats
786
777
787 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
778 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
788 badfn=None):
779 badfn=None):
789 '''Return a matcher that will warn about bad matches.'''
780 '''Return a matcher that will warn about bad matches.'''
790 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
781 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
791
782
792 def matchall(repo):
783 def matchall(repo):
793 '''Return a matcher that will efficiently match everything.'''
784 '''Return a matcher that will efficiently match everything.'''
794 return matchmod.always(repo.root, repo.getcwd())
785 return matchmod.always(repo.root, repo.getcwd())
795
786
796 def matchfiles(repo, files, badfn=None):
787 def matchfiles(repo, files, badfn=None):
797 '''Return a matcher that will efficiently match exactly these files.'''
788 '''Return a matcher that will efficiently match exactly these files.'''
798 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
789 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
799
790
800 def parsefollowlinespattern(repo, rev, pat, msg):
791 def parsefollowlinespattern(repo, rev, pat, msg):
801 """Return a file name from `pat` pattern suitable for usage in followlines
792 """Return a file name from `pat` pattern suitable for usage in followlines
802 logic.
793 logic.
803 """
794 """
804 if not matchmod.patkind(pat):
795 if not matchmod.patkind(pat):
805 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
796 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
806 else:
797 else:
807 ctx = repo[rev]
798 ctx = repo[rev]
808 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
799 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
809 files = [f for f in ctx if m(f)]
800 files = [f for f in ctx if m(f)]
810 if len(files) != 1:
801 if len(files) != 1:
811 raise error.ParseError(msg)
802 raise error.ParseError(msg)
812 return files[0]
803 return files[0]
813
804
814 def getorigvfs(ui, repo):
805 def getorigvfs(ui, repo):
815 """return a vfs suitable to save 'orig' file
806 """return a vfs suitable to save 'orig' file
816
807
817 return None if no special directory is configured"""
808 return None if no special directory is configured"""
818 origbackuppath = ui.config('ui', 'origbackuppath')
809 origbackuppath = ui.config('ui', 'origbackuppath')
819 if not origbackuppath:
810 if not origbackuppath:
820 return None
811 return None
821 return vfs.vfs(repo.wvfs.join(origbackuppath))
812 return vfs.vfs(repo.wvfs.join(origbackuppath))
822
813
823 def origpath(ui, repo, filepath):
814 def origpath(ui, repo, filepath):
824 '''customize where .orig files are created
815 '''customize where .orig files are created
825
816
826 Fetch user defined path from config file: [ui] origbackuppath = <path>
817 Fetch user defined path from config file: [ui] origbackuppath = <path>
827 Fall back to default (filepath with .orig suffix) if not specified
818 Fall back to default (filepath with .orig suffix) if not specified
828 '''
819 '''
829 origvfs = getorigvfs(ui, repo)
820 origvfs = getorigvfs(ui, repo)
830 if origvfs is None:
821 if origvfs is None:
831 return filepath + ".orig"
822 return filepath + ".orig"
832
823
833 # Convert filepath from an absolute path into a path inside the repo.
824 # Convert filepath from an absolute path into a path inside the repo.
834 filepathfromroot = util.normpath(os.path.relpath(filepath,
825 filepathfromroot = util.normpath(os.path.relpath(filepath,
835 start=repo.root))
826 start=repo.root))
836
827
837 origbackupdir = origvfs.dirname(filepathfromroot)
828 origbackupdir = origvfs.dirname(filepathfromroot)
838 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
829 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
839 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
830 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
840
831
841 # Remove any files that conflict with the backup file's path
832 # Remove any files that conflict with the backup file's path
842 for f in reversed(list(util.finddirs(filepathfromroot))):
833 for f in reversed(list(util.finddirs(filepathfromroot))):
843 if origvfs.isfileorlink(f):
834 if origvfs.isfileorlink(f):
844 ui.note(_('removing conflicting file: %s\n')
835 ui.note(_('removing conflicting file: %s\n')
845 % origvfs.join(f))
836 % origvfs.join(f))
846 origvfs.unlink(f)
837 origvfs.unlink(f)
847 break
838 break
848
839
849 origvfs.makedirs(origbackupdir)
840 origvfs.makedirs(origbackupdir)
850
841
851 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
842 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
852 ui.note(_('removing conflicting directory: %s\n')
843 ui.note(_('removing conflicting directory: %s\n')
853 % origvfs.join(filepathfromroot))
844 % origvfs.join(filepathfromroot))
854 origvfs.rmtree(filepathfromroot, forcibly=True)
845 origvfs.rmtree(filepathfromroot, forcibly=True)
855
846
856 return origvfs.join(filepathfromroot)
847 return origvfs.join(filepathfromroot)
857
848
858 class _containsnode(object):
849 class _containsnode(object):
859 """proxy __contains__(node) to container.__contains__ which accepts revs"""
850 """proxy __contains__(node) to container.__contains__ which accepts revs"""
860
851
861 def __init__(self, repo, revcontainer):
852 def __init__(self, repo, revcontainer):
862 self._torev = repo.changelog.rev
853 self._torev = repo.changelog.rev
863 self._revcontains = revcontainer.__contains__
854 self._revcontains = revcontainer.__contains__
864
855
865 def __contains__(self, node):
856 def __contains__(self, node):
866 return self._revcontains(self._torev(node))
857 return self._revcontains(self._torev(node))
867
858
868 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
859 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
869 fixphase=False, targetphase=None, backup=True):
860 fixphase=False, targetphase=None, backup=True):
870 """do common cleanups when old nodes are replaced by new nodes
861 """do common cleanups when old nodes are replaced by new nodes
871
862
872 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
863 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
873 (we might also want to move working directory parent in the future)
864 (we might also want to move working directory parent in the future)
874
865
875 By default, bookmark moves are calculated automatically from 'replacements',
866 By default, bookmark moves are calculated automatically from 'replacements',
876 but 'moves' can be used to override that. Also, 'moves' may include
867 but 'moves' can be used to override that. Also, 'moves' may include
877 additional bookmark moves that should not have associated obsmarkers.
868 additional bookmark moves that should not have associated obsmarkers.
878
869
879 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
870 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
880 have replacements. operation is a string, like "rebase".
871 have replacements. operation is a string, like "rebase".
881
872
882 metadata is dictionary containing metadata to be stored in obsmarker if
873 metadata is dictionary containing metadata to be stored in obsmarker if
883 obsolescence is enabled.
874 obsolescence is enabled.
884 """
875 """
885 assert fixphase or targetphase is None
876 assert fixphase or targetphase is None
886 if not replacements and not moves:
877 if not replacements and not moves:
887 return
878 return
888
879
889 # translate mapping's other forms
880 # translate mapping's other forms
890 if not util.safehasattr(replacements, 'items'):
881 if not util.safehasattr(replacements, 'items'):
891 replacements = {(n,): () for n in replacements}
882 replacements = {(n,): () for n in replacements}
892 else:
883 else:
893 # upgrading non tuple "source" to tuple ones for BC
884 # upgrading non tuple "source" to tuple ones for BC
894 repls = {}
885 repls = {}
895 for key, value in replacements.items():
886 for key, value in replacements.items():
896 if not isinstance(key, tuple):
887 if not isinstance(key, tuple):
897 key = (key,)
888 key = (key,)
898 repls[key] = value
889 repls[key] = value
899 replacements = repls
890 replacements = repls
900
891
901 # Unfiltered repo is needed since nodes in replacements might be hidden.
892 # Unfiltered repo is needed since nodes in replacements might be hidden.
902 unfi = repo.unfiltered()
893 unfi = repo.unfiltered()
903
894
904 # Calculate bookmark movements
895 # Calculate bookmark movements
905 if moves is None:
896 if moves is None:
906 moves = {}
897 moves = {}
907 for oldnodes, newnodes in replacements.items():
898 for oldnodes, newnodes in replacements.items():
908 for oldnode in oldnodes:
899 for oldnode in oldnodes:
909 if oldnode in moves:
900 if oldnode in moves:
910 continue
901 continue
911 if len(newnodes) > 1:
902 if len(newnodes) > 1:
912 # usually a split, take the one with biggest rev number
903 # usually a split, take the one with biggest rev number
913 newnode = next(unfi.set('max(%ln)', newnodes)).node()
904 newnode = next(unfi.set('max(%ln)', newnodes)).node()
914 elif len(newnodes) == 0:
905 elif len(newnodes) == 0:
915 # move bookmark backwards
906 # move bookmark backwards
916 allreplaced = []
907 allreplaced = []
917 for rep in replacements:
908 for rep in replacements:
918 allreplaced.extend(rep)
909 allreplaced.extend(rep)
919 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
910 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
920 allreplaced))
911 allreplaced))
921 if roots:
912 if roots:
922 newnode = roots[0].node()
913 newnode = roots[0].node()
923 else:
914 else:
924 newnode = nullid
915 newnode = nullid
925 else:
916 else:
926 newnode = newnodes[0]
917 newnode = newnodes[0]
927 moves[oldnode] = newnode
918 moves[oldnode] = newnode
928
919
929 allnewnodes = [n for ns in replacements.values() for n in ns]
920 allnewnodes = [n for ns in replacements.values() for n in ns]
930 toretract = {}
921 toretract = {}
931 toadvance = {}
922 toadvance = {}
932 if fixphase:
923 if fixphase:
933 precursors = {}
924 precursors = {}
934 for oldnodes, newnodes in replacements.items():
925 for oldnodes, newnodes in replacements.items():
935 for oldnode in oldnodes:
926 for oldnode in oldnodes:
936 for newnode in newnodes:
927 for newnode in newnodes:
937 precursors.setdefault(newnode, []).append(oldnode)
928 precursors.setdefault(newnode, []).append(oldnode)
938
929
939 allnewnodes.sort(key=lambda n: unfi[n].rev())
930 allnewnodes.sort(key=lambda n: unfi[n].rev())
940 newphases = {}
931 newphases = {}
941 def phase(ctx):
932 def phase(ctx):
942 return newphases.get(ctx.node(), ctx.phase())
933 return newphases.get(ctx.node(), ctx.phase())
943 for newnode in allnewnodes:
934 for newnode in allnewnodes:
944 ctx = unfi[newnode]
935 ctx = unfi[newnode]
945 parentphase = max(phase(p) for p in ctx.parents())
936 parentphase = max(phase(p) for p in ctx.parents())
946 if targetphase is None:
937 if targetphase is None:
947 oldphase = max(unfi[oldnode].phase()
938 oldphase = max(unfi[oldnode].phase()
948 for oldnode in precursors[newnode])
939 for oldnode in precursors[newnode])
949 newphase = max(oldphase, parentphase)
940 newphase = max(oldphase, parentphase)
950 else:
941 else:
951 newphase = max(targetphase, parentphase)
942 newphase = max(targetphase, parentphase)
952 newphases[newnode] = newphase
943 newphases[newnode] = newphase
953 if newphase > ctx.phase():
944 if newphase > ctx.phase():
954 toretract.setdefault(newphase, []).append(newnode)
945 toretract.setdefault(newphase, []).append(newnode)
955 elif newphase < ctx.phase():
946 elif newphase < ctx.phase():
956 toadvance.setdefault(newphase, []).append(newnode)
947 toadvance.setdefault(newphase, []).append(newnode)
957
948
958 with repo.transaction('cleanup') as tr:
949 with repo.transaction('cleanup') as tr:
959 # Move bookmarks
950 # Move bookmarks
960 bmarks = repo._bookmarks
951 bmarks = repo._bookmarks
961 bmarkchanges = []
952 bmarkchanges = []
962 for oldnode, newnode in moves.items():
953 for oldnode, newnode in moves.items():
963 oldbmarks = repo.nodebookmarks(oldnode)
954 oldbmarks = repo.nodebookmarks(oldnode)
964 if not oldbmarks:
955 if not oldbmarks:
965 continue
956 continue
966 from . import bookmarks # avoid import cycle
957 from . import bookmarks # avoid import cycle
967 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
958 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
968 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
959 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
969 hex(oldnode), hex(newnode)))
960 hex(oldnode), hex(newnode)))
970 # Delete divergent bookmarks being parents of related newnodes
961 # Delete divergent bookmarks being parents of related newnodes
971 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
962 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
972 allnewnodes, newnode, oldnode)
963 allnewnodes, newnode, oldnode)
973 deletenodes = _containsnode(repo, deleterevs)
964 deletenodes = _containsnode(repo, deleterevs)
974 for name in oldbmarks:
965 for name in oldbmarks:
975 bmarkchanges.append((name, newnode))
966 bmarkchanges.append((name, newnode))
976 for b in bookmarks.divergent2delete(repo, deletenodes, name):
967 for b in bookmarks.divergent2delete(repo, deletenodes, name):
977 bmarkchanges.append((b, None))
968 bmarkchanges.append((b, None))
978
969
979 if bmarkchanges:
970 if bmarkchanges:
980 bmarks.applychanges(repo, tr, bmarkchanges)
971 bmarks.applychanges(repo, tr, bmarkchanges)
981
972
982 for phase, nodes in toretract.items():
973 for phase, nodes in toretract.items():
983 phases.retractboundary(repo, tr, phase, nodes)
974 phases.retractboundary(repo, tr, phase, nodes)
984 for phase, nodes in toadvance.items():
975 for phase, nodes in toadvance.items():
985 phases.advanceboundary(repo, tr, phase, nodes)
976 phases.advanceboundary(repo, tr, phase, nodes)
986
977
987 # Obsolete or strip nodes
978 # Obsolete or strip nodes
988 if obsolete.isenabled(repo, obsolete.createmarkersopt):
979 if obsolete.isenabled(repo, obsolete.createmarkersopt):
989 # If a node is already obsoleted, and we want to obsolete it
980 # If a node is already obsoleted, and we want to obsolete it
990 # without a successor, skip that obssolete request since it's
981 # without a successor, skip that obssolete request since it's
991 # unnecessary. That's the "if s or not isobs(n)" check below.
982 # unnecessary. That's the "if s or not isobs(n)" check below.
992 # Also sort the node in topology order, that might be useful for
983 # Also sort the node in topology order, that might be useful for
993 # some obsstore logic.
984 # some obsstore logic.
994 # NOTE: the sorting might belong to createmarkers.
985 # NOTE: the sorting might belong to createmarkers.
995 torev = unfi.changelog.rev
986 torev = unfi.changelog.rev
996 sortfunc = lambda ns: torev(ns[0][0])
987 sortfunc = lambda ns: torev(ns[0][0])
997 rels = []
988 rels = []
998 for ns, s in sorted(replacements.items(), key=sortfunc):
989 for ns, s in sorted(replacements.items(), key=sortfunc):
999 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
990 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1000 rels.append(rel)
991 rels.append(rel)
1001 if rels:
992 if rels:
1002 obsolete.createmarkers(repo, rels, operation=operation,
993 obsolete.createmarkers(repo, rels, operation=operation,
1003 metadata=metadata)
994 metadata=metadata)
1004 else:
995 else:
1005 from . import repair # avoid import cycle
996 from . import repair # avoid import cycle
1006 tostrip = list(n for ns in replacements for n in ns)
997 tostrip = list(n for ns in replacements for n in ns)
1007 if tostrip:
998 if tostrip:
1008 repair.delayedstrip(repo.ui, repo, tostrip, operation,
999 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1009 backup=backup)
1000 backup=backup)
1010
1001
1011 def addremove(repo, matcher, prefix, opts=None):
1002 def addremove(repo, matcher, prefix, opts=None):
1012 if opts is None:
1003 if opts is None:
1013 opts = {}
1004 opts = {}
1014 m = matcher
1005 m = matcher
1015 dry_run = opts.get('dry_run')
1006 dry_run = opts.get('dry_run')
1016 try:
1007 try:
1017 similarity = float(opts.get('similarity') or 0)
1008 similarity = float(opts.get('similarity') or 0)
1018 except ValueError:
1009 except ValueError:
1019 raise error.Abort(_('similarity must be a number'))
1010 raise error.Abort(_('similarity must be a number'))
1020 if similarity < 0 or similarity > 100:
1011 if similarity < 0 or similarity > 100:
1021 raise error.Abort(_('similarity must be between 0 and 100'))
1012 raise error.Abort(_('similarity must be between 0 and 100'))
1022 similarity /= 100.0
1013 similarity /= 100.0
1023
1014
1024 ret = 0
1015 ret = 0
1025 join = lambda f: os.path.join(prefix, f)
1016 join = lambda f: os.path.join(prefix, f)
1026
1017
1027 wctx = repo[None]
1018 wctx = repo[None]
1028 for subpath in sorted(wctx.substate):
1019 for subpath in sorted(wctx.substate):
1029 submatch = matchmod.subdirmatcher(subpath, m)
1020 submatch = matchmod.subdirmatcher(subpath, m)
1030 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1021 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1031 sub = wctx.sub(subpath)
1022 sub = wctx.sub(subpath)
1032 try:
1023 try:
1033 if sub.addremove(submatch, prefix, opts):
1024 if sub.addremove(submatch, prefix, opts):
1034 ret = 1
1025 ret = 1
1035 except error.LookupError:
1026 except error.LookupError:
1036 repo.ui.status(_("skipping missing subrepository: %s\n")
1027 repo.ui.status(_("skipping missing subrepository: %s\n")
1037 % join(subpath))
1028 % join(subpath))
1038
1029
1039 rejected = []
1030 rejected = []
1040 def badfn(f, msg):
1031 def badfn(f, msg):
1041 if f in m.files():
1032 if f in m.files():
1042 m.bad(f, msg)
1033 m.bad(f, msg)
1043 rejected.append(f)
1034 rejected.append(f)
1044
1035
1045 badmatch = matchmod.badmatch(m, badfn)
1036 badmatch = matchmod.badmatch(m, badfn)
1046 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1037 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1047 badmatch)
1038 badmatch)
1048
1039
1049 unknownset = set(unknown + forgotten)
1040 unknownset = set(unknown + forgotten)
1050 toprint = unknownset.copy()
1041 toprint = unknownset.copy()
1051 toprint.update(deleted)
1042 toprint.update(deleted)
1052 for abs in sorted(toprint):
1043 for abs in sorted(toprint):
1053 if repo.ui.verbose or not m.exact(abs):
1044 if repo.ui.verbose or not m.exact(abs):
1054 if abs in unknownset:
1045 if abs in unknownset:
1055 status = _('adding %s\n') % m.uipath(abs)
1046 status = _('adding %s\n') % m.uipath(abs)
1056 label = 'ui.addremove.added'
1047 label = 'ui.addremove.added'
1057 else:
1048 else:
1058 status = _('removing %s\n') % m.uipath(abs)
1049 status = _('removing %s\n') % m.uipath(abs)
1059 label = 'ui.addremove.removed'
1050 label = 'ui.addremove.removed'
1060 repo.ui.status(status, label=label)
1051 repo.ui.status(status, label=label)
1061
1052
1062 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1053 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1063 similarity)
1054 similarity)
1064
1055
1065 if not dry_run:
1056 if not dry_run:
1066 _markchanges(repo, unknown + forgotten, deleted, renames)
1057 _markchanges(repo, unknown + forgotten, deleted, renames)
1067
1058
1068 for f in rejected:
1059 for f in rejected:
1069 if f in m.files():
1060 if f in m.files():
1070 return 1
1061 return 1
1071 return ret
1062 return ret
1072
1063
1073 def marktouched(repo, files, similarity=0.0):
1064 def marktouched(repo, files, similarity=0.0):
1074 '''Assert that files have somehow been operated upon. files are relative to
1065 '''Assert that files have somehow been operated upon. files are relative to
1075 the repo root.'''
1066 the repo root.'''
1076 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1067 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1077 rejected = []
1068 rejected = []
1078
1069
1079 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1070 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1080
1071
1081 if repo.ui.verbose:
1072 if repo.ui.verbose:
1082 unknownset = set(unknown + forgotten)
1073 unknownset = set(unknown + forgotten)
1083 toprint = unknownset.copy()
1074 toprint = unknownset.copy()
1084 toprint.update(deleted)
1075 toprint.update(deleted)
1085 for abs in sorted(toprint):
1076 for abs in sorted(toprint):
1086 if abs in unknownset:
1077 if abs in unknownset:
1087 status = _('adding %s\n') % abs
1078 status = _('adding %s\n') % abs
1088 else:
1079 else:
1089 status = _('removing %s\n') % abs
1080 status = _('removing %s\n') % abs
1090 repo.ui.status(status)
1081 repo.ui.status(status)
1091
1082
1092 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1083 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1093 similarity)
1084 similarity)
1094
1085
1095 _markchanges(repo, unknown + forgotten, deleted, renames)
1086 _markchanges(repo, unknown + forgotten, deleted, renames)
1096
1087
1097 for f in rejected:
1088 for f in rejected:
1098 if f in m.files():
1089 if f in m.files():
1099 return 1
1090 return 1
1100 return 0
1091 return 0
1101
1092
1102 def _interestingfiles(repo, matcher):
1093 def _interestingfiles(repo, matcher):
1103 '''Walk dirstate with matcher, looking for files that addremove would care
1094 '''Walk dirstate with matcher, looking for files that addremove would care
1104 about.
1095 about.
1105
1096
1106 This is different from dirstate.status because it doesn't care about
1097 This is different from dirstate.status because it doesn't care about
1107 whether files are modified or clean.'''
1098 whether files are modified or clean.'''
1108 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1099 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1109 audit_path = pathutil.pathauditor(repo.root, cached=True)
1100 audit_path = pathutil.pathauditor(repo.root, cached=True)
1110
1101
1111 ctx = repo[None]
1102 ctx = repo[None]
1112 dirstate = repo.dirstate
1103 dirstate = repo.dirstate
1113 matcher = repo.narrowmatch(matcher, includeexact=True)
1104 matcher = repo.narrowmatch(matcher, includeexact=True)
1114 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1105 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1115 unknown=True, ignored=False, full=False)
1106 unknown=True, ignored=False, full=False)
1116 for abs, st in walkresults.iteritems():
1107 for abs, st in walkresults.iteritems():
1117 dstate = dirstate[abs]
1108 dstate = dirstate[abs]
1118 if dstate == '?' and audit_path.check(abs):
1109 if dstate == '?' and audit_path.check(abs):
1119 unknown.append(abs)
1110 unknown.append(abs)
1120 elif dstate != 'r' and not st:
1111 elif dstate != 'r' and not st:
1121 deleted.append(abs)
1112 deleted.append(abs)
1122 elif dstate == 'r' and st:
1113 elif dstate == 'r' and st:
1123 forgotten.append(abs)
1114 forgotten.append(abs)
1124 # for finding renames
1115 # for finding renames
1125 elif dstate == 'r' and not st:
1116 elif dstate == 'r' and not st:
1126 removed.append(abs)
1117 removed.append(abs)
1127 elif dstate == 'a':
1118 elif dstate == 'a':
1128 added.append(abs)
1119 added.append(abs)
1129
1120
1130 return added, unknown, deleted, removed, forgotten
1121 return added, unknown, deleted, removed, forgotten
1131
1122
1132 def _findrenames(repo, matcher, added, removed, similarity):
1123 def _findrenames(repo, matcher, added, removed, similarity):
1133 '''Find renames from removed files to added ones.'''
1124 '''Find renames from removed files to added ones.'''
1134 renames = {}
1125 renames = {}
1135 if similarity > 0:
1126 if similarity > 0:
1136 for old, new, score in similar.findrenames(repo, added, removed,
1127 for old, new, score in similar.findrenames(repo, added, removed,
1137 similarity):
1128 similarity):
1138 if (repo.ui.verbose or not matcher.exact(old)
1129 if (repo.ui.verbose or not matcher.exact(old)
1139 or not matcher.exact(new)):
1130 or not matcher.exact(new)):
1140 repo.ui.status(_('recording removal of %s as rename to %s '
1131 repo.ui.status(_('recording removal of %s as rename to %s '
1141 '(%d%% similar)\n') %
1132 '(%d%% similar)\n') %
1142 (matcher.rel(old), matcher.rel(new),
1133 (matcher.rel(old), matcher.rel(new),
1143 score * 100))
1134 score * 100))
1144 renames[new] = old
1135 renames[new] = old
1145 return renames
1136 return renames
1146
1137
1147 def _markchanges(repo, unknown, deleted, renames):
1138 def _markchanges(repo, unknown, deleted, renames):
1148 '''Marks the files in unknown as added, the files in deleted as removed,
1139 '''Marks the files in unknown as added, the files in deleted as removed,
1149 and the files in renames as copied.'''
1140 and the files in renames as copied.'''
1150 wctx = repo[None]
1141 wctx = repo[None]
1151 with repo.wlock():
1142 with repo.wlock():
1152 wctx.forget(deleted)
1143 wctx.forget(deleted)
1153 wctx.add(unknown)
1144 wctx.add(unknown)
1154 for new, old in renames.iteritems():
1145 for new, old in renames.iteritems():
1155 wctx.copy(old, new)
1146 wctx.copy(old, new)
1156
1147
1157 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1148 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1158 """Update the dirstate to reflect the intent of copying src to dst. For
1149 """Update the dirstate to reflect the intent of copying src to dst. For
1159 different reasons it might not end with dst being marked as copied from src.
1150 different reasons it might not end with dst being marked as copied from src.
1160 """
1151 """
1161 origsrc = repo.dirstate.copied(src) or src
1152 origsrc = repo.dirstate.copied(src) or src
1162 if dst == origsrc: # copying back a copy?
1153 if dst == origsrc: # copying back a copy?
1163 if repo.dirstate[dst] not in 'mn' and not dryrun:
1154 if repo.dirstate[dst] not in 'mn' and not dryrun:
1164 repo.dirstate.normallookup(dst)
1155 repo.dirstate.normallookup(dst)
1165 else:
1156 else:
1166 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1157 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1167 if not ui.quiet:
1158 if not ui.quiet:
1168 ui.warn(_("%s has not been committed yet, so no copy "
1159 ui.warn(_("%s has not been committed yet, so no copy "
1169 "data will be stored for %s.\n")
1160 "data will be stored for %s.\n")
1170 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1161 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1171 if repo.dirstate[dst] in '?r' and not dryrun:
1162 if repo.dirstate[dst] in '?r' and not dryrun:
1172 wctx.add([dst])
1163 wctx.add([dst])
1173 elif not dryrun:
1164 elif not dryrun:
1174 wctx.copy(origsrc, dst)
1165 wctx.copy(origsrc, dst)
1175
1166
1176 def writerequires(opener, requirements):
1167 def writerequires(opener, requirements):
1177 with opener('requires', 'w', atomictemp=True) as fp:
1168 with opener('requires', 'w', atomictemp=True) as fp:
1178 for r in sorted(requirements):
1169 for r in sorted(requirements):
1179 fp.write("%s\n" % r)
1170 fp.write("%s\n" % r)
1180
1171
1181 class filecachesubentry(object):
1172 class filecachesubentry(object):
1182 def __init__(self, path, stat):
1173 def __init__(self, path, stat):
1183 self.path = path
1174 self.path = path
1184 self.cachestat = None
1175 self.cachestat = None
1185 self._cacheable = None
1176 self._cacheable = None
1186
1177
1187 if stat:
1178 if stat:
1188 self.cachestat = filecachesubentry.stat(self.path)
1179 self.cachestat = filecachesubentry.stat(self.path)
1189
1180
1190 if self.cachestat:
1181 if self.cachestat:
1191 self._cacheable = self.cachestat.cacheable()
1182 self._cacheable = self.cachestat.cacheable()
1192 else:
1183 else:
1193 # None means we don't know yet
1184 # None means we don't know yet
1194 self._cacheable = None
1185 self._cacheable = None
1195
1186
1196 def refresh(self):
1187 def refresh(self):
1197 if self.cacheable():
1188 if self.cacheable():
1198 self.cachestat = filecachesubentry.stat(self.path)
1189 self.cachestat = filecachesubentry.stat(self.path)
1199
1190
1200 def cacheable(self):
1191 def cacheable(self):
1201 if self._cacheable is not None:
1192 if self._cacheable is not None:
1202 return self._cacheable
1193 return self._cacheable
1203
1194
1204 # we don't know yet, assume it is for now
1195 # we don't know yet, assume it is for now
1205 return True
1196 return True
1206
1197
1207 def changed(self):
1198 def changed(self):
1208 # no point in going further if we can't cache it
1199 # no point in going further if we can't cache it
1209 if not self.cacheable():
1200 if not self.cacheable():
1210 return True
1201 return True
1211
1202
1212 newstat = filecachesubentry.stat(self.path)
1203 newstat = filecachesubentry.stat(self.path)
1213
1204
1214 # we may not know if it's cacheable yet, check again now
1205 # we may not know if it's cacheable yet, check again now
1215 if newstat and self._cacheable is None:
1206 if newstat and self._cacheable is None:
1216 self._cacheable = newstat.cacheable()
1207 self._cacheable = newstat.cacheable()
1217
1208
1218 # check again
1209 # check again
1219 if not self._cacheable:
1210 if not self._cacheable:
1220 return True
1211 return True
1221
1212
1222 if self.cachestat != newstat:
1213 if self.cachestat != newstat:
1223 self.cachestat = newstat
1214 self.cachestat = newstat
1224 return True
1215 return True
1225 else:
1216 else:
1226 return False
1217 return False
1227
1218
1228 @staticmethod
1219 @staticmethod
1229 def stat(path):
1220 def stat(path):
1230 try:
1221 try:
1231 return util.cachestat(path)
1222 return util.cachestat(path)
1232 except OSError as e:
1223 except OSError as e:
1233 if e.errno != errno.ENOENT:
1224 if e.errno != errno.ENOENT:
1234 raise
1225 raise
1235
1226
1236 class filecacheentry(object):
1227 class filecacheentry(object):
1237 def __init__(self, paths, stat=True):
1228 def __init__(self, paths, stat=True):
1238 self._entries = []
1229 self._entries = []
1239 for path in paths:
1230 for path in paths:
1240 self._entries.append(filecachesubentry(path, stat))
1231 self._entries.append(filecachesubentry(path, stat))
1241
1232
1242 def changed(self):
1233 def changed(self):
1243 '''true if any entry has changed'''
1234 '''true if any entry has changed'''
1244 for entry in self._entries:
1235 for entry in self._entries:
1245 if entry.changed():
1236 if entry.changed():
1246 return True
1237 return True
1247 return False
1238 return False
1248
1239
1249 def refresh(self):
1240 def refresh(self):
1250 for entry in self._entries:
1241 for entry in self._entries:
1251 entry.refresh()
1242 entry.refresh()
1252
1243
1253 class filecache(object):
1244 class filecache(object):
1254 """A property like decorator that tracks files under .hg/ for updates.
1245 """A property like decorator that tracks files under .hg/ for updates.
1255
1246
1256 On first access, the files defined as arguments are stat()ed and the
1247 On first access, the files defined as arguments are stat()ed and the
1257 results cached. The decorated function is called. The results are stashed
1248 results cached. The decorated function is called. The results are stashed
1258 away in a ``_filecache`` dict on the object whose method is decorated.
1249 away in a ``_filecache`` dict on the object whose method is decorated.
1259
1250
1260 On subsequent access, the cached result is used as it is set to the
1251 On subsequent access, the cached result is used as it is set to the
1261 instance dictionary.
1252 instance dictionary.
1262
1253
1263 On external property set/delete operations, the caller must update the
1254 On external property set/delete operations, the caller must update the
1264 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1255 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1265 instead of directly setting <attr>.
1256 instead of directly setting <attr>.
1266
1257
1267 When using the property API, the cached data is always used if available.
1258 When using the property API, the cached data is always used if available.
1268 No stat() is performed to check if the file has changed.
1259 No stat() is performed to check if the file has changed.
1269
1260
1270 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1261 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1271 can populate an entry before the property's getter is called. In this case,
1262 can populate an entry before the property's getter is called. In this case,
1272 entries in ``_filecache`` will be used during property operations,
1263 entries in ``_filecache`` will be used during property operations,
1273 if available. If the underlying file changes, it is up to external callers
1264 if available. If the underlying file changes, it is up to external callers
1274 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1265 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1275 method result as well as possibly calling ``del obj._filecache[attr]`` to
1266 method result as well as possibly calling ``del obj._filecache[attr]`` to
1276 remove the ``filecacheentry``.
1267 remove the ``filecacheentry``.
1277 """
1268 """
1278
1269
1279 def __init__(self, *paths):
1270 def __init__(self, *paths):
1280 self.paths = paths
1271 self.paths = paths
1281
1272
1282 def join(self, obj, fname):
1273 def join(self, obj, fname):
1283 """Used to compute the runtime path of a cached file.
1274 """Used to compute the runtime path of a cached file.
1284
1275
1285 Users should subclass filecache and provide their own version of this
1276 Users should subclass filecache and provide their own version of this
1286 function to call the appropriate join function on 'obj' (an instance
1277 function to call the appropriate join function on 'obj' (an instance
1287 of the class that its member function was decorated).
1278 of the class that its member function was decorated).
1288 """
1279 """
1289 raise NotImplementedError
1280 raise NotImplementedError
1290
1281
1291 def __call__(self, func):
1282 def __call__(self, func):
1292 self.func = func
1283 self.func = func
1293 self.sname = func.__name__
1284 self.sname = func.__name__
1294 self.name = pycompat.sysbytes(self.sname)
1285 self.name = pycompat.sysbytes(self.sname)
1295 return self
1286 return self
1296
1287
1297 def __get__(self, obj, type=None):
1288 def __get__(self, obj, type=None):
1298 # if accessed on the class, return the descriptor itself.
1289 # if accessed on the class, return the descriptor itself.
1299 if obj is None:
1290 if obj is None:
1300 return self
1291 return self
1301
1292
1302 assert self.sname not in obj.__dict__
1293 assert self.sname not in obj.__dict__
1303
1294
1304 entry = obj._filecache.get(self.name)
1295 entry = obj._filecache.get(self.name)
1305
1296
1306 if entry:
1297 if entry:
1307 if entry.changed():
1298 if entry.changed():
1308 entry.obj = self.func(obj)
1299 entry.obj = self.func(obj)
1309 else:
1300 else:
1310 paths = [self.join(obj, path) for path in self.paths]
1301 paths = [self.join(obj, path) for path in self.paths]
1311
1302
1312 # We stat -before- creating the object so our cache doesn't lie if
1303 # We stat -before- creating the object so our cache doesn't lie if
1313 # a writer modified between the time we read and stat
1304 # a writer modified between the time we read and stat
1314 entry = filecacheentry(paths, True)
1305 entry = filecacheentry(paths, True)
1315 entry.obj = self.func(obj)
1306 entry.obj = self.func(obj)
1316
1307
1317 obj._filecache[self.name] = entry
1308 obj._filecache[self.name] = entry
1318
1309
1319 obj.__dict__[self.sname] = entry.obj
1310 obj.__dict__[self.sname] = entry.obj
1320 return entry.obj
1311 return entry.obj
1321
1312
1322 # don't implement __set__(), which would make __dict__ lookup as slow as
1313 # don't implement __set__(), which would make __dict__ lookup as slow as
1323 # function call.
1314 # function call.
1324
1315
1325 def set(self, obj, value):
1316 def set(self, obj, value):
1326 if self.name not in obj._filecache:
1317 if self.name not in obj._filecache:
1327 # we add an entry for the missing value because X in __dict__
1318 # we add an entry for the missing value because X in __dict__
1328 # implies X in _filecache
1319 # implies X in _filecache
1329 paths = [self.join(obj, path) for path in self.paths]
1320 paths = [self.join(obj, path) for path in self.paths]
1330 ce = filecacheentry(paths, False)
1321 ce = filecacheentry(paths, False)
1331 obj._filecache[self.name] = ce
1322 obj._filecache[self.name] = ce
1332 else:
1323 else:
1333 ce = obj._filecache[self.name]
1324 ce = obj._filecache[self.name]
1334
1325
1335 ce.obj = value # update cached copy
1326 ce.obj = value # update cached copy
1336 obj.__dict__[self.sname] = value # update copy returned by obj.x
1327 obj.__dict__[self.sname] = value # update copy returned by obj.x
1337
1328
1338 def extdatasource(repo, source):
1329 def extdatasource(repo, source):
1339 """Gather a map of rev -> value dict from the specified source
1330 """Gather a map of rev -> value dict from the specified source
1340
1331
1341 A source spec is treated as a URL, with a special case shell: type
1332 A source spec is treated as a URL, with a special case shell: type
1342 for parsing the output from a shell command.
1333 for parsing the output from a shell command.
1343
1334
1344 The data is parsed as a series of newline-separated records where
1335 The data is parsed as a series of newline-separated records where
1345 each record is a revision specifier optionally followed by a space
1336 each record is a revision specifier optionally followed by a space
1346 and a freeform string value. If the revision is known locally, it
1337 and a freeform string value. If the revision is known locally, it
1347 is converted to a rev, otherwise the record is skipped.
1338 is converted to a rev, otherwise the record is skipped.
1348
1339
1349 Note that both key and value are treated as UTF-8 and converted to
1340 Note that both key and value are treated as UTF-8 and converted to
1350 the local encoding. This allows uniformity between local and
1341 the local encoding. This allows uniformity between local and
1351 remote data sources.
1342 remote data sources.
1352 """
1343 """
1353
1344
1354 spec = repo.ui.config("extdata", source)
1345 spec = repo.ui.config("extdata", source)
1355 if not spec:
1346 if not spec:
1356 raise error.Abort(_("unknown extdata source '%s'") % source)
1347 raise error.Abort(_("unknown extdata source '%s'") % source)
1357
1348
1358 data = {}
1349 data = {}
1359 src = proc = None
1350 src = proc = None
1360 try:
1351 try:
1361 if spec.startswith("shell:"):
1352 if spec.startswith("shell:"):
1362 # external commands should be run relative to the repo root
1353 # external commands should be run relative to the repo root
1363 cmd = spec[6:]
1354 cmd = spec[6:]
1364 proc = subprocess.Popen(procutil.tonativestr(cmd),
1355 proc = subprocess.Popen(procutil.tonativestr(cmd),
1365 shell=True, bufsize=-1,
1356 shell=True, bufsize=-1,
1366 close_fds=procutil.closefds,
1357 close_fds=procutil.closefds,
1367 stdout=subprocess.PIPE,
1358 stdout=subprocess.PIPE,
1368 cwd=procutil.tonativestr(repo.root))
1359 cwd=procutil.tonativestr(repo.root))
1369 src = proc.stdout
1360 src = proc.stdout
1370 else:
1361 else:
1371 # treat as a URL or file
1362 # treat as a URL or file
1372 src = url.open(repo.ui, spec)
1363 src = url.open(repo.ui, spec)
1373 for l in src:
1364 for l in src:
1374 if " " in l:
1365 if " " in l:
1375 k, v = l.strip().split(" ", 1)
1366 k, v = l.strip().split(" ", 1)
1376 else:
1367 else:
1377 k, v = l.strip(), ""
1368 k, v = l.strip(), ""
1378
1369
1379 k = encoding.tolocal(k)
1370 k = encoding.tolocal(k)
1380 try:
1371 try:
1381 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1372 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1382 except (error.LookupError, error.RepoLookupError):
1373 except (error.LookupError, error.RepoLookupError):
1383 pass # we ignore data for nodes that don't exist locally
1374 pass # we ignore data for nodes that don't exist locally
1384 finally:
1375 finally:
1385 if proc:
1376 if proc:
1386 proc.communicate()
1377 proc.communicate()
1387 if src:
1378 if src:
1388 src.close()
1379 src.close()
1389 if proc and proc.returncode != 0:
1380 if proc and proc.returncode != 0:
1390 raise error.Abort(_("extdata command '%s' failed: %s")
1381 raise error.Abort(_("extdata command '%s' failed: %s")
1391 % (cmd, procutil.explainexit(proc.returncode)))
1382 % (cmd, procutil.explainexit(proc.returncode)))
1392
1383
1393 return data
1384 return data
1394
1385
1395 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1386 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1396 if lock is None:
1387 if lock is None:
1397 raise error.LockInheritanceContractViolation(
1388 raise error.LockInheritanceContractViolation(
1398 'lock can only be inherited while held')
1389 'lock can only be inherited while held')
1399 if environ is None:
1390 if environ is None:
1400 environ = {}
1391 environ = {}
1401 with lock.inherit() as locker:
1392 with lock.inherit() as locker:
1402 environ[envvar] = locker
1393 environ[envvar] = locker
1403 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1394 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1404
1395
1405 def wlocksub(repo, cmd, *args, **kwargs):
1396 def wlocksub(repo, cmd, *args, **kwargs):
1406 """run cmd as a subprocess that allows inheriting repo's wlock
1397 """run cmd as a subprocess that allows inheriting repo's wlock
1407
1398
1408 This can only be called while the wlock is held. This takes all the
1399 This can only be called while the wlock is held. This takes all the
1409 arguments that ui.system does, and returns the exit code of the
1400 arguments that ui.system does, and returns the exit code of the
1410 subprocess."""
1401 subprocess."""
1411 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1402 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1412 **kwargs)
1403 **kwargs)
1413
1404
1414 class progress(object):
1405 class progress(object):
1415 def __init__(self, ui, updatebar, topic, unit="", total=None):
1406 def __init__(self, ui, updatebar, topic, unit="", total=None):
1416 self.ui = ui
1407 self.ui = ui
1417 self.pos = 0
1408 self.pos = 0
1418 self.topic = topic
1409 self.topic = topic
1419 self.unit = unit
1410 self.unit = unit
1420 self.total = total
1411 self.total = total
1421 self.debug = ui.configbool('progress', 'debug')
1412 self.debug = ui.configbool('progress', 'debug')
1422 self._updatebar = updatebar
1413 self._updatebar = updatebar
1423
1414
1424 def __enter__(self):
1415 def __enter__(self):
1425 return self
1416 return self
1426
1417
1427 def __exit__(self, exc_type, exc_value, exc_tb):
1418 def __exit__(self, exc_type, exc_value, exc_tb):
1428 self.complete()
1419 self.complete()
1429
1420
1430 def update(self, pos, item="", total=None):
1421 def update(self, pos, item="", total=None):
1431 assert pos is not None
1422 assert pos is not None
1432 if total:
1423 if total:
1433 self.total = total
1424 self.total = total
1434 self.pos = pos
1425 self.pos = pos
1435 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1426 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1436 if self.debug:
1427 if self.debug:
1437 self._printdebug(item)
1428 self._printdebug(item)
1438
1429
1439 def increment(self, step=1, item="", total=None):
1430 def increment(self, step=1, item="", total=None):
1440 self.update(self.pos + step, item, total)
1431 self.update(self.pos + step, item, total)
1441
1432
1442 def complete(self):
1433 def complete(self):
1443 self.pos = None
1434 self.pos = None
1444 self.unit = ""
1435 self.unit = ""
1445 self.total = None
1436 self.total = None
1446 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1437 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1447
1438
1448 def _printdebug(self, item):
1439 def _printdebug(self, item):
1449 if self.unit:
1440 if self.unit:
1450 unit = ' ' + self.unit
1441 unit = ' ' + self.unit
1451 if item:
1442 if item:
1452 item = ' ' + item
1443 item = ' ' + item
1453
1444
1454 if self.total:
1445 if self.total:
1455 pct = 100.0 * self.pos / self.total
1446 pct = 100.0 * self.pos / self.total
1456 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1447 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1457 % (self.topic, item, self.pos, self.total, unit, pct))
1448 % (self.topic, item, self.pos, self.total, unit, pct))
1458 else:
1449 else:
1459 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1450 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1460
1451
1461 def gdinitconfig(ui):
1452 def gdinitconfig(ui):
1462 """helper function to know if a repo should be created as general delta
1453 """helper function to know if a repo should be created as general delta
1463 """
1454 """
1464 # experimental config: format.generaldelta
1455 # experimental config: format.generaldelta
1465 return (ui.configbool('format', 'generaldelta')
1456 return (ui.configbool('format', 'generaldelta')
1466 or ui.configbool('format', 'usegeneraldelta'))
1457 or ui.configbool('format', 'usegeneraldelta'))
1467
1458
1468 def gddeltaconfig(ui):
1459 def gddeltaconfig(ui):
1469 """helper function to know if incoming delta should be optimised
1460 """helper function to know if incoming delta should be optimised
1470 """
1461 """
1471 # experimental config: format.generaldelta
1462 # experimental config: format.generaldelta
1472 return ui.configbool('format', 'generaldelta')
1463 return ui.configbool('format', 'generaldelta')
1473
1464
1474 class simplekeyvaluefile(object):
1465 class simplekeyvaluefile(object):
1475 """A simple file with key=value lines
1466 """A simple file with key=value lines
1476
1467
1477 Keys must be alphanumerics and start with a letter, values must not
1468 Keys must be alphanumerics and start with a letter, values must not
1478 contain '\n' characters"""
1469 contain '\n' characters"""
1479 firstlinekey = '__firstline'
1470 firstlinekey = '__firstline'
1480
1471
1481 def __init__(self, vfs, path, keys=None):
1472 def __init__(self, vfs, path, keys=None):
1482 self.vfs = vfs
1473 self.vfs = vfs
1483 self.path = path
1474 self.path = path
1484
1475
1485 def read(self, firstlinenonkeyval=False):
1476 def read(self, firstlinenonkeyval=False):
1486 """Read the contents of a simple key-value file
1477 """Read the contents of a simple key-value file
1487
1478
1488 'firstlinenonkeyval' indicates whether the first line of file should
1479 'firstlinenonkeyval' indicates whether the first line of file should
1489 be treated as a key-value pair or reuturned fully under the
1480 be treated as a key-value pair or reuturned fully under the
1490 __firstline key."""
1481 __firstline key."""
1491 lines = self.vfs.readlines(self.path)
1482 lines = self.vfs.readlines(self.path)
1492 d = {}
1483 d = {}
1493 if firstlinenonkeyval:
1484 if firstlinenonkeyval:
1494 if not lines:
1485 if not lines:
1495 e = _("empty simplekeyvalue file")
1486 e = _("empty simplekeyvalue file")
1496 raise error.CorruptedState(e)
1487 raise error.CorruptedState(e)
1497 # we don't want to include '\n' in the __firstline
1488 # we don't want to include '\n' in the __firstline
1498 d[self.firstlinekey] = lines[0][:-1]
1489 d[self.firstlinekey] = lines[0][:-1]
1499 del lines[0]
1490 del lines[0]
1500
1491
1501 try:
1492 try:
1502 # the 'if line.strip()' part prevents us from failing on empty
1493 # the 'if line.strip()' part prevents us from failing on empty
1503 # lines which only contain '\n' therefore are not skipped
1494 # lines which only contain '\n' therefore are not skipped
1504 # by 'if line'
1495 # by 'if line'
1505 updatedict = dict(line[:-1].split('=', 1) for line in lines
1496 updatedict = dict(line[:-1].split('=', 1) for line in lines
1506 if line.strip())
1497 if line.strip())
1507 if self.firstlinekey in updatedict:
1498 if self.firstlinekey in updatedict:
1508 e = _("%r can't be used as a key")
1499 e = _("%r can't be used as a key")
1509 raise error.CorruptedState(e % self.firstlinekey)
1500 raise error.CorruptedState(e % self.firstlinekey)
1510 d.update(updatedict)
1501 d.update(updatedict)
1511 except ValueError as e:
1502 except ValueError as e:
1512 raise error.CorruptedState(str(e))
1503 raise error.CorruptedState(str(e))
1513 return d
1504 return d
1514
1505
1515 def write(self, data, firstline=None):
1506 def write(self, data, firstline=None):
1516 """Write key=>value mapping to a file
1507 """Write key=>value mapping to a file
1517 data is a dict. Keys must be alphanumerical and start with a letter.
1508 data is a dict. Keys must be alphanumerical and start with a letter.
1518 Values must not contain newline characters.
1509 Values must not contain newline characters.
1519
1510
1520 If 'firstline' is not None, it is written to file before
1511 If 'firstline' is not None, it is written to file before
1521 everything else, as it is, not in a key=value form"""
1512 everything else, as it is, not in a key=value form"""
1522 lines = []
1513 lines = []
1523 if firstline is not None:
1514 if firstline is not None:
1524 lines.append('%s\n' % firstline)
1515 lines.append('%s\n' % firstline)
1525
1516
1526 for k, v in data.items():
1517 for k, v in data.items():
1527 if k == self.firstlinekey:
1518 if k == self.firstlinekey:
1528 e = "key name '%s' is reserved" % self.firstlinekey
1519 e = "key name '%s' is reserved" % self.firstlinekey
1529 raise error.ProgrammingError(e)
1520 raise error.ProgrammingError(e)
1530 if not k[0:1].isalpha():
1521 if not k[0:1].isalpha():
1531 e = "keys must start with a letter in a key-value file"
1522 e = "keys must start with a letter in a key-value file"
1532 raise error.ProgrammingError(e)
1523 raise error.ProgrammingError(e)
1533 if not k.isalnum():
1524 if not k.isalnum():
1534 e = "invalid key name in a simple key-value file"
1525 e = "invalid key name in a simple key-value file"
1535 raise error.ProgrammingError(e)
1526 raise error.ProgrammingError(e)
1536 if '\n' in v:
1527 if '\n' in v:
1537 e = "invalid value in a simple key-value file"
1528 e = "invalid value in a simple key-value file"
1538 raise error.ProgrammingError(e)
1529 raise error.ProgrammingError(e)
1539 lines.append("%s=%s\n" % (k, v))
1530 lines.append("%s=%s\n" % (k, v))
1540 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1531 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1541 fp.write(''.join(lines))
1532 fp.write(''.join(lines))
1542
1533
1543 _reportobsoletedsource = [
1534 _reportobsoletedsource = [
1544 'debugobsolete',
1535 'debugobsolete',
1545 'pull',
1536 'pull',
1546 'push',
1537 'push',
1547 'serve',
1538 'serve',
1548 'unbundle',
1539 'unbundle',
1549 ]
1540 ]
1550
1541
1551 _reportnewcssource = [
1542 _reportnewcssource = [
1552 'pull',
1543 'pull',
1553 'unbundle',
1544 'unbundle',
1554 ]
1545 ]
1555
1546
1556 def prefetchfiles(repo, revs, match):
1547 def prefetchfiles(repo, revs, match):
1557 """Invokes the registered file prefetch functions, allowing extensions to
1548 """Invokes the registered file prefetch functions, allowing extensions to
1558 ensure the corresponding files are available locally, before the command
1549 ensure the corresponding files are available locally, before the command
1559 uses them."""
1550 uses them."""
1560 if match:
1551 if match:
1561 # The command itself will complain about files that don't exist, so
1552 # The command itself will complain about files that don't exist, so
1562 # don't duplicate the message.
1553 # don't duplicate the message.
1563 match = matchmod.badmatch(match, lambda fn, msg: None)
1554 match = matchmod.badmatch(match, lambda fn, msg: None)
1564 else:
1555 else:
1565 match = matchall(repo)
1556 match = matchall(repo)
1566
1557
1567 fileprefetchhooks(repo, revs, match)
1558 fileprefetchhooks(repo, revs, match)
1568
1559
1569 # a list of (repo, revs, match) prefetch functions
1560 # a list of (repo, revs, match) prefetch functions
1570 fileprefetchhooks = util.hooks()
1561 fileprefetchhooks = util.hooks()
1571
1562
1572 # A marker that tells the evolve extension to suppress its own reporting
1563 # A marker that tells the evolve extension to suppress its own reporting
1573 _reportstroubledchangesets = True
1564 _reportstroubledchangesets = True
1574
1565
1575 def registersummarycallback(repo, otr, txnname=''):
1566 def registersummarycallback(repo, otr, txnname=''):
1576 """register a callback to issue a summary after the transaction is closed
1567 """register a callback to issue a summary after the transaction is closed
1577 """
1568 """
1578 def txmatch(sources):
1569 def txmatch(sources):
1579 return any(txnname.startswith(source) for source in sources)
1570 return any(txnname.startswith(source) for source in sources)
1580
1571
1581 categories = []
1572 categories = []
1582
1573
1583 def reportsummary(func):
1574 def reportsummary(func):
1584 """decorator for report callbacks."""
1575 """decorator for report callbacks."""
1585 # The repoview life cycle is shorter than the one of the actual
1576 # The repoview life cycle is shorter than the one of the actual
1586 # underlying repository. So the filtered object can die before the
1577 # underlying repository. So the filtered object can die before the
1587 # weakref is used leading to troubles. We keep a reference to the
1578 # weakref is used leading to troubles. We keep a reference to the
1588 # unfiltered object and restore the filtering when retrieving the
1579 # unfiltered object and restore the filtering when retrieving the
1589 # repository through the weakref.
1580 # repository through the weakref.
1590 filtername = repo.filtername
1581 filtername = repo.filtername
1591 reporef = weakref.ref(repo.unfiltered())
1582 reporef = weakref.ref(repo.unfiltered())
1592 def wrapped(tr):
1583 def wrapped(tr):
1593 repo = reporef()
1584 repo = reporef()
1594 if filtername:
1585 if filtername:
1595 repo = repo.filtered(filtername)
1586 repo = repo.filtered(filtername)
1596 func(repo, tr)
1587 func(repo, tr)
1597 newcat = '%02i-txnreport' % len(categories)
1588 newcat = '%02i-txnreport' % len(categories)
1598 otr.addpostclose(newcat, wrapped)
1589 otr.addpostclose(newcat, wrapped)
1599 categories.append(newcat)
1590 categories.append(newcat)
1600 return wrapped
1591 return wrapped
1601
1592
1602 if txmatch(_reportobsoletedsource):
1593 if txmatch(_reportobsoletedsource):
1603 @reportsummary
1594 @reportsummary
1604 def reportobsoleted(repo, tr):
1595 def reportobsoleted(repo, tr):
1605 obsoleted = obsutil.getobsoleted(repo, tr)
1596 obsoleted = obsutil.getobsoleted(repo, tr)
1606 if obsoleted:
1597 if obsoleted:
1607 repo.ui.status(_('obsoleted %i changesets\n')
1598 repo.ui.status(_('obsoleted %i changesets\n')
1608 % len(obsoleted))
1599 % len(obsoleted))
1609
1600
1610 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1601 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1611 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1602 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1612 instabilitytypes = [
1603 instabilitytypes = [
1613 ('orphan', 'orphan'),
1604 ('orphan', 'orphan'),
1614 ('phase-divergent', 'phasedivergent'),
1605 ('phase-divergent', 'phasedivergent'),
1615 ('content-divergent', 'contentdivergent'),
1606 ('content-divergent', 'contentdivergent'),
1616 ]
1607 ]
1617
1608
1618 def getinstabilitycounts(repo):
1609 def getinstabilitycounts(repo):
1619 filtered = repo.changelog.filteredrevs
1610 filtered = repo.changelog.filteredrevs
1620 counts = {}
1611 counts = {}
1621 for instability, revset in instabilitytypes:
1612 for instability, revset in instabilitytypes:
1622 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1613 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1623 filtered)
1614 filtered)
1624 return counts
1615 return counts
1625
1616
1626 oldinstabilitycounts = getinstabilitycounts(repo)
1617 oldinstabilitycounts = getinstabilitycounts(repo)
1627 @reportsummary
1618 @reportsummary
1628 def reportnewinstabilities(repo, tr):
1619 def reportnewinstabilities(repo, tr):
1629 newinstabilitycounts = getinstabilitycounts(repo)
1620 newinstabilitycounts = getinstabilitycounts(repo)
1630 for instability, revset in instabilitytypes:
1621 for instability, revset in instabilitytypes:
1631 delta = (newinstabilitycounts[instability] -
1622 delta = (newinstabilitycounts[instability] -
1632 oldinstabilitycounts[instability])
1623 oldinstabilitycounts[instability])
1633 msg = getinstabilitymessage(delta, instability)
1624 msg = getinstabilitymessage(delta, instability)
1634 if msg:
1625 if msg:
1635 repo.ui.warn(msg)
1626 repo.ui.warn(msg)
1636
1627
1637 if txmatch(_reportnewcssource):
1628 if txmatch(_reportnewcssource):
1638 @reportsummary
1629 @reportsummary
1639 def reportnewcs(repo, tr):
1630 def reportnewcs(repo, tr):
1640 """Report the range of new revisions pulled/unbundled."""
1631 """Report the range of new revisions pulled/unbundled."""
1641 origrepolen = tr.changes.get('origrepolen', len(repo))
1632 origrepolen = tr.changes.get('origrepolen', len(repo))
1642 unfi = repo.unfiltered()
1633 unfi = repo.unfiltered()
1643 if origrepolen >= len(unfi):
1634 if origrepolen >= len(unfi):
1644 return
1635 return
1645
1636
1646 # Compute the bounds of new visible revisions' range.
1637 # Compute the bounds of new visible revisions' range.
1647 revs = smartset.spanset(repo, start=origrepolen)
1638 revs = smartset.spanset(repo, start=origrepolen)
1648 if revs:
1639 if revs:
1649 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1640 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1650
1641
1651 if minrev == maxrev:
1642 if minrev == maxrev:
1652 revrange = minrev
1643 revrange = minrev
1653 else:
1644 else:
1654 revrange = '%s:%s' % (minrev, maxrev)
1645 revrange = '%s:%s' % (minrev, maxrev)
1655 draft = len(repo.revs('%ld and draft()', revs))
1646 draft = len(repo.revs('%ld and draft()', revs))
1656 secret = len(repo.revs('%ld and secret()', revs))
1647 secret = len(repo.revs('%ld and secret()', revs))
1657 if not (draft or secret):
1648 if not (draft or secret):
1658 msg = _('new changesets %s\n') % revrange
1649 msg = _('new changesets %s\n') % revrange
1659 elif draft and secret:
1650 elif draft and secret:
1660 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1651 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1661 msg %= (revrange, draft, secret)
1652 msg %= (revrange, draft, secret)
1662 elif draft:
1653 elif draft:
1663 msg = _('new changesets %s (%d drafts)\n')
1654 msg = _('new changesets %s (%d drafts)\n')
1664 msg %= (revrange, draft)
1655 msg %= (revrange, draft)
1665 elif secret:
1656 elif secret:
1666 msg = _('new changesets %s (%d secrets)\n')
1657 msg = _('new changesets %s (%d secrets)\n')
1667 msg %= (revrange, secret)
1658 msg %= (revrange, secret)
1668 else:
1659 else:
1669 errormsg = 'entered unreachable condition'
1660 errormsg = 'entered unreachable condition'
1670 raise error.ProgrammingError(errormsg)
1661 raise error.ProgrammingError(errormsg)
1671 repo.ui.status(msg)
1662 repo.ui.status(msg)
1672
1663
1673 # search new changesets directly pulled as obsolete
1664 # search new changesets directly pulled as obsolete
1674 duplicates = tr.changes.get('revduplicates', ())
1665 duplicates = tr.changes.get('revduplicates', ())
1675 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1666 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1676 origrepolen, duplicates)
1667 origrepolen, duplicates)
1677 cl = repo.changelog
1668 cl = repo.changelog
1678 extinctadded = [r for r in obsadded if r not in cl]
1669 extinctadded = [r for r in obsadded if r not in cl]
1679 if extinctadded:
1670 if extinctadded:
1680 # They are not just obsolete, but obsolete and invisible
1671 # They are not just obsolete, but obsolete and invisible
1681 # we call them "extinct" internally but the terms have not been
1672 # we call them "extinct" internally but the terms have not been
1682 # exposed to users.
1673 # exposed to users.
1683 msg = '(%d other changesets obsolete on arrival)\n'
1674 msg = '(%d other changesets obsolete on arrival)\n'
1684 repo.ui.status(msg % len(extinctadded))
1675 repo.ui.status(msg % len(extinctadded))
1685
1676
1686 @reportsummary
1677 @reportsummary
1687 def reportphasechanges(repo, tr):
1678 def reportphasechanges(repo, tr):
1688 """Report statistics of phase changes for changesets pre-existing
1679 """Report statistics of phase changes for changesets pre-existing
1689 pull/unbundle.
1680 pull/unbundle.
1690 """
1681 """
1691 origrepolen = tr.changes.get('origrepolen', len(repo))
1682 origrepolen = tr.changes.get('origrepolen', len(repo))
1692 phasetracking = tr.changes.get('phases', {})
1683 phasetracking = tr.changes.get('phases', {})
1693 if not phasetracking:
1684 if not phasetracking:
1694 return
1685 return
1695 published = [
1686 published = [
1696 rev for rev, (old, new) in phasetracking.iteritems()
1687 rev for rev, (old, new) in phasetracking.iteritems()
1697 if new == phases.public and rev < origrepolen
1688 if new == phases.public and rev < origrepolen
1698 ]
1689 ]
1699 if not published:
1690 if not published:
1700 return
1691 return
1701 repo.ui.status(_('%d local changesets published\n')
1692 repo.ui.status(_('%d local changesets published\n')
1702 % len(published))
1693 % len(published))
1703
1694
1704 def getinstabilitymessage(delta, instability):
1695 def getinstabilitymessage(delta, instability):
1705 """function to return the message to show warning about new instabilities
1696 """function to return the message to show warning about new instabilities
1706
1697
1707 exists as a separate function so that extension can wrap to show more
1698 exists as a separate function so that extension can wrap to show more
1708 information like how to fix instabilities"""
1699 information like how to fix instabilities"""
1709 if delta > 0:
1700 if delta > 0:
1710 return _('%i new %s changesets\n') % (delta, instability)
1701 return _('%i new %s changesets\n') % (delta, instability)
1711
1702
1712 def nodesummaries(repo, nodes, maxnumnodes=4):
1703 def nodesummaries(repo, nodes, maxnumnodes=4):
1713 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1704 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1714 return ' '.join(short(h) for h in nodes)
1705 return ' '.join(short(h) for h in nodes)
1715 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1706 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1716 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1707 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1717
1708
1718 def enforcesinglehead(repo, tr, desc):
1709 def enforcesinglehead(repo, tr, desc):
1719 """check that no named branch has multiple heads"""
1710 """check that no named branch has multiple heads"""
1720 if desc in ('strip', 'repair'):
1711 if desc in ('strip', 'repair'):
1721 # skip the logic during strip
1712 # skip the logic during strip
1722 return
1713 return
1723 visible = repo.filtered('visible')
1714 visible = repo.filtered('visible')
1724 # possible improvement: we could restrict the check to affected branch
1715 # possible improvement: we could restrict the check to affected branch
1725 for name, heads in visible.branchmap().iteritems():
1716 for name, heads in visible.branchmap().iteritems():
1726 if len(heads) > 1:
1717 if len(heads) > 1:
1727 msg = _('rejecting multiple heads on branch "%s"')
1718 msg = _('rejecting multiple heads on branch "%s"')
1728 msg %= name
1719 msg %= name
1729 hint = _('%d heads: %s')
1720 hint = _('%d heads: %s')
1730 hint %= (len(heads), nodesummaries(repo, heads))
1721 hint %= (len(heads), nodesummaries(repo, heads))
1731 raise error.Abort(msg, hint=hint)
1722 raise error.Abort(msg, hint=hint)
1732
1723
1733 def wrapconvertsink(sink):
1724 def wrapconvertsink(sink):
1734 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1725 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1735 before it is used, whether or not the convert extension was formally loaded.
1726 before it is used, whether or not the convert extension was formally loaded.
1736 """
1727 """
1737 return sink
1728 return sink
1738
1729
1739 def unhidehashlikerevs(repo, specs, hiddentype):
1730 def unhidehashlikerevs(repo, specs, hiddentype):
1740 """parse the user specs and unhide changesets whose hash or revision number
1731 """parse the user specs and unhide changesets whose hash or revision number
1741 is passed.
1732 is passed.
1742
1733
1743 hiddentype can be: 1) 'warn': warn while unhiding changesets
1734 hiddentype can be: 1) 'warn': warn while unhiding changesets
1744 2) 'nowarn': don't warn while unhiding changesets
1735 2) 'nowarn': don't warn while unhiding changesets
1745
1736
1746 returns a repo object with the required changesets unhidden
1737 returns a repo object with the required changesets unhidden
1747 """
1738 """
1748 if not repo.filtername or not repo.ui.configbool('experimental',
1739 if not repo.filtername or not repo.ui.configbool('experimental',
1749 'directaccess'):
1740 'directaccess'):
1750 return repo
1741 return repo
1751
1742
1752 if repo.filtername not in ('visible', 'visible-hidden'):
1743 if repo.filtername not in ('visible', 'visible-hidden'):
1753 return repo
1744 return repo
1754
1745
1755 symbols = set()
1746 symbols = set()
1756 for spec in specs:
1747 for spec in specs:
1757 try:
1748 try:
1758 tree = revsetlang.parse(spec)
1749 tree = revsetlang.parse(spec)
1759 except error.ParseError: # will be reported by scmutil.revrange()
1750 except error.ParseError: # will be reported by scmutil.revrange()
1760 continue
1751 continue
1761
1752
1762 symbols.update(revsetlang.gethashlikesymbols(tree))
1753 symbols.update(revsetlang.gethashlikesymbols(tree))
1763
1754
1764 if not symbols:
1755 if not symbols:
1765 return repo
1756 return repo
1766
1757
1767 revs = _getrevsfromsymbols(repo, symbols)
1758 revs = _getrevsfromsymbols(repo, symbols)
1768
1759
1769 if not revs:
1760 if not revs:
1770 return repo
1761 return repo
1771
1762
1772 if hiddentype == 'warn':
1763 if hiddentype == 'warn':
1773 unfi = repo.unfiltered()
1764 unfi = repo.unfiltered()
1774 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1765 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1775 repo.ui.warn(_("warning: accessing hidden changesets for write "
1766 repo.ui.warn(_("warning: accessing hidden changesets for write "
1776 "operation: %s\n") % revstr)
1767 "operation: %s\n") % revstr)
1777
1768
1778 # we have to use new filtername to separate branch/tags cache until we can
1769 # we have to use new filtername to separate branch/tags cache until we can
1779 # disbale these cache when revisions are dynamically pinned.
1770 # disbale these cache when revisions are dynamically pinned.
1780 return repo.filtered('visible-hidden', revs)
1771 return repo.filtered('visible-hidden', revs)
1781
1772
1782 def _getrevsfromsymbols(repo, symbols):
1773 def _getrevsfromsymbols(repo, symbols):
1783 """parse the list of symbols and returns a set of revision numbers of hidden
1774 """parse the list of symbols and returns a set of revision numbers of hidden
1784 changesets present in symbols"""
1775 changesets present in symbols"""
1785 revs = set()
1776 revs = set()
1786 unfi = repo.unfiltered()
1777 unfi = repo.unfiltered()
1787 unficl = unfi.changelog
1778 unficl = unfi.changelog
1788 cl = repo.changelog
1779 cl = repo.changelog
1789 tiprev = len(unficl)
1780 tiprev = len(unficl)
1790 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1781 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1791 for s in symbols:
1782 for s in symbols:
1792 try:
1783 try:
1793 n = int(s)
1784 n = int(s)
1794 if n <= tiprev:
1785 if n <= tiprev:
1795 if not allowrevnums:
1786 if not allowrevnums:
1796 continue
1787 continue
1797 else:
1788 else:
1798 if n not in cl:
1789 if n not in cl:
1799 revs.add(n)
1790 revs.add(n)
1800 continue
1791 continue
1801 except ValueError:
1792 except ValueError:
1802 pass
1793 pass
1803
1794
1804 try:
1795 try:
1805 s = resolvehexnodeidprefix(unfi, s)
1796 s = resolvehexnodeidprefix(unfi, s)
1806 except (error.LookupError, error.WdirUnsupported):
1797 except (error.LookupError, error.WdirUnsupported):
1807 s = None
1798 s = None
1808
1799
1809 if s is not None:
1800 if s is not None:
1810 rev = unficl.rev(s)
1801 rev = unficl.rev(s)
1811 if rev not in cl:
1802 if rev not in cl:
1812 revs.add(rev)
1803 revs.add(rev)
1813
1804
1814 return revs
1805 return revs
1815
1806
1816 def bookmarkrevs(repo, mark):
1807 def bookmarkrevs(repo, mark):
1817 """
1808 """
1818 Select revisions reachable by a given bookmark
1809 Select revisions reachable by a given bookmark
1819 """
1810 """
1820 return repo.revs("ancestors(bookmark(%s)) - "
1811 return repo.revs("ancestors(bookmark(%s)) - "
1821 "ancestors(head() and not bookmark(%s)) - "
1812 "ancestors(head() and not bookmark(%s)) - "
1822 "ancestors(bookmark() and not bookmark(%s))",
1813 "ancestors(bookmark() and not bookmark(%s))",
1823 mark, mark, mark)
1814 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now