##// END OF EJS Templates
dispatch: add inline comment about possible IOError subtypes...
Yuya Nishihara -
r41464:b5169e79 default
parent child Browse files
Show More
@@ -1,1815 +1,1815
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 policy,
37 policy,
38 pycompat,
38 pycompat,
39 revsetlang,
39 revsetlang,
40 similar,
40 similar,
41 smartset,
41 smartset,
42 url,
42 url,
43 util,
43 util,
44 vfs,
44 vfs,
45 )
45 )
46
46
47 from .utils import (
47 from .utils import (
48 procutil,
48 procutil,
49 stringutil,
49 stringutil,
50 )
50 )
51
51
52 if pycompat.iswindows:
52 if pycompat.iswindows:
53 from . import scmwindows as scmplatform
53 from . import scmwindows as scmplatform
54 else:
54 else:
55 from . import scmposix as scmplatform
55 from . import scmposix as scmplatform
56
56
57 parsers = policy.importmod(r'parsers')
57 parsers = policy.importmod(r'parsers')
58
58
59 termsize = scmplatform.termsize
59 termsize = scmplatform.termsize
60
60
61 class status(tuple):
61 class status(tuple):
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 and 'ignored' properties are only relevant to the working copy.
63 and 'ignored' properties are only relevant to the working copy.
64 '''
64 '''
65
65
66 __slots__ = ()
66 __slots__ = ()
67
67
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 clean):
69 clean):
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 ignored, clean))
71 ignored, clean))
72
72
73 @property
73 @property
74 def modified(self):
74 def modified(self):
75 '''files that have been modified'''
75 '''files that have been modified'''
76 return self[0]
76 return self[0]
77
77
78 @property
78 @property
79 def added(self):
79 def added(self):
80 '''files that have been added'''
80 '''files that have been added'''
81 return self[1]
81 return self[1]
82
82
83 @property
83 @property
84 def removed(self):
84 def removed(self):
85 '''files that have been removed'''
85 '''files that have been removed'''
86 return self[2]
86 return self[2]
87
87
88 @property
88 @property
89 def deleted(self):
89 def deleted(self):
90 '''files that are in the dirstate, but have been deleted from the
90 '''files that are in the dirstate, but have been deleted from the
91 working copy (aka "missing")
91 working copy (aka "missing")
92 '''
92 '''
93 return self[3]
93 return self[3]
94
94
95 @property
95 @property
96 def unknown(self):
96 def unknown(self):
97 '''files not in the dirstate that are not ignored'''
97 '''files not in the dirstate that are not ignored'''
98 return self[4]
98 return self[4]
99
99
100 @property
100 @property
101 def ignored(self):
101 def ignored(self):
102 '''files not in the dirstate that are ignored (by _dirignore())'''
102 '''files not in the dirstate that are ignored (by _dirignore())'''
103 return self[5]
103 return self[5]
104
104
105 @property
105 @property
106 def clean(self):
106 def clean(self):
107 '''files that have not been modified'''
107 '''files that have not been modified'''
108 return self[6]
108 return self[6]
109
109
110 def __repr__(self, *args, **kwargs):
110 def __repr__(self, *args, **kwargs):
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 r'unknown=%s, ignored=%s, clean=%s>') %
112 r'unknown=%s, ignored=%s, clean=%s>') %
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114
114
115 def itersubrepos(ctx1, ctx2):
115 def itersubrepos(ctx1, ctx2):
116 """find subrepos in ctx1 or ctx2"""
116 """find subrepos in ctx1 or ctx2"""
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # has been modified (in ctx2) but not yet committed (in ctx1).
119 # has been modified (in ctx2) but not yet committed (in ctx1).
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122
122
123 missing = set()
123 missing = set()
124
124
125 for subpath in ctx2.substate:
125 for subpath in ctx2.substate:
126 if subpath not in ctx1.substate:
126 if subpath not in ctx1.substate:
127 del subpaths[subpath]
127 del subpaths[subpath]
128 missing.add(subpath)
128 missing.add(subpath)
129
129
130 for subpath, ctx in sorted(subpaths.iteritems()):
130 for subpath, ctx in sorted(subpaths.iteritems()):
131 yield subpath, ctx.sub(subpath)
131 yield subpath, ctx.sub(subpath)
132
132
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # status and diff will have an accurate result when it does
134 # status and diff will have an accurate result when it does
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # against itself.
136 # against itself.
137 for subpath in missing:
137 for subpath in missing:
138 yield subpath, ctx2.nullsub(subpath, ctx1)
138 yield subpath, ctx2.nullsub(subpath, ctx1)
139
139
140 def nochangesfound(ui, repo, excluded=None):
140 def nochangesfound(ui, repo, excluded=None):
141 '''Report no changes for push/pull, excluded is None or a list of
141 '''Report no changes for push/pull, excluded is None or a list of
142 nodes excluded from the push/pull.
142 nodes excluded from the push/pull.
143 '''
143 '''
144 secretlist = []
144 secretlist = []
145 if excluded:
145 if excluded:
146 for n in excluded:
146 for n in excluded:
147 ctx = repo[n]
147 ctx = repo[n]
148 if ctx.phase() >= phases.secret and not ctx.extinct():
148 if ctx.phase() >= phases.secret and not ctx.extinct():
149 secretlist.append(n)
149 secretlist.append(n)
150
150
151 if secretlist:
151 if secretlist:
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 % len(secretlist))
153 % len(secretlist))
154 else:
154 else:
155 ui.status(_("no changes found\n"))
155 ui.status(_("no changes found\n"))
156
156
157 def callcatch(ui, func):
157 def callcatch(ui, func):
158 """call func() with global exception handling
158 """call func() with global exception handling
159
159
160 return func() if no exception happens. otherwise do some error handling
160 return func() if no exception happens. otherwise do some error handling
161 and return an exit code accordingly. does not handle all exceptions.
161 and return an exit code accordingly. does not handle all exceptions.
162 """
162 """
163 try:
163 try:
164 try:
164 try:
165 return func()
165 return func()
166 except: # re-raises
166 except: # re-raises
167 ui.traceback()
167 ui.traceback()
168 raise
168 raise
169 # Global exception handling, alphabetically
169 # Global exception handling, alphabetically
170 # Mercurial-specific first, followed by built-in and library exceptions
170 # Mercurial-specific first, followed by built-in and library exceptions
171 except error.LockHeld as inst:
171 except error.LockHeld as inst:
172 if inst.errno == errno.ETIMEDOUT:
172 if inst.errno == errno.ETIMEDOUT:
173 reason = _('timed out waiting for lock held by %r') % (
173 reason = _('timed out waiting for lock held by %r') % (
174 pycompat.bytestr(inst.locker))
174 pycompat.bytestr(inst.locker))
175 else:
175 else:
176 reason = _('lock held by %r') % inst.locker
176 reason = _('lock held by %r') % inst.locker
177 ui.error(_("abort: %s: %s\n") % (
177 ui.error(_("abort: %s: %s\n") % (
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 if not inst.locker:
179 if not inst.locker:
180 ui.error(_("(lock might be very busy)\n"))
180 ui.error(_("(lock might be very busy)\n"))
181 except error.LockUnavailable as inst:
181 except error.LockUnavailable as inst:
182 ui.error(_("abort: could not lock %s: %s\n") %
182 ui.error(_("abort: could not lock %s: %s\n") %
183 (inst.desc or stringutil.forcebytestr(inst.filename),
183 (inst.desc or stringutil.forcebytestr(inst.filename),
184 encoding.strtolocal(inst.strerror)))
184 encoding.strtolocal(inst.strerror)))
185 except error.OutOfBandError as inst:
185 except error.OutOfBandError as inst:
186 if inst.args:
186 if inst.args:
187 msg = _("abort: remote error:\n")
187 msg = _("abort: remote error:\n")
188 else:
188 else:
189 msg = _("abort: remote error\n")
189 msg = _("abort: remote error\n")
190 ui.error(msg)
190 ui.error(msg)
191 if inst.args:
191 if inst.args:
192 ui.error(''.join(inst.args))
192 ui.error(''.join(inst.args))
193 if inst.hint:
193 if inst.hint:
194 ui.error('(%s)\n' % inst.hint)
194 ui.error('(%s)\n' % inst.hint)
195 except error.RepoError as inst:
195 except error.RepoError as inst:
196 ui.error(_("abort: %s!\n") % inst)
196 ui.error(_("abort: %s!\n") % inst)
197 if inst.hint:
197 if inst.hint:
198 ui.error(_("(%s)\n") % inst.hint)
198 ui.error(_("(%s)\n") % inst.hint)
199 except error.ResponseError as inst:
199 except error.ResponseError as inst:
200 ui.error(_("abort: %s") % inst.args[0])
200 ui.error(_("abort: %s") % inst.args[0])
201 msg = inst.args[1]
201 msg = inst.args[1]
202 if isinstance(msg, type(u'')):
202 if isinstance(msg, type(u'')):
203 msg = pycompat.sysbytes(msg)
203 msg = pycompat.sysbytes(msg)
204 if not isinstance(msg, bytes):
204 if not isinstance(msg, bytes):
205 ui.error(" %r\n" % (msg,))
205 ui.error(" %r\n" % (msg,))
206 elif not msg:
206 elif not msg:
207 ui.error(_(" empty string\n"))
207 ui.error(_(" empty string\n"))
208 else:
208 else:
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 except error.CensoredNodeError as inst:
210 except error.CensoredNodeError as inst:
211 ui.error(_("abort: file censored %s!\n") % inst)
211 ui.error(_("abort: file censored %s!\n") % inst)
212 except error.StorageError as inst:
212 except error.StorageError as inst:
213 ui.error(_("abort: %s!\n") % inst)
213 ui.error(_("abort: %s!\n") % inst)
214 if inst.hint:
214 if inst.hint:
215 ui.error(_("(%s)\n") % inst.hint)
215 ui.error(_("(%s)\n") % inst.hint)
216 except error.InterventionRequired as inst:
216 except error.InterventionRequired as inst:
217 ui.error("%s\n" % inst)
217 ui.error("%s\n" % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.error(_("(%s)\n") % inst.hint)
219 ui.error(_("(%s)\n") % inst.hint)
220 return 1
220 return 1
221 except error.WdirUnsupported:
221 except error.WdirUnsupported:
222 ui.error(_("abort: working directory revision cannot be specified\n"))
222 ui.error(_("abort: working directory revision cannot be specified\n"))
223 except error.Abort as inst:
223 except error.Abort as inst:
224 ui.error(_("abort: %s\n") % inst)
224 ui.error(_("abort: %s\n") % inst)
225 if inst.hint:
225 if inst.hint:
226 ui.error(_("(%s)\n") % inst.hint)
226 ui.error(_("(%s)\n") % inst.hint)
227 except ImportError as inst:
227 except ImportError as inst:
228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 m = stringutil.forcebytestr(inst).split()[-1]
229 m = stringutil.forcebytestr(inst).split()[-1]
230 if m in "mpatch bdiff".split():
230 if m in "mpatch bdiff".split():
231 ui.error(_("(did you forget to compile extensions?)\n"))
231 ui.error(_("(did you forget to compile extensions?)\n"))
232 elif m in "zlib".split():
232 elif m in "zlib".split():
233 ui.error(_("(is your Python install correct?)\n"))
233 ui.error(_("(is your Python install correct?)\n"))
234 except IOError as inst:
234 except IOError as inst:
235 if util.safehasattr(inst, "code"):
235 if util.safehasattr(inst, "code"): # HTTPError
236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 elif util.safehasattr(inst, "reason"):
237 elif util.safehasattr(inst, "reason"): # URLError or SSLError
238 try: # usually it is in the form (errno, strerror)
238 try: # usually it is in the form (errno, strerror)
239 reason = inst.reason.args[1]
239 reason = inst.reason.args[1]
240 except (AttributeError, IndexError):
240 except (AttributeError, IndexError):
241 # it might be anything, for example a string
241 # it might be anything, for example a string
242 reason = inst.reason
242 reason = inst.reason
243 if isinstance(reason, pycompat.unicode):
243 if isinstance(reason, pycompat.unicode):
244 # SSLError of Python 2.7.9 contains a unicode
244 # SSLError of Python 2.7.9 contains a unicode
245 reason = encoding.unitolocal(reason)
245 reason = encoding.unitolocal(reason)
246 ui.error(_("abort: error: %s\n") % reason)
246 ui.error(_("abort: error: %s\n") % reason)
247 elif (util.safehasattr(inst, "args")
247 elif (util.safehasattr(inst, "args")
248 and inst.args and inst.args[0] == errno.EPIPE):
248 and inst.args and inst.args[0] == errno.EPIPE):
249 pass
249 pass
250 elif getattr(inst, "strerror", None):
250 elif getattr(inst, "strerror", None): # common IOError
251 if getattr(inst, "filename", None):
251 if getattr(inst, "filename", None):
252 ui.error(_("abort: %s: %s\n") % (
252 ui.error(_("abort: %s: %s\n") % (
253 encoding.strtolocal(inst.strerror),
253 encoding.strtolocal(inst.strerror),
254 stringutil.forcebytestr(inst.filename)))
254 stringutil.forcebytestr(inst.filename)))
255 else:
255 else:
256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 else:
257 else: # suspicious IOError
258 raise
258 raise
259 except OSError as inst:
259 except OSError as inst:
260 if getattr(inst, "filename", None) is not None:
260 if getattr(inst, "filename", None) is not None:
261 ui.error(_("abort: %s: '%s'\n") % (
261 ui.error(_("abort: %s: '%s'\n") % (
262 encoding.strtolocal(inst.strerror),
262 encoding.strtolocal(inst.strerror),
263 stringutil.forcebytestr(inst.filename)))
263 stringutil.forcebytestr(inst.filename)))
264 else:
264 else:
265 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
265 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
266 except MemoryError:
266 except MemoryError:
267 ui.error(_("abort: out of memory\n"))
267 ui.error(_("abort: out of memory\n"))
268 except SystemExit as inst:
268 except SystemExit as inst:
269 # Commands shouldn't sys.exit directly, but give a return code.
269 # Commands shouldn't sys.exit directly, but give a return code.
270 # Just in case catch this and and pass exit code to caller.
270 # Just in case catch this and and pass exit code to caller.
271 return inst.code
271 return inst.code
272
272
273 return -1
273 return -1
274
274
275 def checknewlabel(repo, lbl, kind):
275 def checknewlabel(repo, lbl, kind):
276 # Do not use the "kind" parameter in ui output.
276 # Do not use the "kind" parameter in ui output.
277 # It makes strings difficult to translate.
277 # It makes strings difficult to translate.
278 if lbl in ['tip', '.', 'null']:
278 if lbl in ['tip', '.', 'null']:
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
280 for c in (':', '\0', '\n', '\r'):
280 for c in (':', '\0', '\n', '\r'):
281 if c in lbl:
281 if c in lbl:
282 raise error.Abort(
282 raise error.Abort(
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
284 try:
284 try:
285 int(lbl)
285 int(lbl)
286 raise error.Abort(_("cannot use an integer as a name"))
286 raise error.Abort(_("cannot use an integer as a name"))
287 except ValueError:
287 except ValueError:
288 pass
288 pass
289 if lbl.strip() != lbl:
289 if lbl.strip() != lbl:
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
291
291
292 def checkfilename(f):
292 def checkfilename(f):
293 '''Check that the filename f is an acceptable filename for a tracked file'''
293 '''Check that the filename f is an acceptable filename for a tracked file'''
294 if '\r' in f or '\n' in f:
294 if '\r' in f or '\n' in f:
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
296 % pycompat.bytestr(f))
296 % pycompat.bytestr(f))
297
297
298 def checkportable(ui, f):
298 def checkportable(ui, f):
299 '''Check if filename f is portable and warn or abort depending on config'''
299 '''Check if filename f is portable and warn or abort depending on config'''
300 checkfilename(f)
300 checkfilename(f)
301 abort, warn = checkportabilityalert(ui)
301 abort, warn = checkportabilityalert(ui)
302 if abort or warn:
302 if abort or warn:
303 msg = util.checkwinfilename(f)
303 msg = util.checkwinfilename(f)
304 if msg:
304 if msg:
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
306 if abort:
306 if abort:
307 raise error.Abort(msg)
307 raise error.Abort(msg)
308 ui.warn(_("warning: %s\n") % msg)
308 ui.warn(_("warning: %s\n") % msg)
309
309
310 def checkportabilityalert(ui):
310 def checkportabilityalert(ui):
311 '''check if the user's config requests nothing, a warning, or abort for
311 '''check if the user's config requests nothing, a warning, or abort for
312 non-portable filenames'''
312 non-portable filenames'''
313 val = ui.config('ui', 'portablefilenames')
313 val = ui.config('ui', 'portablefilenames')
314 lval = val.lower()
314 lval = val.lower()
315 bval = stringutil.parsebool(val)
315 bval = stringutil.parsebool(val)
316 abort = pycompat.iswindows or lval == 'abort'
316 abort = pycompat.iswindows or lval == 'abort'
317 warn = bval or lval == 'warn'
317 warn = bval or lval == 'warn'
318 if bval is None and not (warn or abort or lval == 'ignore'):
318 if bval is None and not (warn or abort or lval == 'ignore'):
319 raise error.ConfigError(
319 raise error.ConfigError(
320 _("ui.portablefilenames value is invalid ('%s')") % val)
320 _("ui.portablefilenames value is invalid ('%s')") % val)
321 return abort, warn
321 return abort, warn
322
322
323 class casecollisionauditor(object):
323 class casecollisionauditor(object):
324 def __init__(self, ui, abort, dirstate):
324 def __init__(self, ui, abort, dirstate):
325 self._ui = ui
325 self._ui = ui
326 self._abort = abort
326 self._abort = abort
327 allfiles = '\0'.join(dirstate._map)
327 allfiles = '\0'.join(dirstate._map)
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
329 self._dirstate = dirstate
329 self._dirstate = dirstate
330 # The purpose of _newfiles is so that we don't complain about
330 # The purpose of _newfiles is so that we don't complain about
331 # case collisions if someone were to call this object with the
331 # case collisions if someone were to call this object with the
332 # same filename twice.
332 # same filename twice.
333 self._newfiles = set()
333 self._newfiles = set()
334
334
335 def __call__(self, f):
335 def __call__(self, f):
336 if f in self._newfiles:
336 if f in self._newfiles:
337 return
337 return
338 fl = encoding.lower(f)
338 fl = encoding.lower(f)
339 if fl in self._loweredfiles and f not in self._dirstate:
339 if fl in self._loweredfiles and f not in self._dirstate:
340 msg = _('possible case-folding collision for %s') % f
340 msg = _('possible case-folding collision for %s') % f
341 if self._abort:
341 if self._abort:
342 raise error.Abort(msg)
342 raise error.Abort(msg)
343 self._ui.warn(_("warning: %s\n") % msg)
343 self._ui.warn(_("warning: %s\n") % msg)
344 self._loweredfiles.add(fl)
344 self._loweredfiles.add(fl)
345 self._newfiles.add(f)
345 self._newfiles.add(f)
346
346
347 def filteredhash(repo, maxrev):
347 def filteredhash(repo, maxrev):
348 """build hash of filtered revisions in the current repoview.
348 """build hash of filtered revisions in the current repoview.
349
349
350 Multiple caches perform up-to-date validation by checking that the
350 Multiple caches perform up-to-date validation by checking that the
351 tiprev and tipnode stored in the cache file match the current repository.
351 tiprev and tipnode stored in the cache file match the current repository.
352 However, this is not sufficient for validating repoviews because the set
352 However, this is not sufficient for validating repoviews because the set
353 of revisions in the view may change without the repository tiprev and
353 of revisions in the view may change without the repository tiprev and
354 tipnode changing.
354 tipnode changing.
355
355
356 This function hashes all the revs filtered from the view and returns
356 This function hashes all the revs filtered from the view and returns
357 that SHA-1 digest.
357 that SHA-1 digest.
358 """
358 """
359 cl = repo.changelog
359 cl = repo.changelog
360 if not cl.filteredrevs:
360 if not cl.filteredrevs:
361 return None
361 return None
362 key = None
362 key = None
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
364 if revs:
364 if revs:
365 s = hashlib.sha1()
365 s = hashlib.sha1()
366 for rev in revs:
366 for rev in revs:
367 s.update('%d;' % rev)
367 s.update('%d;' % rev)
368 key = s.digest()
368 key = s.digest()
369 return key
369 return key
370
370
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
372 '''yield every hg repository under path, always recursively.
372 '''yield every hg repository under path, always recursively.
373 The recurse flag will only control recursion into repo working dirs'''
373 The recurse flag will only control recursion into repo working dirs'''
374 def errhandler(err):
374 def errhandler(err):
375 if err.filename == path:
375 if err.filename == path:
376 raise err
376 raise err
377 samestat = getattr(os.path, 'samestat', None)
377 samestat = getattr(os.path, 'samestat', None)
378 if followsym and samestat is not None:
378 if followsym and samestat is not None:
379 def adddir(dirlst, dirname):
379 def adddir(dirlst, dirname):
380 dirstat = os.stat(dirname)
380 dirstat = os.stat(dirname)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
382 if not match:
382 if not match:
383 dirlst.append(dirstat)
383 dirlst.append(dirstat)
384 return not match
384 return not match
385 else:
385 else:
386 followsym = False
386 followsym = False
387
387
388 if (seen_dirs is None) and followsym:
388 if (seen_dirs is None) and followsym:
389 seen_dirs = []
389 seen_dirs = []
390 adddir(seen_dirs, path)
390 adddir(seen_dirs, path)
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
392 dirs.sort()
392 dirs.sort()
393 if '.hg' in dirs:
393 if '.hg' in dirs:
394 yield root # found a repository
394 yield root # found a repository
395 qroot = os.path.join(root, '.hg', 'patches')
395 qroot = os.path.join(root, '.hg', 'patches')
396 if os.path.isdir(os.path.join(qroot, '.hg')):
396 if os.path.isdir(os.path.join(qroot, '.hg')):
397 yield qroot # we have a patch queue repo here
397 yield qroot # we have a patch queue repo here
398 if recurse:
398 if recurse:
399 # avoid recursing inside the .hg directory
399 # avoid recursing inside the .hg directory
400 dirs.remove('.hg')
400 dirs.remove('.hg')
401 else:
401 else:
402 dirs[:] = [] # don't descend further
402 dirs[:] = [] # don't descend further
403 elif followsym:
403 elif followsym:
404 newdirs = []
404 newdirs = []
405 for d in dirs:
405 for d in dirs:
406 fname = os.path.join(root, d)
406 fname = os.path.join(root, d)
407 if adddir(seen_dirs, fname):
407 if adddir(seen_dirs, fname):
408 if os.path.islink(fname):
408 if os.path.islink(fname):
409 for hgname in walkrepos(fname, True, seen_dirs):
409 for hgname in walkrepos(fname, True, seen_dirs):
410 yield hgname
410 yield hgname
411 else:
411 else:
412 newdirs.append(d)
412 newdirs.append(d)
413 dirs[:] = newdirs
413 dirs[:] = newdirs
414
414
415 def binnode(ctx):
415 def binnode(ctx):
416 """Return binary node id for a given basectx"""
416 """Return binary node id for a given basectx"""
417 node = ctx.node()
417 node = ctx.node()
418 if node is None:
418 if node is None:
419 return wdirid
419 return wdirid
420 return node
420 return node
421
421
422 def intrev(ctx):
422 def intrev(ctx):
423 """Return integer for a given basectx that can be used in comparison or
423 """Return integer for a given basectx that can be used in comparison or
424 arithmetic operation"""
424 arithmetic operation"""
425 rev = ctx.rev()
425 rev = ctx.rev()
426 if rev is None:
426 if rev is None:
427 return wdirrev
427 return wdirrev
428 return rev
428 return rev
429
429
430 def formatchangeid(ctx):
430 def formatchangeid(ctx):
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
432 template provided by logcmdutil.changesettemplater"""
432 template provided by logcmdutil.changesettemplater"""
433 repo = ctx.repo()
433 repo = ctx.repo()
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
435
435
436 def formatrevnode(ui, rev, node):
436 def formatrevnode(ui, rev, node):
437 """Format given revision and node depending on the current verbosity"""
437 """Format given revision and node depending on the current verbosity"""
438 if ui.debugflag:
438 if ui.debugflag:
439 hexfunc = hex
439 hexfunc = hex
440 else:
440 else:
441 hexfunc = short
441 hexfunc = short
442 return '%d:%s' % (rev, hexfunc(node))
442 return '%d:%s' % (rev, hexfunc(node))
443
443
444 def resolvehexnodeidprefix(repo, prefix):
444 def resolvehexnodeidprefix(repo, prefix):
445 if (prefix.startswith('x') and
445 if (prefix.startswith('x') and
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
447 prefix = prefix[1:]
447 prefix = prefix[1:]
448 try:
448 try:
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
450 # This matches the shortesthexnodeidprefix() function below.
450 # This matches the shortesthexnodeidprefix() function below.
451 node = repo.unfiltered().changelog._partialmatch(prefix)
451 node = repo.unfiltered().changelog._partialmatch(prefix)
452 except error.AmbiguousPrefixLookupError:
452 except error.AmbiguousPrefixLookupError:
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
454 if revset:
454 if revset:
455 # Clear config to avoid infinite recursion
455 # Clear config to avoid infinite recursion
456 configoverrides = {('experimental',
456 configoverrides = {('experimental',
457 'revisions.disambiguatewithin'): None}
457 'revisions.disambiguatewithin'): None}
458 with repo.ui.configoverride(configoverrides):
458 with repo.ui.configoverride(configoverrides):
459 revs = repo.anyrevs([revset], user=True)
459 revs = repo.anyrevs([revset], user=True)
460 matches = []
460 matches = []
461 for rev in revs:
461 for rev in revs:
462 node = repo.changelog.node(rev)
462 node = repo.changelog.node(rev)
463 if hex(node).startswith(prefix):
463 if hex(node).startswith(prefix):
464 matches.append(node)
464 matches.append(node)
465 if len(matches) == 1:
465 if len(matches) == 1:
466 return matches[0]
466 return matches[0]
467 raise
467 raise
468 if node is None:
468 if node is None:
469 return
469 return
470 repo.changelog.rev(node) # make sure node isn't filtered
470 repo.changelog.rev(node) # make sure node isn't filtered
471 return node
471 return node
472
472
473 def mayberevnum(repo, prefix):
473 def mayberevnum(repo, prefix):
474 """Checks if the given prefix may be mistaken for a revision number"""
474 """Checks if the given prefix may be mistaken for a revision number"""
475 try:
475 try:
476 i = int(prefix)
476 i = int(prefix)
477 # if we are a pure int, then starting with zero will not be
477 # if we are a pure int, then starting with zero will not be
478 # confused as a rev; or, obviously, if the int is larger
478 # confused as a rev; or, obviously, if the int is larger
479 # than the value of the tip rev. We still need to disambiguate if
479 # than the value of the tip rev. We still need to disambiguate if
480 # prefix == '0', since that *is* a valid revnum.
480 # prefix == '0', since that *is* a valid revnum.
481 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
481 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
482 return False
482 return False
483 return True
483 return True
484 except ValueError:
484 except ValueError:
485 return False
485 return False
486
486
487 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
487 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
488 """Find the shortest unambiguous prefix that matches hexnode.
488 """Find the shortest unambiguous prefix that matches hexnode.
489
489
490 If "cache" is not None, it must be a dictionary that can be used for
490 If "cache" is not None, it must be a dictionary that can be used for
491 caching between calls to this method.
491 caching between calls to this method.
492 """
492 """
493 # _partialmatch() of filtered changelog could take O(len(repo)) time,
493 # _partialmatch() of filtered changelog could take O(len(repo)) time,
494 # which would be unacceptably slow. so we look for hash collision in
494 # which would be unacceptably slow. so we look for hash collision in
495 # unfiltered space, which means some hashes may be slightly longer.
495 # unfiltered space, which means some hashes may be slightly longer.
496
496
497 minlength=max(minlength, 1)
497 minlength=max(minlength, 1)
498
498
499 def disambiguate(prefix):
499 def disambiguate(prefix):
500 """Disambiguate against revnums."""
500 """Disambiguate against revnums."""
501 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
501 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
502 if mayberevnum(repo, prefix):
502 if mayberevnum(repo, prefix):
503 return 'x' + prefix
503 return 'x' + prefix
504 else:
504 else:
505 return prefix
505 return prefix
506
506
507 hexnode = hex(node)
507 hexnode = hex(node)
508 for length in range(len(prefix), len(hexnode) + 1):
508 for length in range(len(prefix), len(hexnode) + 1):
509 prefix = hexnode[:length]
509 prefix = hexnode[:length]
510 if not mayberevnum(repo, prefix):
510 if not mayberevnum(repo, prefix):
511 return prefix
511 return prefix
512
512
513 cl = repo.unfiltered().changelog
513 cl = repo.unfiltered().changelog
514 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
514 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
515 if revset:
515 if revset:
516 revs = None
516 revs = None
517 if cache is not None:
517 if cache is not None:
518 revs = cache.get('disambiguationrevset')
518 revs = cache.get('disambiguationrevset')
519 if revs is None:
519 if revs is None:
520 revs = repo.anyrevs([revset], user=True)
520 revs = repo.anyrevs([revset], user=True)
521 if cache is not None:
521 if cache is not None:
522 cache['disambiguationrevset'] = revs
522 cache['disambiguationrevset'] = revs
523 if cl.rev(node) in revs:
523 if cl.rev(node) in revs:
524 hexnode = hex(node)
524 hexnode = hex(node)
525 nodetree = None
525 nodetree = None
526 if cache is not None:
526 if cache is not None:
527 nodetree = cache.get('disambiguationnodetree')
527 nodetree = cache.get('disambiguationnodetree')
528 if not nodetree:
528 if not nodetree:
529 try:
529 try:
530 nodetree = parsers.nodetree(cl.index, len(revs))
530 nodetree = parsers.nodetree(cl.index, len(revs))
531 except AttributeError:
531 except AttributeError:
532 # no native nodetree
532 # no native nodetree
533 pass
533 pass
534 else:
534 else:
535 for r in revs:
535 for r in revs:
536 nodetree.insert(r)
536 nodetree.insert(r)
537 if cache is not None:
537 if cache is not None:
538 cache['disambiguationnodetree'] = nodetree
538 cache['disambiguationnodetree'] = nodetree
539 if nodetree is not None:
539 if nodetree is not None:
540 length = max(nodetree.shortest(node), minlength)
540 length = max(nodetree.shortest(node), minlength)
541 prefix = hexnode[:length]
541 prefix = hexnode[:length]
542 return disambiguate(prefix)
542 return disambiguate(prefix)
543 for length in range(minlength, len(hexnode) + 1):
543 for length in range(minlength, len(hexnode) + 1):
544 matches = []
544 matches = []
545 prefix = hexnode[:length]
545 prefix = hexnode[:length]
546 for rev in revs:
546 for rev in revs:
547 otherhexnode = repo[rev].hex()
547 otherhexnode = repo[rev].hex()
548 if prefix == otherhexnode[:length]:
548 if prefix == otherhexnode[:length]:
549 matches.append(otherhexnode)
549 matches.append(otherhexnode)
550 if len(matches) == 1:
550 if len(matches) == 1:
551 return disambiguate(prefix)
551 return disambiguate(prefix)
552
552
553 try:
553 try:
554 return disambiguate(cl.shortest(node, minlength))
554 return disambiguate(cl.shortest(node, minlength))
555 except error.LookupError:
555 except error.LookupError:
556 raise error.RepoLookupError()
556 raise error.RepoLookupError()
557
557
558 def isrevsymbol(repo, symbol):
558 def isrevsymbol(repo, symbol):
559 """Checks if a symbol exists in the repo.
559 """Checks if a symbol exists in the repo.
560
560
561 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
561 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
562 symbol is an ambiguous nodeid prefix.
562 symbol is an ambiguous nodeid prefix.
563 """
563 """
564 try:
564 try:
565 revsymbol(repo, symbol)
565 revsymbol(repo, symbol)
566 return True
566 return True
567 except error.RepoLookupError:
567 except error.RepoLookupError:
568 return False
568 return False
569
569
570 def revsymbol(repo, symbol):
570 def revsymbol(repo, symbol):
571 """Returns a context given a single revision symbol (as string).
571 """Returns a context given a single revision symbol (as string).
572
572
573 This is similar to revsingle(), but accepts only a single revision symbol,
573 This is similar to revsingle(), but accepts only a single revision symbol,
574 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
574 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
575 not "max(public())".
575 not "max(public())".
576 """
576 """
577 if not isinstance(symbol, bytes):
577 if not isinstance(symbol, bytes):
578 msg = ("symbol (%s of type %s) was not a string, did you mean "
578 msg = ("symbol (%s of type %s) was not a string, did you mean "
579 "repo[symbol]?" % (symbol, type(symbol)))
579 "repo[symbol]?" % (symbol, type(symbol)))
580 raise error.ProgrammingError(msg)
580 raise error.ProgrammingError(msg)
581 try:
581 try:
582 if symbol in ('.', 'tip', 'null'):
582 if symbol in ('.', 'tip', 'null'):
583 return repo[symbol]
583 return repo[symbol]
584
584
585 try:
585 try:
586 r = int(symbol)
586 r = int(symbol)
587 if '%d' % r != symbol:
587 if '%d' % r != symbol:
588 raise ValueError
588 raise ValueError
589 l = len(repo.changelog)
589 l = len(repo.changelog)
590 if r < 0:
590 if r < 0:
591 r += l
591 r += l
592 if r < 0 or r >= l and r != wdirrev:
592 if r < 0 or r >= l and r != wdirrev:
593 raise ValueError
593 raise ValueError
594 return repo[r]
594 return repo[r]
595 except error.FilteredIndexError:
595 except error.FilteredIndexError:
596 raise
596 raise
597 except (ValueError, OverflowError, IndexError):
597 except (ValueError, OverflowError, IndexError):
598 pass
598 pass
599
599
600 if len(symbol) == 40:
600 if len(symbol) == 40:
601 try:
601 try:
602 node = bin(symbol)
602 node = bin(symbol)
603 rev = repo.changelog.rev(node)
603 rev = repo.changelog.rev(node)
604 return repo[rev]
604 return repo[rev]
605 except error.FilteredLookupError:
605 except error.FilteredLookupError:
606 raise
606 raise
607 except (TypeError, LookupError):
607 except (TypeError, LookupError):
608 pass
608 pass
609
609
610 # look up bookmarks through the name interface
610 # look up bookmarks through the name interface
611 try:
611 try:
612 node = repo.names.singlenode(repo, symbol)
612 node = repo.names.singlenode(repo, symbol)
613 rev = repo.changelog.rev(node)
613 rev = repo.changelog.rev(node)
614 return repo[rev]
614 return repo[rev]
615 except KeyError:
615 except KeyError:
616 pass
616 pass
617
617
618 node = resolvehexnodeidprefix(repo, symbol)
618 node = resolvehexnodeidprefix(repo, symbol)
619 if node is not None:
619 if node is not None:
620 rev = repo.changelog.rev(node)
620 rev = repo.changelog.rev(node)
621 return repo[rev]
621 return repo[rev]
622
622
623 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
623 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
624
624
625 except error.WdirUnsupported:
625 except error.WdirUnsupported:
626 return repo[None]
626 return repo[None]
627 except (error.FilteredIndexError, error.FilteredLookupError,
627 except (error.FilteredIndexError, error.FilteredLookupError,
628 error.FilteredRepoLookupError):
628 error.FilteredRepoLookupError):
629 raise _filterederror(repo, symbol)
629 raise _filterederror(repo, symbol)
630
630
631 def _filterederror(repo, changeid):
631 def _filterederror(repo, changeid):
632 """build an exception to be raised about a filtered changeid
632 """build an exception to be raised about a filtered changeid
633
633
634 This is extracted in a function to help extensions (eg: evolve) to
634 This is extracted in a function to help extensions (eg: evolve) to
635 experiment with various message variants."""
635 experiment with various message variants."""
636 if repo.filtername.startswith('visible'):
636 if repo.filtername.startswith('visible'):
637
637
638 # Check if the changeset is obsolete
638 # Check if the changeset is obsolete
639 unfilteredrepo = repo.unfiltered()
639 unfilteredrepo = repo.unfiltered()
640 ctx = revsymbol(unfilteredrepo, changeid)
640 ctx = revsymbol(unfilteredrepo, changeid)
641
641
642 # If the changeset is obsolete, enrich the message with the reason
642 # If the changeset is obsolete, enrich the message with the reason
643 # that made this changeset not visible
643 # that made this changeset not visible
644 if ctx.obsolete():
644 if ctx.obsolete():
645 msg = obsutil._getfilteredreason(repo, changeid, ctx)
645 msg = obsutil._getfilteredreason(repo, changeid, ctx)
646 else:
646 else:
647 msg = _("hidden revision '%s'") % changeid
647 msg = _("hidden revision '%s'") % changeid
648
648
649 hint = _('use --hidden to access hidden revisions')
649 hint = _('use --hidden to access hidden revisions')
650
650
651 return error.FilteredRepoLookupError(msg, hint=hint)
651 return error.FilteredRepoLookupError(msg, hint=hint)
652 msg = _("filtered revision '%s' (not in '%s' subset)")
652 msg = _("filtered revision '%s' (not in '%s' subset)")
653 msg %= (changeid, repo.filtername)
653 msg %= (changeid, repo.filtername)
654 return error.FilteredRepoLookupError(msg)
654 return error.FilteredRepoLookupError(msg)
655
655
656 def revsingle(repo, revspec, default='.', localalias=None):
656 def revsingle(repo, revspec, default='.', localalias=None):
657 if not revspec and revspec != 0:
657 if not revspec and revspec != 0:
658 return repo[default]
658 return repo[default]
659
659
660 l = revrange(repo, [revspec], localalias=localalias)
660 l = revrange(repo, [revspec], localalias=localalias)
661 if not l:
661 if not l:
662 raise error.Abort(_('empty revision set'))
662 raise error.Abort(_('empty revision set'))
663 return repo[l.last()]
663 return repo[l.last()]
664
664
665 def _pairspec(revspec):
665 def _pairspec(revspec):
666 tree = revsetlang.parse(revspec)
666 tree = revsetlang.parse(revspec)
667 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
667 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
668
668
669 def revpair(repo, revs):
669 def revpair(repo, revs):
670 if not revs:
670 if not revs:
671 return repo['.'], repo[None]
671 return repo['.'], repo[None]
672
672
673 l = revrange(repo, revs)
673 l = revrange(repo, revs)
674
674
675 if not l:
675 if not l:
676 raise error.Abort(_('empty revision range'))
676 raise error.Abort(_('empty revision range'))
677
677
678 first = l.first()
678 first = l.first()
679 second = l.last()
679 second = l.last()
680
680
681 if (first == second and len(revs) >= 2
681 if (first == second and len(revs) >= 2
682 and not all(revrange(repo, [r]) for r in revs)):
682 and not all(revrange(repo, [r]) for r in revs)):
683 raise error.Abort(_('empty revision on one side of range'))
683 raise error.Abort(_('empty revision on one side of range'))
684
684
685 # if top-level is range expression, the result must always be a pair
685 # if top-level is range expression, the result must always be a pair
686 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
686 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
687 return repo[first], repo[None]
687 return repo[first], repo[None]
688
688
689 return repo[first], repo[second]
689 return repo[first], repo[second]
690
690
691 def revrange(repo, specs, localalias=None):
691 def revrange(repo, specs, localalias=None):
692 """Execute 1 to many revsets and return the union.
692 """Execute 1 to many revsets and return the union.
693
693
694 This is the preferred mechanism for executing revsets using user-specified
694 This is the preferred mechanism for executing revsets using user-specified
695 config options, such as revset aliases.
695 config options, such as revset aliases.
696
696
697 The revsets specified by ``specs`` will be executed via a chained ``OR``
697 The revsets specified by ``specs`` will be executed via a chained ``OR``
698 expression. If ``specs`` is empty, an empty result is returned.
698 expression. If ``specs`` is empty, an empty result is returned.
699
699
700 ``specs`` can contain integers, in which case they are assumed to be
700 ``specs`` can contain integers, in which case they are assumed to be
701 revision numbers.
701 revision numbers.
702
702
703 It is assumed the revsets are already formatted. If you have arguments
703 It is assumed the revsets are already formatted. If you have arguments
704 that need to be expanded in the revset, call ``revsetlang.formatspec()``
704 that need to be expanded in the revset, call ``revsetlang.formatspec()``
705 and pass the result as an element of ``specs``.
705 and pass the result as an element of ``specs``.
706
706
707 Specifying a single revset is allowed.
707 Specifying a single revset is allowed.
708
708
709 Returns a ``revset.abstractsmartset`` which is a list-like interface over
709 Returns a ``revset.abstractsmartset`` which is a list-like interface over
710 integer revisions.
710 integer revisions.
711 """
711 """
712 allspecs = []
712 allspecs = []
713 for spec in specs:
713 for spec in specs:
714 if isinstance(spec, int):
714 if isinstance(spec, int):
715 spec = revsetlang.formatspec('%d', spec)
715 spec = revsetlang.formatspec('%d', spec)
716 allspecs.append(spec)
716 allspecs.append(spec)
717 return repo.anyrevs(allspecs, user=True, localalias=localalias)
717 return repo.anyrevs(allspecs, user=True, localalias=localalias)
718
718
719 def meaningfulparents(repo, ctx):
719 def meaningfulparents(repo, ctx):
720 """Return list of meaningful (or all if debug) parentrevs for rev.
720 """Return list of meaningful (or all if debug) parentrevs for rev.
721
721
722 For merges (two non-nullrev revisions) both parents are meaningful.
722 For merges (two non-nullrev revisions) both parents are meaningful.
723 Otherwise the first parent revision is considered meaningful if it
723 Otherwise the first parent revision is considered meaningful if it
724 is not the preceding revision.
724 is not the preceding revision.
725 """
725 """
726 parents = ctx.parents()
726 parents = ctx.parents()
727 if len(parents) > 1:
727 if len(parents) > 1:
728 return parents
728 return parents
729 if repo.ui.debugflag:
729 if repo.ui.debugflag:
730 return [parents[0], repo[nullrev]]
730 return [parents[0], repo[nullrev]]
731 if parents[0].rev() >= intrev(ctx) - 1:
731 if parents[0].rev() >= intrev(ctx) - 1:
732 return []
732 return []
733 return parents
733 return parents
734
734
735 def expandpats(pats):
735 def expandpats(pats):
736 '''Expand bare globs when running on windows.
736 '''Expand bare globs when running on windows.
737 On posix we assume it already has already been done by sh.'''
737 On posix we assume it already has already been done by sh.'''
738 if not util.expandglobs:
738 if not util.expandglobs:
739 return list(pats)
739 return list(pats)
740 ret = []
740 ret = []
741 for kindpat in pats:
741 for kindpat in pats:
742 kind, pat = matchmod._patsplit(kindpat, None)
742 kind, pat = matchmod._patsplit(kindpat, None)
743 if kind is None:
743 if kind is None:
744 try:
744 try:
745 globbed = glob.glob(pat)
745 globbed = glob.glob(pat)
746 except re.error:
746 except re.error:
747 globbed = [pat]
747 globbed = [pat]
748 if globbed:
748 if globbed:
749 ret.extend(globbed)
749 ret.extend(globbed)
750 continue
750 continue
751 ret.append(kindpat)
751 ret.append(kindpat)
752 return ret
752 return ret
753
753
754 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
754 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
755 badfn=None):
755 badfn=None):
756 '''Return a matcher and the patterns that were used.
756 '''Return a matcher and the patterns that were used.
757 The matcher will warn about bad matches, unless an alternate badfn callback
757 The matcher will warn about bad matches, unless an alternate badfn callback
758 is provided.'''
758 is provided.'''
759 if pats == ("",):
759 if pats == ("",):
760 pats = []
760 pats = []
761 if opts is None:
761 if opts is None:
762 opts = {}
762 opts = {}
763 if not globbed and default == 'relpath':
763 if not globbed and default == 'relpath':
764 pats = expandpats(pats or [])
764 pats = expandpats(pats or [])
765
765
766 def bad(f, msg):
766 def bad(f, msg):
767 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
767 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
768
768
769 if badfn is None:
769 if badfn is None:
770 badfn = bad
770 badfn = bad
771
771
772 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
772 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
773 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
773 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
774
774
775 if m.always():
775 if m.always():
776 pats = []
776 pats = []
777 return m, pats
777 return m, pats
778
778
779 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
779 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
780 badfn=None):
780 badfn=None):
781 '''Return a matcher that will warn about bad matches.'''
781 '''Return a matcher that will warn about bad matches.'''
782 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
782 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
783
783
784 def matchall(repo):
784 def matchall(repo):
785 '''Return a matcher that will efficiently match everything.'''
785 '''Return a matcher that will efficiently match everything.'''
786 return matchmod.always(repo.root, repo.getcwd())
786 return matchmod.always(repo.root, repo.getcwd())
787
787
788 def matchfiles(repo, files, badfn=None):
788 def matchfiles(repo, files, badfn=None):
789 '''Return a matcher that will efficiently match exactly these files.'''
789 '''Return a matcher that will efficiently match exactly these files.'''
790 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
790 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
791
791
792 def parsefollowlinespattern(repo, rev, pat, msg):
792 def parsefollowlinespattern(repo, rev, pat, msg):
793 """Return a file name from `pat` pattern suitable for usage in followlines
793 """Return a file name from `pat` pattern suitable for usage in followlines
794 logic.
794 logic.
795 """
795 """
796 if not matchmod.patkind(pat):
796 if not matchmod.patkind(pat):
797 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
797 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
798 else:
798 else:
799 ctx = repo[rev]
799 ctx = repo[rev]
800 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
800 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
801 files = [f for f in ctx if m(f)]
801 files = [f for f in ctx if m(f)]
802 if len(files) != 1:
802 if len(files) != 1:
803 raise error.ParseError(msg)
803 raise error.ParseError(msg)
804 return files[0]
804 return files[0]
805
805
806 def getorigvfs(ui, repo):
806 def getorigvfs(ui, repo):
807 """return a vfs suitable to save 'orig' file
807 """return a vfs suitable to save 'orig' file
808
808
809 return None if no special directory is configured"""
809 return None if no special directory is configured"""
810 origbackuppath = ui.config('ui', 'origbackuppath')
810 origbackuppath = ui.config('ui', 'origbackuppath')
811 if not origbackuppath:
811 if not origbackuppath:
812 return None
812 return None
813 return vfs.vfs(repo.wvfs.join(origbackuppath))
813 return vfs.vfs(repo.wvfs.join(origbackuppath))
814
814
815 def origpath(ui, repo, filepath):
815 def origpath(ui, repo, filepath):
816 '''customize where .orig files are created
816 '''customize where .orig files are created
817
817
818 Fetch user defined path from config file: [ui] origbackuppath = <path>
818 Fetch user defined path from config file: [ui] origbackuppath = <path>
819 Fall back to default (filepath with .orig suffix) if not specified
819 Fall back to default (filepath with .orig suffix) if not specified
820 '''
820 '''
821 origvfs = getorigvfs(ui, repo)
821 origvfs = getorigvfs(ui, repo)
822 if origvfs is None:
822 if origvfs is None:
823 return filepath + ".orig"
823 return filepath + ".orig"
824
824
825 # Convert filepath from an absolute path into a path inside the repo.
825 # Convert filepath from an absolute path into a path inside the repo.
826 filepathfromroot = util.normpath(os.path.relpath(filepath,
826 filepathfromroot = util.normpath(os.path.relpath(filepath,
827 start=repo.root))
827 start=repo.root))
828
828
829 origbackupdir = origvfs.dirname(filepathfromroot)
829 origbackupdir = origvfs.dirname(filepathfromroot)
830 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
830 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
831 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
831 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
832
832
833 # Remove any files that conflict with the backup file's path
833 # Remove any files that conflict with the backup file's path
834 for f in reversed(list(util.finddirs(filepathfromroot))):
834 for f in reversed(list(util.finddirs(filepathfromroot))):
835 if origvfs.isfileorlink(f):
835 if origvfs.isfileorlink(f):
836 ui.note(_('removing conflicting file: %s\n')
836 ui.note(_('removing conflicting file: %s\n')
837 % origvfs.join(f))
837 % origvfs.join(f))
838 origvfs.unlink(f)
838 origvfs.unlink(f)
839 break
839 break
840
840
841 origvfs.makedirs(origbackupdir)
841 origvfs.makedirs(origbackupdir)
842
842
843 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
843 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
844 ui.note(_('removing conflicting directory: %s\n')
844 ui.note(_('removing conflicting directory: %s\n')
845 % origvfs.join(filepathfromroot))
845 % origvfs.join(filepathfromroot))
846 origvfs.rmtree(filepathfromroot, forcibly=True)
846 origvfs.rmtree(filepathfromroot, forcibly=True)
847
847
848 return origvfs.join(filepathfromroot)
848 return origvfs.join(filepathfromroot)
849
849
850 class _containsnode(object):
850 class _containsnode(object):
851 """proxy __contains__(node) to container.__contains__ which accepts revs"""
851 """proxy __contains__(node) to container.__contains__ which accepts revs"""
852
852
853 def __init__(self, repo, revcontainer):
853 def __init__(self, repo, revcontainer):
854 self._torev = repo.changelog.rev
854 self._torev = repo.changelog.rev
855 self._revcontains = revcontainer.__contains__
855 self._revcontains = revcontainer.__contains__
856
856
857 def __contains__(self, node):
857 def __contains__(self, node):
858 return self._revcontains(self._torev(node))
858 return self._revcontains(self._torev(node))
859
859
860 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
860 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
861 fixphase=False, targetphase=None, backup=True):
861 fixphase=False, targetphase=None, backup=True):
862 """do common cleanups when old nodes are replaced by new nodes
862 """do common cleanups when old nodes are replaced by new nodes
863
863
864 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
864 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
865 (we might also want to move working directory parent in the future)
865 (we might also want to move working directory parent in the future)
866
866
867 By default, bookmark moves are calculated automatically from 'replacements',
867 By default, bookmark moves are calculated automatically from 'replacements',
868 but 'moves' can be used to override that. Also, 'moves' may include
868 but 'moves' can be used to override that. Also, 'moves' may include
869 additional bookmark moves that should not have associated obsmarkers.
869 additional bookmark moves that should not have associated obsmarkers.
870
870
871 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
871 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
872 have replacements. operation is a string, like "rebase".
872 have replacements. operation is a string, like "rebase".
873
873
874 metadata is dictionary containing metadata to be stored in obsmarker if
874 metadata is dictionary containing metadata to be stored in obsmarker if
875 obsolescence is enabled.
875 obsolescence is enabled.
876 """
876 """
877 assert fixphase or targetphase is None
877 assert fixphase or targetphase is None
878 if not replacements and not moves:
878 if not replacements and not moves:
879 return
879 return
880
880
881 # translate mapping's other forms
881 # translate mapping's other forms
882 if not util.safehasattr(replacements, 'items'):
882 if not util.safehasattr(replacements, 'items'):
883 replacements = {(n,): () for n in replacements}
883 replacements = {(n,): () for n in replacements}
884 else:
884 else:
885 # upgrading non tuple "source" to tuple ones for BC
885 # upgrading non tuple "source" to tuple ones for BC
886 repls = {}
886 repls = {}
887 for key, value in replacements.items():
887 for key, value in replacements.items():
888 if not isinstance(key, tuple):
888 if not isinstance(key, tuple):
889 key = (key,)
889 key = (key,)
890 repls[key] = value
890 repls[key] = value
891 replacements = repls
891 replacements = repls
892
892
893 # Unfiltered repo is needed since nodes in replacements might be hidden.
893 # Unfiltered repo is needed since nodes in replacements might be hidden.
894 unfi = repo.unfiltered()
894 unfi = repo.unfiltered()
895
895
896 # Calculate bookmark movements
896 # Calculate bookmark movements
897 if moves is None:
897 if moves is None:
898 moves = {}
898 moves = {}
899 for oldnodes, newnodes in replacements.items():
899 for oldnodes, newnodes in replacements.items():
900 for oldnode in oldnodes:
900 for oldnode in oldnodes:
901 if oldnode in moves:
901 if oldnode in moves:
902 continue
902 continue
903 if len(newnodes) > 1:
903 if len(newnodes) > 1:
904 # usually a split, take the one with biggest rev number
904 # usually a split, take the one with biggest rev number
905 newnode = next(unfi.set('max(%ln)', newnodes)).node()
905 newnode = next(unfi.set('max(%ln)', newnodes)).node()
906 elif len(newnodes) == 0:
906 elif len(newnodes) == 0:
907 # move bookmark backwards
907 # move bookmark backwards
908 allreplaced = []
908 allreplaced = []
909 for rep in replacements:
909 for rep in replacements:
910 allreplaced.extend(rep)
910 allreplaced.extend(rep)
911 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
911 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
912 allreplaced))
912 allreplaced))
913 if roots:
913 if roots:
914 newnode = roots[0].node()
914 newnode = roots[0].node()
915 else:
915 else:
916 newnode = nullid
916 newnode = nullid
917 else:
917 else:
918 newnode = newnodes[0]
918 newnode = newnodes[0]
919 moves[oldnode] = newnode
919 moves[oldnode] = newnode
920
920
921 allnewnodes = [n for ns in replacements.values() for n in ns]
921 allnewnodes = [n for ns in replacements.values() for n in ns]
922 toretract = {}
922 toretract = {}
923 toadvance = {}
923 toadvance = {}
924 if fixphase:
924 if fixphase:
925 precursors = {}
925 precursors = {}
926 for oldnodes, newnodes in replacements.items():
926 for oldnodes, newnodes in replacements.items():
927 for oldnode in oldnodes:
927 for oldnode in oldnodes:
928 for newnode in newnodes:
928 for newnode in newnodes:
929 precursors.setdefault(newnode, []).append(oldnode)
929 precursors.setdefault(newnode, []).append(oldnode)
930
930
931 allnewnodes.sort(key=lambda n: unfi[n].rev())
931 allnewnodes.sort(key=lambda n: unfi[n].rev())
932 newphases = {}
932 newphases = {}
933 def phase(ctx):
933 def phase(ctx):
934 return newphases.get(ctx.node(), ctx.phase())
934 return newphases.get(ctx.node(), ctx.phase())
935 for newnode in allnewnodes:
935 for newnode in allnewnodes:
936 ctx = unfi[newnode]
936 ctx = unfi[newnode]
937 parentphase = max(phase(p) for p in ctx.parents())
937 parentphase = max(phase(p) for p in ctx.parents())
938 if targetphase is None:
938 if targetphase is None:
939 oldphase = max(unfi[oldnode].phase()
939 oldphase = max(unfi[oldnode].phase()
940 for oldnode in precursors[newnode])
940 for oldnode in precursors[newnode])
941 newphase = max(oldphase, parentphase)
941 newphase = max(oldphase, parentphase)
942 else:
942 else:
943 newphase = max(targetphase, parentphase)
943 newphase = max(targetphase, parentphase)
944 newphases[newnode] = newphase
944 newphases[newnode] = newphase
945 if newphase > ctx.phase():
945 if newphase > ctx.phase():
946 toretract.setdefault(newphase, []).append(newnode)
946 toretract.setdefault(newphase, []).append(newnode)
947 elif newphase < ctx.phase():
947 elif newphase < ctx.phase():
948 toadvance.setdefault(newphase, []).append(newnode)
948 toadvance.setdefault(newphase, []).append(newnode)
949
949
950 with repo.transaction('cleanup') as tr:
950 with repo.transaction('cleanup') as tr:
951 # Move bookmarks
951 # Move bookmarks
952 bmarks = repo._bookmarks
952 bmarks = repo._bookmarks
953 bmarkchanges = []
953 bmarkchanges = []
954 for oldnode, newnode in moves.items():
954 for oldnode, newnode in moves.items():
955 oldbmarks = repo.nodebookmarks(oldnode)
955 oldbmarks = repo.nodebookmarks(oldnode)
956 if not oldbmarks:
956 if not oldbmarks:
957 continue
957 continue
958 from . import bookmarks # avoid import cycle
958 from . import bookmarks # avoid import cycle
959 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
959 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
960 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
960 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
961 hex(oldnode), hex(newnode)))
961 hex(oldnode), hex(newnode)))
962 # Delete divergent bookmarks being parents of related newnodes
962 # Delete divergent bookmarks being parents of related newnodes
963 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
963 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
964 allnewnodes, newnode, oldnode)
964 allnewnodes, newnode, oldnode)
965 deletenodes = _containsnode(repo, deleterevs)
965 deletenodes = _containsnode(repo, deleterevs)
966 for name in oldbmarks:
966 for name in oldbmarks:
967 bmarkchanges.append((name, newnode))
967 bmarkchanges.append((name, newnode))
968 for b in bookmarks.divergent2delete(repo, deletenodes, name):
968 for b in bookmarks.divergent2delete(repo, deletenodes, name):
969 bmarkchanges.append((b, None))
969 bmarkchanges.append((b, None))
970
970
971 if bmarkchanges:
971 if bmarkchanges:
972 bmarks.applychanges(repo, tr, bmarkchanges)
972 bmarks.applychanges(repo, tr, bmarkchanges)
973
973
974 for phase, nodes in toretract.items():
974 for phase, nodes in toretract.items():
975 phases.retractboundary(repo, tr, phase, nodes)
975 phases.retractboundary(repo, tr, phase, nodes)
976 for phase, nodes in toadvance.items():
976 for phase, nodes in toadvance.items():
977 phases.advanceboundary(repo, tr, phase, nodes)
977 phases.advanceboundary(repo, tr, phase, nodes)
978
978
979 # Obsolete or strip nodes
979 # Obsolete or strip nodes
980 if obsolete.isenabled(repo, obsolete.createmarkersopt):
980 if obsolete.isenabled(repo, obsolete.createmarkersopt):
981 # If a node is already obsoleted, and we want to obsolete it
981 # If a node is already obsoleted, and we want to obsolete it
982 # without a successor, skip that obssolete request since it's
982 # without a successor, skip that obssolete request since it's
983 # unnecessary. That's the "if s or not isobs(n)" check below.
983 # unnecessary. That's the "if s or not isobs(n)" check below.
984 # Also sort the node in topology order, that might be useful for
984 # Also sort the node in topology order, that might be useful for
985 # some obsstore logic.
985 # some obsstore logic.
986 # NOTE: the sorting might belong to createmarkers.
986 # NOTE: the sorting might belong to createmarkers.
987 torev = unfi.changelog.rev
987 torev = unfi.changelog.rev
988 sortfunc = lambda ns: torev(ns[0][0])
988 sortfunc = lambda ns: torev(ns[0][0])
989 rels = []
989 rels = []
990 for ns, s in sorted(replacements.items(), key=sortfunc):
990 for ns, s in sorted(replacements.items(), key=sortfunc):
991 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
991 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
992 rels.append(rel)
992 rels.append(rel)
993 if rels:
993 if rels:
994 obsolete.createmarkers(repo, rels, operation=operation,
994 obsolete.createmarkers(repo, rels, operation=operation,
995 metadata=metadata)
995 metadata=metadata)
996 else:
996 else:
997 from . import repair # avoid import cycle
997 from . import repair # avoid import cycle
998 tostrip = list(n for ns in replacements for n in ns)
998 tostrip = list(n for ns in replacements for n in ns)
999 if tostrip:
999 if tostrip:
1000 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1000 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1001 backup=backup)
1001 backup=backup)
1002
1002
1003 def addremove(repo, matcher, prefix, opts=None):
1003 def addremove(repo, matcher, prefix, opts=None):
1004 if opts is None:
1004 if opts is None:
1005 opts = {}
1005 opts = {}
1006 m = matcher
1006 m = matcher
1007 dry_run = opts.get('dry_run')
1007 dry_run = opts.get('dry_run')
1008 try:
1008 try:
1009 similarity = float(opts.get('similarity') or 0)
1009 similarity = float(opts.get('similarity') or 0)
1010 except ValueError:
1010 except ValueError:
1011 raise error.Abort(_('similarity must be a number'))
1011 raise error.Abort(_('similarity must be a number'))
1012 if similarity < 0 or similarity > 100:
1012 if similarity < 0 or similarity > 100:
1013 raise error.Abort(_('similarity must be between 0 and 100'))
1013 raise error.Abort(_('similarity must be between 0 and 100'))
1014 similarity /= 100.0
1014 similarity /= 100.0
1015
1015
1016 ret = 0
1016 ret = 0
1017 join = lambda f: os.path.join(prefix, f)
1017 join = lambda f: os.path.join(prefix, f)
1018
1018
1019 wctx = repo[None]
1019 wctx = repo[None]
1020 for subpath in sorted(wctx.substate):
1020 for subpath in sorted(wctx.substate):
1021 submatch = matchmod.subdirmatcher(subpath, m)
1021 submatch = matchmod.subdirmatcher(subpath, m)
1022 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1022 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1023 sub = wctx.sub(subpath)
1023 sub = wctx.sub(subpath)
1024 try:
1024 try:
1025 if sub.addremove(submatch, prefix, opts):
1025 if sub.addremove(submatch, prefix, opts):
1026 ret = 1
1026 ret = 1
1027 except error.LookupError:
1027 except error.LookupError:
1028 repo.ui.status(_("skipping missing subrepository: %s\n")
1028 repo.ui.status(_("skipping missing subrepository: %s\n")
1029 % join(subpath))
1029 % join(subpath))
1030
1030
1031 rejected = []
1031 rejected = []
1032 def badfn(f, msg):
1032 def badfn(f, msg):
1033 if f in m.files():
1033 if f in m.files():
1034 m.bad(f, msg)
1034 m.bad(f, msg)
1035 rejected.append(f)
1035 rejected.append(f)
1036
1036
1037 badmatch = matchmod.badmatch(m, badfn)
1037 badmatch = matchmod.badmatch(m, badfn)
1038 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1038 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1039 badmatch)
1039 badmatch)
1040
1040
1041 unknownset = set(unknown + forgotten)
1041 unknownset = set(unknown + forgotten)
1042 toprint = unknownset.copy()
1042 toprint = unknownset.copy()
1043 toprint.update(deleted)
1043 toprint.update(deleted)
1044 for abs in sorted(toprint):
1044 for abs in sorted(toprint):
1045 if repo.ui.verbose or not m.exact(abs):
1045 if repo.ui.verbose or not m.exact(abs):
1046 if abs in unknownset:
1046 if abs in unknownset:
1047 status = _('adding %s\n') % m.uipath(abs)
1047 status = _('adding %s\n') % m.uipath(abs)
1048 label = 'ui.addremove.added'
1048 label = 'ui.addremove.added'
1049 else:
1049 else:
1050 status = _('removing %s\n') % m.uipath(abs)
1050 status = _('removing %s\n') % m.uipath(abs)
1051 label = 'ui.addremove.removed'
1051 label = 'ui.addremove.removed'
1052 repo.ui.status(status, label=label)
1052 repo.ui.status(status, label=label)
1053
1053
1054 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1054 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1055 similarity)
1055 similarity)
1056
1056
1057 if not dry_run:
1057 if not dry_run:
1058 _markchanges(repo, unknown + forgotten, deleted, renames)
1058 _markchanges(repo, unknown + forgotten, deleted, renames)
1059
1059
1060 for f in rejected:
1060 for f in rejected:
1061 if f in m.files():
1061 if f in m.files():
1062 return 1
1062 return 1
1063 return ret
1063 return ret
1064
1064
1065 def marktouched(repo, files, similarity=0.0):
1065 def marktouched(repo, files, similarity=0.0):
1066 '''Assert that files have somehow been operated upon. files are relative to
1066 '''Assert that files have somehow been operated upon. files are relative to
1067 the repo root.'''
1067 the repo root.'''
1068 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1068 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1069 rejected = []
1069 rejected = []
1070
1070
1071 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1071 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1072
1072
1073 if repo.ui.verbose:
1073 if repo.ui.verbose:
1074 unknownset = set(unknown + forgotten)
1074 unknownset = set(unknown + forgotten)
1075 toprint = unknownset.copy()
1075 toprint = unknownset.copy()
1076 toprint.update(deleted)
1076 toprint.update(deleted)
1077 for abs in sorted(toprint):
1077 for abs in sorted(toprint):
1078 if abs in unknownset:
1078 if abs in unknownset:
1079 status = _('adding %s\n') % abs
1079 status = _('adding %s\n') % abs
1080 else:
1080 else:
1081 status = _('removing %s\n') % abs
1081 status = _('removing %s\n') % abs
1082 repo.ui.status(status)
1082 repo.ui.status(status)
1083
1083
1084 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1084 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1085 similarity)
1085 similarity)
1086
1086
1087 _markchanges(repo, unknown + forgotten, deleted, renames)
1087 _markchanges(repo, unknown + forgotten, deleted, renames)
1088
1088
1089 for f in rejected:
1089 for f in rejected:
1090 if f in m.files():
1090 if f in m.files():
1091 return 1
1091 return 1
1092 return 0
1092 return 0
1093
1093
1094 def _interestingfiles(repo, matcher):
1094 def _interestingfiles(repo, matcher):
1095 '''Walk dirstate with matcher, looking for files that addremove would care
1095 '''Walk dirstate with matcher, looking for files that addremove would care
1096 about.
1096 about.
1097
1097
1098 This is different from dirstate.status because it doesn't care about
1098 This is different from dirstate.status because it doesn't care about
1099 whether files are modified or clean.'''
1099 whether files are modified or clean.'''
1100 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1100 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1101 audit_path = pathutil.pathauditor(repo.root, cached=True)
1101 audit_path = pathutil.pathauditor(repo.root, cached=True)
1102
1102
1103 ctx = repo[None]
1103 ctx = repo[None]
1104 dirstate = repo.dirstate
1104 dirstate = repo.dirstate
1105 matcher = repo.narrowmatch(matcher, includeexact=True)
1105 matcher = repo.narrowmatch(matcher, includeexact=True)
1106 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1106 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1107 unknown=True, ignored=False, full=False)
1107 unknown=True, ignored=False, full=False)
1108 for abs, st in walkresults.iteritems():
1108 for abs, st in walkresults.iteritems():
1109 dstate = dirstate[abs]
1109 dstate = dirstate[abs]
1110 if dstate == '?' and audit_path.check(abs):
1110 if dstate == '?' and audit_path.check(abs):
1111 unknown.append(abs)
1111 unknown.append(abs)
1112 elif dstate != 'r' and not st:
1112 elif dstate != 'r' and not st:
1113 deleted.append(abs)
1113 deleted.append(abs)
1114 elif dstate == 'r' and st:
1114 elif dstate == 'r' and st:
1115 forgotten.append(abs)
1115 forgotten.append(abs)
1116 # for finding renames
1116 # for finding renames
1117 elif dstate == 'r' and not st:
1117 elif dstate == 'r' and not st:
1118 removed.append(abs)
1118 removed.append(abs)
1119 elif dstate == 'a':
1119 elif dstate == 'a':
1120 added.append(abs)
1120 added.append(abs)
1121
1121
1122 return added, unknown, deleted, removed, forgotten
1122 return added, unknown, deleted, removed, forgotten
1123
1123
1124 def _findrenames(repo, matcher, added, removed, similarity):
1124 def _findrenames(repo, matcher, added, removed, similarity):
1125 '''Find renames from removed files to added ones.'''
1125 '''Find renames from removed files to added ones.'''
1126 renames = {}
1126 renames = {}
1127 if similarity > 0:
1127 if similarity > 0:
1128 for old, new, score in similar.findrenames(repo, added, removed,
1128 for old, new, score in similar.findrenames(repo, added, removed,
1129 similarity):
1129 similarity):
1130 if (repo.ui.verbose or not matcher.exact(old)
1130 if (repo.ui.verbose or not matcher.exact(old)
1131 or not matcher.exact(new)):
1131 or not matcher.exact(new)):
1132 repo.ui.status(_('recording removal of %s as rename to %s '
1132 repo.ui.status(_('recording removal of %s as rename to %s '
1133 '(%d%% similar)\n') %
1133 '(%d%% similar)\n') %
1134 (matcher.rel(old), matcher.rel(new),
1134 (matcher.rel(old), matcher.rel(new),
1135 score * 100))
1135 score * 100))
1136 renames[new] = old
1136 renames[new] = old
1137 return renames
1137 return renames
1138
1138
1139 def _markchanges(repo, unknown, deleted, renames):
1139 def _markchanges(repo, unknown, deleted, renames):
1140 '''Marks the files in unknown as added, the files in deleted as removed,
1140 '''Marks the files in unknown as added, the files in deleted as removed,
1141 and the files in renames as copied.'''
1141 and the files in renames as copied.'''
1142 wctx = repo[None]
1142 wctx = repo[None]
1143 with repo.wlock():
1143 with repo.wlock():
1144 wctx.forget(deleted)
1144 wctx.forget(deleted)
1145 wctx.add(unknown)
1145 wctx.add(unknown)
1146 for new, old in renames.iteritems():
1146 for new, old in renames.iteritems():
1147 wctx.copy(old, new)
1147 wctx.copy(old, new)
1148
1148
1149 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1149 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1150 """Update the dirstate to reflect the intent of copying src to dst. For
1150 """Update the dirstate to reflect the intent of copying src to dst. For
1151 different reasons it might not end with dst being marked as copied from src.
1151 different reasons it might not end with dst being marked as copied from src.
1152 """
1152 """
1153 origsrc = repo.dirstate.copied(src) or src
1153 origsrc = repo.dirstate.copied(src) or src
1154 if dst == origsrc: # copying back a copy?
1154 if dst == origsrc: # copying back a copy?
1155 if repo.dirstate[dst] not in 'mn' and not dryrun:
1155 if repo.dirstate[dst] not in 'mn' and not dryrun:
1156 repo.dirstate.normallookup(dst)
1156 repo.dirstate.normallookup(dst)
1157 else:
1157 else:
1158 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1158 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1159 if not ui.quiet:
1159 if not ui.quiet:
1160 ui.warn(_("%s has not been committed yet, so no copy "
1160 ui.warn(_("%s has not been committed yet, so no copy "
1161 "data will be stored for %s.\n")
1161 "data will be stored for %s.\n")
1162 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1162 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1163 if repo.dirstate[dst] in '?r' and not dryrun:
1163 if repo.dirstate[dst] in '?r' and not dryrun:
1164 wctx.add([dst])
1164 wctx.add([dst])
1165 elif not dryrun:
1165 elif not dryrun:
1166 wctx.copy(origsrc, dst)
1166 wctx.copy(origsrc, dst)
1167
1167
1168 def writerequires(opener, requirements):
1168 def writerequires(opener, requirements):
1169 with opener('requires', 'w', atomictemp=True) as fp:
1169 with opener('requires', 'w', atomictemp=True) as fp:
1170 for r in sorted(requirements):
1170 for r in sorted(requirements):
1171 fp.write("%s\n" % r)
1171 fp.write("%s\n" % r)
1172
1172
1173 class filecachesubentry(object):
1173 class filecachesubentry(object):
1174 def __init__(self, path, stat):
1174 def __init__(self, path, stat):
1175 self.path = path
1175 self.path = path
1176 self.cachestat = None
1176 self.cachestat = None
1177 self._cacheable = None
1177 self._cacheable = None
1178
1178
1179 if stat:
1179 if stat:
1180 self.cachestat = filecachesubentry.stat(self.path)
1180 self.cachestat = filecachesubentry.stat(self.path)
1181
1181
1182 if self.cachestat:
1182 if self.cachestat:
1183 self._cacheable = self.cachestat.cacheable()
1183 self._cacheable = self.cachestat.cacheable()
1184 else:
1184 else:
1185 # None means we don't know yet
1185 # None means we don't know yet
1186 self._cacheable = None
1186 self._cacheable = None
1187
1187
1188 def refresh(self):
1188 def refresh(self):
1189 if self.cacheable():
1189 if self.cacheable():
1190 self.cachestat = filecachesubentry.stat(self.path)
1190 self.cachestat = filecachesubentry.stat(self.path)
1191
1191
1192 def cacheable(self):
1192 def cacheable(self):
1193 if self._cacheable is not None:
1193 if self._cacheable is not None:
1194 return self._cacheable
1194 return self._cacheable
1195
1195
1196 # we don't know yet, assume it is for now
1196 # we don't know yet, assume it is for now
1197 return True
1197 return True
1198
1198
1199 def changed(self):
1199 def changed(self):
1200 # no point in going further if we can't cache it
1200 # no point in going further if we can't cache it
1201 if not self.cacheable():
1201 if not self.cacheable():
1202 return True
1202 return True
1203
1203
1204 newstat = filecachesubentry.stat(self.path)
1204 newstat = filecachesubentry.stat(self.path)
1205
1205
1206 # we may not know if it's cacheable yet, check again now
1206 # we may not know if it's cacheable yet, check again now
1207 if newstat and self._cacheable is None:
1207 if newstat and self._cacheable is None:
1208 self._cacheable = newstat.cacheable()
1208 self._cacheable = newstat.cacheable()
1209
1209
1210 # check again
1210 # check again
1211 if not self._cacheable:
1211 if not self._cacheable:
1212 return True
1212 return True
1213
1213
1214 if self.cachestat != newstat:
1214 if self.cachestat != newstat:
1215 self.cachestat = newstat
1215 self.cachestat = newstat
1216 return True
1216 return True
1217 else:
1217 else:
1218 return False
1218 return False
1219
1219
1220 @staticmethod
1220 @staticmethod
1221 def stat(path):
1221 def stat(path):
1222 try:
1222 try:
1223 return util.cachestat(path)
1223 return util.cachestat(path)
1224 except OSError as e:
1224 except OSError as e:
1225 if e.errno != errno.ENOENT:
1225 if e.errno != errno.ENOENT:
1226 raise
1226 raise
1227
1227
1228 class filecacheentry(object):
1228 class filecacheentry(object):
1229 def __init__(self, paths, stat=True):
1229 def __init__(self, paths, stat=True):
1230 self._entries = []
1230 self._entries = []
1231 for path in paths:
1231 for path in paths:
1232 self._entries.append(filecachesubentry(path, stat))
1232 self._entries.append(filecachesubentry(path, stat))
1233
1233
1234 def changed(self):
1234 def changed(self):
1235 '''true if any entry has changed'''
1235 '''true if any entry has changed'''
1236 for entry in self._entries:
1236 for entry in self._entries:
1237 if entry.changed():
1237 if entry.changed():
1238 return True
1238 return True
1239 return False
1239 return False
1240
1240
1241 def refresh(self):
1241 def refresh(self):
1242 for entry in self._entries:
1242 for entry in self._entries:
1243 entry.refresh()
1243 entry.refresh()
1244
1244
1245 class filecache(object):
1245 class filecache(object):
1246 """A property like decorator that tracks files under .hg/ for updates.
1246 """A property like decorator that tracks files under .hg/ for updates.
1247
1247
1248 On first access, the files defined as arguments are stat()ed and the
1248 On first access, the files defined as arguments are stat()ed and the
1249 results cached. The decorated function is called. The results are stashed
1249 results cached. The decorated function is called. The results are stashed
1250 away in a ``_filecache`` dict on the object whose method is decorated.
1250 away in a ``_filecache`` dict on the object whose method is decorated.
1251
1251
1252 On subsequent access, the cached result is used as it is set to the
1252 On subsequent access, the cached result is used as it is set to the
1253 instance dictionary.
1253 instance dictionary.
1254
1254
1255 On external property set/delete operations, the caller must update the
1255 On external property set/delete operations, the caller must update the
1256 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1256 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1257 instead of directly setting <attr>.
1257 instead of directly setting <attr>.
1258
1258
1259 When using the property API, the cached data is always used if available.
1259 When using the property API, the cached data is always used if available.
1260 No stat() is performed to check if the file has changed.
1260 No stat() is performed to check if the file has changed.
1261
1261
1262 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1262 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1263 can populate an entry before the property's getter is called. In this case,
1263 can populate an entry before the property's getter is called. In this case,
1264 entries in ``_filecache`` will be used during property operations,
1264 entries in ``_filecache`` will be used during property operations,
1265 if available. If the underlying file changes, it is up to external callers
1265 if available. If the underlying file changes, it is up to external callers
1266 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1266 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1267 method result as well as possibly calling ``del obj._filecache[attr]`` to
1267 method result as well as possibly calling ``del obj._filecache[attr]`` to
1268 remove the ``filecacheentry``.
1268 remove the ``filecacheentry``.
1269 """
1269 """
1270
1270
1271 def __init__(self, *paths):
1271 def __init__(self, *paths):
1272 self.paths = paths
1272 self.paths = paths
1273
1273
1274 def join(self, obj, fname):
1274 def join(self, obj, fname):
1275 """Used to compute the runtime path of a cached file.
1275 """Used to compute the runtime path of a cached file.
1276
1276
1277 Users should subclass filecache and provide their own version of this
1277 Users should subclass filecache and provide their own version of this
1278 function to call the appropriate join function on 'obj' (an instance
1278 function to call the appropriate join function on 'obj' (an instance
1279 of the class that its member function was decorated).
1279 of the class that its member function was decorated).
1280 """
1280 """
1281 raise NotImplementedError
1281 raise NotImplementedError
1282
1282
1283 def __call__(self, func):
1283 def __call__(self, func):
1284 self.func = func
1284 self.func = func
1285 self.sname = func.__name__
1285 self.sname = func.__name__
1286 self.name = pycompat.sysbytes(self.sname)
1286 self.name = pycompat.sysbytes(self.sname)
1287 return self
1287 return self
1288
1288
1289 def __get__(self, obj, type=None):
1289 def __get__(self, obj, type=None):
1290 # if accessed on the class, return the descriptor itself.
1290 # if accessed on the class, return the descriptor itself.
1291 if obj is None:
1291 if obj is None:
1292 return self
1292 return self
1293
1293
1294 assert self.sname not in obj.__dict__
1294 assert self.sname not in obj.__dict__
1295
1295
1296 entry = obj._filecache.get(self.name)
1296 entry = obj._filecache.get(self.name)
1297
1297
1298 if entry:
1298 if entry:
1299 if entry.changed():
1299 if entry.changed():
1300 entry.obj = self.func(obj)
1300 entry.obj = self.func(obj)
1301 else:
1301 else:
1302 paths = [self.join(obj, path) for path in self.paths]
1302 paths = [self.join(obj, path) for path in self.paths]
1303
1303
1304 # We stat -before- creating the object so our cache doesn't lie if
1304 # We stat -before- creating the object so our cache doesn't lie if
1305 # a writer modified between the time we read and stat
1305 # a writer modified between the time we read and stat
1306 entry = filecacheentry(paths, True)
1306 entry = filecacheentry(paths, True)
1307 entry.obj = self.func(obj)
1307 entry.obj = self.func(obj)
1308
1308
1309 obj._filecache[self.name] = entry
1309 obj._filecache[self.name] = entry
1310
1310
1311 obj.__dict__[self.sname] = entry.obj
1311 obj.__dict__[self.sname] = entry.obj
1312 return entry.obj
1312 return entry.obj
1313
1313
1314 # don't implement __set__(), which would make __dict__ lookup as slow as
1314 # don't implement __set__(), which would make __dict__ lookup as slow as
1315 # function call.
1315 # function call.
1316
1316
1317 def set(self, obj, value):
1317 def set(self, obj, value):
1318 if self.name not in obj._filecache:
1318 if self.name not in obj._filecache:
1319 # we add an entry for the missing value because X in __dict__
1319 # we add an entry for the missing value because X in __dict__
1320 # implies X in _filecache
1320 # implies X in _filecache
1321 paths = [self.join(obj, path) for path in self.paths]
1321 paths = [self.join(obj, path) for path in self.paths]
1322 ce = filecacheentry(paths, False)
1322 ce = filecacheentry(paths, False)
1323 obj._filecache[self.name] = ce
1323 obj._filecache[self.name] = ce
1324 else:
1324 else:
1325 ce = obj._filecache[self.name]
1325 ce = obj._filecache[self.name]
1326
1326
1327 ce.obj = value # update cached copy
1327 ce.obj = value # update cached copy
1328 obj.__dict__[self.sname] = value # update copy returned by obj.x
1328 obj.__dict__[self.sname] = value # update copy returned by obj.x
1329
1329
1330 def extdatasource(repo, source):
1330 def extdatasource(repo, source):
1331 """Gather a map of rev -> value dict from the specified source
1331 """Gather a map of rev -> value dict from the specified source
1332
1332
1333 A source spec is treated as a URL, with a special case shell: type
1333 A source spec is treated as a URL, with a special case shell: type
1334 for parsing the output from a shell command.
1334 for parsing the output from a shell command.
1335
1335
1336 The data is parsed as a series of newline-separated records where
1336 The data is parsed as a series of newline-separated records where
1337 each record is a revision specifier optionally followed by a space
1337 each record is a revision specifier optionally followed by a space
1338 and a freeform string value. If the revision is known locally, it
1338 and a freeform string value. If the revision is known locally, it
1339 is converted to a rev, otherwise the record is skipped.
1339 is converted to a rev, otherwise the record is skipped.
1340
1340
1341 Note that both key and value are treated as UTF-8 and converted to
1341 Note that both key and value are treated as UTF-8 and converted to
1342 the local encoding. This allows uniformity between local and
1342 the local encoding. This allows uniformity between local and
1343 remote data sources.
1343 remote data sources.
1344 """
1344 """
1345
1345
1346 spec = repo.ui.config("extdata", source)
1346 spec = repo.ui.config("extdata", source)
1347 if not spec:
1347 if not spec:
1348 raise error.Abort(_("unknown extdata source '%s'") % source)
1348 raise error.Abort(_("unknown extdata source '%s'") % source)
1349
1349
1350 data = {}
1350 data = {}
1351 src = proc = None
1351 src = proc = None
1352 try:
1352 try:
1353 if spec.startswith("shell:"):
1353 if spec.startswith("shell:"):
1354 # external commands should be run relative to the repo root
1354 # external commands should be run relative to the repo root
1355 cmd = spec[6:]
1355 cmd = spec[6:]
1356 proc = subprocess.Popen(procutil.tonativestr(cmd),
1356 proc = subprocess.Popen(procutil.tonativestr(cmd),
1357 shell=True, bufsize=-1,
1357 shell=True, bufsize=-1,
1358 close_fds=procutil.closefds,
1358 close_fds=procutil.closefds,
1359 stdout=subprocess.PIPE,
1359 stdout=subprocess.PIPE,
1360 cwd=procutil.tonativestr(repo.root))
1360 cwd=procutil.tonativestr(repo.root))
1361 src = proc.stdout
1361 src = proc.stdout
1362 else:
1362 else:
1363 # treat as a URL or file
1363 # treat as a URL or file
1364 src = url.open(repo.ui, spec)
1364 src = url.open(repo.ui, spec)
1365 for l in src:
1365 for l in src:
1366 if " " in l:
1366 if " " in l:
1367 k, v = l.strip().split(" ", 1)
1367 k, v = l.strip().split(" ", 1)
1368 else:
1368 else:
1369 k, v = l.strip(), ""
1369 k, v = l.strip(), ""
1370
1370
1371 k = encoding.tolocal(k)
1371 k = encoding.tolocal(k)
1372 try:
1372 try:
1373 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1373 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1374 except (error.LookupError, error.RepoLookupError):
1374 except (error.LookupError, error.RepoLookupError):
1375 pass # we ignore data for nodes that don't exist locally
1375 pass # we ignore data for nodes that don't exist locally
1376 finally:
1376 finally:
1377 if proc:
1377 if proc:
1378 proc.communicate()
1378 proc.communicate()
1379 if src:
1379 if src:
1380 src.close()
1380 src.close()
1381 if proc and proc.returncode != 0:
1381 if proc and proc.returncode != 0:
1382 raise error.Abort(_("extdata command '%s' failed: %s")
1382 raise error.Abort(_("extdata command '%s' failed: %s")
1383 % (cmd, procutil.explainexit(proc.returncode)))
1383 % (cmd, procutil.explainexit(proc.returncode)))
1384
1384
1385 return data
1385 return data
1386
1386
1387 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1387 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1388 if lock is None:
1388 if lock is None:
1389 raise error.LockInheritanceContractViolation(
1389 raise error.LockInheritanceContractViolation(
1390 'lock can only be inherited while held')
1390 'lock can only be inherited while held')
1391 if environ is None:
1391 if environ is None:
1392 environ = {}
1392 environ = {}
1393 with lock.inherit() as locker:
1393 with lock.inherit() as locker:
1394 environ[envvar] = locker
1394 environ[envvar] = locker
1395 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1395 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1396
1396
1397 def wlocksub(repo, cmd, *args, **kwargs):
1397 def wlocksub(repo, cmd, *args, **kwargs):
1398 """run cmd as a subprocess that allows inheriting repo's wlock
1398 """run cmd as a subprocess that allows inheriting repo's wlock
1399
1399
1400 This can only be called while the wlock is held. This takes all the
1400 This can only be called while the wlock is held. This takes all the
1401 arguments that ui.system does, and returns the exit code of the
1401 arguments that ui.system does, and returns the exit code of the
1402 subprocess."""
1402 subprocess."""
1403 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1403 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1404 **kwargs)
1404 **kwargs)
1405
1405
1406 class progress(object):
1406 class progress(object):
1407 def __init__(self, ui, updatebar, topic, unit="", total=None):
1407 def __init__(self, ui, updatebar, topic, unit="", total=None):
1408 self.ui = ui
1408 self.ui = ui
1409 self.pos = 0
1409 self.pos = 0
1410 self.topic = topic
1410 self.topic = topic
1411 self.unit = unit
1411 self.unit = unit
1412 self.total = total
1412 self.total = total
1413 self.debug = ui.configbool('progress', 'debug')
1413 self.debug = ui.configbool('progress', 'debug')
1414 self._updatebar = updatebar
1414 self._updatebar = updatebar
1415
1415
1416 def __enter__(self):
1416 def __enter__(self):
1417 return self
1417 return self
1418
1418
1419 def __exit__(self, exc_type, exc_value, exc_tb):
1419 def __exit__(self, exc_type, exc_value, exc_tb):
1420 self.complete()
1420 self.complete()
1421
1421
1422 def update(self, pos, item="", total=None):
1422 def update(self, pos, item="", total=None):
1423 assert pos is not None
1423 assert pos is not None
1424 if total:
1424 if total:
1425 self.total = total
1425 self.total = total
1426 self.pos = pos
1426 self.pos = pos
1427 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1427 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1428 if self.debug:
1428 if self.debug:
1429 self._printdebug(item)
1429 self._printdebug(item)
1430
1430
1431 def increment(self, step=1, item="", total=None):
1431 def increment(self, step=1, item="", total=None):
1432 self.update(self.pos + step, item, total)
1432 self.update(self.pos + step, item, total)
1433
1433
1434 def complete(self):
1434 def complete(self):
1435 self.pos = None
1435 self.pos = None
1436 self.unit = ""
1436 self.unit = ""
1437 self.total = None
1437 self.total = None
1438 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1438 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1439
1439
1440 def _printdebug(self, item):
1440 def _printdebug(self, item):
1441 if self.unit:
1441 if self.unit:
1442 unit = ' ' + self.unit
1442 unit = ' ' + self.unit
1443 if item:
1443 if item:
1444 item = ' ' + item
1444 item = ' ' + item
1445
1445
1446 if self.total:
1446 if self.total:
1447 pct = 100.0 * self.pos / self.total
1447 pct = 100.0 * self.pos / self.total
1448 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1448 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1449 % (self.topic, item, self.pos, self.total, unit, pct))
1449 % (self.topic, item, self.pos, self.total, unit, pct))
1450 else:
1450 else:
1451 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1451 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1452
1452
1453 def gdinitconfig(ui):
1453 def gdinitconfig(ui):
1454 """helper function to know if a repo should be created as general delta
1454 """helper function to know if a repo should be created as general delta
1455 """
1455 """
1456 # experimental config: format.generaldelta
1456 # experimental config: format.generaldelta
1457 return (ui.configbool('format', 'generaldelta')
1457 return (ui.configbool('format', 'generaldelta')
1458 or ui.configbool('format', 'usegeneraldelta'))
1458 or ui.configbool('format', 'usegeneraldelta'))
1459
1459
1460 def gddeltaconfig(ui):
1460 def gddeltaconfig(ui):
1461 """helper function to know if incoming delta should be optimised
1461 """helper function to know if incoming delta should be optimised
1462 """
1462 """
1463 # experimental config: format.generaldelta
1463 # experimental config: format.generaldelta
1464 return ui.configbool('format', 'generaldelta')
1464 return ui.configbool('format', 'generaldelta')
1465
1465
1466 class simplekeyvaluefile(object):
1466 class simplekeyvaluefile(object):
1467 """A simple file with key=value lines
1467 """A simple file with key=value lines
1468
1468
1469 Keys must be alphanumerics and start with a letter, values must not
1469 Keys must be alphanumerics and start with a letter, values must not
1470 contain '\n' characters"""
1470 contain '\n' characters"""
1471 firstlinekey = '__firstline'
1471 firstlinekey = '__firstline'
1472
1472
1473 def __init__(self, vfs, path, keys=None):
1473 def __init__(self, vfs, path, keys=None):
1474 self.vfs = vfs
1474 self.vfs = vfs
1475 self.path = path
1475 self.path = path
1476
1476
1477 def read(self, firstlinenonkeyval=False):
1477 def read(self, firstlinenonkeyval=False):
1478 """Read the contents of a simple key-value file
1478 """Read the contents of a simple key-value file
1479
1479
1480 'firstlinenonkeyval' indicates whether the first line of file should
1480 'firstlinenonkeyval' indicates whether the first line of file should
1481 be treated as a key-value pair or reuturned fully under the
1481 be treated as a key-value pair or reuturned fully under the
1482 __firstline key."""
1482 __firstline key."""
1483 lines = self.vfs.readlines(self.path)
1483 lines = self.vfs.readlines(self.path)
1484 d = {}
1484 d = {}
1485 if firstlinenonkeyval:
1485 if firstlinenonkeyval:
1486 if not lines:
1486 if not lines:
1487 e = _("empty simplekeyvalue file")
1487 e = _("empty simplekeyvalue file")
1488 raise error.CorruptedState(e)
1488 raise error.CorruptedState(e)
1489 # we don't want to include '\n' in the __firstline
1489 # we don't want to include '\n' in the __firstline
1490 d[self.firstlinekey] = lines[0][:-1]
1490 d[self.firstlinekey] = lines[0][:-1]
1491 del lines[0]
1491 del lines[0]
1492
1492
1493 try:
1493 try:
1494 # the 'if line.strip()' part prevents us from failing on empty
1494 # the 'if line.strip()' part prevents us from failing on empty
1495 # lines which only contain '\n' therefore are not skipped
1495 # lines which only contain '\n' therefore are not skipped
1496 # by 'if line'
1496 # by 'if line'
1497 updatedict = dict(line[:-1].split('=', 1) for line in lines
1497 updatedict = dict(line[:-1].split('=', 1) for line in lines
1498 if line.strip())
1498 if line.strip())
1499 if self.firstlinekey in updatedict:
1499 if self.firstlinekey in updatedict:
1500 e = _("%r can't be used as a key")
1500 e = _("%r can't be used as a key")
1501 raise error.CorruptedState(e % self.firstlinekey)
1501 raise error.CorruptedState(e % self.firstlinekey)
1502 d.update(updatedict)
1502 d.update(updatedict)
1503 except ValueError as e:
1503 except ValueError as e:
1504 raise error.CorruptedState(str(e))
1504 raise error.CorruptedState(str(e))
1505 return d
1505 return d
1506
1506
1507 def write(self, data, firstline=None):
1507 def write(self, data, firstline=None):
1508 """Write key=>value mapping to a file
1508 """Write key=>value mapping to a file
1509 data is a dict. Keys must be alphanumerical and start with a letter.
1509 data is a dict. Keys must be alphanumerical and start with a letter.
1510 Values must not contain newline characters.
1510 Values must not contain newline characters.
1511
1511
1512 If 'firstline' is not None, it is written to file before
1512 If 'firstline' is not None, it is written to file before
1513 everything else, as it is, not in a key=value form"""
1513 everything else, as it is, not in a key=value form"""
1514 lines = []
1514 lines = []
1515 if firstline is not None:
1515 if firstline is not None:
1516 lines.append('%s\n' % firstline)
1516 lines.append('%s\n' % firstline)
1517
1517
1518 for k, v in data.items():
1518 for k, v in data.items():
1519 if k == self.firstlinekey:
1519 if k == self.firstlinekey:
1520 e = "key name '%s' is reserved" % self.firstlinekey
1520 e = "key name '%s' is reserved" % self.firstlinekey
1521 raise error.ProgrammingError(e)
1521 raise error.ProgrammingError(e)
1522 if not k[0:1].isalpha():
1522 if not k[0:1].isalpha():
1523 e = "keys must start with a letter in a key-value file"
1523 e = "keys must start with a letter in a key-value file"
1524 raise error.ProgrammingError(e)
1524 raise error.ProgrammingError(e)
1525 if not k.isalnum():
1525 if not k.isalnum():
1526 e = "invalid key name in a simple key-value file"
1526 e = "invalid key name in a simple key-value file"
1527 raise error.ProgrammingError(e)
1527 raise error.ProgrammingError(e)
1528 if '\n' in v:
1528 if '\n' in v:
1529 e = "invalid value in a simple key-value file"
1529 e = "invalid value in a simple key-value file"
1530 raise error.ProgrammingError(e)
1530 raise error.ProgrammingError(e)
1531 lines.append("%s=%s\n" % (k, v))
1531 lines.append("%s=%s\n" % (k, v))
1532 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1532 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1533 fp.write(''.join(lines))
1533 fp.write(''.join(lines))
1534
1534
1535 _reportobsoletedsource = [
1535 _reportobsoletedsource = [
1536 'debugobsolete',
1536 'debugobsolete',
1537 'pull',
1537 'pull',
1538 'push',
1538 'push',
1539 'serve',
1539 'serve',
1540 'unbundle',
1540 'unbundle',
1541 ]
1541 ]
1542
1542
1543 _reportnewcssource = [
1543 _reportnewcssource = [
1544 'pull',
1544 'pull',
1545 'unbundle',
1545 'unbundle',
1546 ]
1546 ]
1547
1547
1548 def prefetchfiles(repo, revs, match):
1548 def prefetchfiles(repo, revs, match):
1549 """Invokes the registered file prefetch functions, allowing extensions to
1549 """Invokes the registered file prefetch functions, allowing extensions to
1550 ensure the corresponding files are available locally, before the command
1550 ensure the corresponding files are available locally, before the command
1551 uses them."""
1551 uses them."""
1552 if match:
1552 if match:
1553 # The command itself will complain about files that don't exist, so
1553 # The command itself will complain about files that don't exist, so
1554 # don't duplicate the message.
1554 # don't duplicate the message.
1555 match = matchmod.badmatch(match, lambda fn, msg: None)
1555 match = matchmod.badmatch(match, lambda fn, msg: None)
1556 else:
1556 else:
1557 match = matchall(repo)
1557 match = matchall(repo)
1558
1558
1559 fileprefetchhooks(repo, revs, match)
1559 fileprefetchhooks(repo, revs, match)
1560
1560
1561 # a list of (repo, revs, match) prefetch functions
1561 # a list of (repo, revs, match) prefetch functions
1562 fileprefetchhooks = util.hooks()
1562 fileprefetchhooks = util.hooks()
1563
1563
1564 # A marker that tells the evolve extension to suppress its own reporting
1564 # A marker that tells the evolve extension to suppress its own reporting
1565 _reportstroubledchangesets = True
1565 _reportstroubledchangesets = True
1566
1566
1567 def registersummarycallback(repo, otr, txnname=''):
1567 def registersummarycallback(repo, otr, txnname=''):
1568 """register a callback to issue a summary after the transaction is closed
1568 """register a callback to issue a summary after the transaction is closed
1569 """
1569 """
1570 def txmatch(sources):
1570 def txmatch(sources):
1571 return any(txnname.startswith(source) for source in sources)
1571 return any(txnname.startswith(source) for source in sources)
1572
1572
1573 categories = []
1573 categories = []
1574
1574
1575 def reportsummary(func):
1575 def reportsummary(func):
1576 """decorator for report callbacks."""
1576 """decorator for report callbacks."""
1577 # The repoview life cycle is shorter than the one of the actual
1577 # The repoview life cycle is shorter than the one of the actual
1578 # underlying repository. So the filtered object can die before the
1578 # underlying repository. So the filtered object can die before the
1579 # weakref is used leading to troubles. We keep a reference to the
1579 # weakref is used leading to troubles. We keep a reference to the
1580 # unfiltered object and restore the filtering when retrieving the
1580 # unfiltered object and restore the filtering when retrieving the
1581 # repository through the weakref.
1581 # repository through the weakref.
1582 filtername = repo.filtername
1582 filtername = repo.filtername
1583 reporef = weakref.ref(repo.unfiltered())
1583 reporef = weakref.ref(repo.unfiltered())
1584 def wrapped(tr):
1584 def wrapped(tr):
1585 repo = reporef()
1585 repo = reporef()
1586 if filtername:
1586 if filtername:
1587 repo = repo.filtered(filtername)
1587 repo = repo.filtered(filtername)
1588 func(repo, tr)
1588 func(repo, tr)
1589 newcat = '%02i-txnreport' % len(categories)
1589 newcat = '%02i-txnreport' % len(categories)
1590 otr.addpostclose(newcat, wrapped)
1590 otr.addpostclose(newcat, wrapped)
1591 categories.append(newcat)
1591 categories.append(newcat)
1592 return wrapped
1592 return wrapped
1593
1593
1594 if txmatch(_reportobsoletedsource):
1594 if txmatch(_reportobsoletedsource):
1595 @reportsummary
1595 @reportsummary
1596 def reportobsoleted(repo, tr):
1596 def reportobsoleted(repo, tr):
1597 obsoleted = obsutil.getobsoleted(repo, tr)
1597 obsoleted = obsutil.getobsoleted(repo, tr)
1598 if obsoleted:
1598 if obsoleted:
1599 repo.ui.status(_('obsoleted %i changesets\n')
1599 repo.ui.status(_('obsoleted %i changesets\n')
1600 % len(obsoleted))
1600 % len(obsoleted))
1601
1601
1602 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1602 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1603 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1603 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1604 instabilitytypes = [
1604 instabilitytypes = [
1605 ('orphan', 'orphan'),
1605 ('orphan', 'orphan'),
1606 ('phase-divergent', 'phasedivergent'),
1606 ('phase-divergent', 'phasedivergent'),
1607 ('content-divergent', 'contentdivergent'),
1607 ('content-divergent', 'contentdivergent'),
1608 ]
1608 ]
1609
1609
1610 def getinstabilitycounts(repo):
1610 def getinstabilitycounts(repo):
1611 filtered = repo.changelog.filteredrevs
1611 filtered = repo.changelog.filteredrevs
1612 counts = {}
1612 counts = {}
1613 for instability, revset in instabilitytypes:
1613 for instability, revset in instabilitytypes:
1614 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1614 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1615 filtered)
1615 filtered)
1616 return counts
1616 return counts
1617
1617
1618 oldinstabilitycounts = getinstabilitycounts(repo)
1618 oldinstabilitycounts = getinstabilitycounts(repo)
1619 @reportsummary
1619 @reportsummary
1620 def reportnewinstabilities(repo, tr):
1620 def reportnewinstabilities(repo, tr):
1621 newinstabilitycounts = getinstabilitycounts(repo)
1621 newinstabilitycounts = getinstabilitycounts(repo)
1622 for instability, revset in instabilitytypes:
1622 for instability, revset in instabilitytypes:
1623 delta = (newinstabilitycounts[instability] -
1623 delta = (newinstabilitycounts[instability] -
1624 oldinstabilitycounts[instability])
1624 oldinstabilitycounts[instability])
1625 msg = getinstabilitymessage(delta, instability)
1625 msg = getinstabilitymessage(delta, instability)
1626 if msg:
1626 if msg:
1627 repo.ui.warn(msg)
1627 repo.ui.warn(msg)
1628
1628
1629 if txmatch(_reportnewcssource):
1629 if txmatch(_reportnewcssource):
1630 @reportsummary
1630 @reportsummary
1631 def reportnewcs(repo, tr):
1631 def reportnewcs(repo, tr):
1632 """Report the range of new revisions pulled/unbundled."""
1632 """Report the range of new revisions pulled/unbundled."""
1633 origrepolen = tr.changes.get('origrepolen', len(repo))
1633 origrepolen = tr.changes.get('origrepolen', len(repo))
1634 unfi = repo.unfiltered()
1634 unfi = repo.unfiltered()
1635 if origrepolen >= len(unfi):
1635 if origrepolen >= len(unfi):
1636 return
1636 return
1637
1637
1638 # Compute the bounds of new visible revisions' range.
1638 # Compute the bounds of new visible revisions' range.
1639 revs = smartset.spanset(repo, start=origrepolen)
1639 revs = smartset.spanset(repo, start=origrepolen)
1640 if revs:
1640 if revs:
1641 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1641 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1642
1642
1643 if minrev == maxrev:
1643 if minrev == maxrev:
1644 revrange = minrev
1644 revrange = minrev
1645 else:
1645 else:
1646 revrange = '%s:%s' % (minrev, maxrev)
1646 revrange = '%s:%s' % (minrev, maxrev)
1647 draft = len(repo.revs('%ld and draft()', revs))
1647 draft = len(repo.revs('%ld and draft()', revs))
1648 secret = len(repo.revs('%ld and secret()', revs))
1648 secret = len(repo.revs('%ld and secret()', revs))
1649 if not (draft or secret):
1649 if not (draft or secret):
1650 msg = _('new changesets %s\n') % revrange
1650 msg = _('new changesets %s\n') % revrange
1651 elif draft and secret:
1651 elif draft and secret:
1652 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1652 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1653 msg %= (revrange, draft, secret)
1653 msg %= (revrange, draft, secret)
1654 elif draft:
1654 elif draft:
1655 msg = _('new changesets %s (%d drafts)\n')
1655 msg = _('new changesets %s (%d drafts)\n')
1656 msg %= (revrange, draft)
1656 msg %= (revrange, draft)
1657 elif secret:
1657 elif secret:
1658 msg = _('new changesets %s (%d secrets)\n')
1658 msg = _('new changesets %s (%d secrets)\n')
1659 msg %= (revrange, secret)
1659 msg %= (revrange, secret)
1660 else:
1660 else:
1661 errormsg = 'entered unreachable condition'
1661 errormsg = 'entered unreachable condition'
1662 raise error.ProgrammingError(errormsg)
1662 raise error.ProgrammingError(errormsg)
1663 repo.ui.status(msg)
1663 repo.ui.status(msg)
1664
1664
1665 # search new changesets directly pulled as obsolete
1665 # search new changesets directly pulled as obsolete
1666 duplicates = tr.changes.get('revduplicates', ())
1666 duplicates = tr.changes.get('revduplicates', ())
1667 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1667 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1668 origrepolen, duplicates)
1668 origrepolen, duplicates)
1669 cl = repo.changelog
1669 cl = repo.changelog
1670 extinctadded = [r for r in obsadded if r not in cl]
1670 extinctadded = [r for r in obsadded if r not in cl]
1671 if extinctadded:
1671 if extinctadded:
1672 # They are not just obsolete, but obsolete and invisible
1672 # They are not just obsolete, but obsolete and invisible
1673 # we call them "extinct" internally but the terms have not been
1673 # we call them "extinct" internally but the terms have not been
1674 # exposed to users.
1674 # exposed to users.
1675 msg = '(%d other changesets obsolete on arrival)\n'
1675 msg = '(%d other changesets obsolete on arrival)\n'
1676 repo.ui.status(msg % len(extinctadded))
1676 repo.ui.status(msg % len(extinctadded))
1677
1677
1678 @reportsummary
1678 @reportsummary
1679 def reportphasechanges(repo, tr):
1679 def reportphasechanges(repo, tr):
1680 """Report statistics of phase changes for changesets pre-existing
1680 """Report statistics of phase changes for changesets pre-existing
1681 pull/unbundle.
1681 pull/unbundle.
1682 """
1682 """
1683 origrepolen = tr.changes.get('origrepolen', len(repo))
1683 origrepolen = tr.changes.get('origrepolen', len(repo))
1684 phasetracking = tr.changes.get('phases', {})
1684 phasetracking = tr.changes.get('phases', {})
1685 if not phasetracking:
1685 if not phasetracking:
1686 return
1686 return
1687 published = [
1687 published = [
1688 rev for rev, (old, new) in phasetracking.iteritems()
1688 rev for rev, (old, new) in phasetracking.iteritems()
1689 if new == phases.public and rev < origrepolen
1689 if new == phases.public and rev < origrepolen
1690 ]
1690 ]
1691 if not published:
1691 if not published:
1692 return
1692 return
1693 repo.ui.status(_('%d local changesets published\n')
1693 repo.ui.status(_('%d local changesets published\n')
1694 % len(published))
1694 % len(published))
1695
1695
1696 def getinstabilitymessage(delta, instability):
1696 def getinstabilitymessage(delta, instability):
1697 """function to return the message to show warning about new instabilities
1697 """function to return the message to show warning about new instabilities
1698
1698
1699 exists as a separate function so that extension can wrap to show more
1699 exists as a separate function so that extension can wrap to show more
1700 information like how to fix instabilities"""
1700 information like how to fix instabilities"""
1701 if delta > 0:
1701 if delta > 0:
1702 return _('%i new %s changesets\n') % (delta, instability)
1702 return _('%i new %s changesets\n') % (delta, instability)
1703
1703
1704 def nodesummaries(repo, nodes, maxnumnodes=4):
1704 def nodesummaries(repo, nodes, maxnumnodes=4):
1705 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1705 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1706 return ' '.join(short(h) for h in nodes)
1706 return ' '.join(short(h) for h in nodes)
1707 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1707 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1708 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1708 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1709
1709
1710 def enforcesinglehead(repo, tr, desc):
1710 def enforcesinglehead(repo, tr, desc):
1711 """check that no named branch has multiple heads"""
1711 """check that no named branch has multiple heads"""
1712 if desc in ('strip', 'repair'):
1712 if desc in ('strip', 'repair'):
1713 # skip the logic during strip
1713 # skip the logic during strip
1714 return
1714 return
1715 visible = repo.filtered('visible')
1715 visible = repo.filtered('visible')
1716 # possible improvement: we could restrict the check to affected branch
1716 # possible improvement: we could restrict the check to affected branch
1717 for name, heads in visible.branchmap().iteritems():
1717 for name, heads in visible.branchmap().iteritems():
1718 if len(heads) > 1:
1718 if len(heads) > 1:
1719 msg = _('rejecting multiple heads on branch "%s"')
1719 msg = _('rejecting multiple heads on branch "%s"')
1720 msg %= name
1720 msg %= name
1721 hint = _('%d heads: %s')
1721 hint = _('%d heads: %s')
1722 hint %= (len(heads), nodesummaries(repo, heads))
1722 hint %= (len(heads), nodesummaries(repo, heads))
1723 raise error.Abort(msg, hint=hint)
1723 raise error.Abort(msg, hint=hint)
1724
1724
1725 def wrapconvertsink(sink):
1725 def wrapconvertsink(sink):
1726 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1726 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1727 before it is used, whether or not the convert extension was formally loaded.
1727 before it is used, whether or not the convert extension was formally loaded.
1728 """
1728 """
1729 return sink
1729 return sink
1730
1730
1731 def unhidehashlikerevs(repo, specs, hiddentype):
1731 def unhidehashlikerevs(repo, specs, hiddentype):
1732 """parse the user specs and unhide changesets whose hash or revision number
1732 """parse the user specs and unhide changesets whose hash or revision number
1733 is passed.
1733 is passed.
1734
1734
1735 hiddentype can be: 1) 'warn': warn while unhiding changesets
1735 hiddentype can be: 1) 'warn': warn while unhiding changesets
1736 2) 'nowarn': don't warn while unhiding changesets
1736 2) 'nowarn': don't warn while unhiding changesets
1737
1737
1738 returns a repo object with the required changesets unhidden
1738 returns a repo object with the required changesets unhidden
1739 """
1739 """
1740 if not repo.filtername or not repo.ui.configbool('experimental',
1740 if not repo.filtername or not repo.ui.configbool('experimental',
1741 'directaccess'):
1741 'directaccess'):
1742 return repo
1742 return repo
1743
1743
1744 if repo.filtername not in ('visible', 'visible-hidden'):
1744 if repo.filtername not in ('visible', 'visible-hidden'):
1745 return repo
1745 return repo
1746
1746
1747 symbols = set()
1747 symbols = set()
1748 for spec in specs:
1748 for spec in specs:
1749 try:
1749 try:
1750 tree = revsetlang.parse(spec)
1750 tree = revsetlang.parse(spec)
1751 except error.ParseError: # will be reported by scmutil.revrange()
1751 except error.ParseError: # will be reported by scmutil.revrange()
1752 continue
1752 continue
1753
1753
1754 symbols.update(revsetlang.gethashlikesymbols(tree))
1754 symbols.update(revsetlang.gethashlikesymbols(tree))
1755
1755
1756 if not symbols:
1756 if not symbols:
1757 return repo
1757 return repo
1758
1758
1759 revs = _getrevsfromsymbols(repo, symbols)
1759 revs = _getrevsfromsymbols(repo, symbols)
1760
1760
1761 if not revs:
1761 if not revs:
1762 return repo
1762 return repo
1763
1763
1764 if hiddentype == 'warn':
1764 if hiddentype == 'warn':
1765 unfi = repo.unfiltered()
1765 unfi = repo.unfiltered()
1766 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1766 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1767 repo.ui.warn(_("warning: accessing hidden changesets for write "
1767 repo.ui.warn(_("warning: accessing hidden changesets for write "
1768 "operation: %s\n") % revstr)
1768 "operation: %s\n") % revstr)
1769
1769
1770 # we have to use new filtername to separate branch/tags cache until we can
1770 # we have to use new filtername to separate branch/tags cache until we can
1771 # disbale these cache when revisions are dynamically pinned.
1771 # disbale these cache when revisions are dynamically pinned.
1772 return repo.filtered('visible-hidden', revs)
1772 return repo.filtered('visible-hidden', revs)
1773
1773
1774 def _getrevsfromsymbols(repo, symbols):
1774 def _getrevsfromsymbols(repo, symbols):
1775 """parse the list of symbols and returns a set of revision numbers of hidden
1775 """parse the list of symbols and returns a set of revision numbers of hidden
1776 changesets present in symbols"""
1776 changesets present in symbols"""
1777 revs = set()
1777 revs = set()
1778 unfi = repo.unfiltered()
1778 unfi = repo.unfiltered()
1779 unficl = unfi.changelog
1779 unficl = unfi.changelog
1780 cl = repo.changelog
1780 cl = repo.changelog
1781 tiprev = len(unficl)
1781 tiprev = len(unficl)
1782 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1782 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1783 for s in symbols:
1783 for s in symbols:
1784 try:
1784 try:
1785 n = int(s)
1785 n = int(s)
1786 if n <= tiprev:
1786 if n <= tiprev:
1787 if not allowrevnums:
1787 if not allowrevnums:
1788 continue
1788 continue
1789 else:
1789 else:
1790 if n not in cl:
1790 if n not in cl:
1791 revs.add(n)
1791 revs.add(n)
1792 continue
1792 continue
1793 except ValueError:
1793 except ValueError:
1794 pass
1794 pass
1795
1795
1796 try:
1796 try:
1797 s = resolvehexnodeidprefix(unfi, s)
1797 s = resolvehexnodeidprefix(unfi, s)
1798 except (error.LookupError, error.WdirUnsupported):
1798 except (error.LookupError, error.WdirUnsupported):
1799 s = None
1799 s = None
1800
1800
1801 if s is not None:
1801 if s is not None:
1802 rev = unficl.rev(s)
1802 rev = unficl.rev(s)
1803 if rev not in cl:
1803 if rev not in cl:
1804 revs.add(rev)
1804 revs.add(rev)
1805
1805
1806 return revs
1806 return revs
1807
1807
1808 def bookmarkrevs(repo, mark):
1808 def bookmarkrevs(repo, mark):
1809 """
1809 """
1810 Select revisions reachable by a given bookmark
1810 Select revisions reachable by a given bookmark
1811 """
1811 """
1812 return repo.revs("ancestors(bookmark(%s)) - "
1812 return repo.revs("ancestors(bookmark(%s)) - "
1813 "ancestors(head() and not bookmark(%s)) - "
1813 "ancestors(head() and not bookmark(%s)) - "
1814 "ancestors(bookmark() and not bookmark(%s))",
1814 "ancestors(bookmark() and not bookmark(%s))",
1815 mark, mark, mark)
1815 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now