##// END OF EJS Templates
scmutil: drop unreachable except clause...
Martin von Zweigbergk -
r41324:17941fc5 default
parent child Browse files
Show More
@@ -1,1826 +1,1823 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
16 import subprocess
15 import subprocess
17 import weakref
16 import weakref
18
17
19 from .i18n import _
18 from .i18n import _
20 from .node import (
19 from .node import (
21 bin,
20 bin,
22 hex,
21 hex,
23 nullid,
22 nullid,
24 nullrev,
23 nullrev,
25 short,
24 short,
26 wdirid,
25 wdirid,
27 wdirrev,
26 wdirrev,
28 )
27 )
29
28
30 from . import (
29 from . import (
31 encoding,
30 encoding,
32 error,
31 error,
33 match as matchmod,
32 match as matchmod,
34 obsolete,
33 obsolete,
35 obsutil,
34 obsutil,
36 pathutil,
35 pathutil,
37 phases,
36 phases,
38 policy,
37 policy,
39 pycompat,
38 pycompat,
40 revsetlang,
39 revsetlang,
41 similar,
40 similar,
42 smartset,
41 smartset,
43 url,
42 url,
44 util,
43 util,
45 vfs,
44 vfs,
46 )
45 )
47
46
48 from .utils import (
47 from .utils import (
49 procutil,
48 procutil,
50 stringutil,
49 stringutil,
51 )
50 )
52
51
53 if pycompat.iswindows:
52 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
53 from . import scmwindows as scmplatform
55 else:
54 else:
56 from . import scmposix as scmplatform
55 from . import scmposix as scmplatform
57
56
58 parsers = policy.importmod(r'parsers')
57 parsers = policy.importmod(r'parsers')
59
58
60 termsize = scmplatform.termsize
59 termsize = scmplatform.termsize
61
60
62 class status(tuple):
61 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
63 and 'ignored' properties are only relevant to the working copy.
65 '''
64 '''
66
65
67 __slots__ = ()
66 __slots__ = ()
68
67
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
69 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
71 ignored, clean))
73
72
74 @property
73 @property
75 def modified(self):
74 def modified(self):
76 '''files that have been modified'''
75 '''files that have been modified'''
77 return self[0]
76 return self[0]
78
77
79 @property
78 @property
80 def added(self):
79 def added(self):
81 '''files that have been added'''
80 '''files that have been added'''
82 return self[1]
81 return self[1]
83
82
84 @property
83 @property
85 def removed(self):
84 def removed(self):
86 '''files that have been removed'''
85 '''files that have been removed'''
87 return self[2]
86 return self[2]
88
87
89 @property
88 @property
90 def deleted(self):
89 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
90 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
91 working copy (aka "missing")
93 '''
92 '''
94 return self[3]
93 return self[3]
95
94
96 @property
95 @property
97 def unknown(self):
96 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
97 '''files not in the dirstate that are not ignored'''
99 return self[4]
98 return self[4]
100
99
101 @property
100 @property
102 def ignored(self):
101 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
102 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
103 return self[5]
105
104
106 @property
105 @property
107 def clean(self):
106 def clean(self):
108 '''files that have not been modified'''
107 '''files that have not been modified'''
109 return self[6]
108 return self[6]
110
109
111 def __repr__(self, *args, **kwargs):
110 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
112 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
114
116 def itersubrepos(ctx1, ctx2):
115 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
116 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
119 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
122
124 missing = set()
123 missing = set()
125
124
126 for subpath in ctx2.substate:
125 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
126 if subpath not in ctx1.substate:
128 del subpaths[subpath]
127 del subpaths[subpath]
129 missing.add(subpath)
128 missing.add(subpath)
130
129
131 for subpath, ctx in sorted(subpaths.iteritems()):
130 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
131 yield subpath, ctx.sub(subpath)
133
132
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
134 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
136 # against itself.
138 for subpath in missing:
137 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
138 yield subpath, ctx2.nullsub(subpath, ctx1)
140
139
141 def nochangesfound(ui, repo, excluded=None):
140 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
141 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
142 nodes excluded from the push/pull.
144 '''
143 '''
145 secretlist = []
144 secretlist = []
146 if excluded:
145 if excluded:
147 for n in excluded:
146 for n in excluded:
148 ctx = repo[n]
147 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
148 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
149 secretlist.append(n)
151
150
152 if secretlist:
151 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
153 % len(secretlist))
155 else:
154 else:
156 ui.status(_("no changes found\n"))
155 ui.status(_("no changes found\n"))
157
156
158 def callcatch(ui, func):
157 def callcatch(ui, func):
159 """call func() with global exception handling
158 """call func() with global exception handling
160
159
161 return func() if no exception happens. otherwise do some error handling
160 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
161 and return an exit code accordingly. does not handle all exceptions.
163 """
162 """
164 try:
163 try:
165 try:
164 try:
166 return func()
165 return func()
167 except: # re-raises
166 except: # re-raises
168 ui.traceback()
167 ui.traceback()
169 raise
168 raise
170 # Global exception handling, alphabetically
169 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
170 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
171 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
172 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % (
173 reason = _('timed out waiting for lock held by %r') % (
175 pycompat.bytestr(inst.locker))
174 pycompat.bytestr(inst.locker))
176 else:
175 else:
177 reason = _('lock held by %r') % inst.locker
176 reason = _('lock held by %r') % inst.locker
178 ui.error(_("abort: %s: %s\n") % (
177 ui.error(_("abort: %s: %s\n") % (
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 if not inst.locker:
179 if not inst.locker:
181 ui.error(_("(lock might be very busy)\n"))
180 ui.error(_("(lock might be very busy)\n"))
182 except error.LockUnavailable as inst:
181 except error.LockUnavailable as inst:
183 ui.error(_("abort: could not lock %s: %s\n") %
182 ui.error(_("abort: could not lock %s: %s\n") %
184 (inst.desc or stringutil.forcebytestr(inst.filename),
183 (inst.desc or stringutil.forcebytestr(inst.filename),
185 encoding.strtolocal(inst.strerror)))
184 encoding.strtolocal(inst.strerror)))
186 except error.OutOfBandError as inst:
185 except error.OutOfBandError as inst:
187 if inst.args:
186 if inst.args:
188 msg = _("abort: remote error:\n")
187 msg = _("abort: remote error:\n")
189 else:
188 else:
190 msg = _("abort: remote error\n")
189 msg = _("abort: remote error\n")
191 ui.error(msg)
190 ui.error(msg)
192 if inst.args:
191 if inst.args:
193 ui.error(''.join(inst.args))
192 ui.error(''.join(inst.args))
194 if inst.hint:
193 if inst.hint:
195 ui.error('(%s)\n' % inst.hint)
194 ui.error('(%s)\n' % inst.hint)
196 except error.RepoError as inst:
195 except error.RepoError as inst:
197 ui.error(_("abort: %s!\n") % inst)
196 ui.error(_("abort: %s!\n") % inst)
198 if inst.hint:
197 if inst.hint:
199 ui.error(_("(%s)\n") % inst.hint)
198 ui.error(_("(%s)\n") % inst.hint)
200 except error.ResponseError as inst:
199 except error.ResponseError as inst:
201 ui.error(_("abort: %s") % inst.args[0])
200 ui.error(_("abort: %s") % inst.args[0])
202 msg = inst.args[1]
201 msg = inst.args[1]
203 if isinstance(msg, type(u'')):
202 if isinstance(msg, type(u'')):
204 msg = pycompat.sysbytes(msg)
203 msg = pycompat.sysbytes(msg)
205 if not isinstance(msg, bytes):
204 if not isinstance(msg, bytes):
206 ui.error(" %r\n" % (msg,))
205 ui.error(" %r\n" % (msg,))
207 elif not msg:
206 elif not msg:
208 ui.error(_(" empty string\n"))
207 ui.error(_(" empty string\n"))
209 else:
208 else:
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 except error.CensoredNodeError as inst:
210 except error.CensoredNodeError as inst:
212 ui.error(_("abort: file censored %s!\n") % inst)
211 ui.error(_("abort: file censored %s!\n") % inst)
213 except error.StorageError as inst:
212 except error.StorageError as inst:
214 ui.error(_("abort: %s!\n") % inst)
213 ui.error(_("abort: %s!\n") % inst)
215 if inst.hint:
214 if inst.hint:
216 ui.error(_("(%s)\n") % inst.hint)
215 ui.error(_("(%s)\n") % inst.hint)
217 except error.InterventionRequired as inst:
216 except error.InterventionRequired as inst:
218 ui.error("%s\n" % inst)
217 ui.error("%s\n" % inst)
219 if inst.hint:
218 if inst.hint:
220 ui.error(_("(%s)\n") % inst.hint)
219 ui.error(_("(%s)\n") % inst.hint)
221 return 1
220 return 1
222 except error.WdirUnsupported:
221 except error.WdirUnsupported:
223 ui.error(_("abort: working directory revision cannot be specified\n"))
222 ui.error(_("abort: working directory revision cannot be specified\n"))
224 except error.Abort as inst:
223 except error.Abort as inst:
225 ui.error(_("abort: %s\n") % inst)
224 ui.error(_("abort: %s\n") % inst)
226 if inst.hint:
225 if inst.hint:
227 ui.error(_("(%s)\n") % inst.hint)
226 ui.error(_("(%s)\n") % inst.hint)
228 except ImportError as inst:
227 except ImportError as inst:
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 m = stringutil.forcebytestr(inst).split()[-1]
229 m = stringutil.forcebytestr(inst).split()[-1]
231 if m in "mpatch bdiff".split():
230 if m in "mpatch bdiff".split():
232 ui.error(_("(did you forget to compile extensions?)\n"))
231 ui.error(_("(did you forget to compile extensions?)\n"))
233 elif m in "zlib".split():
232 elif m in "zlib".split():
234 ui.error(_("(is your Python install correct?)\n"))
233 ui.error(_("(is your Python install correct?)\n"))
235 except IOError as inst:
234 except IOError as inst:
236 if util.safehasattr(inst, "code"):
235 if util.safehasattr(inst, "code"):
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 elif util.safehasattr(inst, "reason"):
237 elif util.safehasattr(inst, "reason"):
239 try: # usually it is in the form (errno, strerror)
238 try: # usually it is in the form (errno, strerror)
240 reason = inst.reason.args[1]
239 reason = inst.reason.args[1]
241 except (AttributeError, IndexError):
240 except (AttributeError, IndexError):
242 # it might be anything, for example a string
241 # it might be anything, for example a string
243 reason = inst.reason
242 reason = inst.reason
244 if isinstance(reason, pycompat.unicode):
243 if isinstance(reason, pycompat.unicode):
245 # SSLError of Python 2.7.9 contains a unicode
244 # SSLError of Python 2.7.9 contains a unicode
246 reason = encoding.unitolocal(reason)
245 reason = encoding.unitolocal(reason)
247 ui.error(_("abort: error: %s\n") % reason)
246 ui.error(_("abort: error: %s\n") % reason)
248 elif (util.safehasattr(inst, "args")
247 elif (util.safehasattr(inst, "args")
249 and inst.args and inst.args[0] == errno.EPIPE):
248 and inst.args and inst.args[0] == errno.EPIPE):
250 pass
249 pass
251 elif getattr(inst, "strerror", None):
250 elif getattr(inst, "strerror", None):
252 if getattr(inst, "filename", None):
251 if getattr(inst, "filename", None):
253 ui.error(_("abort: %s: %s\n") % (
252 ui.error(_("abort: %s: %s\n") % (
254 encoding.strtolocal(inst.strerror),
253 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
254 stringutil.forcebytestr(inst.filename)))
256 else:
255 else:
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 else:
257 else:
259 raise
258 raise
260 except OSError as inst:
259 except OSError as inst:
261 if getattr(inst, "filename", None) is not None:
260 if getattr(inst, "filename", None) is not None:
262 ui.error(_("abort: %s: '%s'\n") % (
261 ui.error(_("abort: %s: '%s'\n") % (
263 encoding.strtolocal(inst.strerror),
262 encoding.strtolocal(inst.strerror),
264 stringutil.forcebytestr(inst.filename)))
263 stringutil.forcebytestr(inst.filename)))
265 else:
264 else:
266 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
265 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
267 except MemoryError:
266 except MemoryError:
268 ui.error(_("abort: out of memory\n"))
267 ui.error(_("abort: out of memory\n"))
269 except SystemExit as inst:
268 except SystemExit as inst:
270 # Commands shouldn't sys.exit directly, but give a return code.
269 # Commands shouldn't sys.exit directly, but give a return code.
271 # Just in case catch this and and pass exit code to caller.
270 # Just in case catch this and and pass exit code to caller.
272 return inst.code
271 return inst.code
273 except socket.error as inst:
274 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
275
272
276 return -1
273 return -1
277
274
278 def checknewlabel(repo, lbl, kind):
275 def checknewlabel(repo, lbl, kind):
279 # Do not use the "kind" parameter in ui output.
276 # Do not use the "kind" parameter in ui output.
280 # It makes strings difficult to translate.
277 # It makes strings difficult to translate.
281 if lbl in ['tip', '.', 'null']:
278 if lbl in ['tip', '.', 'null']:
282 raise error.Abort(_("the name '%s' is reserved") % lbl)
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
283 for c in (':', '\0', '\n', '\r'):
280 for c in (':', '\0', '\n', '\r'):
284 if c in lbl:
281 if c in lbl:
285 raise error.Abort(
282 raise error.Abort(
286 _("%r cannot be used in a name") % pycompat.bytestr(c))
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
287 try:
284 try:
288 int(lbl)
285 int(lbl)
289 raise error.Abort(_("cannot use an integer as a name"))
286 raise error.Abort(_("cannot use an integer as a name"))
290 except ValueError:
287 except ValueError:
291 pass
288 pass
292 if lbl.strip() != lbl:
289 if lbl.strip() != lbl:
293 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
294
291
295 def checkfilename(f):
292 def checkfilename(f):
296 '''Check that the filename f is an acceptable filename for a tracked file'''
293 '''Check that the filename f is an acceptable filename for a tracked file'''
297 if '\r' in f or '\n' in f:
294 if '\r' in f or '\n' in f:
298 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
299 % pycompat.bytestr(f))
296 % pycompat.bytestr(f))
300
297
301 def checkportable(ui, f):
298 def checkportable(ui, f):
302 '''Check if filename f is portable and warn or abort depending on config'''
299 '''Check if filename f is portable and warn or abort depending on config'''
303 checkfilename(f)
300 checkfilename(f)
304 abort, warn = checkportabilityalert(ui)
301 abort, warn = checkportabilityalert(ui)
305 if abort or warn:
302 if abort or warn:
306 msg = util.checkwinfilename(f)
303 msg = util.checkwinfilename(f)
307 if msg:
304 if msg:
308 msg = "%s: %s" % (msg, procutil.shellquote(f))
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
309 if abort:
306 if abort:
310 raise error.Abort(msg)
307 raise error.Abort(msg)
311 ui.warn(_("warning: %s\n") % msg)
308 ui.warn(_("warning: %s\n") % msg)
312
309
313 def checkportabilityalert(ui):
310 def checkportabilityalert(ui):
314 '''check if the user's config requests nothing, a warning, or abort for
311 '''check if the user's config requests nothing, a warning, or abort for
315 non-portable filenames'''
312 non-portable filenames'''
316 val = ui.config('ui', 'portablefilenames')
313 val = ui.config('ui', 'portablefilenames')
317 lval = val.lower()
314 lval = val.lower()
318 bval = stringutil.parsebool(val)
315 bval = stringutil.parsebool(val)
319 abort = pycompat.iswindows or lval == 'abort'
316 abort = pycompat.iswindows or lval == 'abort'
320 warn = bval or lval == 'warn'
317 warn = bval or lval == 'warn'
321 if bval is None and not (warn or abort or lval == 'ignore'):
318 if bval is None and not (warn or abort or lval == 'ignore'):
322 raise error.ConfigError(
319 raise error.ConfigError(
323 _("ui.portablefilenames value is invalid ('%s')") % val)
320 _("ui.portablefilenames value is invalid ('%s')") % val)
324 return abort, warn
321 return abort, warn
325
322
326 class casecollisionauditor(object):
323 class casecollisionauditor(object):
327 def __init__(self, ui, abort, dirstate):
324 def __init__(self, ui, abort, dirstate):
328 self._ui = ui
325 self._ui = ui
329 self._abort = abort
326 self._abort = abort
330 allfiles = '\0'.join(dirstate._map)
327 allfiles = '\0'.join(dirstate._map)
331 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
332 self._dirstate = dirstate
329 self._dirstate = dirstate
333 # The purpose of _newfiles is so that we don't complain about
330 # The purpose of _newfiles is so that we don't complain about
334 # case collisions if someone were to call this object with the
331 # case collisions if someone were to call this object with the
335 # same filename twice.
332 # same filename twice.
336 self._newfiles = set()
333 self._newfiles = set()
337
334
338 def __call__(self, f):
335 def __call__(self, f):
339 if f in self._newfiles:
336 if f in self._newfiles:
340 return
337 return
341 fl = encoding.lower(f)
338 fl = encoding.lower(f)
342 if fl in self._loweredfiles and f not in self._dirstate:
339 if fl in self._loweredfiles and f not in self._dirstate:
343 msg = _('possible case-folding collision for %s') % f
340 msg = _('possible case-folding collision for %s') % f
344 if self._abort:
341 if self._abort:
345 raise error.Abort(msg)
342 raise error.Abort(msg)
346 self._ui.warn(_("warning: %s\n") % msg)
343 self._ui.warn(_("warning: %s\n") % msg)
347 self._loweredfiles.add(fl)
344 self._loweredfiles.add(fl)
348 self._newfiles.add(f)
345 self._newfiles.add(f)
349
346
350 def filteredhash(repo, maxrev):
347 def filteredhash(repo, maxrev):
351 """build hash of filtered revisions in the current repoview.
348 """build hash of filtered revisions in the current repoview.
352
349
353 Multiple caches perform up-to-date validation by checking that the
350 Multiple caches perform up-to-date validation by checking that the
354 tiprev and tipnode stored in the cache file match the current repository.
351 tiprev and tipnode stored in the cache file match the current repository.
355 However, this is not sufficient for validating repoviews because the set
352 However, this is not sufficient for validating repoviews because the set
356 of revisions in the view may change without the repository tiprev and
353 of revisions in the view may change without the repository tiprev and
357 tipnode changing.
354 tipnode changing.
358
355
359 This function hashes all the revs filtered from the view and returns
356 This function hashes all the revs filtered from the view and returns
360 that SHA-1 digest.
357 that SHA-1 digest.
361 """
358 """
362 cl = repo.changelog
359 cl = repo.changelog
363 if not cl.filteredrevs:
360 if not cl.filteredrevs:
364 return None
361 return None
365 key = None
362 key = None
366 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
367 if revs:
364 if revs:
368 s = hashlib.sha1()
365 s = hashlib.sha1()
369 for rev in revs:
366 for rev in revs:
370 s.update('%d;' % rev)
367 s.update('%d;' % rev)
371 key = s.digest()
368 key = s.digest()
372 return key
369 return key
373
370
374 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
375 '''yield every hg repository under path, always recursively.
372 '''yield every hg repository under path, always recursively.
376 The recurse flag will only control recursion into repo working dirs'''
373 The recurse flag will only control recursion into repo working dirs'''
377 def errhandler(err):
374 def errhandler(err):
378 if err.filename == path:
375 if err.filename == path:
379 raise err
376 raise err
380 samestat = getattr(os.path, 'samestat', None)
377 samestat = getattr(os.path, 'samestat', None)
381 if followsym and samestat is not None:
378 if followsym and samestat is not None:
382 def adddir(dirlst, dirname):
379 def adddir(dirlst, dirname):
383 dirstat = os.stat(dirname)
380 dirstat = os.stat(dirname)
384 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
385 if not match:
382 if not match:
386 dirlst.append(dirstat)
383 dirlst.append(dirstat)
387 return not match
384 return not match
388 else:
385 else:
389 followsym = False
386 followsym = False
390
387
391 if (seen_dirs is None) and followsym:
388 if (seen_dirs is None) and followsym:
392 seen_dirs = []
389 seen_dirs = []
393 adddir(seen_dirs, path)
390 adddir(seen_dirs, path)
394 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
395 dirs.sort()
392 dirs.sort()
396 if '.hg' in dirs:
393 if '.hg' in dirs:
397 yield root # found a repository
394 yield root # found a repository
398 qroot = os.path.join(root, '.hg', 'patches')
395 qroot = os.path.join(root, '.hg', 'patches')
399 if os.path.isdir(os.path.join(qroot, '.hg')):
396 if os.path.isdir(os.path.join(qroot, '.hg')):
400 yield qroot # we have a patch queue repo here
397 yield qroot # we have a patch queue repo here
401 if recurse:
398 if recurse:
402 # avoid recursing inside the .hg directory
399 # avoid recursing inside the .hg directory
403 dirs.remove('.hg')
400 dirs.remove('.hg')
404 else:
401 else:
405 dirs[:] = [] # don't descend further
402 dirs[:] = [] # don't descend further
406 elif followsym:
403 elif followsym:
407 newdirs = []
404 newdirs = []
408 for d in dirs:
405 for d in dirs:
409 fname = os.path.join(root, d)
406 fname = os.path.join(root, d)
410 if adddir(seen_dirs, fname):
407 if adddir(seen_dirs, fname):
411 if os.path.islink(fname):
408 if os.path.islink(fname):
412 for hgname in walkrepos(fname, True, seen_dirs):
409 for hgname in walkrepos(fname, True, seen_dirs):
413 yield hgname
410 yield hgname
414 else:
411 else:
415 newdirs.append(d)
412 newdirs.append(d)
416 dirs[:] = newdirs
413 dirs[:] = newdirs
417
414
418 def binnode(ctx):
415 def binnode(ctx):
419 """Return binary node id for a given basectx"""
416 """Return binary node id for a given basectx"""
420 node = ctx.node()
417 node = ctx.node()
421 if node is None:
418 if node is None:
422 return wdirid
419 return wdirid
423 return node
420 return node
424
421
425 def intrev(ctx):
422 def intrev(ctx):
426 """Return integer for a given basectx that can be used in comparison or
423 """Return integer for a given basectx that can be used in comparison or
427 arithmetic operation"""
424 arithmetic operation"""
428 rev = ctx.rev()
425 rev = ctx.rev()
429 if rev is None:
426 if rev is None:
430 return wdirrev
427 return wdirrev
431 return rev
428 return rev
432
429
433 def formatchangeid(ctx):
430 def formatchangeid(ctx):
434 """Format changectx as '{rev}:{node|formatnode}', which is the default
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
435 template provided by logcmdutil.changesettemplater"""
432 template provided by logcmdutil.changesettemplater"""
436 repo = ctx.repo()
433 repo = ctx.repo()
437 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
438
435
439 def formatrevnode(ui, rev, node):
436 def formatrevnode(ui, rev, node):
440 """Format given revision and node depending on the current verbosity"""
437 """Format given revision and node depending on the current verbosity"""
441 if ui.debugflag:
438 if ui.debugflag:
442 hexfunc = hex
439 hexfunc = hex
443 else:
440 else:
444 hexfunc = short
441 hexfunc = short
445 return '%d:%s' % (rev, hexfunc(node))
442 return '%d:%s' % (rev, hexfunc(node))
446
443
447 def resolvehexnodeidprefix(repo, prefix):
444 def resolvehexnodeidprefix(repo, prefix):
448 if (prefix.startswith('x') and
445 if (prefix.startswith('x') and
449 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
450 prefix = prefix[1:]
447 prefix = prefix[1:]
451 try:
448 try:
452 # Uses unfiltered repo because it's faster when prefix is ambiguous/
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
453 # This matches the shortesthexnodeidprefix() function below.
450 # This matches the shortesthexnodeidprefix() function below.
454 node = repo.unfiltered().changelog._partialmatch(prefix)
451 node = repo.unfiltered().changelog._partialmatch(prefix)
455 except error.AmbiguousPrefixLookupError:
452 except error.AmbiguousPrefixLookupError:
456 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
457 if revset:
454 if revset:
458 # Clear config to avoid infinite recursion
455 # Clear config to avoid infinite recursion
459 configoverrides = {('experimental',
456 configoverrides = {('experimental',
460 'revisions.disambiguatewithin'): None}
457 'revisions.disambiguatewithin'): None}
461 with repo.ui.configoverride(configoverrides):
458 with repo.ui.configoverride(configoverrides):
462 revs = repo.anyrevs([revset], user=True)
459 revs = repo.anyrevs([revset], user=True)
463 matches = []
460 matches = []
464 for rev in revs:
461 for rev in revs:
465 node = repo.changelog.node(rev)
462 node = repo.changelog.node(rev)
466 if hex(node).startswith(prefix):
463 if hex(node).startswith(prefix):
467 matches.append(node)
464 matches.append(node)
468 if len(matches) == 1:
465 if len(matches) == 1:
469 return matches[0]
466 return matches[0]
470 raise
467 raise
471 if node is None:
468 if node is None:
472 return
469 return
473 repo.changelog.rev(node) # make sure node isn't filtered
470 repo.changelog.rev(node) # make sure node isn't filtered
474 return node
471 return node
475
472
476 def mayberevnum(repo, prefix):
473 def mayberevnum(repo, prefix):
477 """Checks if the given prefix may be mistaken for a revision number"""
474 """Checks if the given prefix may be mistaken for a revision number"""
478 try:
475 try:
479 i = int(prefix)
476 i = int(prefix)
480 # if we are a pure int, then starting with zero will not be
477 # if we are a pure int, then starting with zero will not be
481 # confused as a rev; or, obviously, if the int is larger
478 # confused as a rev; or, obviously, if the int is larger
482 # than the value of the tip rev. We still need to disambiguate if
479 # than the value of the tip rev. We still need to disambiguate if
483 # prefix == '0', since that *is* a valid revnum.
480 # prefix == '0', since that *is* a valid revnum.
484 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
481 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
485 return False
482 return False
486 return True
483 return True
487 except ValueError:
484 except ValueError:
488 return False
485 return False
489
486
490 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
487 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
491 """Find the shortest unambiguous prefix that matches hexnode.
488 """Find the shortest unambiguous prefix that matches hexnode.
492
489
493 If "cache" is not None, it must be a dictionary that can be used for
490 If "cache" is not None, it must be a dictionary that can be used for
494 caching between calls to this method.
491 caching between calls to this method.
495 """
492 """
496 # _partialmatch() of filtered changelog could take O(len(repo)) time,
493 # _partialmatch() of filtered changelog could take O(len(repo)) time,
497 # which would be unacceptably slow. so we look for hash collision in
494 # which would be unacceptably slow. so we look for hash collision in
498 # unfiltered space, which means some hashes may be slightly longer.
495 # unfiltered space, which means some hashes may be slightly longer.
499
496
500 minlength=max(minlength, 1)
497 minlength=max(minlength, 1)
501
498
502 def disambiguate(prefix):
499 def disambiguate(prefix):
503 """Disambiguate against revnums."""
500 """Disambiguate against revnums."""
504 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
501 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
505 if mayberevnum(repo, prefix):
502 if mayberevnum(repo, prefix):
506 return 'x' + prefix
503 return 'x' + prefix
507 else:
504 else:
508 return prefix
505 return prefix
509
506
510 hexnode = hex(node)
507 hexnode = hex(node)
511 for length in range(len(prefix), len(hexnode) + 1):
508 for length in range(len(prefix), len(hexnode) + 1):
512 prefix = hexnode[:length]
509 prefix = hexnode[:length]
513 if not mayberevnum(repo, prefix):
510 if not mayberevnum(repo, prefix):
514 return prefix
511 return prefix
515
512
516 cl = repo.unfiltered().changelog
513 cl = repo.unfiltered().changelog
517 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
514 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
518 if revset:
515 if revset:
519 revs = None
516 revs = None
520 if cache is not None:
517 if cache is not None:
521 revs = cache.get('disambiguationrevset')
518 revs = cache.get('disambiguationrevset')
522 if revs is None:
519 if revs is None:
523 revs = repo.anyrevs([revset], user=True)
520 revs = repo.anyrevs([revset], user=True)
524 if cache is not None:
521 if cache is not None:
525 cache['disambiguationrevset'] = revs
522 cache['disambiguationrevset'] = revs
526 if cl.rev(node) in revs:
523 if cl.rev(node) in revs:
527 hexnode = hex(node)
524 hexnode = hex(node)
528 nodetree = None
525 nodetree = None
529 if cache is not None:
526 if cache is not None:
530 nodetree = cache.get('disambiguationnodetree')
527 nodetree = cache.get('disambiguationnodetree')
531 if not nodetree:
528 if not nodetree:
532 try:
529 try:
533 nodetree = parsers.nodetree(cl.index, len(revs))
530 nodetree = parsers.nodetree(cl.index, len(revs))
534 except AttributeError:
531 except AttributeError:
535 # no native nodetree
532 # no native nodetree
536 pass
533 pass
537 else:
534 else:
538 for r in revs:
535 for r in revs:
539 nodetree.insert(r)
536 nodetree.insert(r)
540 if cache is not None:
537 if cache is not None:
541 cache['disambiguationnodetree'] = nodetree
538 cache['disambiguationnodetree'] = nodetree
542 if nodetree is not None:
539 if nodetree is not None:
543 length = max(nodetree.shortest(node), minlength)
540 length = max(nodetree.shortest(node), minlength)
544 prefix = hexnode[:length]
541 prefix = hexnode[:length]
545 return disambiguate(prefix)
542 return disambiguate(prefix)
546 for length in range(minlength, len(hexnode) + 1):
543 for length in range(minlength, len(hexnode) + 1):
547 matches = []
544 matches = []
548 prefix = hexnode[:length]
545 prefix = hexnode[:length]
549 for rev in revs:
546 for rev in revs:
550 otherhexnode = repo[rev].hex()
547 otherhexnode = repo[rev].hex()
551 if prefix == otherhexnode[:length]:
548 if prefix == otherhexnode[:length]:
552 matches.append(otherhexnode)
549 matches.append(otherhexnode)
553 if len(matches) == 1:
550 if len(matches) == 1:
554 return disambiguate(prefix)
551 return disambiguate(prefix)
555
552
556 try:
553 try:
557 return disambiguate(cl.shortest(node, minlength))
554 return disambiguate(cl.shortest(node, minlength))
558 except error.LookupError:
555 except error.LookupError:
559 raise error.RepoLookupError()
556 raise error.RepoLookupError()
560
557
561 def isrevsymbol(repo, symbol):
558 def isrevsymbol(repo, symbol):
562 """Checks if a symbol exists in the repo.
559 """Checks if a symbol exists in the repo.
563
560
564 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
561 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
565 symbol is an ambiguous nodeid prefix.
562 symbol is an ambiguous nodeid prefix.
566 """
563 """
567 try:
564 try:
568 revsymbol(repo, symbol)
565 revsymbol(repo, symbol)
569 return True
566 return True
570 except error.RepoLookupError:
567 except error.RepoLookupError:
571 return False
568 return False
572
569
573 def revsymbol(repo, symbol):
570 def revsymbol(repo, symbol):
574 """Returns a context given a single revision symbol (as string).
571 """Returns a context given a single revision symbol (as string).
575
572
576 This is similar to revsingle(), but accepts only a single revision symbol,
573 This is similar to revsingle(), but accepts only a single revision symbol,
577 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
574 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
578 not "max(public())".
575 not "max(public())".
579 """
576 """
580 if not isinstance(symbol, bytes):
577 if not isinstance(symbol, bytes):
581 msg = ("symbol (%s of type %s) was not a string, did you mean "
578 msg = ("symbol (%s of type %s) was not a string, did you mean "
582 "repo[symbol]?" % (symbol, type(symbol)))
579 "repo[symbol]?" % (symbol, type(symbol)))
583 raise error.ProgrammingError(msg)
580 raise error.ProgrammingError(msg)
584 try:
581 try:
585 if symbol in ('.', 'tip', 'null'):
582 if symbol in ('.', 'tip', 'null'):
586 return repo[symbol]
583 return repo[symbol]
587
584
588 try:
585 try:
589 r = int(symbol)
586 r = int(symbol)
590 if '%d' % r != symbol:
587 if '%d' % r != symbol:
591 raise ValueError
588 raise ValueError
592 l = len(repo.changelog)
589 l = len(repo.changelog)
593 if r < 0:
590 if r < 0:
594 r += l
591 r += l
595 if r < 0 or r >= l and r != wdirrev:
592 if r < 0 or r >= l and r != wdirrev:
596 raise ValueError
593 raise ValueError
597 return repo[r]
594 return repo[r]
598 except error.FilteredIndexError:
595 except error.FilteredIndexError:
599 raise
596 raise
600 except (ValueError, OverflowError, IndexError):
597 except (ValueError, OverflowError, IndexError):
601 pass
598 pass
602
599
603 if len(symbol) == 40:
600 if len(symbol) == 40:
604 try:
601 try:
605 node = bin(symbol)
602 node = bin(symbol)
606 rev = repo.changelog.rev(node)
603 rev = repo.changelog.rev(node)
607 return repo[rev]
604 return repo[rev]
608 except error.FilteredLookupError:
605 except error.FilteredLookupError:
609 raise
606 raise
610 except (TypeError, LookupError):
607 except (TypeError, LookupError):
611 pass
608 pass
612
609
613 # look up bookmarks through the name interface
610 # look up bookmarks through the name interface
614 try:
611 try:
615 node = repo.names.singlenode(repo, symbol)
612 node = repo.names.singlenode(repo, symbol)
616 rev = repo.changelog.rev(node)
613 rev = repo.changelog.rev(node)
617 return repo[rev]
614 return repo[rev]
618 except KeyError:
615 except KeyError:
619 pass
616 pass
620
617
621 node = resolvehexnodeidprefix(repo, symbol)
618 node = resolvehexnodeidprefix(repo, symbol)
622 if node is not None:
619 if node is not None:
623 rev = repo.changelog.rev(node)
620 rev = repo.changelog.rev(node)
624 return repo[rev]
621 return repo[rev]
625
622
626 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
623 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
627
624
628 except error.WdirUnsupported:
625 except error.WdirUnsupported:
629 return repo[None]
626 return repo[None]
630 except (error.FilteredIndexError, error.FilteredLookupError,
627 except (error.FilteredIndexError, error.FilteredLookupError,
631 error.FilteredRepoLookupError):
628 error.FilteredRepoLookupError):
632 raise _filterederror(repo, symbol)
629 raise _filterederror(repo, symbol)
633
630
634 def _filterederror(repo, changeid):
631 def _filterederror(repo, changeid):
635 """build an exception to be raised about a filtered changeid
632 """build an exception to be raised about a filtered changeid
636
633
637 This is extracted in a function to help extensions (eg: evolve) to
634 This is extracted in a function to help extensions (eg: evolve) to
638 experiment with various message variants."""
635 experiment with various message variants."""
639 if repo.filtername.startswith('visible'):
636 if repo.filtername.startswith('visible'):
640
637
641 # Check if the changeset is obsolete
638 # Check if the changeset is obsolete
642 unfilteredrepo = repo.unfiltered()
639 unfilteredrepo = repo.unfiltered()
643 ctx = revsymbol(unfilteredrepo, changeid)
640 ctx = revsymbol(unfilteredrepo, changeid)
644
641
645 # If the changeset is obsolete, enrich the message with the reason
642 # If the changeset is obsolete, enrich the message with the reason
646 # that made this changeset not visible
643 # that made this changeset not visible
647 if ctx.obsolete():
644 if ctx.obsolete():
648 msg = obsutil._getfilteredreason(repo, changeid, ctx)
645 msg = obsutil._getfilteredreason(repo, changeid, ctx)
649 else:
646 else:
650 msg = _("hidden revision '%s'") % changeid
647 msg = _("hidden revision '%s'") % changeid
651
648
652 hint = _('use --hidden to access hidden revisions')
649 hint = _('use --hidden to access hidden revisions')
653
650
654 return error.FilteredRepoLookupError(msg, hint=hint)
651 return error.FilteredRepoLookupError(msg, hint=hint)
655 msg = _("filtered revision '%s' (not in '%s' subset)")
652 msg = _("filtered revision '%s' (not in '%s' subset)")
656 msg %= (changeid, repo.filtername)
653 msg %= (changeid, repo.filtername)
657 return error.FilteredRepoLookupError(msg)
654 return error.FilteredRepoLookupError(msg)
658
655
659 def revsingle(repo, revspec, default='.', localalias=None):
656 def revsingle(repo, revspec, default='.', localalias=None):
660 if not revspec and revspec != 0:
657 if not revspec and revspec != 0:
661 return repo[default]
658 return repo[default]
662
659
663 l = revrange(repo, [revspec], localalias=localalias)
660 l = revrange(repo, [revspec], localalias=localalias)
664 if not l:
661 if not l:
665 raise error.Abort(_('empty revision set'))
662 raise error.Abort(_('empty revision set'))
666 return repo[l.last()]
663 return repo[l.last()]
667
664
668 def _pairspec(revspec):
665 def _pairspec(revspec):
669 tree = revsetlang.parse(revspec)
666 tree = revsetlang.parse(revspec)
670 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
667 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
671
668
672 def revpair(repo, revs):
669 def revpair(repo, revs):
673 if not revs:
670 if not revs:
674 return repo['.'], repo[None]
671 return repo['.'], repo[None]
675
672
676 l = revrange(repo, revs)
673 l = revrange(repo, revs)
677
674
678 if not l:
675 if not l:
679 first = second = None
676 first = second = None
680 elif l.isascending():
677 elif l.isascending():
681 first = l.min()
678 first = l.min()
682 second = l.max()
679 second = l.max()
683 elif l.isdescending():
680 elif l.isdescending():
684 first = l.max()
681 first = l.max()
685 second = l.min()
682 second = l.min()
686 else:
683 else:
687 first = l.first()
684 first = l.first()
688 second = l.last()
685 second = l.last()
689
686
690 if first is None:
687 if first is None:
691 raise error.Abort(_('empty revision range'))
688 raise error.Abort(_('empty revision range'))
692 if (first == second and len(revs) >= 2
689 if (first == second and len(revs) >= 2
693 and not all(revrange(repo, [r]) for r in revs)):
690 and not all(revrange(repo, [r]) for r in revs)):
694 raise error.Abort(_('empty revision on one side of range'))
691 raise error.Abort(_('empty revision on one side of range'))
695
692
696 # if top-level is range expression, the result must always be a pair
693 # if top-level is range expression, the result must always be a pair
697 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
694 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
698 return repo[first], repo[None]
695 return repo[first], repo[None]
699
696
700 return repo[first], repo[second]
697 return repo[first], repo[second]
701
698
702 def revrange(repo, specs, localalias=None):
699 def revrange(repo, specs, localalias=None):
703 """Execute 1 to many revsets and return the union.
700 """Execute 1 to many revsets and return the union.
704
701
705 This is the preferred mechanism for executing revsets using user-specified
702 This is the preferred mechanism for executing revsets using user-specified
706 config options, such as revset aliases.
703 config options, such as revset aliases.
707
704
708 The revsets specified by ``specs`` will be executed via a chained ``OR``
705 The revsets specified by ``specs`` will be executed via a chained ``OR``
709 expression. If ``specs`` is empty, an empty result is returned.
706 expression. If ``specs`` is empty, an empty result is returned.
710
707
711 ``specs`` can contain integers, in which case they are assumed to be
708 ``specs`` can contain integers, in which case they are assumed to be
712 revision numbers.
709 revision numbers.
713
710
714 It is assumed the revsets are already formatted. If you have arguments
711 It is assumed the revsets are already formatted. If you have arguments
715 that need to be expanded in the revset, call ``revsetlang.formatspec()``
712 that need to be expanded in the revset, call ``revsetlang.formatspec()``
716 and pass the result as an element of ``specs``.
713 and pass the result as an element of ``specs``.
717
714
718 Specifying a single revset is allowed.
715 Specifying a single revset is allowed.
719
716
720 Returns a ``revset.abstractsmartset`` which is a list-like interface over
717 Returns a ``revset.abstractsmartset`` which is a list-like interface over
721 integer revisions.
718 integer revisions.
722 """
719 """
723 allspecs = []
720 allspecs = []
724 for spec in specs:
721 for spec in specs:
725 if isinstance(spec, int):
722 if isinstance(spec, int):
726 spec = revsetlang.formatspec('%d', spec)
723 spec = revsetlang.formatspec('%d', spec)
727 allspecs.append(spec)
724 allspecs.append(spec)
728 return repo.anyrevs(allspecs, user=True, localalias=localalias)
725 return repo.anyrevs(allspecs, user=True, localalias=localalias)
729
726
730 def meaningfulparents(repo, ctx):
727 def meaningfulparents(repo, ctx):
731 """Return list of meaningful (or all if debug) parentrevs for rev.
728 """Return list of meaningful (or all if debug) parentrevs for rev.
732
729
733 For merges (two non-nullrev revisions) both parents are meaningful.
730 For merges (two non-nullrev revisions) both parents are meaningful.
734 Otherwise the first parent revision is considered meaningful if it
731 Otherwise the first parent revision is considered meaningful if it
735 is not the preceding revision.
732 is not the preceding revision.
736 """
733 """
737 parents = ctx.parents()
734 parents = ctx.parents()
738 if len(parents) > 1:
735 if len(parents) > 1:
739 return parents
736 return parents
740 if repo.ui.debugflag:
737 if repo.ui.debugflag:
741 return [parents[0], repo[nullrev]]
738 return [parents[0], repo[nullrev]]
742 if parents[0].rev() >= intrev(ctx) - 1:
739 if parents[0].rev() >= intrev(ctx) - 1:
743 return []
740 return []
744 return parents
741 return parents
745
742
746 def expandpats(pats):
743 def expandpats(pats):
747 '''Expand bare globs when running on windows.
744 '''Expand bare globs when running on windows.
748 On posix we assume it already has already been done by sh.'''
745 On posix we assume it already has already been done by sh.'''
749 if not util.expandglobs:
746 if not util.expandglobs:
750 return list(pats)
747 return list(pats)
751 ret = []
748 ret = []
752 for kindpat in pats:
749 for kindpat in pats:
753 kind, pat = matchmod._patsplit(kindpat, None)
750 kind, pat = matchmod._patsplit(kindpat, None)
754 if kind is None:
751 if kind is None:
755 try:
752 try:
756 globbed = glob.glob(pat)
753 globbed = glob.glob(pat)
757 except re.error:
754 except re.error:
758 globbed = [pat]
755 globbed = [pat]
759 if globbed:
756 if globbed:
760 ret.extend(globbed)
757 ret.extend(globbed)
761 continue
758 continue
762 ret.append(kindpat)
759 ret.append(kindpat)
763 return ret
760 return ret
764
761
765 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
762 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
766 badfn=None):
763 badfn=None):
767 '''Return a matcher and the patterns that were used.
764 '''Return a matcher and the patterns that were used.
768 The matcher will warn about bad matches, unless an alternate badfn callback
765 The matcher will warn about bad matches, unless an alternate badfn callback
769 is provided.'''
766 is provided.'''
770 if pats == ("",):
767 if pats == ("",):
771 pats = []
768 pats = []
772 if opts is None:
769 if opts is None:
773 opts = {}
770 opts = {}
774 if not globbed and default == 'relpath':
771 if not globbed and default == 'relpath':
775 pats = expandpats(pats or [])
772 pats = expandpats(pats or [])
776
773
777 def bad(f, msg):
774 def bad(f, msg):
778 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
775 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
779
776
780 if badfn is None:
777 if badfn is None:
781 badfn = bad
778 badfn = bad
782
779
783 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
780 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
784 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
781 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
785
782
786 if m.always():
783 if m.always():
787 pats = []
784 pats = []
788 return m, pats
785 return m, pats
789
786
790 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
787 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
791 badfn=None):
788 badfn=None):
792 '''Return a matcher that will warn about bad matches.'''
789 '''Return a matcher that will warn about bad matches.'''
793 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
790 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
794
791
795 def matchall(repo):
792 def matchall(repo):
796 '''Return a matcher that will efficiently match everything.'''
793 '''Return a matcher that will efficiently match everything.'''
797 return matchmod.always(repo.root, repo.getcwd())
794 return matchmod.always(repo.root, repo.getcwd())
798
795
799 def matchfiles(repo, files, badfn=None):
796 def matchfiles(repo, files, badfn=None):
800 '''Return a matcher that will efficiently match exactly these files.'''
797 '''Return a matcher that will efficiently match exactly these files.'''
801 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
798 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
802
799
803 def parsefollowlinespattern(repo, rev, pat, msg):
800 def parsefollowlinespattern(repo, rev, pat, msg):
804 """Return a file name from `pat` pattern suitable for usage in followlines
801 """Return a file name from `pat` pattern suitable for usage in followlines
805 logic.
802 logic.
806 """
803 """
807 if not matchmod.patkind(pat):
804 if not matchmod.patkind(pat):
808 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
805 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
809 else:
806 else:
810 ctx = repo[rev]
807 ctx = repo[rev]
811 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
808 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
812 files = [f for f in ctx if m(f)]
809 files = [f for f in ctx if m(f)]
813 if len(files) != 1:
810 if len(files) != 1:
814 raise error.ParseError(msg)
811 raise error.ParseError(msg)
815 return files[0]
812 return files[0]
816
813
817 def getorigvfs(ui, repo):
814 def getorigvfs(ui, repo):
818 """return a vfs suitable to save 'orig' file
815 """return a vfs suitable to save 'orig' file
819
816
820 return None if no special directory is configured"""
817 return None if no special directory is configured"""
821 origbackuppath = ui.config('ui', 'origbackuppath')
818 origbackuppath = ui.config('ui', 'origbackuppath')
822 if not origbackuppath:
819 if not origbackuppath:
823 return None
820 return None
824 return vfs.vfs(repo.wvfs.join(origbackuppath))
821 return vfs.vfs(repo.wvfs.join(origbackuppath))
825
822
826 def origpath(ui, repo, filepath):
823 def origpath(ui, repo, filepath):
827 '''customize where .orig files are created
824 '''customize where .orig files are created
828
825
829 Fetch user defined path from config file: [ui] origbackuppath = <path>
826 Fetch user defined path from config file: [ui] origbackuppath = <path>
830 Fall back to default (filepath with .orig suffix) if not specified
827 Fall back to default (filepath with .orig suffix) if not specified
831 '''
828 '''
832 origvfs = getorigvfs(ui, repo)
829 origvfs = getorigvfs(ui, repo)
833 if origvfs is None:
830 if origvfs is None:
834 return filepath + ".orig"
831 return filepath + ".orig"
835
832
836 # Convert filepath from an absolute path into a path inside the repo.
833 # Convert filepath from an absolute path into a path inside the repo.
837 filepathfromroot = util.normpath(os.path.relpath(filepath,
834 filepathfromroot = util.normpath(os.path.relpath(filepath,
838 start=repo.root))
835 start=repo.root))
839
836
840 origbackupdir = origvfs.dirname(filepathfromroot)
837 origbackupdir = origvfs.dirname(filepathfromroot)
841 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
838 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
842 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
839 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
843
840
844 # Remove any files that conflict with the backup file's path
841 # Remove any files that conflict with the backup file's path
845 for f in reversed(list(util.finddirs(filepathfromroot))):
842 for f in reversed(list(util.finddirs(filepathfromroot))):
846 if origvfs.isfileorlink(f):
843 if origvfs.isfileorlink(f):
847 ui.note(_('removing conflicting file: %s\n')
844 ui.note(_('removing conflicting file: %s\n')
848 % origvfs.join(f))
845 % origvfs.join(f))
849 origvfs.unlink(f)
846 origvfs.unlink(f)
850 break
847 break
851
848
852 origvfs.makedirs(origbackupdir)
849 origvfs.makedirs(origbackupdir)
853
850
854 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
851 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
855 ui.note(_('removing conflicting directory: %s\n')
852 ui.note(_('removing conflicting directory: %s\n')
856 % origvfs.join(filepathfromroot))
853 % origvfs.join(filepathfromroot))
857 origvfs.rmtree(filepathfromroot, forcibly=True)
854 origvfs.rmtree(filepathfromroot, forcibly=True)
858
855
859 return origvfs.join(filepathfromroot)
856 return origvfs.join(filepathfromroot)
860
857
861 class _containsnode(object):
858 class _containsnode(object):
862 """proxy __contains__(node) to container.__contains__ which accepts revs"""
859 """proxy __contains__(node) to container.__contains__ which accepts revs"""
863
860
864 def __init__(self, repo, revcontainer):
861 def __init__(self, repo, revcontainer):
865 self._torev = repo.changelog.rev
862 self._torev = repo.changelog.rev
866 self._revcontains = revcontainer.__contains__
863 self._revcontains = revcontainer.__contains__
867
864
868 def __contains__(self, node):
865 def __contains__(self, node):
869 return self._revcontains(self._torev(node))
866 return self._revcontains(self._torev(node))
870
867
871 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
868 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
872 fixphase=False, targetphase=None, backup=True):
869 fixphase=False, targetphase=None, backup=True):
873 """do common cleanups when old nodes are replaced by new nodes
870 """do common cleanups when old nodes are replaced by new nodes
874
871
875 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
872 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
876 (we might also want to move working directory parent in the future)
873 (we might also want to move working directory parent in the future)
877
874
878 By default, bookmark moves are calculated automatically from 'replacements',
875 By default, bookmark moves are calculated automatically from 'replacements',
879 but 'moves' can be used to override that. Also, 'moves' may include
876 but 'moves' can be used to override that. Also, 'moves' may include
880 additional bookmark moves that should not have associated obsmarkers.
877 additional bookmark moves that should not have associated obsmarkers.
881
878
882 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
879 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
883 have replacements. operation is a string, like "rebase".
880 have replacements. operation is a string, like "rebase".
884
881
885 metadata is dictionary containing metadata to be stored in obsmarker if
882 metadata is dictionary containing metadata to be stored in obsmarker if
886 obsolescence is enabled.
883 obsolescence is enabled.
887 """
884 """
888 assert fixphase or targetphase is None
885 assert fixphase or targetphase is None
889 if not replacements and not moves:
886 if not replacements and not moves:
890 return
887 return
891
888
892 # translate mapping's other forms
889 # translate mapping's other forms
893 if not util.safehasattr(replacements, 'items'):
890 if not util.safehasattr(replacements, 'items'):
894 replacements = {(n,): () for n in replacements}
891 replacements = {(n,): () for n in replacements}
895 else:
892 else:
896 # upgrading non tuple "source" to tuple ones for BC
893 # upgrading non tuple "source" to tuple ones for BC
897 repls = {}
894 repls = {}
898 for key, value in replacements.items():
895 for key, value in replacements.items():
899 if not isinstance(key, tuple):
896 if not isinstance(key, tuple):
900 key = (key,)
897 key = (key,)
901 repls[key] = value
898 repls[key] = value
902 replacements = repls
899 replacements = repls
903
900
904 # Unfiltered repo is needed since nodes in replacements might be hidden.
901 # Unfiltered repo is needed since nodes in replacements might be hidden.
905 unfi = repo.unfiltered()
902 unfi = repo.unfiltered()
906
903
907 # Calculate bookmark movements
904 # Calculate bookmark movements
908 if moves is None:
905 if moves is None:
909 moves = {}
906 moves = {}
910 for oldnodes, newnodes in replacements.items():
907 for oldnodes, newnodes in replacements.items():
911 for oldnode in oldnodes:
908 for oldnode in oldnodes:
912 if oldnode in moves:
909 if oldnode in moves:
913 continue
910 continue
914 if len(newnodes) > 1:
911 if len(newnodes) > 1:
915 # usually a split, take the one with biggest rev number
912 # usually a split, take the one with biggest rev number
916 newnode = next(unfi.set('max(%ln)', newnodes)).node()
913 newnode = next(unfi.set('max(%ln)', newnodes)).node()
917 elif len(newnodes) == 0:
914 elif len(newnodes) == 0:
918 # move bookmark backwards
915 # move bookmark backwards
919 allreplaced = []
916 allreplaced = []
920 for rep in replacements:
917 for rep in replacements:
921 allreplaced.extend(rep)
918 allreplaced.extend(rep)
922 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
919 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
923 allreplaced))
920 allreplaced))
924 if roots:
921 if roots:
925 newnode = roots[0].node()
922 newnode = roots[0].node()
926 else:
923 else:
927 newnode = nullid
924 newnode = nullid
928 else:
925 else:
929 newnode = newnodes[0]
926 newnode = newnodes[0]
930 moves[oldnode] = newnode
927 moves[oldnode] = newnode
931
928
932 allnewnodes = [n for ns in replacements.values() for n in ns]
929 allnewnodes = [n for ns in replacements.values() for n in ns]
933 toretract = {}
930 toretract = {}
934 toadvance = {}
931 toadvance = {}
935 if fixphase:
932 if fixphase:
936 precursors = {}
933 precursors = {}
937 for oldnodes, newnodes in replacements.items():
934 for oldnodes, newnodes in replacements.items():
938 for oldnode in oldnodes:
935 for oldnode in oldnodes:
939 for newnode in newnodes:
936 for newnode in newnodes:
940 precursors.setdefault(newnode, []).append(oldnode)
937 precursors.setdefault(newnode, []).append(oldnode)
941
938
942 allnewnodes.sort(key=lambda n: unfi[n].rev())
939 allnewnodes.sort(key=lambda n: unfi[n].rev())
943 newphases = {}
940 newphases = {}
944 def phase(ctx):
941 def phase(ctx):
945 return newphases.get(ctx.node(), ctx.phase())
942 return newphases.get(ctx.node(), ctx.phase())
946 for newnode in allnewnodes:
943 for newnode in allnewnodes:
947 ctx = unfi[newnode]
944 ctx = unfi[newnode]
948 parentphase = max(phase(p) for p in ctx.parents())
945 parentphase = max(phase(p) for p in ctx.parents())
949 if targetphase is None:
946 if targetphase is None:
950 oldphase = max(unfi[oldnode].phase()
947 oldphase = max(unfi[oldnode].phase()
951 for oldnode in precursors[newnode])
948 for oldnode in precursors[newnode])
952 newphase = max(oldphase, parentphase)
949 newphase = max(oldphase, parentphase)
953 else:
950 else:
954 newphase = max(targetphase, parentphase)
951 newphase = max(targetphase, parentphase)
955 newphases[newnode] = newphase
952 newphases[newnode] = newphase
956 if newphase > ctx.phase():
953 if newphase > ctx.phase():
957 toretract.setdefault(newphase, []).append(newnode)
954 toretract.setdefault(newphase, []).append(newnode)
958 elif newphase < ctx.phase():
955 elif newphase < ctx.phase():
959 toadvance.setdefault(newphase, []).append(newnode)
956 toadvance.setdefault(newphase, []).append(newnode)
960
957
961 with repo.transaction('cleanup') as tr:
958 with repo.transaction('cleanup') as tr:
962 # Move bookmarks
959 # Move bookmarks
963 bmarks = repo._bookmarks
960 bmarks = repo._bookmarks
964 bmarkchanges = []
961 bmarkchanges = []
965 for oldnode, newnode in moves.items():
962 for oldnode, newnode in moves.items():
966 oldbmarks = repo.nodebookmarks(oldnode)
963 oldbmarks = repo.nodebookmarks(oldnode)
967 if not oldbmarks:
964 if not oldbmarks:
968 continue
965 continue
969 from . import bookmarks # avoid import cycle
966 from . import bookmarks # avoid import cycle
970 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
967 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
971 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
968 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
972 hex(oldnode), hex(newnode)))
969 hex(oldnode), hex(newnode)))
973 # Delete divergent bookmarks being parents of related newnodes
970 # Delete divergent bookmarks being parents of related newnodes
974 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
971 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
975 allnewnodes, newnode, oldnode)
972 allnewnodes, newnode, oldnode)
976 deletenodes = _containsnode(repo, deleterevs)
973 deletenodes = _containsnode(repo, deleterevs)
977 for name in oldbmarks:
974 for name in oldbmarks:
978 bmarkchanges.append((name, newnode))
975 bmarkchanges.append((name, newnode))
979 for b in bookmarks.divergent2delete(repo, deletenodes, name):
976 for b in bookmarks.divergent2delete(repo, deletenodes, name):
980 bmarkchanges.append((b, None))
977 bmarkchanges.append((b, None))
981
978
982 if bmarkchanges:
979 if bmarkchanges:
983 bmarks.applychanges(repo, tr, bmarkchanges)
980 bmarks.applychanges(repo, tr, bmarkchanges)
984
981
985 for phase, nodes in toretract.items():
982 for phase, nodes in toretract.items():
986 phases.retractboundary(repo, tr, phase, nodes)
983 phases.retractboundary(repo, tr, phase, nodes)
987 for phase, nodes in toadvance.items():
984 for phase, nodes in toadvance.items():
988 phases.advanceboundary(repo, tr, phase, nodes)
985 phases.advanceboundary(repo, tr, phase, nodes)
989
986
990 # Obsolete or strip nodes
987 # Obsolete or strip nodes
991 if obsolete.isenabled(repo, obsolete.createmarkersopt):
988 if obsolete.isenabled(repo, obsolete.createmarkersopt):
992 # If a node is already obsoleted, and we want to obsolete it
989 # If a node is already obsoleted, and we want to obsolete it
993 # without a successor, skip that obssolete request since it's
990 # without a successor, skip that obssolete request since it's
994 # unnecessary. That's the "if s or not isobs(n)" check below.
991 # unnecessary. That's the "if s or not isobs(n)" check below.
995 # Also sort the node in topology order, that might be useful for
992 # Also sort the node in topology order, that might be useful for
996 # some obsstore logic.
993 # some obsstore logic.
997 # NOTE: the sorting might belong to createmarkers.
994 # NOTE: the sorting might belong to createmarkers.
998 torev = unfi.changelog.rev
995 torev = unfi.changelog.rev
999 sortfunc = lambda ns: torev(ns[0][0])
996 sortfunc = lambda ns: torev(ns[0][0])
1000 rels = []
997 rels = []
1001 for ns, s in sorted(replacements.items(), key=sortfunc):
998 for ns, s in sorted(replacements.items(), key=sortfunc):
1002 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
999 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1003 rels.append(rel)
1000 rels.append(rel)
1004 if rels:
1001 if rels:
1005 obsolete.createmarkers(repo, rels, operation=operation,
1002 obsolete.createmarkers(repo, rels, operation=operation,
1006 metadata=metadata)
1003 metadata=metadata)
1007 else:
1004 else:
1008 from . import repair # avoid import cycle
1005 from . import repair # avoid import cycle
1009 tostrip = list(n for ns in replacements for n in ns)
1006 tostrip = list(n for ns in replacements for n in ns)
1010 if tostrip:
1007 if tostrip:
1011 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1008 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1012 backup=backup)
1009 backup=backup)
1013
1010
1014 def addremove(repo, matcher, prefix, opts=None):
1011 def addremove(repo, matcher, prefix, opts=None):
1015 if opts is None:
1012 if opts is None:
1016 opts = {}
1013 opts = {}
1017 m = matcher
1014 m = matcher
1018 dry_run = opts.get('dry_run')
1015 dry_run = opts.get('dry_run')
1019 try:
1016 try:
1020 similarity = float(opts.get('similarity') or 0)
1017 similarity = float(opts.get('similarity') or 0)
1021 except ValueError:
1018 except ValueError:
1022 raise error.Abort(_('similarity must be a number'))
1019 raise error.Abort(_('similarity must be a number'))
1023 if similarity < 0 or similarity > 100:
1020 if similarity < 0 or similarity > 100:
1024 raise error.Abort(_('similarity must be between 0 and 100'))
1021 raise error.Abort(_('similarity must be between 0 and 100'))
1025 similarity /= 100.0
1022 similarity /= 100.0
1026
1023
1027 ret = 0
1024 ret = 0
1028 join = lambda f: os.path.join(prefix, f)
1025 join = lambda f: os.path.join(prefix, f)
1029
1026
1030 wctx = repo[None]
1027 wctx = repo[None]
1031 for subpath in sorted(wctx.substate):
1028 for subpath in sorted(wctx.substate):
1032 submatch = matchmod.subdirmatcher(subpath, m)
1029 submatch = matchmod.subdirmatcher(subpath, m)
1033 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1030 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1034 sub = wctx.sub(subpath)
1031 sub = wctx.sub(subpath)
1035 try:
1032 try:
1036 if sub.addremove(submatch, prefix, opts):
1033 if sub.addremove(submatch, prefix, opts):
1037 ret = 1
1034 ret = 1
1038 except error.LookupError:
1035 except error.LookupError:
1039 repo.ui.status(_("skipping missing subrepository: %s\n")
1036 repo.ui.status(_("skipping missing subrepository: %s\n")
1040 % join(subpath))
1037 % join(subpath))
1041
1038
1042 rejected = []
1039 rejected = []
1043 def badfn(f, msg):
1040 def badfn(f, msg):
1044 if f in m.files():
1041 if f in m.files():
1045 m.bad(f, msg)
1042 m.bad(f, msg)
1046 rejected.append(f)
1043 rejected.append(f)
1047
1044
1048 badmatch = matchmod.badmatch(m, badfn)
1045 badmatch = matchmod.badmatch(m, badfn)
1049 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1046 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1050 badmatch)
1047 badmatch)
1051
1048
1052 unknownset = set(unknown + forgotten)
1049 unknownset = set(unknown + forgotten)
1053 toprint = unknownset.copy()
1050 toprint = unknownset.copy()
1054 toprint.update(deleted)
1051 toprint.update(deleted)
1055 for abs in sorted(toprint):
1052 for abs in sorted(toprint):
1056 if repo.ui.verbose or not m.exact(abs):
1053 if repo.ui.verbose or not m.exact(abs):
1057 if abs in unknownset:
1054 if abs in unknownset:
1058 status = _('adding %s\n') % m.uipath(abs)
1055 status = _('adding %s\n') % m.uipath(abs)
1059 label = 'ui.addremove.added'
1056 label = 'ui.addremove.added'
1060 else:
1057 else:
1061 status = _('removing %s\n') % m.uipath(abs)
1058 status = _('removing %s\n') % m.uipath(abs)
1062 label = 'ui.addremove.removed'
1059 label = 'ui.addremove.removed'
1063 repo.ui.status(status, label=label)
1060 repo.ui.status(status, label=label)
1064
1061
1065 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1062 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1066 similarity)
1063 similarity)
1067
1064
1068 if not dry_run:
1065 if not dry_run:
1069 _markchanges(repo, unknown + forgotten, deleted, renames)
1066 _markchanges(repo, unknown + forgotten, deleted, renames)
1070
1067
1071 for f in rejected:
1068 for f in rejected:
1072 if f in m.files():
1069 if f in m.files():
1073 return 1
1070 return 1
1074 return ret
1071 return ret
1075
1072
1076 def marktouched(repo, files, similarity=0.0):
1073 def marktouched(repo, files, similarity=0.0):
1077 '''Assert that files have somehow been operated upon. files are relative to
1074 '''Assert that files have somehow been operated upon. files are relative to
1078 the repo root.'''
1075 the repo root.'''
1079 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1076 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1080 rejected = []
1077 rejected = []
1081
1078
1082 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1079 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1083
1080
1084 if repo.ui.verbose:
1081 if repo.ui.verbose:
1085 unknownset = set(unknown + forgotten)
1082 unknownset = set(unknown + forgotten)
1086 toprint = unknownset.copy()
1083 toprint = unknownset.copy()
1087 toprint.update(deleted)
1084 toprint.update(deleted)
1088 for abs in sorted(toprint):
1085 for abs in sorted(toprint):
1089 if abs in unknownset:
1086 if abs in unknownset:
1090 status = _('adding %s\n') % abs
1087 status = _('adding %s\n') % abs
1091 else:
1088 else:
1092 status = _('removing %s\n') % abs
1089 status = _('removing %s\n') % abs
1093 repo.ui.status(status)
1090 repo.ui.status(status)
1094
1091
1095 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1092 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1096 similarity)
1093 similarity)
1097
1094
1098 _markchanges(repo, unknown + forgotten, deleted, renames)
1095 _markchanges(repo, unknown + forgotten, deleted, renames)
1099
1096
1100 for f in rejected:
1097 for f in rejected:
1101 if f in m.files():
1098 if f in m.files():
1102 return 1
1099 return 1
1103 return 0
1100 return 0
1104
1101
1105 def _interestingfiles(repo, matcher):
1102 def _interestingfiles(repo, matcher):
1106 '''Walk dirstate with matcher, looking for files that addremove would care
1103 '''Walk dirstate with matcher, looking for files that addremove would care
1107 about.
1104 about.
1108
1105
1109 This is different from dirstate.status because it doesn't care about
1106 This is different from dirstate.status because it doesn't care about
1110 whether files are modified or clean.'''
1107 whether files are modified or clean.'''
1111 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1108 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1112 audit_path = pathutil.pathauditor(repo.root, cached=True)
1109 audit_path = pathutil.pathauditor(repo.root, cached=True)
1113
1110
1114 ctx = repo[None]
1111 ctx = repo[None]
1115 dirstate = repo.dirstate
1112 dirstate = repo.dirstate
1116 matcher = repo.narrowmatch(matcher, includeexact=True)
1113 matcher = repo.narrowmatch(matcher, includeexact=True)
1117 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1114 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1118 unknown=True, ignored=False, full=False)
1115 unknown=True, ignored=False, full=False)
1119 for abs, st in walkresults.iteritems():
1116 for abs, st in walkresults.iteritems():
1120 dstate = dirstate[abs]
1117 dstate = dirstate[abs]
1121 if dstate == '?' and audit_path.check(abs):
1118 if dstate == '?' and audit_path.check(abs):
1122 unknown.append(abs)
1119 unknown.append(abs)
1123 elif dstate != 'r' and not st:
1120 elif dstate != 'r' and not st:
1124 deleted.append(abs)
1121 deleted.append(abs)
1125 elif dstate == 'r' and st:
1122 elif dstate == 'r' and st:
1126 forgotten.append(abs)
1123 forgotten.append(abs)
1127 # for finding renames
1124 # for finding renames
1128 elif dstate == 'r' and not st:
1125 elif dstate == 'r' and not st:
1129 removed.append(abs)
1126 removed.append(abs)
1130 elif dstate == 'a':
1127 elif dstate == 'a':
1131 added.append(abs)
1128 added.append(abs)
1132
1129
1133 return added, unknown, deleted, removed, forgotten
1130 return added, unknown, deleted, removed, forgotten
1134
1131
1135 def _findrenames(repo, matcher, added, removed, similarity):
1132 def _findrenames(repo, matcher, added, removed, similarity):
1136 '''Find renames from removed files to added ones.'''
1133 '''Find renames from removed files to added ones.'''
1137 renames = {}
1134 renames = {}
1138 if similarity > 0:
1135 if similarity > 0:
1139 for old, new, score in similar.findrenames(repo, added, removed,
1136 for old, new, score in similar.findrenames(repo, added, removed,
1140 similarity):
1137 similarity):
1141 if (repo.ui.verbose or not matcher.exact(old)
1138 if (repo.ui.verbose or not matcher.exact(old)
1142 or not matcher.exact(new)):
1139 or not matcher.exact(new)):
1143 repo.ui.status(_('recording removal of %s as rename to %s '
1140 repo.ui.status(_('recording removal of %s as rename to %s '
1144 '(%d%% similar)\n') %
1141 '(%d%% similar)\n') %
1145 (matcher.rel(old), matcher.rel(new),
1142 (matcher.rel(old), matcher.rel(new),
1146 score * 100))
1143 score * 100))
1147 renames[new] = old
1144 renames[new] = old
1148 return renames
1145 return renames
1149
1146
1150 def _markchanges(repo, unknown, deleted, renames):
1147 def _markchanges(repo, unknown, deleted, renames):
1151 '''Marks the files in unknown as added, the files in deleted as removed,
1148 '''Marks the files in unknown as added, the files in deleted as removed,
1152 and the files in renames as copied.'''
1149 and the files in renames as copied.'''
1153 wctx = repo[None]
1150 wctx = repo[None]
1154 with repo.wlock():
1151 with repo.wlock():
1155 wctx.forget(deleted)
1152 wctx.forget(deleted)
1156 wctx.add(unknown)
1153 wctx.add(unknown)
1157 for new, old in renames.iteritems():
1154 for new, old in renames.iteritems():
1158 wctx.copy(old, new)
1155 wctx.copy(old, new)
1159
1156
1160 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1157 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1161 """Update the dirstate to reflect the intent of copying src to dst. For
1158 """Update the dirstate to reflect the intent of copying src to dst. For
1162 different reasons it might not end with dst being marked as copied from src.
1159 different reasons it might not end with dst being marked as copied from src.
1163 """
1160 """
1164 origsrc = repo.dirstate.copied(src) or src
1161 origsrc = repo.dirstate.copied(src) or src
1165 if dst == origsrc: # copying back a copy?
1162 if dst == origsrc: # copying back a copy?
1166 if repo.dirstate[dst] not in 'mn' and not dryrun:
1163 if repo.dirstate[dst] not in 'mn' and not dryrun:
1167 repo.dirstate.normallookup(dst)
1164 repo.dirstate.normallookup(dst)
1168 else:
1165 else:
1169 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1166 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1170 if not ui.quiet:
1167 if not ui.quiet:
1171 ui.warn(_("%s has not been committed yet, so no copy "
1168 ui.warn(_("%s has not been committed yet, so no copy "
1172 "data will be stored for %s.\n")
1169 "data will be stored for %s.\n")
1173 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1170 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1174 if repo.dirstate[dst] in '?r' and not dryrun:
1171 if repo.dirstate[dst] in '?r' and not dryrun:
1175 wctx.add([dst])
1172 wctx.add([dst])
1176 elif not dryrun:
1173 elif not dryrun:
1177 wctx.copy(origsrc, dst)
1174 wctx.copy(origsrc, dst)
1178
1175
1179 def writerequires(opener, requirements):
1176 def writerequires(opener, requirements):
1180 with opener('requires', 'w', atomictemp=True) as fp:
1177 with opener('requires', 'w', atomictemp=True) as fp:
1181 for r in sorted(requirements):
1178 for r in sorted(requirements):
1182 fp.write("%s\n" % r)
1179 fp.write("%s\n" % r)
1183
1180
1184 class filecachesubentry(object):
1181 class filecachesubentry(object):
1185 def __init__(self, path, stat):
1182 def __init__(self, path, stat):
1186 self.path = path
1183 self.path = path
1187 self.cachestat = None
1184 self.cachestat = None
1188 self._cacheable = None
1185 self._cacheable = None
1189
1186
1190 if stat:
1187 if stat:
1191 self.cachestat = filecachesubentry.stat(self.path)
1188 self.cachestat = filecachesubentry.stat(self.path)
1192
1189
1193 if self.cachestat:
1190 if self.cachestat:
1194 self._cacheable = self.cachestat.cacheable()
1191 self._cacheable = self.cachestat.cacheable()
1195 else:
1192 else:
1196 # None means we don't know yet
1193 # None means we don't know yet
1197 self._cacheable = None
1194 self._cacheable = None
1198
1195
1199 def refresh(self):
1196 def refresh(self):
1200 if self.cacheable():
1197 if self.cacheable():
1201 self.cachestat = filecachesubentry.stat(self.path)
1198 self.cachestat = filecachesubentry.stat(self.path)
1202
1199
1203 def cacheable(self):
1200 def cacheable(self):
1204 if self._cacheable is not None:
1201 if self._cacheable is not None:
1205 return self._cacheable
1202 return self._cacheable
1206
1203
1207 # we don't know yet, assume it is for now
1204 # we don't know yet, assume it is for now
1208 return True
1205 return True
1209
1206
1210 def changed(self):
1207 def changed(self):
1211 # no point in going further if we can't cache it
1208 # no point in going further if we can't cache it
1212 if not self.cacheable():
1209 if not self.cacheable():
1213 return True
1210 return True
1214
1211
1215 newstat = filecachesubentry.stat(self.path)
1212 newstat = filecachesubentry.stat(self.path)
1216
1213
1217 # we may not know if it's cacheable yet, check again now
1214 # we may not know if it's cacheable yet, check again now
1218 if newstat and self._cacheable is None:
1215 if newstat and self._cacheable is None:
1219 self._cacheable = newstat.cacheable()
1216 self._cacheable = newstat.cacheable()
1220
1217
1221 # check again
1218 # check again
1222 if not self._cacheable:
1219 if not self._cacheable:
1223 return True
1220 return True
1224
1221
1225 if self.cachestat != newstat:
1222 if self.cachestat != newstat:
1226 self.cachestat = newstat
1223 self.cachestat = newstat
1227 return True
1224 return True
1228 else:
1225 else:
1229 return False
1226 return False
1230
1227
1231 @staticmethod
1228 @staticmethod
1232 def stat(path):
1229 def stat(path):
1233 try:
1230 try:
1234 return util.cachestat(path)
1231 return util.cachestat(path)
1235 except OSError as e:
1232 except OSError as e:
1236 if e.errno != errno.ENOENT:
1233 if e.errno != errno.ENOENT:
1237 raise
1234 raise
1238
1235
1239 class filecacheentry(object):
1236 class filecacheentry(object):
1240 def __init__(self, paths, stat=True):
1237 def __init__(self, paths, stat=True):
1241 self._entries = []
1238 self._entries = []
1242 for path in paths:
1239 for path in paths:
1243 self._entries.append(filecachesubentry(path, stat))
1240 self._entries.append(filecachesubentry(path, stat))
1244
1241
1245 def changed(self):
1242 def changed(self):
1246 '''true if any entry has changed'''
1243 '''true if any entry has changed'''
1247 for entry in self._entries:
1244 for entry in self._entries:
1248 if entry.changed():
1245 if entry.changed():
1249 return True
1246 return True
1250 return False
1247 return False
1251
1248
1252 def refresh(self):
1249 def refresh(self):
1253 for entry in self._entries:
1250 for entry in self._entries:
1254 entry.refresh()
1251 entry.refresh()
1255
1252
1256 class filecache(object):
1253 class filecache(object):
1257 """A property like decorator that tracks files under .hg/ for updates.
1254 """A property like decorator that tracks files under .hg/ for updates.
1258
1255
1259 On first access, the files defined as arguments are stat()ed and the
1256 On first access, the files defined as arguments are stat()ed and the
1260 results cached. The decorated function is called. The results are stashed
1257 results cached. The decorated function is called. The results are stashed
1261 away in a ``_filecache`` dict on the object whose method is decorated.
1258 away in a ``_filecache`` dict on the object whose method is decorated.
1262
1259
1263 On subsequent access, the cached result is used as it is set to the
1260 On subsequent access, the cached result is used as it is set to the
1264 instance dictionary.
1261 instance dictionary.
1265
1262
1266 On external property set/delete operations, the caller must update the
1263 On external property set/delete operations, the caller must update the
1267 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1264 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1268 instead of directly setting <attr>.
1265 instead of directly setting <attr>.
1269
1266
1270 When using the property API, the cached data is always used if available.
1267 When using the property API, the cached data is always used if available.
1271 No stat() is performed to check if the file has changed.
1268 No stat() is performed to check if the file has changed.
1272
1269
1273 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1270 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1274 can populate an entry before the property's getter is called. In this case,
1271 can populate an entry before the property's getter is called. In this case,
1275 entries in ``_filecache`` will be used during property operations,
1272 entries in ``_filecache`` will be used during property operations,
1276 if available. If the underlying file changes, it is up to external callers
1273 if available. If the underlying file changes, it is up to external callers
1277 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1274 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1278 method result as well as possibly calling ``del obj._filecache[attr]`` to
1275 method result as well as possibly calling ``del obj._filecache[attr]`` to
1279 remove the ``filecacheentry``.
1276 remove the ``filecacheentry``.
1280 """
1277 """
1281
1278
1282 def __init__(self, *paths):
1279 def __init__(self, *paths):
1283 self.paths = paths
1280 self.paths = paths
1284
1281
1285 def join(self, obj, fname):
1282 def join(self, obj, fname):
1286 """Used to compute the runtime path of a cached file.
1283 """Used to compute the runtime path of a cached file.
1287
1284
1288 Users should subclass filecache and provide their own version of this
1285 Users should subclass filecache and provide their own version of this
1289 function to call the appropriate join function on 'obj' (an instance
1286 function to call the appropriate join function on 'obj' (an instance
1290 of the class that its member function was decorated).
1287 of the class that its member function was decorated).
1291 """
1288 """
1292 raise NotImplementedError
1289 raise NotImplementedError
1293
1290
1294 def __call__(self, func):
1291 def __call__(self, func):
1295 self.func = func
1292 self.func = func
1296 self.sname = func.__name__
1293 self.sname = func.__name__
1297 self.name = pycompat.sysbytes(self.sname)
1294 self.name = pycompat.sysbytes(self.sname)
1298 return self
1295 return self
1299
1296
1300 def __get__(self, obj, type=None):
1297 def __get__(self, obj, type=None):
1301 # if accessed on the class, return the descriptor itself.
1298 # if accessed on the class, return the descriptor itself.
1302 if obj is None:
1299 if obj is None:
1303 return self
1300 return self
1304
1301
1305 assert self.sname not in obj.__dict__
1302 assert self.sname not in obj.__dict__
1306
1303
1307 entry = obj._filecache.get(self.name)
1304 entry = obj._filecache.get(self.name)
1308
1305
1309 if entry:
1306 if entry:
1310 if entry.changed():
1307 if entry.changed():
1311 entry.obj = self.func(obj)
1308 entry.obj = self.func(obj)
1312 else:
1309 else:
1313 paths = [self.join(obj, path) for path in self.paths]
1310 paths = [self.join(obj, path) for path in self.paths]
1314
1311
1315 # We stat -before- creating the object so our cache doesn't lie if
1312 # We stat -before- creating the object so our cache doesn't lie if
1316 # a writer modified between the time we read and stat
1313 # a writer modified between the time we read and stat
1317 entry = filecacheentry(paths, True)
1314 entry = filecacheentry(paths, True)
1318 entry.obj = self.func(obj)
1315 entry.obj = self.func(obj)
1319
1316
1320 obj._filecache[self.name] = entry
1317 obj._filecache[self.name] = entry
1321
1318
1322 obj.__dict__[self.sname] = entry.obj
1319 obj.__dict__[self.sname] = entry.obj
1323 return entry.obj
1320 return entry.obj
1324
1321
1325 # don't implement __set__(), which would make __dict__ lookup as slow as
1322 # don't implement __set__(), which would make __dict__ lookup as slow as
1326 # function call.
1323 # function call.
1327
1324
1328 def set(self, obj, value):
1325 def set(self, obj, value):
1329 if self.name not in obj._filecache:
1326 if self.name not in obj._filecache:
1330 # we add an entry for the missing value because X in __dict__
1327 # we add an entry for the missing value because X in __dict__
1331 # implies X in _filecache
1328 # implies X in _filecache
1332 paths = [self.join(obj, path) for path in self.paths]
1329 paths = [self.join(obj, path) for path in self.paths]
1333 ce = filecacheentry(paths, False)
1330 ce = filecacheentry(paths, False)
1334 obj._filecache[self.name] = ce
1331 obj._filecache[self.name] = ce
1335 else:
1332 else:
1336 ce = obj._filecache[self.name]
1333 ce = obj._filecache[self.name]
1337
1334
1338 ce.obj = value # update cached copy
1335 ce.obj = value # update cached copy
1339 obj.__dict__[self.sname] = value # update copy returned by obj.x
1336 obj.__dict__[self.sname] = value # update copy returned by obj.x
1340
1337
1341 def extdatasource(repo, source):
1338 def extdatasource(repo, source):
1342 """Gather a map of rev -> value dict from the specified source
1339 """Gather a map of rev -> value dict from the specified source
1343
1340
1344 A source spec is treated as a URL, with a special case shell: type
1341 A source spec is treated as a URL, with a special case shell: type
1345 for parsing the output from a shell command.
1342 for parsing the output from a shell command.
1346
1343
1347 The data is parsed as a series of newline-separated records where
1344 The data is parsed as a series of newline-separated records where
1348 each record is a revision specifier optionally followed by a space
1345 each record is a revision specifier optionally followed by a space
1349 and a freeform string value. If the revision is known locally, it
1346 and a freeform string value. If the revision is known locally, it
1350 is converted to a rev, otherwise the record is skipped.
1347 is converted to a rev, otherwise the record is skipped.
1351
1348
1352 Note that both key and value are treated as UTF-8 and converted to
1349 Note that both key and value are treated as UTF-8 and converted to
1353 the local encoding. This allows uniformity between local and
1350 the local encoding. This allows uniformity between local and
1354 remote data sources.
1351 remote data sources.
1355 """
1352 """
1356
1353
1357 spec = repo.ui.config("extdata", source)
1354 spec = repo.ui.config("extdata", source)
1358 if not spec:
1355 if not spec:
1359 raise error.Abort(_("unknown extdata source '%s'") % source)
1356 raise error.Abort(_("unknown extdata source '%s'") % source)
1360
1357
1361 data = {}
1358 data = {}
1362 src = proc = None
1359 src = proc = None
1363 try:
1360 try:
1364 if spec.startswith("shell:"):
1361 if spec.startswith("shell:"):
1365 # external commands should be run relative to the repo root
1362 # external commands should be run relative to the repo root
1366 cmd = spec[6:]
1363 cmd = spec[6:]
1367 proc = subprocess.Popen(procutil.tonativestr(cmd),
1364 proc = subprocess.Popen(procutil.tonativestr(cmd),
1368 shell=True, bufsize=-1,
1365 shell=True, bufsize=-1,
1369 close_fds=procutil.closefds,
1366 close_fds=procutil.closefds,
1370 stdout=subprocess.PIPE,
1367 stdout=subprocess.PIPE,
1371 cwd=procutil.tonativestr(repo.root))
1368 cwd=procutil.tonativestr(repo.root))
1372 src = proc.stdout
1369 src = proc.stdout
1373 else:
1370 else:
1374 # treat as a URL or file
1371 # treat as a URL or file
1375 src = url.open(repo.ui, spec)
1372 src = url.open(repo.ui, spec)
1376 for l in src:
1373 for l in src:
1377 if " " in l:
1374 if " " in l:
1378 k, v = l.strip().split(" ", 1)
1375 k, v = l.strip().split(" ", 1)
1379 else:
1376 else:
1380 k, v = l.strip(), ""
1377 k, v = l.strip(), ""
1381
1378
1382 k = encoding.tolocal(k)
1379 k = encoding.tolocal(k)
1383 try:
1380 try:
1384 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1381 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1385 except (error.LookupError, error.RepoLookupError):
1382 except (error.LookupError, error.RepoLookupError):
1386 pass # we ignore data for nodes that don't exist locally
1383 pass # we ignore data for nodes that don't exist locally
1387 finally:
1384 finally:
1388 if proc:
1385 if proc:
1389 proc.communicate()
1386 proc.communicate()
1390 if src:
1387 if src:
1391 src.close()
1388 src.close()
1392 if proc and proc.returncode != 0:
1389 if proc and proc.returncode != 0:
1393 raise error.Abort(_("extdata command '%s' failed: %s")
1390 raise error.Abort(_("extdata command '%s' failed: %s")
1394 % (cmd, procutil.explainexit(proc.returncode)))
1391 % (cmd, procutil.explainexit(proc.returncode)))
1395
1392
1396 return data
1393 return data
1397
1394
1398 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1395 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1399 if lock is None:
1396 if lock is None:
1400 raise error.LockInheritanceContractViolation(
1397 raise error.LockInheritanceContractViolation(
1401 'lock can only be inherited while held')
1398 'lock can only be inherited while held')
1402 if environ is None:
1399 if environ is None:
1403 environ = {}
1400 environ = {}
1404 with lock.inherit() as locker:
1401 with lock.inherit() as locker:
1405 environ[envvar] = locker
1402 environ[envvar] = locker
1406 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1403 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1407
1404
1408 def wlocksub(repo, cmd, *args, **kwargs):
1405 def wlocksub(repo, cmd, *args, **kwargs):
1409 """run cmd as a subprocess that allows inheriting repo's wlock
1406 """run cmd as a subprocess that allows inheriting repo's wlock
1410
1407
1411 This can only be called while the wlock is held. This takes all the
1408 This can only be called while the wlock is held. This takes all the
1412 arguments that ui.system does, and returns the exit code of the
1409 arguments that ui.system does, and returns the exit code of the
1413 subprocess."""
1410 subprocess."""
1414 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1411 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1415 **kwargs)
1412 **kwargs)
1416
1413
1417 class progress(object):
1414 class progress(object):
1418 def __init__(self, ui, updatebar, topic, unit="", total=None):
1415 def __init__(self, ui, updatebar, topic, unit="", total=None):
1419 self.ui = ui
1416 self.ui = ui
1420 self.pos = 0
1417 self.pos = 0
1421 self.topic = topic
1418 self.topic = topic
1422 self.unit = unit
1419 self.unit = unit
1423 self.total = total
1420 self.total = total
1424 self.debug = ui.configbool('progress', 'debug')
1421 self.debug = ui.configbool('progress', 'debug')
1425 self._updatebar = updatebar
1422 self._updatebar = updatebar
1426
1423
1427 def __enter__(self):
1424 def __enter__(self):
1428 return self
1425 return self
1429
1426
1430 def __exit__(self, exc_type, exc_value, exc_tb):
1427 def __exit__(self, exc_type, exc_value, exc_tb):
1431 self.complete()
1428 self.complete()
1432
1429
1433 def update(self, pos, item="", total=None):
1430 def update(self, pos, item="", total=None):
1434 assert pos is not None
1431 assert pos is not None
1435 if total:
1432 if total:
1436 self.total = total
1433 self.total = total
1437 self.pos = pos
1434 self.pos = pos
1438 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1435 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1439 if self.debug:
1436 if self.debug:
1440 self._printdebug(item)
1437 self._printdebug(item)
1441
1438
1442 def increment(self, step=1, item="", total=None):
1439 def increment(self, step=1, item="", total=None):
1443 self.update(self.pos + step, item, total)
1440 self.update(self.pos + step, item, total)
1444
1441
1445 def complete(self):
1442 def complete(self):
1446 self.pos = None
1443 self.pos = None
1447 self.unit = ""
1444 self.unit = ""
1448 self.total = None
1445 self.total = None
1449 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1446 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1450
1447
1451 def _printdebug(self, item):
1448 def _printdebug(self, item):
1452 if self.unit:
1449 if self.unit:
1453 unit = ' ' + self.unit
1450 unit = ' ' + self.unit
1454 if item:
1451 if item:
1455 item = ' ' + item
1452 item = ' ' + item
1456
1453
1457 if self.total:
1454 if self.total:
1458 pct = 100.0 * self.pos / self.total
1455 pct = 100.0 * self.pos / self.total
1459 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1456 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1460 % (self.topic, item, self.pos, self.total, unit, pct))
1457 % (self.topic, item, self.pos, self.total, unit, pct))
1461 else:
1458 else:
1462 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1459 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1463
1460
1464 def gdinitconfig(ui):
1461 def gdinitconfig(ui):
1465 """helper function to know if a repo should be created as general delta
1462 """helper function to know if a repo should be created as general delta
1466 """
1463 """
1467 # experimental config: format.generaldelta
1464 # experimental config: format.generaldelta
1468 return (ui.configbool('format', 'generaldelta')
1465 return (ui.configbool('format', 'generaldelta')
1469 or ui.configbool('format', 'usegeneraldelta'))
1466 or ui.configbool('format', 'usegeneraldelta'))
1470
1467
1471 def gddeltaconfig(ui):
1468 def gddeltaconfig(ui):
1472 """helper function to know if incoming delta should be optimised
1469 """helper function to know if incoming delta should be optimised
1473 """
1470 """
1474 # experimental config: format.generaldelta
1471 # experimental config: format.generaldelta
1475 return ui.configbool('format', 'generaldelta')
1472 return ui.configbool('format', 'generaldelta')
1476
1473
1477 class simplekeyvaluefile(object):
1474 class simplekeyvaluefile(object):
1478 """A simple file with key=value lines
1475 """A simple file with key=value lines
1479
1476
1480 Keys must be alphanumerics and start with a letter, values must not
1477 Keys must be alphanumerics and start with a letter, values must not
1481 contain '\n' characters"""
1478 contain '\n' characters"""
1482 firstlinekey = '__firstline'
1479 firstlinekey = '__firstline'
1483
1480
1484 def __init__(self, vfs, path, keys=None):
1481 def __init__(self, vfs, path, keys=None):
1485 self.vfs = vfs
1482 self.vfs = vfs
1486 self.path = path
1483 self.path = path
1487
1484
1488 def read(self, firstlinenonkeyval=False):
1485 def read(self, firstlinenonkeyval=False):
1489 """Read the contents of a simple key-value file
1486 """Read the contents of a simple key-value file
1490
1487
1491 'firstlinenonkeyval' indicates whether the first line of file should
1488 'firstlinenonkeyval' indicates whether the first line of file should
1492 be treated as a key-value pair or reuturned fully under the
1489 be treated as a key-value pair or reuturned fully under the
1493 __firstline key."""
1490 __firstline key."""
1494 lines = self.vfs.readlines(self.path)
1491 lines = self.vfs.readlines(self.path)
1495 d = {}
1492 d = {}
1496 if firstlinenonkeyval:
1493 if firstlinenonkeyval:
1497 if not lines:
1494 if not lines:
1498 e = _("empty simplekeyvalue file")
1495 e = _("empty simplekeyvalue file")
1499 raise error.CorruptedState(e)
1496 raise error.CorruptedState(e)
1500 # we don't want to include '\n' in the __firstline
1497 # we don't want to include '\n' in the __firstline
1501 d[self.firstlinekey] = lines[0][:-1]
1498 d[self.firstlinekey] = lines[0][:-1]
1502 del lines[0]
1499 del lines[0]
1503
1500
1504 try:
1501 try:
1505 # the 'if line.strip()' part prevents us from failing on empty
1502 # the 'if line.strip()' part prevents us from failing on empty
1506 # lines which only contain '\n' therefore are not skipped
1503 # lines which only contain '\n' therefore are not skipped
1507 # by 'if line'
1504 # by 'if line'
1508 updatedict = dict(line[:-1].split('=', 1) for line in lines
1505 updatedict = dict(line[:-1].split('=', 1) for line in lines
1509 if line.strip())
1506 if line.strip())
1510 if self.firstlinekey in updatedict:
1507 if self.firstlinekey in updatedict:
1511 e = _("%r can't be used as a key")
1508 e = _("%r can't be used as a key")
1512 raise error.CorruptedState(e % self.firstlinekey)
1509 raise error.CorruptedState(e % self.firstlinekey)
1513 d.update(updatedict)
1510 d.update(updatedict)
1514 except ValueError as e:
1511 except ValueError as e:
1515 raise error.CorruptedState(str(e))
1512 raise error.CorruptedState(str(e))
1516 return d
1513 return d
1517
1514
1518 def write(self, data, firstline=None):
1515 def write(self, data, firstline=None):
1519 """Write key=>value mapping to a file
1516 """Write key=>value mapping to a file
1520 data is a dict. Keys must be alphanumerical and start with a letter.
1517 data is a dict. Keys must be alphanumerical and start with a letter.
1521 Values must not contain newline characters.
1518 Values must not contain newline characters.
1522
1519
1523 If 'firstline' is not None, it is written to file before
1520 If 'firstline' is not None, it is written to file before
1524 everything else, as it is, not in a key=value form"""
1521 everything else, as it is, not in a key=value form"""
1525 lines = []
1522 lines = []
1526 if firstline is not None:
1523 if firstline is not None:
1527 lines.append('%s\n' % firstline)
1524 lines.append('%s\n' % firstline)
1528
1525
1529 for k, v in data.items():
1526 for k, v in data.items():
1530 if k == self.firstlinekey:
1527 if k == self.firstlinekey:
1531 e = "key name '%s' is reserved" % self.firstlinekey
1528 e = "key name '%s' is reserved" % self.firstlinekey
1532 raise error.ProgrammingError(e)
1529 raise error.ProgrammingError(e)
1533 if not k[0:1].isalpha():
1530 if not k[0:1].isalpha():
1534 e = "keys must start with a letter in a key-value file"
1531 e = "keys must start with a letter in a key-value file"
1535 raise error.ProgrammingError(e)
1532 raise error.ProgrammingError(e)
1536 if not k.isalnum():
1533 if not k.isalnum():
1537 e = "invalid key name in a simple key-value file"
1534 e = "invalid key name in a simple key-value file"
1538 raise error.ProgrammingError(e)
1535 raise error.ProgrammingError(e)
1539 if '\n' in v:
1536 if '\n' in v:
1540 e = "invalid value in a simple key-value file"
1537 e = "invalid value in a simple key-value file"
1541 raise error.ProgrammingError(e)
1538 raise error.ProgrammingError(e)
1542 lines.append("%s=%s\n" % (k, v))
1539 lines.append("%s=%s\n" % (k, v))
1543 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1540 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1544 fp.write(''.join(lines))
1541 fp.write(''.join(lines))
1545
1542
1546 _reportobsoletedsource = [
1543 _reportobsoletedsource = [
1547 'debugobsolete',
1544 'debugobsolete',
1548 'pull',
1545 'pull',
1549 'push',
1546 'push',
1550 'serve',
1547 'serve',
1551 'unbundle',
1548 'unbundle',
1552 ]
1549 ]
1553
1550
1554 _reportnewcssource = [
1551 _reportnewcssource = [
1555 'pull',
1552 'pull',
1556 'unbundle',
1553 'unbundle',
1557 ]
1554 ]
1558
1555
1559 def prefetchfiles(repo, revs, match):
1556 def prefetchfiles(repo, revs, match):
1560 """Invokes the registered file prefetch functions, allowing extensions to
1557 """Invokes the registered file prefetch functions, allowing extensions to
1561 ensure the corresponding files are available locally, before the command
1558 ensure the corresponding files are available locally, before the command
1562 uses them."""
1559 uses them."""
1563 if match:
1560 if match:
1564 # The command itself will complain about files that don't exist, so
1561 # The command itself will complain about files that don't exist, so
1565 # don't duplicate the message.
1562 # don't duplicate the message.
1566 match = matchmod.badmatch(match, lambda fn, msg: None)
1563 match = matchmod.badmatch(match, lambda fn, msg: None)
1567 else:
1564 else:
1568 match = matchall(repo)
1565 match = matchall(repo)
1569
1566
1570 fileprefetchhooks(repo, revs, match)
1567 fileprefetchhooks(repo, revs, match)
1571
1568
1572 # a list of (repo, revs, match) prefetch functions
1569 # a list of (repo, revs, match) prefetch functions
1573 fileprefetchhooks = util.hooks()
1570 fileprefetchhooks = util.hooks()
1574
1571
1575 # A marker that tells the evolve extension to suppress its own reporting
1572 # A marker that tells the evolve extension to suppress its own reporting
1576 _reportstroubledchangesets = True
1573 _reportstroubledchangesets = True
1577
1574
1578 def registersummarycallback(repo, otr, txnname=''):
1575 def registersummarycallback(repo, otr, txnname=''):
1579 """register a callback to issue a summary after the transaction is closed
1576 """register a callback to issue a summary after the transaction is closed
1580 """
1577 """
1581 def txmatch(sources):
1578 def txmatch(sources):
1582 return any(txnname.startswith(source) for source in sources)
1579 return any(txnname.startswith(source) for source in sources)
1583
1580
1584 categories = []
1581 categories = []
1585
1582
1586 def reportsummary(func):
1583 def reportsummary(func):
1587 """decorator for report callbacks."""
1584 """decorator for report callbacks."""
1588 # The repoview life cycle is shorter than the one of the actual
1585 # The repoview life cycle is shorter than the one of the actual
1589 # underlying repository. So the filtered object can die before the
1586 # underlying repository. So the filtered object can die before the
1590 # weakref is used leading to troubles. We keep a reference to the
1587 # weakref is used leading to troubles. We keep a reference to the
1591 # unfiltered object and restore the filtering when retrieving the
1588 # unfiltered object and restore the filtering when retrieving the
1592 # repository through the weakref.
1589 # repository through the weakref.
1593 filtername = repo.filtername
1590 filtername = repo.filtername
1594 reporef = weakref.ref(repo.unfiltered())
1591 reporef = weakref.ref(repo.unfiltered())
1595 def wrapped(tr):
1592 def wrapped(tr):
1596 repo = reporef()
1593 repo = reporef()
1597 if filtername:
1594 if filtername:
1598 repo = repo.filtered(filtername)
1595 repo = repo.filtered(filtername)
1599 func(repo, tr)
1596 func(repo, tr)
1600 newcat = '%02i-txnreport' % len(categories)
1597 newcat = '%02i-txnreport' % len(categories)
1601 otr.addpostclose(newcat, wrapped)
1598 otr.addpostclose(newcat, wrapped)
1602 categories.append(newcat)
1599 categories.append(newcat)
1603 return wrapped
1600 return wrapped
1604
1601
1605 if txmatch(_reportobsoletedsource):
1602 if txmatch(_reportobsoletedsource):
1606 @reportsummary
1603 @reportsummary
1607 def reportobsoleted(repo, tr):
1604 def reportobsoleted(repo, tr):
1608 obsoleted = obsutil.getobsoleted(repo, tr)
1605 obsoleted = obsutil.getobsoleted(repo, tr)
1609 if obsoleted:
1606 if obsoleted:
1610 repo.ui.status(_('obsoleted %i changesets\n')
1607 repo.ui.status(_('obsoleted %i changesets\n')
1611 % len(obsoleted))
1608 % len(obsoleted))
1612
1609
1613 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1610 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1614 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1611 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1615 instabilitytypes = [
1612 instabilitytypes = [
1616 ('orphan', 'orphan'),
1613 ('orphan', 'orphan'),
1617 ('phase-divergent', 'phasedivergent'),
1614 ('phase-divergent', 'phasedivergent'),
1618 ('content-divergent', 'contentdivergent'),
1615 ('content-divergent', 'contentdivergent'),
1619 ]
1616 ]
1620
1617
1621 def getinstabilitycounts(repo):
1618 def getinstabilitycounts(repo):
1622 filtered = repo.changelog.filteredrevs
1619 filtered = repo.changelog.filteredrevs
1623 counts = {}
1620 counts = {}
1624 for instability, revset in instabilitytypes:
1621 for instability, revset in instabilitytypes:
1625 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1622 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1626 filtered)
1623 filtered)
1627 return counts
1624 return counts
1628
1625
1629 oldinstabilitycounts = getinstabilitycounts(repo)
1626 oldinstabilitycounts = getinstabilitycounts(repo)
1630 @reportsummary
1627 @reportsummary
1631 def reportnewinstabilities(repo, tr):
1628 def reportnewinstabilities(repo, tr):
1632 newinstabilitycounts = getinstabilitycounts(repo)
1629 newinstabilitycounts = getinstabilitycounts(repo)
1633 for instability, revset in instabilitytypes:
1630 for instability, revset in instabilitytypes:
1634 delta = (newinstabilitycounts[instability] -
1631 delta = (newinstabilitycounts[instability] -
1635 oldinstabilitycounts[instability])
1632 oldinstabilitycounts[instability])
1636 msg = getinstabilitymessage(delta, instability)
1633 msg = getinstabilitymessage(delta, instability)
1637 if msg:
1634 if msg:
1638 repo.ui.warn(msg)
1635 repo.ui.warn(msg)
1639
1636
1640 if txmatch(_reportnewcssource):
1637 if txmatch(_reportnewcssource):
1641 @reportsummary
1638 @reportsummary
1642 def reportnewcs(repo, tr):
1639 def reportnewcs(repo, tr):
1643 """Report the range of new revisions pulled/unbundled."""
1640 """Report the range of new revisions pulled/unbundled."""
1644 origrepolen = tr.changes.get('origrepolen', len(repo))
1641 origrepolen = tr.changes.get('origrepolen', len(repo))
1645 unfi = repo.unfiltered()
1642 unfi = repo.unfiltered()
1646 if origrepolen >= len(unfi):
1643 if origrepolen >= len(unfi):
1647 return
1644 return
1648
1645
1649 # Compute the bounds of new visible revisions' range.
1646 # Compute the bounds of new visible revisions' range.
1650 revs = smartset.spanset(repo, start=origrepolen)
1647 revs = smartset.spanset(repo, start=origrepolen)
1651 if revs:
1648 if revs:
1652 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1649 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1653
1650
1654 if minrev == maxrev:
1651 if minrev == maxrev:
1655 revrange = minrev
1652 revrange = minrev
1656 else:
1653 else:
1657 revrange = '%s:%s' % (minrev, maxrev)
1654 revrange = '%s:%s' % (minrev, maxrev)
1658 draft = len(repo.revs('%ld and draft()', revs))
1655 draft = len(repo.revs('%ld and draft()', revs))
1659 secret = len(repo.revs('%ld and secret()', revs))
1656 secret = len(repo.revs('%ld and secret()', revs))
1660 if not (draft or secret):
1657 if not (draft or secret):
1661 msg = _('new changesets %s\n') % revrange
1658 msg = _('new changesets %s\n') % revrange
1662 elif draft and secret:
1659 elif draft and secret:
1663 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1660 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1664 msg %= (revrange, draft, secret)
1661 msg %= (revrange, draft, secret)
1665 elif draft:
1662 elif draft:
1666 msg = _('new changesets %s (%d drafts)\n')
1663 msg = _('new changesets %s (%d drafts)\n')
1667 msg %= (revrange, draft)
1664 msg %= (revrange, draft)
1668 elif secret:
1665 elif secret:
1669 msg = _('new changesets %s (%d secrets)\n')
1666 msg = _('new changesets %s (%d secrets)\n')
1670 msg %= (revrange, secret)
1667 msg %= (revrange, secret)
1671 else:
1668 else:
1672 errormsg = 'entered unreachable condition'
1669 errormsg = 'entered unreachable condition'
1673 raise error.ProgrammingError(errormsg)
1670 raise error.ProgrammingError(errormsg)
1674 repo.ui.status(msg)
1671 repo.ui.status(msg)
1675
1672
1676 # search new changesets directly pulled as obsolete
1673 # search new changesets directly pulled as obsolete
1677 duplicates = tr.changes.get('revduplicates', ())
1674 duplicates = tr.changes.get('revduplicates', ())
1678 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1675 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1679 origrepolen, duplicates)
1676 origrepolen, duplicates)
1680 cl = repo.changelog
1677 cl = repo.changelog
1681 extinctadded = [r for r in obsadded if r not in cl]
1678 extinctadded = [r for r in obsadded if r not in cl]
1682 if extinctadded:
1679 if extinctadded:
1683 # They are not just obsolete, but obsolete and invisible
1680 # They are not just obsolete, but obsolete and invisible
1684 # we call them "extinct" internally but the terms have not been
1681 # we call them "extinct" internally but the terms have not been
1685 # exposed to users.
1682 # exposed to users.
1686 msg = '(%d other changesets obsolete on arrival)\n'
1683 msg = '(%d other changesets obsolete on arrival)\n'
1687 repo.ui.status(msg % len(extinctadded))
1684 repo.ui.status(msg % len(extinctadded))
1688
1685
1689 @reportsummary
1686 @reportsummary
1690 def reportphasechanges(repo, tr):
1687 def reportphasechanges(repo, tr):
1691 """Report statistics of phase changes for changesets pre-existing
1688 """Report statistics of phase changes for changesets pre-existing
1692 pull/unbundle.
1689 pull/unbundle.
1693 """
1690 """
1694 origrepolen = tr.changes.get('origrepolen', len(repo))
1691 origrepolen = tr.changes.get('origrepolen', len(repo))
1695 phasetracking = tr.changes.get('phases', {})
1692 phasetracking = tr.changes.get('phases', {})
1696 if not phasetracking:
1693 if not phasetracking:
1697 return
1694 return
1698 published = [
1695 published = [
1699 rev for rev, (old, new) in phasetracking.iteritems()
1696 rev for rev, (old, new) in phasetracking.iteritems()
1700 if new == phases.public and rev < origrepolen
1697 if new == phases.public and rev < origrepolen
1701 ]
1698 ]
1702 if not published:
1699 if not published:
1703 return
1700 return
1704 repo.ui.status(_('%d local changesets published\n')
1701 repo.ui.status(_('%d local changesets published\n')
1705 % len(published))
1702 % len(published))
1706
1703
1707 def getinstabilitymessage(delta, instability):
1704 def getinstabilitymessage(delta, instability):
1708 """function to return the message to show warning about new instabilities
1705 """function to return the message to show warning about new instabilities
1709
1706
1710 exists as a separate function so that extension can wrap to show more
1707 exists as a separate function so that extension can wrap to show more
1711 information like how to fix instabilities"""
1708 information like how to fix instabilities"""
1712 if delta > 0:
1709 if delta > 0:
1713 return _('%i new %s changesets\n') % (delta, instability)
1710 return _('%i new %s changesets\n') % (delta, instability)
1714
1711
1715 def nodesummaries(repo, nodes, maxnumnodes=4):
1712 def nodesummaries(repo, nodes, maxnumnodes=4):
1716 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1713 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1717 return ' '.join(short(h) for h in nodes)
1714 return ' '.join(short(h) for h in nodes)
1718 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1715 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1719 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1716 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1720
1717
1721 def enforcesinglehead(repo, tr, desc):
1718 def enforcesinglehead(repo, tr, desc):
1722 """check that no named branch has multiple heads"""
1719 """check that no named branch has multiple heads"""
1723 if desc in ('strip', 'repair'):
1720 if desc in ('strip', 'repair'):
1724 # skip the logic during strip
1721 # skip the logic during strip
1725 return
1722 return
1726 visible = repo.filtered('visible')
1723 visible = repo.filtered('visible')
1727 # possible improvement: we could restrict the check to affected branch
1724 # possible improvement: we could restrict the check to affected branch
1728 for name, heads in visible.branchmap().iteritems():
1725 for name, heads in visible.branchmap().iteritems():
1729 if len(heads) > 1:
1726 if len(heads) > 1:
1730 msg = _('rejecting multiple heads on branch "%s"')
1727 msg = _('rejecting multiple heads on branch "%s"')
1731 msg %= name
1728 msg %= name
1732 hint = _('%d heads: %s')
1729 hint = _('%d heads: %s')
1733 hint %= (len(heads), nodesummaries(repo, heads))
1730 hint %= (len(heads), nodesummaries(repo, heads))
1734 raise error.Abort(msg, hint=hint)
1731 raise error.Abort(msg, hint=hint)
1735
1732
1736 def wrapconvertsink(sink):
1733 def wrapconvertsink(sink):
1737 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1734 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1738 before it is used, whether or not the convert extension was formally loaded.
1735 before it is used, whether or not the convert extension was formally loaded.
1739 """
1736 """
1740 return sink
1737 return sink
1741
1738
1742 def unhidehashlikerevs(repo, specs, hiddentype):
1739 def unhidehashlikerevs(repo, specs, hiddentype):
1743 """parse the user specs and unhide changesets whose hash or revision number
1740 """parse the user specs and unhide changesets whose hash or revision number
1744 is passed.
1741 is passed.
1745
1742
1746 hiddentype can be: 1) 'warn': warn while unhiding changesets
1743 hiddentype can be: 1) 'warn': warn while unhiding changesets
1747 2) 'nowarn': don't warn while unhiding changesets
1744 2) 'nowarn': don't warn while unhiding changesets
1748
1745
1749 returns a repo object with the required changesets unhidden
1746 returns a repo object with the required changesets unhidden
1750 """
1747 """
1751 if not repo.filtername or not repo.ui.configbool('experimental',
1748 if not repo.filtername or not repo.ui.configbool('experimental',
1752 'directaccess'):
1749 'directaccess'):
1753 return repo
1750 return repo
1754
1751
1755 if repo.filtername not in ('visible', 'visible-hidden'):
1752 if repo.filtername not in ('visible', 'visible-hidden'):
1756 return repo
1753 return repo
1757
1754
1758 symbols = set()
1755 symbols = set()
1759 for spec in specs:
1756 for spec in specs:
1760 try:
1757 try:
1761 tree = revsetlang.parse(spec)
1758 tree = revsetlang.parse(spec)
1762 except error.ParseError: # will be reported by scmutil.revrange()
1759 except error.ParseError: # will be reported by scmutil.revrange()
1763 continue
1760 continue
1764
1761
1765 symbols.update(revsetlang.gethashlikesymbols(tree))
1762 symbols.update(revsetlang.gethashlikesymbols(tree))
1766
1763
1767 if not symbols:
1764 if not symbols:
1768 return repo
1765 return repo
1769
1766
1770 revs = _getrevsfromsymbols(repo, symbols)
1767 revs = _getrevsfromsymbols(repo, symbols)
1771
1768
1772 if not revs:
1769 if not revs:
1773 return repo
1770 return repo
1774
1771
1775 if hiddentype == 'warn':
1772 if hiddentype == 'warn':
1776 unfi = repo.unfiltered()
1773 unfi = repo.unfiltered()
1777 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1774 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1778 repo.ui.warn(_("warning: accessing hidden changesets for write "
1775 repo.ui.warn(_("warning: accessing hidden changesets for write "
1779 "operation: %s\n") % revstr)
1776 "operation: %s\n") % revstr)
1780
1777
1781 # we have to use new filtername to separate branch/tags cache until we can
1778 # we have to use new filtername to separate branch/tags cache until we can
1782 # disbale these cache when revisions are dynamically pinned.
1779 # disbale these cache when revisions are dynamically pinned.
1783 return repo.filtered('visible-hidden', revs)
1780 return repo.filtered('visible-hidden', revs)
1784
1781
1785 def _getrevsfromsymbols(repo, symbols):
1782 def _getrevsfromsymbols(repo, symbols):
1786 """parse the list of symbols and returns a set of revision numbers of hidden
1783 """parse the list of symbols and returns a set of revision numbers of hidden
1787 changesets present in symbols"""
1784 changesets present in symbols"""
1788 revs = set()
1785 revs = set()
1789 unfi = repo.unfiltered()
1786 unfi = repo.unfiltered()
1790 unficl = unfi.changelog
1787 unficl = unfi.changelog
1791 cl = repo.changelog
1788 cl = repo.changelog
1792 tiprev = len(unficl)
1789 tiprev = len(unficl)
1793 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1790 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1794 for s in symbols:
1791 for s in symbols:
1795 try:
1792 try:
1796 n = int(s)
1793 n = int(s)
1797 if n <= tiprev:
1794 if n <= tiprev:
1798 if not allowrevnums:
1795 if not allowrevnums:
1799 continue
1796 continue
1800 else:
1797 else:
1801 if n not in cl:
1798 if n not in cl:
1802 revs.add(n)
1799 revs.add(n)
1803 continue
1800 continue
1804 except ValueError:
1801 except ValueError:
1805 pass
1802 pass
1806
1803
1807 try:
1804 try:
1808 s = resolvehexnodeidprefix(unfi, s)
1805 s = resolvehexnodeidprefix(unfi, s)
1809 except (error.LookupError, error.WdirUnsupported):
1806 except (error.LookupError, error.WdirUnsupported):
1810 s = None
1807 s = None
1811
1808
1812 if s is not None:
1809 if s is not None:
1813 rev = unficl.rev(s)
1810 rev = unficl.rev(s)
1814 if rev not in cl:
1811 if rev not in cl:
1815 revs.add(rev)
1812 revs.add(rev)
1816
1813
1817 return revs
1814 return revs
1818
1815
1819 def bookmarkrevs(repo, mark):
1816 def bookmarkrevs(repo, mark):
1820 """
1817 """
1821 Select revisions reachable by a given bookmark
1818 Select revisions reachable by a given bookmark
1822 """
1819 """
1823 return repo.revs("ancestors(bookmark(%s)) - "
1820 return repo.revs("ancestors(bookmark(%s)) - "
1824 "ancestors(head() and not bookmark(%s)) - "
1821 "ancestors(head() and not bookmark(%s)) - "
1825 "ancestors(bookmark() and not bookmark(%s))",
1822 "ancestors(bookmark() and not bookmark(%s))",
1826 mark, mark, mark)
1823 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now