##// END OF EJS Templates
cleanupnodes: pass multiple predecessors to `createmarkers` directly
Boris Feld -
r39959:61f39a89 default
parent child Browse files
Show More
@@ -1,1802 +1,1801 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 encoding,
31 encoding,
32 error,
32 error,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 obsutil,
35 obsutil,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 policy,
38 policy,
39 pycompat,
39 pycompat,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 procutil,
49 procutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 if pycompat.iswindows:
53 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
54 from . import scmwindows as scmplatform
55 else:
55 else:
56 from . import scmposix as scmplatform
56 from . import scmposix as scmplatform
57
57
58 parsers = policy.importmod(r'parsers')
58 parsers = policy.importmod(r'parsers')
59
59
60 termsize = scmplatform.termsize
60 termsize = scmplatform.termsize
61
61
62 class status(tuple):
62 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
64 and 'ignored' properties are only relevant to the working copy.
65 '''
65 '''
66
66
67 __slots__ = ()
67 __slots__ = ()
68
68
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
70 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
72 ignored, clean))
73
73
74 @property
74 @property
75 def modified(self):
75 def modified(self):
76 '''files that have been modified'''
76 '''files that have been modified'''
77 return self[0]
77 return self[0]
78
78
79 @property
79 @property
80 def added(self):
80 def added(self):
81 '''files that have been added'''
81 '''files that have been added'''
82 return self[1]
82 return self[1]
83
83
84 @property
84 @property
85 def removed(self):
85 def removed(self):
86 '''files that have been removed'''
86 '''files that have been removed'''
87 return self[2]
87 return self[2]
88
88
89 @property
89 @property
90 def deleted(self):
90 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
91 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
92 working copy (aka "missing")
93 '''
93 '''
94 return self[3]
94 return self[3]
95
95
96 @property
96 @property
97 def unknown(self):
97 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
98 '''files not in the dirstate that are not ignored'''
99 return self[4]
99 return self[4]
100
100
101 @property
101 @property
102 def ignored(self):
102 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
104 return self[5]
105
105
106 @property
106 @property
107 def clean(self):
107 def clean(self):
108 '''files that have not been modified'''
108 '''files that have not been modified'''
109 return self[6]
109 return self[6]
110
110
111 def __repr__(self, *args, **kwargs):
111 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
115
116 def itersubrepos(ctx1, ctx2):
116 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
117 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
123
124 missing = set()
124 missing = set()
125
125
126 for subpath in ctx2.substate:
126 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
127 if subpath not in ctx1.substate:
128 del subpaths[subpath]
128 del subpaths[subpath]
129 missing.add(subpath)
129 missing.add(subpath)
130
130
131 for subpath, ctx in sorted(subpaths.iteritems()):
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
132 yield subpath, ctx.sub(subpath)
133
133
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
135 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
137 # against itself.
138 for subpath in missing:
138 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140
140
141 def nochangesfound(ui, repo, excluded=None):
141 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
142 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
143 nodes excluded from the push/pull.
144 '''
144 '''
145 secretlist = []
145 secretlist = []
146 if excluded:
146 if excluded:
147 for n in excluded:
147 for n in excluded:
148 ctx = repo[n]
148 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
150 secretlist.append(n)
151
151
152 if secretlist:
152 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
154 % len(secretlist))
155 else:
155 else:
156 ui.status(_("no changes found\n"))
156 ui.status(_("no changes found\n"))
157
157
158 def callcatch(ui, func):
158 def callcatch(ui, func):
159 """call func() with global exception handling
159 """call func() with global exception handling
160
160
161 return func() if no exception happens. otherwise do some error handling
161 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
162 and return an exit code accordingly. does not handle all exceptions.
163 """
163 """
164 try:
164 try:
165 try:
165 try:
166 return func()
166 return func()
167 except: # re-raises
167 except: # re-raises
168 ui.traceback()
168 ui.traceback()
169 raise
169 raise
170 # Global exception handling, alphabetically
170 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
171 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
172 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
173 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % inst.locker
174 reason = _('timed out waiting for lock held by %r') % inst.locker
175 else:
175 else:
176 reason = _('lock held by %r') % inst.locker
176 reason = _('lock held by %r') % inst.locker
177 ui.error(_("abort: %s: %s\n") % (
177 ui.error(_("abort: %s: %s\n") % (
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 if not inst.locker:
179 if not inst.locker:
180 ui.error(_("(lock might be very busy)\n"))
180 ui.error(_("(lock might be very busy)\n"))
181 except error.LockUnavailable as inst:
181 except error.LockUnavailable as inst:
182 ui.error(_("abort: could not lock %s: %s\n") %
182 ui.error(_("abort: could not lock %s: %s\n") %
183 (inst.desc or stringutil.forcebytestr(inst.filename),
183 (inst.desc or stringutil.forcebytestr(inst.filename),
184 encoding.strtolocal(inst.strerror)))
184 encoding.strtolocal(inst.strerror)))
185 except error.OutOfBandError as inst:
185 except error.OutOfBandError as inst:
186 if inst.args:
186 if inst.args:
187 msg = _("abort: remote error:\n")
187 msg = _("abort: remote error:\n")
188 else:
188 else:
189 msg = _("abort: remote error\n")
189 msg = _("abort: remote error\n")
190 ui.error(msg)
190 ui.error(msg)
191 if inst.args:
191 if inst.args:
192 ui.error(''.join(inst.args))
192 ui.error(''.join(inst.args))
193 if inst.hint:
193 if inst.hint:
194 ui.error('(%s)\n' % inst.hint)
194 ui.error('(%s)\n' % inst.hint)
195 except error.RepoError as inst:
195 except error.RepoError as inst:
196 ui.error(_("abort: %s!\n") % inst)
196 ui.error(_("abort: %s!\n") % inst)
197 if inst.hint:
197 if inst.hint:
198 ui.error(_("(%s)\n") % inst.hint)
198 ui.error(_("(%s)\n") % inst.hint)
199 except error.ResponseError as inst:
199 except error.ResponseError as inst:
200 ui.error(_("abort: %s") % inst.args[0])
200 ui.error(_("abort: %s") % inst.args[0])
201 msg = inst.args[1]
201 msg = inst.args[1]
202 if isinstance(msg, type(u'')):
202 if isinstance(msg, type(u'')):
203 msg = pycompat.sysbytes(msg)
203 msg = pycompat.sysbytes(msg)
204 if not isinstance(msg, bytes):
204 if not isinstance(msg, bytes):
205 ui.error(" %r\n" % (msg,))
205 ui.error(" %r\n" % (msg,))
206 elif not msg:
206 elif not msg:
207 ui.error(_(" empty string\n"))
207 ui.error(_(" empty string\n"))
208 else:
208 else:
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 except error.CensoredNodeError as inst:
210 except error.CensoredNodeError as inst:
211 ui.error(_("abort: file censored %s!\n") % inst)
211 ui.error(_("abort: file censored %s!\n") % inst)
212 except error.StorageError as inst:
212 except error.StorageError as inst:
213 ui.error(_("abort: %s!\n") % inst)
213 ui.error(_("abort: %s!\n") % inst)
214 except error.InterventionRequired as inst:
214 except error.InterventionRequired as inst:
215 ui.error("%s\n" % inst)
215 ui.error("%s\n" % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.error(_("(%s)\n") % inst.hint)
217 ui.error(_("(%s)\n") % inst.hint)
218 return 1
218 return 1
219 except error.WdirUnsupported:
219 except error.WdirUnsupported:
220 ui.error(_("abort: working directory revision cannot be specified\n"))
220 ui.error(_("abort: working directory revision cannot be specified\n"))
221 except error.Abort as inst:
221 except error.Abort as inst:
222 ui.error(_("abort: %s\n") % inst)
222 ui.error(_("abort: %s\n") % inst)
223 if inst.hint:
223 if inst.hint:
224 ui.error(_("(%s)\n") % inst.hint)
224 ui.error(_("(%s)\n") % inst.hint)
225 except ImportError as inst:
225 except ImportError as inst:
226 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
226 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
227 m = stringutil.forcebytestr(inst).split()[-1]
227 m = stringutil.forcebytestr(inst).split()[-1]
228 if m in "mpatch bdiff".split():
228 if m in "mpatch bdiff".split():
229 ui.error(_("(did you forget to compile extensions?)\n"))
229 ui.error(_("(did you forget to compile extensions?)\n"))
230 elif m in "zlib".split():
230 elif m in "zlib".split():
231 ui.error(_("(is your Python install correct?)\n"))
231 ui.error(_("(is your Python install correct?)\n"))
232 except IOError as inst:
232 except IOError as inst:
233 if util.safehasattr(inst, "code"):
233 if util.safehasattr(inst, "code"):
234 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
234 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
235 elif util.safehasattr(inst, "reason"):
235 elif util.safehasattr(inst, "reason"):
236 try: # usually it is in the form (errno, strerror)
236 try: # usually it is in the form (errno, strerror)
237 reason = inst.reason.args[1]
237 reason = inst.reason.args[1]
238 except (AttributeError, IndexError):
238 except (AttributeError, IndexError):
239 # it might be anything, for example a string
239 # it might be anything, for example a string
240 reason = inst.reason
240 reason = inst.reason
241 if isinstance(reason, pycompat.unicode):
241 if isinstance(reason, pycompat.unicode):
242 # SSLError of Python 2.7.9 contains a unicode
242 # SSLError of Python 2.7.9 contains a unicode
243 reason = encoding.unitolocal(reason)
243 reason = encoding.unitolocal(reason)
244 ui.error(_("abort: error: %s\n") % reason)
244 ui.error(_("abort: error: %s\n") % reason)
245 elif (util.safehasattr(inst, "args")
245 elif (util.safehasattr(inst, "args")
246 and inst.args and inst.args[0] == errno.EPIPE):
246 and inst.args and inst.args[0] == errno.EPIPE):
247 pass
247 pass
248 elif getattr(inst, "strerror", None):
248 elif getattr(inst, "strerror", None):
249 if getattr(inst, "filename", None):
249 if getattr(inst, "filename", None):
250 ui.error(_("abort: %s: %s\n") % (
250 ui.error(_("abort: %s: %s\n") % (
251 encoding.strtolocal(inst.strerror),
251 encoding.strtolocal(inst.strerror),
252 stringutil.forcebytestr(inst.filename)))
252 stringutil.forcebytestr(inst.filename)))
253 else:
253 else:
254 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
254 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
255 else:
255 else:
256 raise
256 raise
257 except OSError as inst:
257 except OSError as inst:
258 if getattr(inst, "filename", None) is not None:
258 if getattr(inst, "filename", None) is not None:
259 ui.error(_("abort: %s: '%s'\n") % (
259 ui.error(_("abort: %s: '%s'\n") % (
260 encoding.strtolocal(inst.strerror),
260 encoding.strtolocal(inst.strerror),
261 stringutil.forcebytestr(inst.filename)))
261 stringutil.forcebytestr(inst.filename)))
262 else:
262 else:
263 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
263 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
264 except MemoryError:
264 except MemoryError:
265 ui.error(_("abort: out of memory\n"))
265 ui.error(_("abort: out of memory\n"))
266 except SystemExit as inst:
266 except SystemExit as inst:
267 # Commands shouldn't sys.exit directly, but give a return code.
267 # Commands shouldn't sys.exit directly, but give a return code.
268 # Just in case catch this and and pass exit code to caller.
268 # Just in case catch this and and pass exit code to caller.
269 return inst.code
269 return inst.code
270 except socket.error as inst:
270 except socket.error as inst:
271 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
271 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
272
272
273 return -1
273 return -1
274
274
275 def checknewlabel(repo, lbl, kind):
275 def checknewlabel(repo, lbl, kind):
276 # Do not use the "kind" parameter in ui output.
276 # Do not use the "kind" parameter in ui output.
277 # It makes strings difficult to translate.
277 # It makes strings difficult to translate.
278 if lbl in ['tip', '.', 'null']:
278 if lbl in ['tip', '.', 'null']:
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
280 for c in (':', '\0', '\n', '\r'):
280 for c in (':', '\0', '\n', '\r'):
281 if c in lbl:
281 if c in lbl:
282 raise error.Abort(
282 raise error.Abort(
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
284 try:
284 try:
285 int(lbl)
285 int(lbl)
286 raise error.Abort(_("cannot use an integer as a name"))
286 raise error.Abort(_("cannot use an integer as a name"))
287 except ValueError:
287 except ValueError:
288 pass
288 pass
289 if lbl.strip() != lbl:
289 if lbl.strip() != lbl:
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
291
291
292 def checkfilename(f):
292 def checkfilename(f):
293 '''Check that the filename f is an acceptable filename for a tracked file'''
293 '''Check that the filename f is an acceptable filename for a tracked file'''
294 if '\r' in f or '\n' in f:
294 if '\r' in f or '\n' in f:
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
296 % pycompat.bytestr(f))
296 % pycompat.bytestr(f))
297
297
298 def checkportable(ui, f):
298 def checkportable(ui, f):
299 '''Check if filename f is portable and warn or abort depending on config'''
299 '''Check if filename f is portable and warn or abort depending on config'''
300 checkfilename(f)
300 checkfilename(f)
301 abort, warn = checkportabilityalert(ui)
301 abort, warn = checkportabilityalert(ui)
302 if abort or warn:
302 if abort or warn:
303 msg = util.checkwinfilename(f)
303 msg = util.checkwinfilename(f)
304 if msg:
304 if msg:
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
306 if abort:
306 if abort:
307 raise error.Abort(msg)
307 raise error.Abort(msg)
308 ui.warn(_("warning: %s\n") % msg)
308 ui.warn(_("warning: %s\n") % msg)
309
309
310 def checkportabilityalert(ui):
310 def checkportabilityalert(ui):
311 '''check if the user's config requests nothing, a warning, or abort for
311 '''check if the user's config requests nothing, a warning, or abort for
312 non-portable filenames'''
312 non-portable filenames'''
313 val = ui.config('ui', 'portablefilenames')
313 val = ui.config('ui', 'portablefilenames')
314 lval = val.lower()
314 lval = val.lower()
315 bval = stringutil.parsebool(val)
315 bval = stringutil.parsebool(val)
316 abort = pycompat.iswindows or lval == 'abort'
316 abort = pycompat.iswindows or lval == 'abort'
317 warn = bval or lval == 'warn'
317 warn = bval or lval == 'warn'
318 if bval is None and not (warn or abort or lval == 'ignore'):
318 if bval is None and not (warn or abort or lval == 'ignore'):
319 raise error.ConfigError(
319 raise error.ConfigError(
320 _("ui.portablefilenames value is invalid ('%s')") % val)
320 _("ui.portablefilenames value is invalid ('%s')") % val)
321 return abort, warn
321 return abort, warn
322
322
323 class casecollisionauditor(object):
323 class casecollisionauditor(object):
324 def __init__(self, ui, abort, dirstate):
324 def __init__(self, ui, abort, dirstate):
325 self._ui = ui
325 self._ui = ui
326 self._abort = abort
326 self._abort = abort
327 allfiles = '\0'.join(dirstate._map)
327 allfiles = '\0'.join(dirstate._map)
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
329 self._dirstate = dirstate
329 self._dirstate = dirstate
330 # The purpose of _newfiles is so that we don't complain about
330 # The purpose of _newfiles is so that we don't complain about
331 # case collisions if someone were to call this object with the
331 # case collisions if someone were to call this object with the
332 # same filename twice.
332 # same filename twice.
333 self._newfiles = set()
333 self._newfiles = set()
334
334
335 def __call__(self, f):
335 def __call__(self, f):
336 if f in self._newfiles:
336 if f in self._newfiles:
337 return
337 return
338 fl = encoding.lower(f)
338 fl = encoding.lower(f)
339 if fl in self._loweredfiles and f not in self._dirstate:
339 if fl in self._loweredfiles and f not in self._dirstate:
340 msg = _('possible case-folding collision for %s') % f
340 msg = _('possible case-folding collision for %s') % f
341 if self._abort:
341 if self._abort:
342 raise error.Abort(msg)
342 raise error.Abort(msg)
343 self._ui.warn(_("warning: %s\n") % msg)
343 self._ui.warn(_("warning: %s\n") % msg)
344 self._loweredfiles.add(fl)
344 self._loweredfiles.add(fl)
345 self._newfiles.add(f)
345 self._newfiles.add(f)
346
346
347 def filteredhash(repo, maxrev):
347 def filteredhash(repo, maxrev):
348 """build hash of filtered revisions in the current repoview.
348 """build hash of filtered revisions in the current repoview.
349
349
350 Multiple caches perform up-to-date validation by checking that the
350 Multiple caches perform up-to-date validation by checking that the
351 tiprev and tipnode stored in the cache file match the current repository.
351 tiprev and tipnode stored in the cache file match the current repository.
352 However, this is not sufficient for validating repoviews because the set
352 However, this is not sufficient for validating repoviews because the set
353 of revisions in the view may change without the repository tiprev and
353 of revisions in the view may change without the repository tiprev and
354 tipnode changing.
354 tipnode changing.
355
355
356 This function hashes all the revs filtered from the view and returns
356 This function hashes all the revs filtered from the view and returns
357 that SHA-1 digest.
357 that SHA-1 digest.
358 """
358 """
359 cl = repo.changelog
359 cl = repo.changelog
360 if not cl.filteredrevs:
360 if not cl.filteredrevs:
361 return None
361 return None
362 key = None
362 key = None
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
364 if revs:
364 if revs:
365 s = hashlib.sha1()
365 s = hashlib.sha1()
366 for rev in revs:
366 for rev in revs:
367 s.update('%d;' % rev)
367 s.update('%d;' % rev)
368 key = s.digest()
368 key = s.digest()
369 return key
369 return key
370
370
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
372 '''yield every hg repository under path, always recursively.
372 '''yield every hg repository under path, always recursively.
373 The recurse flag will only control recursion into repo working dirs'''
373 The recurse flag will only control recursion into repo working dirs'''
374 def errhandler(err):
374 def errhandler(err):
375 if err.filename == path:
375 if err.filename == path:
376 raise err
376 raise err
377 samestat = getattr(os.path, 'samestat', None)
377 samestat = getattr(os.path, 'samestat', None)
378 if followsym and samestat is not None:
378 if followsym and samestat is not None:
379 def adddir(dirlst, dirname):
379 def adddir(dirlst, dirname):
380 dirstat = os.stat(dirname)
380 dirstat = os.stat(dirname)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
382 if not match:
382 if not match:
383 dirlst.append(dirstat)
383 dirlst.append(dirstat)
384 return not match
384 return not match
385 else:
385 else:
386 followsym = False
386 followsym = False
387
387
388 if (seen_dirs is None) and followsym:
388 if (seen_dirs is None) and followsym:
389 seen_dirs = []
389 seen_dirs = []
390 adddir(seen_dirs, path)
390 adddir(seen_dirs, path)
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
392 dirs.sort()
392 dirs.sort()
393 if '.hg' in dirs:
393 if '.hg' in dirs:
394 yield root # found a repository
394 yield root # found a repository
395 qroot = os.path.join(root, '.hg', 'patches')
395 qroot = os.path.join(root, '.hg', 'patches')
396 if os.path.isdir(os.path.join(qroot, '.hg')):
396 if os.path.isdir(os.path.join(qroot, '.hg')):
397 yield qroot # we have a patch queue repo here
397 yield qroot # we have a patch queue repo here
398 if recurse:
398 if recurse:
399 # avoid recursing inside the .hg directory
399 # avoid recursing inside the .hg directory
400 dirs.remove('.hg')
400 dirs.remove('.hg')
401 else:
401 else:
402 dirs[:] = [] # don't descend further
402 dirs[:] = [] # don't descend further
403 elif followsym:
403 elif followsym:
404 newdirs = []
404 newdirs = []
405 for d in dirs:
405 for d in dirs:
406 fname = os.path.join(root, d)
406 fname = os.path.join(root, d)
407 if adddir(seen_dirs, fname):
407 if adddir(seen_dirs, fname):
408 if os.path.islink(fname):
408 if os.path.islink(fname):
409 for hgname in walkrepos(fname, True, seen_dirs):
409 for hgname in walkrepos(fname, True, seen_dirs):
410 yield hgname
410 yield hgname
411 else:
411 else:
412 newdirs.append(d)
412 newdirs.append(d)
413 dirs[:] = newdirs
413 dirs[:] = newdirs
414
414
415 def binnode(ctx):
415 def binnode(ctx):
416 """Return binary node id for a given basectx"""
416 """Return binary node id for a given basectx"""
417 node = ctx.node()
417 node = ctx.node()
418 if node is None:
418 if node is None:
419 return wdirid
419 return wdirid
420 return node
420 return node
421
421
422 def intrev(ctx):
422 def intrev(ctx):
423 """Return integer for a given basectx that can be used in comparison or
423 """Return integer for a given basectx that can be used in comparison or
424 arithmetic operation"""
424 arithmetic operation"""
425 rev = ctx.rev()
425 rev = ctx.rev()
426 if rev is None:
426 if rev is None:
427 return wdirrev
427 return wdirrev
428 return rev
428 return rev
429
429
430 def formatchangeid(ctx):
430 def formatchangeid(ctx):
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
432 template provided by logcmdutil.changesettemplater"""
432 template provided by logcmdutil.changesettemplater"""
433 repo = ctx.repo()
433 repo = ctx.repo()
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
435
435
436 def formatrevnode(ui, rev, node):
436 def formatrevnode(ui, rev, node):
437 """Format given revision and node depending on the current verbosity"""
437 """Format given revision and node depending on the current verbosity"""
438 if ui.debugflag:
438 if ui.debugflag:
439 hexfunc = hex
439 hexfunc = hex
440 else:
440 else:
441 hexfunc = short
441 hexfunc = short
442 return '%d:%s' % (rev, hexfunc(node))
442 return '%d:%s' % (rev, hexfunc(node))
443
443
444 def resolvehexnodeidprefix(repo, prefix):
444 def resolvehexnodeidprefix(repo, prefix):
445 if (prefix.startswith('x') and
445 if (prefix.startswith('x') and
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
447 prefix = prefix[1:]
447 prefix = prefix[1:]
448 try:
448 try:
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
450 # This matches the shortesthexnodeidprefix() function below.
450 # This matches the shortesthexnodeidprefix() function below.
451 node = repo.unfiltered().changelog._partialmatch(prefix)
451 node = repo.unfiltered().changelog._partialmatch(prefix)
452 except error.AmbiguousPrefixLookupError:
452 except error.AmbiguousPrefixLookupError:
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
454 if revset:
454 if revset:
455 # Clear config to avoid infinite recursion
455 # Clear config to avoid infinite recursion
456 configoverrides = {('experimental',
456 configoverrides = {('experimental',
457 'revisions.disambiguatewithin'): None}
457 'revisions.disambiguatewithin'): None}
458 with repo.ui.configoverride(configoverrides):
458 with repo.ui.configoverride(configoverrides):
459 revs = repo.anyrevs([revset], user=True)
459 revs = repo.anyrevs([revset], user=True)
460 matches = []
460 matches = []
461 for rev in revs:
461 for rev in revs:
462 node = repo.changelog.node(rev)
462 node = repo.changelog.node(rev)
463 if hex(node).startswith(prefix):
463 if hex(node).startswith(prefix):
464 matches.append(node)
464 matches.append(node)
465 if len(matches) == 1:
465 if len(matches) == 1:
466 return matches[0]
466 return matches[0]
467 raise
467 raise
468 if node is None:
468 if node is None:
469 return
469 return
470 repo.changelog.rev(node) # make sure node isn't filtered
470 repo.changelog.rev(node) # make sure node isn't filtered
471 return node
471 return node
472
472
473 def mayberevnum(repo, prefix):
473 def mayberevnum(repo, prefix):
474 """Checks if the given prefix may be mistaken for a revision number"""
474 """Checks if the given prefix may be mistaken for a revision number"""
475 try:
475 try:
476 i = int(prefix)
476 i = int(prefix)
477 # if we are a pure int, then starting with zero will not be
477 # if we are a pure int, then starting with zero will not be
478 # confused as a rev; or, obviously, if the int is larger
478 # confused as a rev; or, obviously, if the int is larger
479 # than the value of the tip rev
479 # than the value of the tip rev
480 if prefix[0:1] == b'0' or i >= len(repo):
480 if prefix[0:1] == b'0' or i >= len(repo):
481 return False
481 return False
482 return True
482 return True
483 except ValueError:
483 except ValueError:
484 return False
484 return False
485
485
486 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
486 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
487 """Find the shortest unambiguous prefix that matches hexnode.
487 """Find the shortest unambiguous prefix that matches hexnode.
488
488
489 If "cache" is not None, it must be a dictionary that can be used for
489 If "cache" is not None, it must be a dictionary that can be used for
490 caching between calls to this method.
490 caching between calls to this method.
491 """
491 """
492 # _partialmatch() of filtered changelog could take O(len(repo)) time,
492 # _partialmatch() of filtered changelog could take O(len(repo)) time,
493 # which would be unacceptably slow. so we look for hash collision in
493 # which would be unacceptably slow. so we look for hash collision in
494 # unfiltered space, which means some hashes may be slightly longer.
494 # unfiltered space, which means some hashes may be slightly longer.
495
495
496 def disambiguate(prefix):
496 def disambiguate(prefix):
497 """Disambiguate against revnums."""
497 """Disambiguate against revnums."""
498 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
498 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
499 if mayberevnum(repo, prefix):
499 if mayberevnum(repo, prefix):
500 return 'x' + prefix
500 return 'x' + prefix
501 else:
501 else:
502 return prefix
502 return prefix
503
503
504 hexnode = hex(node)
504 hexnode = hex(node)
505 for length in range(len(prefix), len(hexnode) + 1):
505 for length in range(len(prefix), len(hexnode) + 1):
506 prefix = hexnode[:length]
506 prefix = hexnode[:length]
507 if not mayberevnum(repo, prefix):
507 if not mayberevnum(repo, prefix):
508 return prefix
508 return prefix
509
509
510 cl = repo.unfiltered().changelog
510 cl = repo.unfiltered().changelog
511 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
511 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
512 if revset:
512 if revset:
513 revs = None
513 revs = None
514 if cache is not None:
514 if cache is not None:
515 revs = cache.get('disambiguationrevset')
515 revs = cache.get('disambiguationrevset')
516 if revs is None:
516 if revs is None:
517 revs = repo.anyrevs([revset], user=True)
517 revs = repo.anyrevs([revset], user=True)
518 if cache is not None:
518 if cache is not None:
519 cache['disambiguationrevset'] = revs
519 cache['disambiguationrevset'] = revs
520 if cl.rev(node) in revs:
520 if cl.rev(node) in revs:
521 hexnode = hex(node)
521 hexnode = hex(node)
522 nodetree = None
522 nodetree = None
523 if cache is not None:
523 if cache is not None:
524 nodetree = cache.get('disambiguationnodetree')
524 nodetree = cache.get('disambiguationnodetree')
525 if not nodetree:
525 if not nodetree:
526 try:
526 try:
527 nodetree = parsers.nodetree(cl.index, len(revs))
527 nodetree = parsers.nodetree(cl.index, len(revs))
528 except AttributeError:
528 except AttributeError:
529 # no native nodetree
529 # no native nodetree
530 pass
530 pass
531 else:
531 else:
532 for r in revs:
532 for r in revs:
533 nodetree.insert(r)
533 nodetree.insert(r)
534 if cache is not None:
534 if cache is not None:
535 cache['disambiguationnodetree'] = nodetree
535 cache['disambiguationnodetree'] = nodetree
536 if nodetree is not None:
536 if nodetree is not None:
537 length = max(nodetree.shortest(node), minlength)
537 length = max(nodetree.shortest(node), minlength)
538 prefix = hexnode[:length]
538 prefix = hexnode[:length]
539 return disambiguate(prefix)
539 return disambiguate(prefix)
540 for length in range(minlength, len(hexnode) + 1):
540 for length in range(minlength, len(hexnode) + 1):
541 matches = []
541 matches = []
542 prefix = hexnode[:length]
542 prefix = hexnode[:length]
543 for rev in revs:
543 for rev in revs:
544 otherhexnode = repo[rev].hex()
544 otherhexnode = repo[rev].hex()
545 if prefix == otherhexnode[:length]:
545 if prefix == otherhexnode[:length]:
546 matches.append(otherhexnode)
546 matches.append(otherhexnode)
547 if len(matches) == 1:
547 if len(matches) == 1:
548 return disambiguate(prefix)
548 return disambiguate(prefix)
549
549
550 try:
550 try:
551 return disambiguate(cl.shortest(node, minlength))
551 return disambiguate(cl.shortest(node, minlength))
552 except error.LookupError:
552 except error.LookupError:
553 raise error.RepoLookupError()
553 raise error.RepoLookupError()
554
554
555 def isrevsymbol(repo, symbol):
555 def isrevsymbol(repo, symbol):
556 """Checks if a symbol exists in the repo.
556 """Checks if a symbol exists in the repo.
557
557
558 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
558 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
559 symbol is an ambiguous nodeid prefix.
559 symbol is an ambiguous nodeid prefix.
560 """
560 """
561 try:
561 try:
562 revsymbol(repo, symbol)
562 revsymbol(repo, symbol)
563 return True
563 return True
564 except error.RepoLookupError:
564 except error.RepoLookupError:
565 return False
565 return False
566
566
567 def revsymbol(repo, symbol):
567 def revsymbol(repo, symbol):
568 """Returns a context given a single revision symbol (as string).
568 """Returns a context given a single revision symbol (as string).
569
569
570 This is similar to revsingle(), but accepts only a single revision symbol,
570 This is similar to revsingle(), but accepts only a single revision symbol,
571 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
571 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
572 not "max(public())".
572 not "max(public())".
573 """
573 """
574 if not isinstance(symbol, bytes):
574 if not isinstance(symbol, bytes):
575 msg = ("symbol (%s of type %s) was not a string, did you mean "
575 msg = ("symbol (%s of type %s) was not a string, did you mean "
576 "repo[symbol]?" % (symbol, type(symbol)))
576 "repo[symbol]?" % (symbol, type(symbol)))
577 raise error.ProgrammingError(msg)
577 raise error.ProgrammingError(msg)
578 try:
578 try:
579 if symbol in ('.', 'tip', 'null'):
579 if symbol in ('.', 'tip', 'null'):
580 return repo[symbol]
580 return repo[symbol]
581
581
582 try:
582 try:
583 r = int(symbol)
583 r = int(symbol)
584 if '%d' % r != symbol:
584 if '%d' % r != symbol:
585 raise ValueError
585 raise ValueError
586 l = len(repo.changelog)
586 l = len(repo.changelog)
587 if r < 0:
587 if r < 0:
588 r += l
588 r += l
589 if r < 0 or r >= l and r != wdirrev:
589 if r < 0 or r >= l and r != wdirrev:
590 raise ValueError
590 raise ValueError
591 return repo[r]
591 return repo[r]
592 except error.FilteredIndexError:
592 except error.FilteredIndexError:
593 raise
593 raise
594 except (ValueError, OverflowError, IndexError):
594 except (ValueError, OverflowError, IndexError):
595 pass
595 pass
596
596
597 if len(symbol) == 40:
597 if len(symbol) == 40:
598 try:
598 try:
599 node = bin(symbol)
599 node = bin(symbol)
600 rev = repo.changelog.rev(node)
600 rev = repo.changelog.rev(node)
601 return repo[rev]
601 return repo[rev]
602 except error.FilteredLookupError:
602 except error.FilteredLookupError:
603 raise
603 raise
604 except (TypeError, LookupError):
604 except (TypeError, LookupError):
605 pass
605 pass
606
606
607 # look up bookmarks through the name interface
607 # look up bookmarks through the name interface
608 try:
608 try:
609 node = repo.names.singlenode(repo, symbol)
609 node = repo.names.singlenode(repo, symbol)
610 rev = repo.changelog.rev(node)
610 rev = repo.changelog.rev(node)
611 return repo[rev]
611 return repo[rev]
612 except KeyError:
612 except KeyError:
613 pass
613 pass
614
614
615 node = resolvehexnodeidprefix(repo, symbol)
615 node = resolvehexnodeidprefix(repo, symbol)
616 if node is not None:
616 if node is not None:
617 rev = repo.changelog.rev(node)
617 rev = repo.changelog.rev(node)
618 return repo[rev]
618 return repo[rev]
619
619
620 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
620 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
621
621
622 except error.WdirUnsupported:
622 except error.WdirUnsupported:
623 return repo[None]
623 return repo[None]
624 except (error.FilteredIndexError, error.FilteredLookupError,
624 except (error.FilteredIndexError, error.FilteredLookupError,
625 error.FilteredRepoLookupError):
625 error.FilteredRepoLookupError):
626 raise _filterederror(repo, symbol)
626 raise _filterederror(repo, symbol)
627
627
628 def _filterederror(repo, changeid):
628 def _filterederror(repo, changeid):
629 """build an exception to be raised about a filtered changeid
629 """build an exception to be raised about a filtered changeid
630
630
631 This is extracted in a function to help extensions (eg: evolve) to
631 This is extracted in a function to help extensions (eg: evolve) to
632 experiment with various message variants."""
632 experiment with various message variants."""
633 if repo.filtername.startswith('visible'):
633 if repo.filtername.startswith('visible'):
634
634
635 # Check if the changeset is obsolete
635 # Check if the changeset is obsolete
636 unfilteredrepo = repo.unfiltered()
636 unfilteredrepo = repo.unfiltered()
637 ctx = revsymbol(unfilteredrepo, changeid)
637 ctx = revsymbol(unfilteredrepo, changeid)
638
638
639 # If the changeset is obsolete, enrich the message with the reason
639 # If the changeset is obsolete, enrich the message with the reason
640 # that made this changeset not visible
640 # that made this changeset not visible
641 if ctx.obsolete():
641 if ctx.obsolete():
642 msg = obsutil._getfilteredreason(repo, changeid, ctx)
642 msg = obsutil._getfilteredreason(repo, changeid, ctx)
643 else:
643 else:
644 msg = _("hidden revision '%s'") % changeid
644 msg = _("hidden revision '%s'") % changeid
645
645
646 hint = _('use --hidden to access hidden revisions')
646 hint = _('use --hidden to access hidden revisions')
647
647
648 return error.FilteredRepoLookupError(msg, hint=hint)
648 return error.FilteredRepoLookupError(msg, hint=hint)
649 msg = _("filtered revision '%s' (not in '%s' subset)")
649 msg = _("filtered revision '%s' (not in '%s' subset)")
650 msg %= (changeid, repo.filtername)
650 msg %= (changeid, repo.filtername)
651 return error.FilteredRepoLookupError(msg)
651 return error.FilteredRepoLookupError(msg)
652
652
653 def revsingle(repo, revspec, default='.', localalias=None):
653 def revsingle(repo, revspec, default='.', localalias=None):
654 if not revspec and revspec != 0:
654 if not revspec and revspec != 0:
655 return repo[default]
655 return repo[default]
656
656
657 l = revrange(repo, [revspec], localalias=localalias)
657 l = revrange(repo, [revspec], localalias=localalias)
658 if not l:
658 if not l:
659 raise error.Abort(_('empty revision set'))
659 raise error.Abort(_('empty revision set'))
660 return repo[l.last()]
660 return repo[l.last()]
661
661
662 def _pairspec(revspec):
662 def _pairspec(revspec):
663 tree = revsetlang.parse(revspec)
663 tree = revsetlang.parse(revspec)
664 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
664 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
665
665
666 def revpair(repo, revs):
666 def revpair(repo, revs):
667 if not revs:
667 if not revs:
668 return repo['.'], repo[None]
668 return repo['.'], repo[None]
669
669
670 l = revrange(repo, revs)
670 l = revrange(repo, revs)
671
671
672 if not l:
672 if not l:
673 first = second = None
673 first = second = None
674 elif l.isascending():
674 elif l.isascending():
675 first = l.min()
675 first = l.min()
676 second = l.max()
676 second = l.max()
677 elif l.isdescending():
677 elif l.isdescending():
678 first = l.max()
678 first = l.max()
679 second = l.min()
679 second = l.min()
680 else:
680 else:
681 first = l.first()
681 first = l.first()
682 second = l.last()
682 second = l.last()
683
683
684 if first is None:
684 if first is None:
685 raise error.Abort(_('empty revision range'))
685 raise error.Abort(_('empty revision range'))
686 if (first == second and len(revs) >= 2
686 if (first == second and len(revs) >= 2
687 and not all(revrange(repo, [r]) for r in revs)):
687 and not all(revrange(repo, [r]) for r in revs)):
688 raise error.Abort(_('empty revision on one side of range'))
688 raise error.Abort(_('empty revision on one side of range'))
689
689
690 # if top-level is range expression, the result must always be a pair
690 # if top-level is range expression, the result must always be a pair
691 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
691 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
692 return repo[first], repo[None]
692 return repo[first], repo[None]
693
693
694 return repo[first], repo[second]
694 return repo[first], repo[second]
695
695
696 def revrange(repo, specs, localalias=None):
696 def revrange(repo, specs, localalias=None):
697 """Execute 1 to many revsets and return the union.
697 """Execute 1 to many revsets and return the union.
698
698
699 This is the preferred mechanism for executing revsets using user-specified
699 This is the preferred mechanism for executing revsets using user-specified
700 config options, such as revset aliases.
700 config options, such as revset aliases.
701
701
702 The revsets specified by ``specs`` will be executed via a chained ``OR``
702 The revsets specified by ``specs`` will be executed via a chained ``OR``
703 expression. If ``specs`` is empty, an empty result is returned.
703 expression. If ``specs`` is empty, an empty result is returned.
704
704
705 ``specs`` can contain integers, in which case they are assumed to be
705 ``specs`` can contain integers, in which case they are assumed to be
706 revision numbers.
706 revision numbers.
707
707
708 It is assumed the revsets are already formatted. If you have arguments
708 It is assumed the revsets are already formatted. If you have arguments
709 that need to be expanded in the revset, call ``revsetlang.formatspec()``
709 that need to be expanded in the revset, call ``revsetlang.formatspec()``
710 and pass the result as an element of ``specs``.
710 and pass the result as an element of ``specs``.
711
711
712 Specifying a single revset is allowed.
712 Specifying a single revset is allowed.
713
713
714 Returns a ``revset.abstractsmartset`` which is a list-like interface over
714 Returns a ``revset.abstractsmartset`` which is a list-like interface over
715 integer revisions.
715 integer revisions.
716 """
716 """
717 allspecs = []
717 allspecs = []
718 for spec in specs:
718 for spec in specs:
719 if isinstance(spec, int):
719 if isinstance(spec, int):
720 spec = revsetlang.formatspec('rev(%d)', spec)
720 spec = revsetlang.formatspec('rev(%d)', spec)
721 allspecs.append(spec)
721 allspecs.append(spec)
722 return repo.anyrevs(allspecs, user=True, localalias=localalias)
722 return repo.anyrevs(allspecs, user=True, localalias=localalias)
723
723
724 def meaningfulparents(repo, ctx):
724 def meaningfulparents(repo, ctx):
725 """Return list of meaningful (or all if debug) parentrevs for rev.
725 """Return list of meaningful (or all if debug) parentrevs for rev.
726
726
727 For merges (two non-nullrev revisions) both parents are meaningful.
727 For merges (two non-nullrev revisions) both parents are meaningful.
728 Otherwise the first parent revision is considered meaningful if it
728 Otherwise the first parent revision is considered meaningful if it
729 is not the preceding revision.
729 is not the preceding revision.
730 """
730 """
731 parents = ctx.parents()
731 parents = ctx.parents()
732 if len(parents) > 1:
732 if len(parents) > 1:
733 return parents
733 return parents
734 if repo.ui.debugflag:
734 if repo.ui.debugflag:
735 return [parents[0], repo[nullrev]]
735 return [parents[0], repo[nullrev]]
736 if parents[0].rev() >= intrev(ctx) - 1:
736 if parents[0].rev() >= intrev(ctx) - 1:
737 return []
737 return []
738 return parents
738 return parents
739
739
740 def expandpats(pats):
740 def expandpats(pats):
741 '''Expand bare globs when running on windows.
741 '''Expand bare globs when running on windows.
742 On posix we assume it already has already been done by sh.'''
742 On posix we assume it already has already been done by sh.'''
743 if not util.expandglobs:
743 if not util.expandglobs:
744 return list(pats)
744 return list(pats)
745 ret = []
745 ret = []
746 for kindpat in pats:
746 for kindpat in pats:
747 kind, pat = matchmod._patsplit(kindpat, None)
747 kind, pat = matchmod._patsplit(kindpat, None)
748 if kind is None:
748 if kind is None:
749 try:
749 try:
750 globbed = glob.glob(pat)
750 globbed = glob.glob(pat)
751 except re.error:
751 except re.error:
752 globbed = [pat]
752 globbed = [pat]
753 if globbed:
753 if globbed:
754 ret.extend(globbed)
754 ret.extend(globbed)
755 continue
755 continue
756 ret.append(kindpat)
756 ret.append(kindpat)
757 return ret
757 return ret
758
758
759 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
759 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
760 badfn=None):
760 badfn=None):
761 '''Return a matcher and the patterns that were used.
761 '''Return a matcher and the patterns that were used.
762 The matcher will warn about bad matches, unless an alternate badfn callback
762 The matcher will warn about bad matches, unless an alternate badfn callback
763 is provided.'''
763 is provided.'''
764 if pats == ("",):
764 if pats == ("",):
765 pats = []
765 pats = []
766 if opts is None:
766 if opts is None:
767 opts = {}
767 opts = {}
768 if not globbed and default == 'relpath':
768 if not globbed and default == 'relpath':
769 pats = expandpats(pats or [])
769 pats = expandpats(pats or [])
770
770
771 def bad(f, msg):
771 def bad(f, msg):
772 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
772 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
773
773
774 if badfn is None:
774 if badfn is None:
775 badfn = bad
775 badfn = bad
776
776
777 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
777 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
778 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
778 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
779
779
780 if m.always():
780 if m.always():
781 pats = []
781 pats = []
782 return m, pats
782 return m, pats
783
783
784 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
784 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
785 badfn=None):
785 badfn=None):
786 '''Return a matcher that will warn about bad matches.'''
786 '''Return a matcher that will warn about bad matches.'''
787 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
787 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
788
788
789 def matchall(repo):
789 def matchall(repo):
790 '''Return a matcher that will efficiently match everything.'''
790 '''Return a matcher that will efficiently match everything.'''
791 return matchmod.always(repo.root, repo.getcwd())
791 return matchmod.always(repo.root, repo.getcwd())
792
792
793 def matchfiles(repo, files, badfn=None):
793 def matchfiles(repo, files, badfn=None):
794 '''Return a matcher that will efficiently match exactly these files.'''
794 '''Return a matcher that will efficiently match exactly these files.'''
795 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
795 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
796
796
797 def parsefollowlinespattern(repo, rev, pat, msg):
797 def parsefollowlinespattern(repo, rev, pat, msg):
798 """Return a file name from `pat` pattern suitable for usage in followlines
798 """Return a file name from `pat` pattern suitable for usage in followlines
799 logic.
799 logic.
800 """
800 """
801 if not matchmod.patkind(pat):
801 if not matchmod.patkind(pat):
802 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
802 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
803 else:
803 else:
804 ctx = repo[rev]
804 ctx = repo[rev]
805 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
805 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
806 files = [f for f in ctx if m(f)]
806 files = [f for f in ctx if m(f)]
807 if len(files) != 1:
807 if len(files) != 1:
808 raise error.ParseError(msg)
808 raise error.ParseError(msg)
809 return files[0]
809 return files[0]
810
810
811 def origpath(ui, repo, filepath):
811 def origpath(ui, repo, filepath):
812 '''customize where .orig files are created
812 '''customize where .orig files are created
813
813
814 Fetch user defined path from config file: [ui] origbackuppath = <path>
814 Fetch user defined path from config file: [ui] origbackuppath = <path>
815 Fall back to default (filepath with .orig suffix) if not specified
815 Fall back to default (filepath with .orig suffix) if not specified
816 '''
816 '''
817 origbackuppath = ui.config('ui', 'origbackuppath')
817 origbackuppath = ui.config('ui', 'origbackuppath')
818 if not origbackuppath:
818 if not origbackuppath:
819 return filepath + ".orig"
819 return filepath + ".orig"
820
820
821 # Convert filepath from an absolute path into a path inside the repo.
821 # Convert filepath from an absolute path into a path inside the repo.
822 filepathfromroot = util.normpath(os.path.relpath(filepath,
822 filepathfromroot = util.normpath(os.path.relpath(filepath,
823 start=repo.root))
823 start=repo.root))
824
824
825 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
825 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
826 origbackupdir = origvfs.dirname(filepathfromroot)
826 origbackupdir = origvfs.dirname(filepathfromroot)
827 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
827 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
828 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
828 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
829
829
830 # Remove any files that conflict with the backup file's path
830 # Remove any files that conflict with the backup file's path
831 for f in reversed(list(util.finddirs(filepathfromroot))):
831 for f in reversed(list(util.finddirs(filepathfromroot))):
832 if origvfs.isfileorlink(f):
832 if origvfs.isfileorlink(f):
833 ui.note(_('removing conflicting file: %s\n')
833 ui.note(_('removing conflicting file: %s\n')
834 % origvfs.join(f))
834 % origvfs.join(f))
835 origvfs.unlink(f)
835 origvfs.unlink(f)
836 break
836 break
837
837
838 origvfs.makedirs(origbackupdir)
838 origvfs.makedirs(origbackupdir)
839
839
840 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
840 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
841 ui.note(_('removing conflicting directory: %s\n')
841 ui.note(_('removing conflicting directory: %s\n')
842 % origvfs.join(filepathfromroot))
842 % origvfs.join(filepathfromroot))
843 origvfs.rmtree(filepathfromroot, forcibly=True)
843 origvfs.rmtree(filepathfromroot, forcibly=True)
844
844
845 return origvfs.join(filepathfromroot)
845 return origvfs.join(filepathfromroot)
846
846
847 class _containsnode(object):
847 class _containsnode(object):
848 """proxy __contains__(node) to container.__contains__ which accepts revs"""
848 """proxy __contains__(node) to container.__contains__ which accepts revs"""
849
849
850 def __init__(self, repo, revcontainer):
850 def __init__(self, repo, revcontainer):
851 self._torev = repo.changelog.rev
851 self._torev = repo.changelog.rev
852 self._revcontains = revcontainer.__contains__
852 self._revcontains = revcontainer.__contains__
853
853
854 def __contains__(self, node):
854 def __contains__(self, node):
855 return self._revcontains(self._torev(node))
855 return self._revcontains(self._torev(node))
856
856
857 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
857 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
858 fixphase=False, targetphase=None, backup=True):
858 fixphase=False, targetphase=None, backup=True):
859 """do common cleanups when old nodes are replaced by new nodes
859 """do common cleanups when old nodes are replaced by new nodes
860
860
861 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
861 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
862 (we might also want to move working directory parent in the future)
862 (we might also want to move working directory parent in the future)
863
863
864 By default, bookmark moves are calculated automatically from 'replacements',
864 By default, bookmark moves are calculated automatically from 'replacements',
865 but 'moves' can be used to override that. Also, 'moves' may include
865 but 'moves' can be used to override that. Also, 'moves' may include
866 additional bookmark moves that should not have associated obsmarkers.
866 additional bookmark moves that should not have associated obsmarkers.
867
867
868 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
868 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
869 have replacements. operation is a string, like "rebase".
869 have replacements. operation is a string, like "rebase".
870
870
871 metadata is dictionary containing metadata to be stored in obsmarker if
871 metadata is dictionary containing metadata to be stored in obsmarker if
872 obsolescence is enabled.
872 obsolescence is enabled.
873 """
873 """
874 assert fixphase or targetphase is None
874 assert fixphase or targetphase is None
875 if not replacements and not moves:
875 if not replacements and not moves:
876 return
876 return
877
877
878 # translate mapping's other forms
878 # translate mapping's other forms
879 if not util.safehasattr(replacements, 'items'):
879 if not util.safehasattr(replacements, 'items'):
880 replacements = {(n,): () for n in replacements}
880 replacements = {(n,): () for n in replacements}
881 else:
881 else:
882 # upgrading non tuple "source" to tuple ones for BC
882 # upgrading non tuple "source" to tuple ones for BC
883 repls = {}
883 repls = {}
884 for key, value in replacements.items():
884 for key, value in replacements.items():
885 if not isinstance(key, tuple):
885 if not isinstance(key, tuple):
886 key = (key,)
886 key = (key,)
887 repls[key] = value
887 repls[key] = value
888 replacements = repls
888 replacements = repls
889
889
890 # Calculate bookmark movements
890 # Calculate bookmark movements
891 if moves is None:
891 if moves is None:
892 moves = {}
892 moves = {}
893 # Unfiltered repo is needed since nodes in replacements might be hidden.
893 # Unfiltered repo is needed since nodes in replacements might be hidden.
894 unfi = repo.unfiltered()
894 unfi = repo.unfiltered()
895 for oldnodes, newnodes in replacements.items():
895 for oldnodes, newnodes in replacements.items():
896 for oldnode in oldnodes:
896 for oldnode in oldnodes:
897 if oldnode in moves:
897 if oldnode in moves:
898 continue
898 continue
899 if len(newnodes) > 1:
899 if len(newnodes) > 1:
900 # usually a split, take the one with biggest rev number
900 # usually a split, take the one with biggest rev number
901 newnode = next(unfi.set('max(%ln)', newnodes)).node()
901 newnode = next(unfi.set('max(%ln)', newnodes)).node()
902 elif len(newnodes) == 0:
902 elif len(newnodes) == 0:
903 # move bookmark backwards
903 # move bookmark backwards
904 allreplaced = []
904 allreplaced = []
905 for rep in replacements:
905 for rep in replacements:
906 allreplaced.extend(rep)
906 allreplaced.extend(rep)
907 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
907 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
908 allreplaced))
908 allreplaced))
909 if roots:
909 if roots:
910 newnode = roots[0].node()
910 newnode = roots[0].node()
911 else:
911 else:
912 newnode = nullid
912 newnode = nullid
913 else:
913 else:
914 newnode = newnodes[0]
914 newnode = newnodes[0]
915 moves[oldnode] = newnode
915 moves[oldnode] = newnode
916
916
917 allnewnodes = [n for ns in replacements.values() for n in ns]
917 allnewnodes = [n for ns in replacements.values() for n in ns]
918 toretract = {}
918 toretract = {}
919 toadvance = {}
919 toadvance = {}
920 if fixphase:
920 if fixphase:
921 precursors = {}
921 precursors = {}
922 for oldnodes, newnodes in replacements.items():
922 for oldnodes, newnodes in replacements.items():
923 for oldnode in oldnodes:
923 for oldnode in oldnodes:
924 for newnode in newnodes:
924 for newnode in newnodes:
925 precursors.setdefault(newnode, []).append(oldnode)
925 precursors.setdefault(newnode, []).append(oldnode)
926
926
927 allnewnodes.sort(key=lambda n: unfi[n].rev())
927 allnewnodes.sort(key=lambda n: unfi[n].rev())
928 newphases = {}
928 newphases = {}
929 def phase(ctx):
929 def phase(ctx):
930 return newphases.get(ctx.node(), ctx.phase())
930 return newphases.get(ctx.node(), ctx.phase())
931 for newnode in allnewnodes:
931 for newnode in allnewnodes:
932 ctx = unfi[newnode]
932 ctx = unfi[newnode]
933 parentphase = max(phase(p) for p in ctx.parents())
933 parentphase = max(phase(p) for p in ctx.parents())
934 if targetphase is None:
934 if targetphase is None:
935 oldphase = max(unfi[oldnode].phase()
935 oldphase = max(unfi[oldnode].phase()
936 for oldnode in precursors[newnode])
936 for oldnode in precursors[newnode])
937 newphase = max(oldphase, parentphase)
937 newphase = max(oldphase, parentphase)
938 else:
938 else:
939 newphase = max(targetphase, parentphase)
939 newphase = max(targetphase, parentphase)
940 newphases[newnode] = newphase
940 newphases[newnode] = newphase
941 if newphase > ctx.phase():
941 if newphase > ctx.phase():
942 toretract.setdefault(newphase, []).append(newnode)
942 toretract.setdefault(newphase, []).append(newnode)
943 elif newphase < ctx.phase():
943 elif newphase < ctx.phase():
944 toadvance.setdefault(newphase, []).append(newnode)
944 toadvance.setdefault(newphase, []).append(newnode)
945
945
946 with repo.transaction('cleanup') as tr:
946 with repo.transaction('cleanup') as tr:
947 # Move bookmarks
947 # Move bookmarks
948 bmarks = repo._bookmarks
948 bmarks = repo._bookmarks
949 bmarkchanges = []
949 bmarkchanges = []
950 for oldnode, newnode in moves.items():
950 for oldnode, newnode in moves.items():
951 oldbmarks = repo.nodebookmarks(oldnode)
951 oldbmarks = repo.nodebookmarks(oldnode)
952 if not oldbmarks:
952 if not oldbmarks:
953 continue
953 continue
954 from . import bookmarks # avoid import cycle
954 from . import bookmarks # avoid import cycle
955 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
955 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
956 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
956 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
957 hex(oldnode), hex(newnode)))
957 hex(oldnode), hex(newnode)))
958 # Delete divergent bookmarks being parents of related newnodes
958 # Delete divergent bookmarks being parents of related newnodes
959 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
959 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
960 allnewnodes, newnode, oldnode)
960 allnewnodes, newnode, oldnode)
961 deletenodes = _containsnode(repo, deleterevs)
961 deletenodes = _containsnode(repo, deleterevs)
962 for name in oldbmarks:
962 for name in oldbmarks:
963 bmarkchanges.append((name, newnode))
963 bmarkchanges.append((name, newnode))
964 for b in bookmarks.divergent2delete(repo, deletenodes, name):
964 for b in bookmarks.divergent2delete(repo, deletenodes, name):
965 bmarkchanges.append((b, None))
965 bmarkchanges.append((b, None))
966
966
967 if bmarkchanges:
967 if bmarkchanges:
968 bmarks.applychanges(repo, tr, bmarkchanges)
968 bmarks.applychanges(repo, tr, bmarkchanges)
969
969
970 for phase, nodes in toretract.items():
970 for phase, nodes in toretract.items():
971 phases.retractboundary(repo, tr, phase, nodes)
971 phases.retractboundary(repo, tr, phase, nodes)
972 for phase, nodes in toadvance.items():
972 for phase, nodes in toadvance.items():
973 phases.advanceboundary(repo, tr, phase, nodes)
973 phases.advanceboundary(repo, tr, phase, nodes)
974
974
975 # Obsolete or strip nodes
975 # Obsolete or strip nodes
976 if obsolete.isenabled(repo, obsolete.createmarkersopt):
976 if obsolete.isenabled(repo, obsolete.createmarkersopt):
977 # If a node is already obsoleted, and we want to obsolete it
977 # If a node is already obsoleted, and we want to obsolete it
978 # without a successor, skip that obssolete request since it's
978 # without a successor, skip that obssolete request since it's
979 # unnecessary. That's the "if s or not isobs(n)" check below.
979 # unnecessary. That's the "if s or not isobs(n)" check below.
980 # Also sort the node in topology order, that might be useful for
980 # Also sort the node in topology order, that might be useful for
981 # some obsstore logic.
981 # some obsstore logic.
982 # NOTE: the filtering and sorting might belong to createmarkers.
982 # NOTE: the filtering and sorting might belong to createmarkers.
983 torev = unfi.changelog.rev
983 torev = unfi.changelog.rev
984 sortfunc = lambda ns: torev(ns[0][0])
984 sortfunc = lambda ns: torev(ns[0][0])
985 rels = []
985 rels = []
986 for ns, s in sorted(replacements.items(), key=sortfunc):
986 for ns, s in sorted(replacements.items(), key=sortfunc):
987 for n in ns:
987 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
988 rel = (unfi[n], tuple(unfi[m] for m in s))
989 rels.append(rel)
988 rels.append(rel)
990 if rels:
989 if rels:
991 obsolete.createmarkers(repo, rels, operation=operation,
990 obsolete.createmarkers(repo, rels, operation=operation,
992 metadata=metadata)
991 metadata=metadata)
993 else:
992 else:
994 from . import repair # avoid import cycle
993 from . import repair # avoid import cycle
995 tostrip = list(n for ns in replacements for n in ns)
994 tostrip = list(n for ns in replacements for n in ns)
996 if tostrip:
995 if tostrip:
997 repair.delayedstrip(repo.ui, repo, tostrip, operation,
996 repair.delayedstrip(repo.ui, repo, tostrip, operation,
998 backup=backup)
997 backup=backup)
999
998
1000 def addremove(repo, matcher, prefix, opts=None):
999 def addremove(repo, matcher, prefix, opts=None):
1001 if opts is None:
1000 if opts is None:
1002 opts = {}
1001 opts = {}
1003 m = matcher
1002 m = matcher
1004 dry_run = opts.get('dry_run')
1003 dry_run = opts.get('dry_run')
1005 try:
1004 try:
1006 similarity = float(opts.get('similarity') or 0)
1005 similarity = float(opts.get('similarity') or 0)
1007 except ValueError:
1006 except ValueError:
1008 raise error.Abort(_('similarity must be a number'))
1007 raise error.Abort(_('similarity must be a number'))
1009 if similarity < 0 or similarity > 100:
1008 if similarity < 0 or similarity > 100:
1010 raise error.Abort(_('similarity must be between 0 and 100'))
1009 raise error.Abort(_('similarity must be between 0 and 100'))
1011 similarity /= 100.0
1010 similarity /= 100.0
1012
1011
1013 ret = 0
1012 ret = 0
1014 join = lambda f: os.path.join(prefix, f)
1013 join = lambda f: os.path.join(prefix, f)
1015
1014
1016 wctx = repo[None]
1015 wctx = repo[None]
1017 for subpath in sorted(wctx.substate):
1016 for subpath in sorted(wctx.substate):
1018 submatch = matchmod.subdirmatcher(subpath, m)
1017 submatch = matchmod.subdirmatcher(subpath, m)
1019 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1018 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1020 sub = wctx.sub(subpath)
1019 sub = wctx.sub(subpath)
1021 try:
1020 try:
1022 if sub.addremove(submatch, prefix, opts):
1021 if sub.addremove(submatch, prefix, opts):
1023 ret = 1
1022 ret = 1
1024 except error.LookupError:
1023 except error.LookupError:
1025 repo.ui.status(_("skipping missing subrepository: %s\n")
1024 repo.ui.status(_("skipping missing subrepository: %s\n")
1026 % join(subpath))
1025 % join(subpath))
1027
1026
1028 rejected = []
1027 rejected = []
1029 def badfn(f, msg):
1028 def badfn(f, msg):
1030 if f in m.files():
1029 if f in m.files():
1031 m.bad(f, msg)
1030 m.bad(f, msg)
1032 rejected.append(f)
1031 rejected.append(f)
1033
1032
1034 badmatch = matchmod.badmatch(m, badfn)
1033 badmatch = matchmod.badmatch(m, badfn)
1035 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1034 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1036 badmatch)
1035 badmatch)
1037
1036
1038 unknownset = set(unknown + forgotten)
1037 unknownset = set(unknown + forgotten)
1039 toprint = unknownset.copy()
1038 toprint = unknownset.copy()
1040 toprint.update(deleted)
1039 toprint.update(deleted)
1041 for abs in sorted(toprint):
1040 for abs in sorted(toprint):
1042 if repo.ui.verbose or not m.exact(abs):
1041 if repo.ui.verbose or not m.exact(abs):
1043 if abs in unknownset:
1042 if abs in unknownset:
1044 status = _('adding %s\n') % m.uipath(abs)
1043 status = _('adding %s\n') % m.uipath(abs)
1045 label = 'addremove.added'
1044 label = 'addremove.added'
1046 else:
1045 else:
1047 status = _('removing %s\n') % m.uipath(abs)
1046 status = _('removing %s\n') % m.uipath(abs)
1048 label = 'addremove.removed'
1047 label = 'addremove.removed'
1049 repo.ui.status(status, label=label)
1048 repo.ui.status(status, label=label)
1050
1049
1051 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1050 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1052 similarity)
1051 similarity)
1053
1052
1054 if not dry_run:
1053 if not dry_run:
1055 _markchanges(repo, unknown + forgotten, deleted, renames)
1054 _markchanges(repo, unknown + forgotten, deleted, renames)
1056
1055
1057 for f in rejected:
1056 for f in rejected:
1058 if f in m.files():
1057 if f in m.files():
1059 return 1
1058 return 1
1060 return ret
1059 return ret
1061
1060
1062 def marktouched(repo, files, similarity=0.0):
1061 def marktouched(repo, files, similarity=0.0):
1063 '''Assert that files have somehow been operated upon. files are relative to
1062 '''Assert that files have somehow been operated upon. files are relative to
1064 the repo root.'''
1063 the repo root.'''
1065 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1064 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1066 rejected = []
1065 rejected = []
1067
1066
1068 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1067 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1069
1068
1070 if repo.ui.verbose:
1069 if repo.ui.verbose:
1071 unknownset = set(unknown + forgotten)
1070 unknownset = set(unknown + forgotten)
1072 toprint = unknownset.copy()
1071 toprint = unknownset.copy()
1073 toprint.update(deleted)
1072 toprint.update(deleted)
1074 for abs in sorted(toprint):
1073 for abs in sorted(toprint):
1075 if abs in unknownset:
1074 if abs in unknownset:
1076 status = _('adding %s\n') % abs
1075 status = _('adding %s\n') % abs
1077 else:
1076 else:
1078 status = _('removing %s\n') % abs
1077 status = _('removing %s\n') % abs
1079 repo.ui.status(status)
1078 repo.ui.status(status)
1080
1079
1081 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1080 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1082 similarity)
1081 similarity)
1083
1082
1084 _markchanges(repo, unknown + forgotten, deleted, renames)
1083 _markchanges(repo, unknown + forgotten, deleted, renames)
1085
1084
1086 for f in rejected:
1085 for f in rejected:
1087 if f in m.files():
1086 if f in m.files():
1088 return 1
1087 return 1
1089 return 0
1088 return 0
1090
1089
1091 def _interestingfiles(repo, matcher):
1090 def _interestingfiles(repo, matcher):
1092 '''Walk dirstate with matcher, looking for files that addremove would care
1091 '''Walk dirstate with matcher, looking for files that addremove would care
1093 about.
1092 about.
1094
1093
1095 This is different from dirstate.status because it doesn't care about
1094 This is different from dirstate.status because it doesn't care about
1096 whether files are modified or clean.'''
1095 whether files are modified or clean.'''
1097 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1096 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1098 audit_path = pathutil.pathauditor(repo.root, cached=True)
1097 audit_path = pathutil.pathauditor(repo.root, cached=True)
1099
1098
1100 ctx = repo[None]
1099 ctx = repo[None]
1101 dirstate = repo.dirstate
1100 dirstate = repo.dirstate
1102 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1101 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1103 unknown=True, ignored=False, full=False)
1102 unknown=True, ignored=False, full=False)
1104 for abs, st in walkresults.iteritems():
1103 for abs, st in walkresults.iteritems():
1105 dstate = dirstate[abs]
1104 dstate = dirstate[abs]
1106 if dstate == '?' and audit_path.check(abs):
1105 if dstate == '?' and audit_path.check(abs):
1107 unknown.append(abs)
1106 unknown.append(abs)
1108 elif dstate != 'r' and not st:
1107 elif dstate != 'r' and not st:
1109 deleted.append(abs)
1108 deleted.append(abs)
1110 elif dstate == 'r' and st:
1109 elif dstate == 'r' and st:
1111 forgotten.append(abs)
1110 forgotten.append(abs)
1112 # for finding renames
1111 # for finding renames
1113 elif dstate == 'r' and not st:
1112 elif dstate == 'r' and not st:
1114 removed.append(abs)
1113 removed.append(abs)
1115 elif dstate == 'a':
1114 elif dstate == 'a':
1116 added.append(abs)
1115 added.append(abs)
1117
1116
1118 return added, unknown, deleted, removed, forgotten
1117 return added, unknown, deleted, removed, forgotten
1119
1118
1120 def _findrenames(repo, matcher, added, removed, similarity):
1119 def _findrenames(repo, matcher, added, removed, similarity):
1121 '''Find renames from removed files to added ones.'''
1120 '''Find renames from removed files to added ones.'''
1122 renames = {}
1121 renames = {}
1123 if similarity > 0:
1122 if similarity > 0:
1124 for old, new, score in similar.findrenames(repo, added, removed,
1123 for old, new, score in similar.findrenames(repo, added, removed,
1125 similarity):
1124 similarity):
1126 if (repo.ui.verbose or not matcher.exact(old)
1125 if (repo.ui.verbose or not matcher.exact(old)
1127 or not matcher.exact(new)):
1126 or not matcher.exact(new)):
1128 repo.ui.status(_('recording removal of %s as rename to %s '
1127 repo.ui.status(_('recording removal of %s as rename to %s '
1129 '(%d%% similar)\n') %
1128 '(%d%% similar)\n') %
1130 (matcher.rel(old), matcher.rel(new),
1129 (matcher.rel(old), matcher.rel(new),
1131 score * 100))
1130 score * 100))
1132 renames[new] = old
1131 renames[new] = old
1133 return renames
1132 return renames
1134
1133
1135 def _markchanges(repo, unknown, deleted, renames):
1134 def _markchanges(repo, unknown, deleted, renames):
1136 '''Marks the files in unknown as added, the files in deleted as removed,
1135 '''Marks the files in unknown as added, the files in deleted as removed,
1137 and the files in renames as copied.'''
1136 and the files in renames as copied.'''
1138 wctx = repo[None]
1137 wctx = repo[None]
1139 with repo.wlock():
1138 with repo.wlock():
1140 wctx.forget(deleted)
1139 wctx.forget(deleted)
1141 wctx.add(unknown)
1140 wctx.add(unknown)
1142 for new, old in renames.iteritems():
1141 for new, old in renames.iteritems():
1143 wctx.copy(old, new)
1142 wctx.copy(old, new)
1144
1143
1145 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1144 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1146 """Update the dirstate to reflect the intent of copying src to dst. For
1145 """Update the dirstate to reflect the intent of copying src to dst. For
1147 different reasons it might not end with dst being marked as copied from src.
1146 different reasons it might not end with dst being marked as copied from src.
1148 """
1147 """
1149 origsrc = repo.dirstate.copied(src) or src
1148 origsrc = repo.dirstate.copied(src) or src
1150 if dst == origsrc: # copying back a copy?
1149 if dst == origsrc: # copying back a copy?
1151 if repo.dirstate[dst] not in 'mn' and not dryrun:
1150 if repo.dirstate[dst] not in 'mn' and not dryrun:
1152 repo.dirstate.normallookup(dst)
1151 repo.dirstate.normallookup(dst)
1153 else:
1152 else:
1154 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1153 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1155 if not ui.quiet:
1154 if not ui.quiet:
1156 ui.warn(_("%s has not been committed yet, so no copy "
1155 ui.warn(_("%s has not been committed yet, so no copy "
1157 "data will be stored for %s.\n")
1156 "data will be stored for %s.\n")
1158 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1157 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1159 if repo.dirstate[dst] in '?r' and not dryrun:
1158 if repo.dirstate[dst] in '?r' and not dryrun:
1160 wctx.add([dst])
1159 wctx.add([dst])
1161 elif not dryrun:
1160 elif not dryrun:
1162 wctx.copy(origsrc, dst)
1161 wctx.copy(origsrc, dst)
1163
1162
1164 def writerequires(opener, requirements):
1163 def writerequires(opener, requirements):
1165 with opener('requires', 'w') as fp:
1164 with opener('requires', 'w') as fp:
1166 for r in sorted(requirements):
1165 for r in sorted(requirements):
1167 fp.write("%s\n" % r)
1166 fp.write("%s\n" % r)
1168
1167
1169 class filecachesubentry(object):
1168 class filecachesubentry(object):
1170 def __init__(self, path, stat):
1169 def __init__(self, path, stat):
1171 self.path = path
1170 self.path = path
1172 self.cachestat = None
1171 self.cachestat = None
1173 self._cacheable = None
1172 self._cacheable = None
1174
1173
1175 if stat:
1174 if stat:
1176 self.cachestat = filecachesubentry.stat(self.path)
1175 self.cachestat = filecachesubentry.stat(self.path)
1177
1176
1178 if self.cachestat:
1177 if self.cachestat:
1179 self._cacheable = self.cachestat.cacheable()
1178 self._cacheable = self.cachestat.cacheable()
1180 else:
1179 else:
1181 # None means we don't know yet
1180 # None means we don't know yet
1182 self._cacheable = None
1181 self._cacheable = None
1183
1182
1184 def refresh(self):
1183 def refresh(self):
1185 if self.cacheable():
1184 if self.cacheable():
1186 self.cachestat = filecachesubentry.stat(self.path)
1185 self.cachestat = filecachesubentry.stat(self.path)
1187
1186
1188 def cacheable(self):
1187 def cacheable(self):
1189 if self._cacheable is not None:
1188 if self._cacheable is not None:
1190 return self._cacheable
1189 return self._cacheable
1191
1190
1192 # we don't know yet, assume it is for now
1191 # we don't know yet, assume it is for now
1193 return True
1192 return True
1194
1193
1195 def changed(self):
1194 def changed(self):
1196 # no point in going further if we can't cache it
1195 # no point in going further if we can't cache it
1197 if not self.cacheable():
1196 if not self.cacheable():
1198 return True
1197 return True
1199
1198
1200 newstat = filecachesubentry.stat(self.path)
1199 newstat = filecachesubentry.stat(self.path)
1201
1200
1202 # we may not know if it's cacheable yet, check again now
1201 # we may not know if it's cacheable yet, check again now
1203 if newstat and self._cacheable is None:
1202 if newstat and self._cacheable is None:
1204 self._cacheable = newstat.cacheable()
1203 self._cacheable = newstat.cacheable()
1205
1204
1206 # check again
1205 # check again
1207 if not self._cacheable:
1206 if not self._cacheable:
1208 return True
1207 return True
1209
1208
1210 if self.cachestat != newstat:
1209 if self.cachestat != newstat:
1211 self.cachestat = newstat
1210 self.cachestat = newstat
1212 return True
1211 return True
1213 else:
1212 else:
1214 return False
1213 return False
1215
1214
1216 @staticmethod
1215 @staticmethod
1217 def stat(path):
1216 def stat(path):
1218 try:
1217 try:
1219 return util.cachestat(path)
1218 return util.cachestat(path)
1220 except OSError as e:
1219 except OSError as e:
1221 if e.errno != errno.ENOENT:
1220 if e.errno != errno.ENOENT:
1222 raise
1221 raise
1223
1222
1224 class filecacheentry(object):
1223 class filecacheentry(object):
1225 def __init__(self, paths, stat=True):
1224 def __init__(self, paths, stat=True):
1226 self._entries = []
1225 self._entries = []
1227 for path in paths:
1226 for path in paths:
1228 self._entries.append(filecachesubentry(path, stat))
1227 self._entries.append(filecachesubentry(path, stat))
1229
1228
1230 def changed(self):
1229 def changed(self):
1231 '''true if any entry has changed'''
1230 '''true if any entry has changed'''
1232 for entry in self._entries:
1231 for entry in self._entries:
1233 if entry.changed():
1232 if entry.changed():
1234 return True
1233 return True
1235 return False
1234 return False
1236
1235
1237 def refresh(self):
1236 def refresh(self):
1238 for entry in self._entries:
1237 for entry in self._entries:
1239 entry.refresh()
1238 entry.refresh()
1240
1239
1241 class filecache(object):
1240 class filecache(object):
1242 """A property like decorator that tracks files under .hg/ for updates.
1241 """A property like decorator that tracks files under .hg/ for updates.
1243
1242
1244 On first access, the files defined as arguments are stat()ed and the
1243 On first access, the files defined as arguments are stat()ed and the
1245 results cached. The decorated function is called. The results are stashed
1244 results cached. The decorated function is called. The results are stashed
1246 away in a ``_filecache`` dict on the object whose method is decorated.
1245 away in a ``_filecache`` dict on the object whose method is decorated.
1247
1246
1248 On subsequent access, the cached result is returned.
1247 On subsequent access, the cached result is returned.
1249
1248
1250 On external property set operations, stat() calls are performed and the new
1249 On external property set operations, stat() calls are performed and the new
1251 value is cached.
1250 value is cached.
1252
1251
1253 On property delete operations, cached data is removed.
1252 On property delete operations, cached data is removed.
1254
1253
1255 When using the property API, cached data is always returned, if available:
1254 When using the property API, cached data is always returned, if available:
1256 no stat() is performed to check if the file has changed and if the function
1255 no stat() is performed to check if the file has changed and if the function
1257 needs to be called to reflect file changes.
1256 needs to be called to reflect file changes.
1258
1257
1259 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1258 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1260 can populate an entry before the property's getter is called. In this case,
1259 can populate an entry before the property's getter is called. In this case,
1261 entries in ``_filecache`` will be used during property operations,
1260 entries in ``_filecache`` will be used during property operations,
1262 if available. If the underlying file changes, it is up to external callers
1261 if available. If the underlying file changes, it is up to external callers
1263 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1262 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1264 method result as well as possibly calling ``del obj._filecache[attr]`` to
1263 method result as well as possibly calling ``del obj._filecache[attr]`` to
1265 remove the ``filecacheentry``.
1264 remove the ``filecacheentry``.
1266 """
1265 """
1267
1266
1268 def __init__(self, *paths):
1267 def __init__(self, *paths):
1269 self.paths = paths
1268 self.paths = paths
1270
1269
1271 def join(self, obj, fname):
1270 def join(self, obj, fname):
1272 """Used to compute the runtime path of a cached file.
1271 """Used to compute the runtime path of a cached file.
1273
1272
1274 Users should subclass filecache and provide their own version of this
1273 Users should subclass filecache and provide their own version of this
1275 function to call the appropriate join function on 'obj' (an instance
1274 function to call the appropriate join function on 'obj' (an instance
1276 of the class that its member function was decorated).
1275 of the class that its member function was decorated).
1277 """
1276 """
1278 raise NotImplementedError
1277 raise NotImplementedError
1279
1278
1280 def __call__(self, func):
1279 def __call__(self, func):
1281 self.func = func
1280 self.func = func
1282 self.sname = func.__name__
1281 self.sname = func.__name__
1283 self.name = pycompat.sysbytes(self.sname)
1282 self.name = pycompat.sysbytes(self.sname)
1284 return self
1283 return self
1285
1284
1286 def __get__(self, obj, type=None):
1285 def __get__(self, obj, type=None):
1287 # if accessed on the class, return the descriptor itself.
1286 # if accessed on the class, return the descriptor itself.
1288 if obj is None:
1287 if obj is None:
1289 return self
1288 return self
1290 # do we need to check if the file changed?
1289 # do we need to check if the file changed?
1291 if self.sname in obj.__dict__:
1290 if self.sname in obj.__dict__:
1292 assert self.name in obj._filecache, self.name
1291 assert self.name in obj._filecache, self.name
1293 return obj.__dict__[self.sname]
1292 return obj.__dict__[self.sname]
1294
1293
1295 entry = obj._filecache.get(self.name)
1294 entry = obj._filecache.get(self.name)
1296
1295
1297 if entry:
1296 if entry:
1298 if entry.changed():
1297 if entry.changed():
1299 entry.obj = self.func(obj)
1298 entry.obj = self.func(obj)
1300 else:
1299 else:
1301 paths = [self.join(obj, path) for path in self.paths]
1300 paths = [self.join(obj, path) for path in self.paths]
1302
1301
1303 # We stat -before- creating the object so our cache doesn't lie if
1302 # We stat -before- creating the object so our cache doesn't lie if
1304 # a writer modified between the time we read and stat
1303 # a writer modified between the time we read and stat
1305 entry = filecacheentry(paths, True)
1304 entry = filecacheentry(paths, True)
1306 entry.obj = self.func(obj)
1305 entry.obj = self.func(obj)
1307
1306
1308 obj._filecache[self.name] = entry
1307 obj._filecache[self.name] = entry
1309
1308
1310 obj.__dict__[self.sname] = entry.obj
1309 obj.__dict__[self.sname] = entry.obj
1311 return entry.obj
1310 return entry.obj
1312
1311
1313 def __set__(self, obj, value):
1312 def __set__(self, obj, value):
1314 if self.name not in obj._filecache:
1313 if self.name not in obj._filecache:
1315 # we add an entry for the missing value because X in __dict__
1314 # we add an entry for the missing value because X in __dict__
1316 # implies X in _filecache
1315 # implies X in _filecache
1317 paths = [self.join(obj, path) for path in self.paths]
1316 paths = [self.join(obj, path) for path in self.paths]
1318 ce = filecacheentry(paths, False)
1317 ce = filecacheentry(paths, False)
1319 obj._filecache[self.name] = ce
1318 obj._filecache[self.name] = ce
1320 else:
1319 else:
1321 ce = obj._filecache[self.name]
1320 ce = obj._filecache[self.name]
1322
1321
1323 ce.obj = value # update cached copy
1322 ce.obj = value # update cached copy
1324 obj.__dict__[self.sname] = value # update copy returned by obj.x
1323 obj.__dict__[self.sname] = value # update copy returned by obj.x
1325
1324
1326 def __delete__(self, obj):
1325 def __delete__(self, obj):
1327 try:
1326 try:
1328 del obj.__dict__[self.sname]
1327 del obj.__dict__[self.sname]
1329 except KeyError:
1328 except KeyError:
1330 raise AttributeError(self.sname)
1329 raise AttributeError(self.sname)
1331
1330
1332 def extdatasource(repo, source):
1331 def extdatasource(repo, source):
1333 """Gather a map of rev -> value dict from the specified source
1332 """Gather a map of rev -> value dict from the specified source
1334
1333
1335 A source spec is treated as a URL, with a special case shell: type
1334 A source spec is treated as a URL, with a special case shell: type
1336 for parsing the output from a shell command.
1335 for parsing the output from a shell command.
1337
1336
1338 The data is parsed as a series of newline-separated records where
1337 The data is parsed as a series of newline-separated records where
1339 each record is a revision specifier optionally followed by a space
1338 each record is a revision specifier optionally followed by a space
1340 and a freeform string value. If the revision is known locally, it
1339 and a freeform string value. If the revision is known locally, it
1341 is converted to a rev, otherwise the record is skipped.
1340 is converted to a rev, otherwise the record is skipped.
1342
1341
1343 Note that both key and value are treated as UTF-8 and converted to
1342 Note that both key and value are treated as UTF-8 and converted to
1344 the local encoding. This allows uniformity between local and
1343 the local encoding. This allows uniformity between local and
1345 remote data sources.
1344 remote data sources.
1346 """
1345 """
1347
1346
1348 spec = repo.ui.config("extdata", source)
1347 spec = repo.ui.config("extdata", source)
1349 if not spec:
1348 if not spec:
1350 raise error.Abort(_("unknown extdata source '%s'") % source)
1349 raise error.Abort(_("unknown extdata source '%s'") % source)
1351
1350
1352 data = {}
1351 data = {}
1353 src = proc = None
1352 src = proc = None
1354 try:
1353 try:
1355 if spec.startswith("shell:"):
1354 if spec.startswith("shell:"):
1356 # external commands should be run relative to the repo root
1355 # external commands should be run relative to the repo root
1357 cmd = spec[6:]
1356 cmd = spec[6:]
1358 proc = subprocess.Popen(procutil.tonativestr(cmd),
1357 proc = subprocess.Popen(procutil.tonativestr(cmd),
1359 shell=True, bufsize=-1,
1358 shell=True, bufsize=-1,
1360 close_fds=procutil.closefds,
1359 close_fds=procutil.closefds,
1361 stdout=subprocess.PIPE,
1360 stdout=subprocess.PIPE,
1362 cwd=procutil.tonativestr(repo.root))
1361 cwd=procutil.tonativestr(repo.root))
1363 src = proc.stdout
1362 src = proc.stdout
1364 else:
1363 else:
1365 # treat as a URL or file
1364 # treat as a URL or file
1366 src = url.open(repo.ui, spec)
1365 src = url.open(repo.ui, spec)
1367 for l in src:
1366 for l in src:
1368 if " " in l:
1367 if " " in l:
1369 k, v = l.strip().split(" ", 1)
1368 k, v = l.strip().split(" ", 1)
1370 else:
1369 else:
1371 k, v = l.strip(), ""
1370 k, v = l.strip(), ""
1372
1371
1373 k = encoding.tolocal(k)
1372 k = encoding.tolocal(k)
1374 try:
1373 try:
1375 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1374 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1376 except (error.LookupError, error.RepoLookupError):
1375 except (error.LookupError, error.RepoLookupError):
1377 pass # we ignore data for nodes that don't exist locally
1376 pass # we ignore data for nodes that don't exist locally
1378 finally:
1377 finally:
1379 if proc:
1378 if proc:
1380 proc.communicate()
1379 proc.communicate()
1381 if src:
1380 if src:
1382 src.close()
1381 src.close()
1383 if proc and proc.returncode != 0:
1382 if proc and proc.returncode != 0:
1384 raise error.Abort(_("extdata command '%s' failed: %s")
1383 raise error.Abort(_("extdata command '%s' failed: %s")
1385 % (cmd, procutil.explainexit(proc.returncode)))
1384 % (cmd, procutil.explainexit(proc.returncode)))
1386
1385
1387 return data
1386 return data
1388
1387
1389 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1388 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1390 if lock is None:
1389 if lock is None:
1391 raise error.LockInheritanceContractViolation(
1390 raise error.LockInheritanceContractViolation(
1392 'lock can only be inherited while held')
1391 'lock can only be inherited while held')
1393 if environ is None:
1392 if environ is None:
1394 environ = {}
1393 environ = {}
1395 with lock.inherit() as locker:
1394 with lock.inherit() as locker:
1396 environ[envvar] = locker
1395 environ[envvar] = locker
1397 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1396 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1398
1397
1399 def wlocksub(repo, cmd, *args, **kwargs):
1398 def wlocksub(repo, cmd, *args, **kwargs):
1400 """run cmd as a subprocess that allows inheriting repo's wlock
1399 """run cmd as a subprocess that allows inheriting repo's wlock
1401
1400
1402 This can only be called while the wlock is held. This takes all the
1401 This can only be called while the wlock is held. This takes all the
1403 arguments that ui.system does, and returns the exit code of the
1402 arguments that ui.system does, and returns the exit code of the
1404 subprocess."""
1403 subprocess."""
1405 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1404 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1406 **kwargs)
1405 **kwargs)
1407
1406
1408 class progress(object):
1407 class progress(object):
1409 def __init__(self, ui, topic, unit="", total=None):
1408 def __init__(self, ui, topic, unit="", total=None):
1410 self.ui = ui
1409 self.ui = ui
1411 self.pos = 0
1410 self.pos = 0
1412 self.topic = topic
1411 self.topic = topic
1413 self.unit = unit
1412 self.unit = unit
1414 self.total = total
1413 self.total = total
1415
1414
1416 def __enter__(self):
1415 def __enter__(self):
1417 return self
1416 return self
1418
1417
1419 def __exit__(self, exc_type, exc_value, exc_tb):
1418 def __exit__(self, exc_type, exc_value, exc_tb):
1420 self.complete()
1419 self.complete()
1421
1420
1422 def update(self, pos, item="", total=None):
1421 def update(self, pos, item="", total=None):
1423 assert pos is not None
1422 assert pos is not None
1424 if total:
1423 if total:
1425 self.total = total
1424 self.total = total
1426 self.pos = pos
1425 self.pos = pos
1427 self._print(item)
1426 self._print(item)
1428
1427
1429 def increment(self, step=1, item="", total=None):
1428 def increment(self, step=1, item="", total=None):
1430 self.update(self.pos + step, item, total)
1429 self.update(self.pos + step, item, total)
1431
1430
1432 def complete(self):
1431 def complete(self):
1433 self.ui.progress(self.topic, None)
1432 self.ui.progress(self.topic, None)
1434
1433
1435 def _print(self, item):
1434 def _print(self, item):
1436 self.ui.progress(self.topic, self.pos, item, self.unit,
1435 self.ui.progress(self.topic, self.pos, item, self.unit,
1437 self.total)
1436 self.total)
1438
1437
1439 def gdinitconfig(ui):
1438 def gdinitconfig(ui):
1440 """helper function to know if a repo should be created as general delta
1439 """helper function to know if a repo should be created as general delta
1441 """
1440 """
1442 # experimental config: format.generaldelta
1441 # experimental config: format.generaldelta
1443 return (ui.configbool('format', 'generaldelta')
1442 return (ui.configbool('format', 'generaldelta')
1444 or ui.configbool('format', 'usegeneraldelta')
1443 or ui.configbool('format', 'usegeneraldelta')
1445 or ui.configbool('format', 'sparse-revlog'))
1444 or ui.configbool('format', 'sparse-revlog'))
1446
1445
1447 def gddeltaconfig(ui):
1446 def gddeltaconfig(ui):
1448 """helper function to know if incoming delta should be optimised
1447 """helper function to know if incoming delta should be optimised
1449 """
1448 """
1450 # experimental config: format.generaldelta
1449 # experimental config: format.generaldelta
1451 return ui.configbool('format', 'generaldelta')
1450 return ui.configbool('format', 'generaldelta')
1452
1451
1453 class simplekeyvaluefile(object):
1452 class simplekeyvaluefile(object):
1454 """A simple file with key=value lines
1453 """A simple file with key=value lines
1455
1454
1456 Keys must be alphanumerics and start with a letter, values must not
1455 Keys must be alphanumerics and start with a letter, values must not
1457 contain '\n' characters"""
1456 contain '\n' characters"""
1458 firstlinekey = '__firstline'
1457 firstlinekey = '__firstline'
1459
1458
1460 def __init__(self, vfs, path, keys=None):
1459 def __init__(self, vfs, path, keys=None):
1461 self.vfs = vfs
1460 self.vfs = vfs
1462 self.path = path
1461 self.path = path
1463
1462
1464 def read(self, firstlinenonkeyval=False):
1463 def read(self, firstlinenonkeyval=False):
1465 """Read the contents of a simple key-value file
1464 """Read the contents of a simple key-value file
1466
1465
1467 'firstlinenonkeyval' indicates whether the first line of file should
1466 'firstlinenonkeyval' indicates whether the first line of file should
1468 be treated as a key-value pair or reuturned fully under the
1467 be treated as a key-value pair or reuturned fully under the
1469 __firstline key."""
1468 __firstline key."""
1470 lines = self.vfs.readlines(self.path)
1469 lines = self.vfs.readlines(self.path)
1471 d = {}
1470 d = {}
1472 if firstlinenonkeyval:
1471 if firstlinenonkeyval:
1473 if not lines:
1472 if not lines:
1474 e = _("empty simplekeyvalue file")
1473 e = _("empty simplekeyvalue file")
1475 raise error.CorruptedState(e)
1474 raise error.CorruptedState(e)
1476 # we don't want to include '\n' in the __firstline
1475 # we don't want to include '\n' in the __firstline
1477 d[self.firstlinekey] = lines[0][:-1]
1476 d[self.firstlinekey] = lines[0][:-1]
1478 del lines[0]
1477 del lines[0]
1479
1478
1480 try:
1479 try:
1481 # the 'if line.strip()' part prevents us from failing on empty
1480 # the 'if line.strip()' part prevents us from failing on empty
1482 # lines which only contain '\n' therefore are not skipped
1481 # lines which only contain '\n' therefore are not skipped
1483 # by 'if line'
1482 # by 'if line'
1484 updatedict = dict(line[:-1].split('=', 1) for line in lines
1483 updatedict = dict(line[:-1].split('=', 1) for line in lines
1485 if line.strip())
1484 if line.strip())
1486 if self.firstlinekey in updatedict:
1485 if self.firstlinekey in updatedict:
1487 e = _("%r can't be used as a key")
1486 e = _("%r can't be used as a key")
1488 raise error.CorruptedState(e % self.firstlinekey)
1487 raise error.CorruptedState(e % self.firstlinekey)
1489 d.update(updatedict)
1488 d.update(updatedict)
1490 except ValueError as e:
1489 except ValueError as e:
1491 raise error.CorruptedState(str(e))
1490 raise error.CorruptedState(str(e))
1492 return d
1491 return d
1493
1492
1494 def write(self, data, firstline=None):
1493 def write(self, data, firstline=None):
1495 """Write key=>value mapping to a file
1494 """Write key=>value mapping to a file
1496 data is a dict. Keys must be alphanumerical and start with a letter.
1495 data is a dict. Keys must be alphanumerical and start with a letter.
1497 Values must not contain newline characters.
1496 Values must not contain newline characters.
1498
1497
1499 If 'firstline' is not None, it is written to file before
1498 If 'firstline' is not None, it is written to file before
1500 everything else, as it is, not in a key=value form"""
1499 everything else, as it is, not in a key=value form"""
1501 lines = []
1500 lines = []
1502 if firstline is not None:
1501 if firstline is not None:
1503 lines.append('%s\n' % firstline)
1502 lines.append('%s\n' % firstline)
1504
1503
1505 for k, v in data.items():
1504 for k, v in data.items():
1506 if k == self.firstlinekey:
1505 if k == self.firstlinekey:
1507 e = "key name '%s' is reserved" % self.firstlinekey
1506 e = "key name '%s' is reserved" % self.firstlinekey
1508 raise error.ProgrammingError(e)
1507 raise error.ProgrammingError(e)
1509 if not k[0:1].isalpha():
1508 if not k[0:1].isalpha():
1510 e = "keys must start with a letter in a key-value file"
1509 e = "keys must start with a letter in a key-value file"
1511 raise error.ProgrammingError(e)
1510 raise error.ProgrammingError(e)
1512 if not k.isalnum():
1511 if not k.isalnum():
1513 e = "invalid key name in a simple key-value file"
1512 e = "invalid key name in a simple key-value file"
1514 raise error.ProgrammingError(e)
1513 raise error.ProgrammingError(e)
1515 if '\n' in v:
1514 if '\n' in v:
1516 e = "invalid value in a simple key-value file"
1515 e = "invalid value in a simple key-value file"
1517 raise error.ProgrammingError(e)
1516 raise error.ProgrammingError(e)
1518 lines.append("%s=%s\n" % (k, v))
1517 lines.append("%s=%s\n" % (k, v))
1519 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1518 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1520 fp.write(''.join(lines))
1519 fp.write(''.join(lines))
1521
1520
1522 _reportobsoletedsource = [
1521 _reportobsoletedsource = [
1523 'debugobsolete',
1522 'debugobsolete',
1524 'pull',
1523 'pull',
1525 'push',
1524 'push',
1526 'serve',
1525 'serve',
1527 'unbundle',
1526 'unbundle',
1528 ]
1527 ]
1529
1528
1530 _reportnewcssource = [
1529 _reportnewcssource = [
1531 'pull',
1530 'pull',
1532 'unbundle',
1531 'unbundle',
1533 ]
1532 ]
1534
1533
1535 def prefetchfiles(repo, revs, match):
1534 def prefetchfiles(repo, revs, match):
1536 """Invokes the registered file prefetch functions, allowing extensions to
1535 """Invokes the registered file prefetch functions, allowing extensions to
1537 ensure the corresponding files are available locally, before the command
1536 ensure the corresponding files are available locally, before the command
1538 uses them."""
1537 uses them."""
1539 if match:
1538 if match:
1540 # The command itself will complain about files that don't exist, so
1539 # The command itself will complain about files that don't exist, so
1541 # don't duplicate the message.
1540 # don't duplicate the message.
1542 match = matchmod.badmatch(match, lambda fn, msg: None)
1541 match = matchmod.badmatch(match, lambda fn, msg: None)
1543 else:
1542 else:
1544 match = matchall(repo)
1543 match = matchall(repo)
1545
1544
1546 fileprefetchhooks(repo, revs, match)
1545 fileprefetchhooks(repo, revs, match)
1547
1546
1548 # a list of (repo, revs, match) prefetch functions
1547 # a list of (repo, revs, match) prefetch functions
1549 fileprefetchhooks = util.hooks()
1548 fileprefetchhooks = util.hooks()
1550
1549
1551 # A marker that tells the evolve extension to suppress its own reporting
1550 # A marker that tells the evolve extension to suppress its own reporting
1552 _reportstroubledchangesets = True
1551 _reportstroubledchangesets = True
1553
1552
1554 def registersummarycallback(repo, otr, txnname=''):
1553 def registersummarycallback(repo, otr, txnname=''):
1555 """register a callback to issue a summary after the transaction is closed
1554 """register a callback to issue a summary after the transaction is closed
1556 """
1555 """
1557 def txmatch(sources):
1556 def txmatch(sources):
1558 return any(txnname.startswith(source) for source in sources)
1557 return any(txnname.startswith(source) for source in sources)
1559
1558
1560 categories = []
1559 categories = []
1561
1560
1562 def reportsummary(func):
1561 def reportsummary(func):
1563 """decorator for report callbacks."""
1562 """decorator for report callbacks."""
1564 # The repoview life cycle is shorter than the one of the actual
1563 # The repoview life cycle is shorter than the one of the actual
1565 # underlying repository. So the filtered object can die before the
1564 # underlying repository. So the filtered object can die before the
1566 # weakref is used leading to troubles. We keep a reference to the
1565 # weakref is used leading to troubles. We keep a reference to the
1567 # unfiltered object and restore the filtering when retrieving the
1566 # unfiltered object and restore the filtering when retrieving the
1568 # repository through the weakref.
1567 # repository through the weakref.
1569 filtername = repo.filtername
1568 filtername = repo.filtername
1570 reporef = weakref.ref(repo.unfiltered())
1569 reporef = weakref.ref(repo.unfiltered())
1571 def wrapped(tr):
1570 def wrapped(tr):
1572 repo = reporef()
1571 repo = reporef()
1573 if filtername:
1572 if filtername:
1574 repo = repo.filtered(filtername)
1573 repo = repo.filtered(filtername)
1575 func(repo, tr)
1574 func(repo, tr)
1576 newcat = '%02i-txnreport' % len(categories)
1575 newcat = '%02i-txnreport' % len(categories)
1577 otr.addpostclose(newcat, wrapped)
1576 otr.addpostclose(newcat, wrapped)
1578 categories.append(newcat)
1577 categories.append(newcat)
1579 return wrapped
1578 return wrapped
1580
1579
1581 if txmatch(_reportobsoletedsource):
1580 if txmatch(_reportobsoletedsource):
1582 @reportsummary
1581 @reportsummary
1583 def reportobsoleted(repo, tr):
1582 def reportobsoleted(repo, tr):
1584 obsoleted = obsutil.getobsoleted(repo, tr)
1583 obsoleted = obsutil.getobsoleted(repo, tr)
1585 if obsoleted:
1584 if obsoleted:
1586 repo.ui.status(_('obsoleted %i changesets\n')
1585 repo.ui.status(_('obsoleted %i changesets\n')
1587 % len(obsoleted))
1586 % len(obsoleted))
1588
1587
1589 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1588 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1590 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1589 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1591 instabilitytypes = [
1590 instabilitytypes = [
1592 ('orphan', 'orphan'),
1591 ('orphan', 'orphan'),
1593 ('phase-divergent', 'phasedivergent'),
1592 ('phase-divergent', 'phasedivergent'),
1594 ('content-divergent', 'contentdivergent'),
1593 ('content-divergent', 'contentdivergent'),
1595 ]
1594 ]
1596
1595
1597 def getinstabilitycounts(repo):
1596 def getinstabilitycounts(repo):
1598 filtered = repo.changelog.filteredrevs
1597 filtered = repo.changelog.filteredrevs
1599 counts = {}
1598 counts = {}
1600 for instability, revset in instabilitytypes:
1599 for instability, revset in instabilitytypes:
1601 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1600 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1602 filtered)
1601 filtered)
1603 return counts
1602 return counts
1604
1603
1605 oldinstabilitycounts = getinstabilitycounts(repo)
1604 oldinstabilitycounts = getinstabilitycounts(repo)
1606 @reportsummary
1605 @reportsummary
1607 def reportnewinstabilities(repo, tr):
1606 def reportnewinstabilities(repo, tr):
1608 newinstabilitycounts = getinstabilitycounts(repo)
1607 newinstabilitycounts = getinstabilitycounts(repo)
1609 for instability, revset in instabilitytypes:
1608 for instability, revset in instabilitytypes:
1610 delta = (newinstabilitycounts[instability] -
1609 delta = (newinstabilitycounts[instability] -
1611 oldinstabilitycounts[instability])
1610 oldinstabilitycounts[instability])
1612 msg = getinstabilitymessage(delta, instability)
1611 msg = getinstabilitymessage(delta, instability)
1613 if msg:
1612 if msg:
1614 repo.ui.warn(msg)
1613 repo.ui.warn(msg)
1615
1614
1616 if txmatch(_reportnewcssource):
1615 if txmatch(_reportnewcssource):
1617 @reportsummary
1616 @reportsummary
1618 def reportnewcs(repo, tr):
1617 def reportnewcs(repo, tr):
1619 """Report the range of new revisions pulled/unbundled."""
1618 """Report the range of new revisions pulled/unbundled."""
1620 origrepolen = tr.changes.get('origrepolen', len(repo))
1619 origrepolen = tr.changes.get('origrepolen', len(repo))
1621 unfi = repo.unfiltered()
1620 unfi = repo.unfiltered()
1622 if origrepolen >= len(unfi):
1621 if origrepolen >= len(unfi):
1623 return
1622 return
1624
1623
1625 # Compute the bounds of new visible revisions' range.
1624 # Compute the bounds of new visible revisions' range.
1626 revs = smartset.spanset(repo, start=origrepolen)
1625 revs = smartset.spanset(repo, start=origrepolen)
1627 if revs:
1626 if revs:
1628 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1627 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1629
1628
1630 if minrev == maxrev:
1629 if minrev == maxrev:
1631 revrange = minrev
1630 revrange = minrev
1632 else:
1631 else:
1633 revrange = '%s:%s' % (minrev, maxrev)
1632 revrange = '%s:%s' % (minrev, maxrev)
1634 draft = len(repo.revs('%ld and draft()', revs))
1633 draft = len(repo.revs('%ld and draft()', revs))
1635 secret = len(repo.revs('%ld and secret()', revs))
1634 secret = len(repo.revs('%ld and secret()', revs))
1636 if not (draft or secret):
1635 if not (draft or secret):
1637 msg = _('new changesets %s\n') % revrange
1636 msg = _('new changesets %s\n') % revrange
1638 elif draft and secret:
1637 elif draft and secret:
1639 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1638 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1640 msg %= (revrange, draft, secret)
1639 msg %= (revrange, draft, secret)
1641 elif draft:
1640 elif draft:
1642 msg = _('new changesets %s (%d drafts)\n')
1641 msg = _('new changesets %s (%d drafts)\n')
1643 msg %= (revrange, draft)
1642 msg %= (revrange, draft)
1644 elif secret:
1643 elif secret:
1645 msg = _('new changesets %s (%d secrets)\n')
1644 msg = _('new changesets %s (%d secrets)\n')
1646 msg %= (revrange, secret)
1645 msg %= (revrange, secret)
1647 else:
1646 else:
1648 errormsg = 'entered unreachable condition'
1647 errormsg = 'entered unreachable condition'
1649 raise error.ProgrammingError(errormsg)
1648 raise error.ProgrammingError(errormsg)
1650 repo.ui.status(msg)
1649 repo.ui.status(msg)
1651
1650
1652 # search new changesets directly pulled as obsolete
1651 # search new changesets directly pulled as obsolete
1653 duplicates = tr.changes.get('revduplicates', ())
1652 duplicates = tr.changes.get('revduplicates', ())
1654 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1653 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1655 origrepolen, duplicates)
1654 origrepolen, duplicates)
1656 cl = repo.changelog
1655 cl = repo.changelog
1657 extinctadded = [r for r in obsadded if r not in cl]
1656 extinctadded = [r for r in obsadded if r not in cl]
1658 if extinctadded:
1657 if extinctadded:
1659 # They are not just obsolete, but obsolete and invisible
1658 # They are not just obsolete, but obsolete and invisible
1660 # we call them "extinct" internally but the terms have not been
1659 # we call them "extinct" internally but the terms have not been
1661 # exposed to users.
1660 # exposed to users.
1662 msg = '(%d other changesets obsolete on arrival)\n'
1661 msg = '(%d other changesets obsolete on arrival)\n'
1663 repo.ui.status(msg % len(extinctadded))
1662 repo.ui.status(msg % len(extinctadded))
1664
1663
1665 @reportsummary
1664 @reportsummary
1666 def reportphasechanges(repo, tr):
1665 def reportphasechanges(repo, tr):
1667 """Report statistics of phase changes for changesets pre-existing
1666 """Report statistics of phase changes for changesets pre-existing
1668 pull/unbundle.
1667 pull/unbundle.
1669 """
1668 """
1670 origrepolen = tr.changes.get('origrepolen', len(repo))
1669 origrepolen = tr.changes.get('origrepolen', len(repo))
1671 phasetracking = tr.changes.get('phases', {})
1670 phasetracking = tr.changes.get('phases', {})
1672 if not phasetracking:
1671 if not phasetracking:
1673 return
1672 return
1674 published = [
1673 published = [
1675 rev for rev, (old, new) in phasetracking.iteritems()
1674 rev for rev, (old, new) in phasetracking.iteritems()
1676 if new == phases.public and rev < origrepolen
1675 if new == phases.public and rev < origrepolen
1677 ]
1676 ]
1678 if not published:
1677 if not published:
1679 return
1678 return
1680 repo.ui.status(_('%d local changesets published\n')
1679 repo.ui.status(_('%d local changesets published\n')
1681 % len(published))
1680 % len(published))
1682
1681
1683 def getinstabilitymessage(delta, instability):
1682 def getinstabilitymessage(delta, instability):
1684 """function to return the message to show warning about new instabilities
1683 """function to return the message to show warning about new instabilities
1685
1684
1686 exists as a separate function so that extension can wrap to show more
1685 exists as a separate function so that extension can wrap to show more
1687 information like how to fix instabilities"""
1686 information like how to fix instabilities"""
1688 if delta > 0:
1687 if delta > 0:
1689 return _('%i new %s changesets\n') % (delta, instability)
1688 return _('%i new %s changesets\n') % (delta, instability)
1690
1689
1691 def nodesummaries(repo, nodes, maxnumnodes=4):
1690 def nodesummaries(repo, nodes, maxnumnodes=4):
1692 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1691 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1693 return ' '.join(short(h) for h in nodes)
1692 return ' '.join(short(h) for h in nodes)
1694 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1693 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1695 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1694 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1696
1695
1697 def enforcesinglehead(repo, tr, desc):
1696 def enforcesinglehead(repo, tr, desc):
1698 """check that no named branch has multiple heads"""
1697 """check that no named branch has multiple heads"""
1699 if desc in ('strip', 'repair'):
1698 if desc in ('strip', 'repair'):
1700 # skip the logic during strip
1699 # skip the logic during strip
1701 return
1700 return
1702 visible = repo.filtered('visible')
1701 visible = repo.filtered('visible')
1703 # possible improvement: we could restrict the check to affected branch
1702 # possible improvement: we could restrict the check to affected branch
1704 for name, heads in visible.branchmap().iteritems():
1703 for name, heads in visible.branchmap().iteritems():
1705 if len(heads) > 1:
1704 if len(heads) > 1:
1706 msg = _('rejecting multiple heads on branch "%s"')
1705 msg = _('rejecting multiple heads on branch "%s"')
1707 msg %= name
1706 msg %= name
1708 hint = _('%d heads: %s')
1707 hint = _('%d heads: %s')
1709 hint %= (len(heads), nodesummaries(repo, heads))
1708 hint %= (len(heads), nodesummaries(repo, heads))
1710 raise error.Abort(msg, hint=hint)
1709 raise error.Abort(msg, hint=hint)
1711
1710
1712 def wrapconvertsink(sink):
1711 def wrapconvertsink(sink):
1713 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1712 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1714 before it is used, whether or not the convert extension was formally loaded.
1713 before it is used, whether or not the convert extension was formally loaded.
1715 """
1714 """
1716 return sink
1715 return sink
1717
1716
1718 def unhidehashlikerevs(repo, specs, hiddentype):
1717 def unhidehashlikerevs(repo, specs, hiddentype):
1719 """parse the user specs and unhide changesets whose hash or revision number
1718 """parse the user specs and unhide changesets whose hash or revision number
1720 is passed.
1719 is passed.
1721
1720
1722 hiddentype can be: 1) 'warn': warn while unhiding changesets
1721 hiddentype can be: 1) 'warn': warn while unhiding changesets
1723 2) 'nowarn': don't warn while unhiding changesets
1722 2) 'nowarn': don't warn while unhiding changesets
1724
1723
1725 returns a repo object with the required changesets unhidden
1724 returns a repo object with the required changesets unhidden
1726 """
1725 """
1727 if not repo.filtername or not repo.ui.configbool('experimental',
1726 if not repo.filtername or not repo.ui.configbool('experimental',
1728 'directaccess'):
1727 'directaccess'):
1729 return repo
1728 return repo
1730
1729
1731 if repo.filtername not in ('visible', 'visible-hidden'):
1730 if repo.filtername not in ('visible', 'visible-hidden'):
1732 return repo
1731 return repo
1733
1732
1734 symbols = set()
1733 symbols = set()
1735 for spec in specs:
1734 for spec in specs:
1736 try:
1735 try:
1737 tree = revsetlang.parse(spec)
1736 tree = revsetlang.parse(spec)
1738 except error.ParseError: # will be reported by scmutil.revrange()
1737 except error.ParseError: # will be reported by scmutil.revrange()
1739 continue
1738 continue
1740
1739
1741 symbols.update(revsetlang.gethashlikesymbols(tree))
1740 symbols.update(revsetlang.gethashlikesymbols(tree))
1742
1741
1743 if not symbols:
1742 if not symbols:
1744 return repo
1743 return repo
1745
1744
1746 revs = _getrevsfromsymbols(repo, symbols)
1745 revs = _getrevsfromsymbols(repo, symbols)
1747
1746
1748 if not revs:
1747 if not revs:
1749 return repo
1748 return repo
1750
1749
1751 if hiddentype == 'warn':
1750 if hiddentype == 'warn':
1752 unfi = repo.unfiltered()
1751 unfi = repo.unfiltered()
1753 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1752 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1754 repo.ui.warn(_("warning: accessing hidden changesets for write "
1753 repo.ui.warn(_("warning: accessing hidden changesets for write "
1755 "operation: %s\n") % revstr)
1754 "operation: %s\n") % revstr)
1756
1755
1757 # we have to use new filtername to separate branch/tags cache until we can
1756 # we have to use new filtername to separate branch/tags cache until we can
1758 # disbale these cache when revisions are dynamically pinned.
1757 # disbale these cache when revisions are dynamically pinned.
1759 return repo.filtered('visible-hidden', revs)
1758 return repo.filtered('visible-hidden', revs)
1760
1759
1761 def _getrevsfromsymbols(repo, symbols):
1760 def _getrevsfromsymbols(repo, symbols):
1762 """parse the list of symbols and returns a set of revision numbers of hidden
1761 """parse the list of symbols and returns a set of revision numbers of hidden
1763 changesets present in symbols"""
1762 changesets present in symbols"""
1764 revs = set()
1763 revs = set()
1765 unfi = repo.unfiltered()
1764 unfi = repo.unfiltered()
1766 unficl = unfi.changelog
1765 unficl = unfi.changelog
1767 cl = repo.changelog
1766 cl = repo.changelog
1768 tiprev = len(unficl)
1767 tiprev = len(unficl)
1769 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1768 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1770 for s in symbols:
1769 for s in symbols:
1771 try:
1770 try:
1772 n = int(s)
1771 n = int(s)
1773 if n <= tiprev:
1772 if n <= tiprev:
1774 if not allowrevnums:
1773 if not allowrevnums:
1775 continue
1774 continue
1776 else:
1775 else:
1777 if n not in cl:
1776 if n not in cl:
1778 revs.add(n)
1777 revs.add(n)
1779 continue
1778 continue
1780 except ValueError:
1779 except ValueError:
1781 pass
1780 pass
1782
1781
1783 try:
1782 try:
1784 s = resolvehexnodeidprefix(unfi, s)
1783 s = resolvehexnodeidprefix(unfi, s)
1785 except (error.LookupError, error.WdirUnsupported):
1784 except (error.LookupError, error.WdirUnsupported):
1786 s = None
1785 s = None
1787
1786
1788 if s is not None:
1787 if s is not None:
1789 rev = unficl.rev(s)
1788 rev = unficl.rev(s)
1790 if rev not in cl:
1789 if rev not in cl:
1791 revs.add(rev)
1790 revs.add(rev)
1792
1791
1793 return revs
1792 return revs
1794
1793
1795 def bookmarkrevs(repo, mark):
1794 def bookmarkrevs(repo, mark):
1796 """
1795 """
1797 Select revisions reachable by a given bookmark
1796 Select revisions reachable by a given bookmark
1798 """
1797 """
1799 return repo.revs("ancestors(bookmark(%s)) - "
1798 return repo.revs("ancestors(bookmark(%s)) - "
1800 "ancestors(head() and not bookmark(%s)) - "
1799 "ancestors(head() and not bookmark(%s)) - "
1801 "ancestors(bookmark() and not bookmark(%s))",
1800 "ancestors(bookmark() and not bookmark(%s))",
1802 mark, mark, mark)
1801 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now