##// END OF EJS Templates
scmutil: delete now-unused origpath() (API)...
Martin von Zweigbergk -
r41753:e944cf4c default
parent child Browse files
Show More
@@ -1,1876 +1,1841 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 policy,
37 policy,
38 pycompat,
38 pycompat,
39 revsetlang,
39 revsetlang,
40 similar,
40 similar,
41 smartset,
41 smartset,
42 url,
42 url,
43 util,
43 util,
44 vfs,
44 vfs,
45 )
45 )
46
46
47 from .utils import (
47 from .utils import (
48 procutil,
48 procutil,
49 stringutil,
49 stringutil,
50 )
50 )
51
51
52 if pycompat.iswindows:
52 if pycompat.iswindows:
53 from . import scmwindows as scmplatform
53 from . import scmwindows as scmplatform
54 else:
54 else:
55 from . import scmposix as scmplatform
55 from . import scmposix as scmplatform
56
56
57 parsers = policy.importmod(r'parsers')
57 parsers = policy.importmod(r'parsers')
58
58
59 termsize = scmplatform.termsize
59 termsize = scmplatform.termsize
60
60
61 class status(tuple):
61 class status(tuple):
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 and 'ignored' properties are only relevant to the working copy.
63 and 'ignored' properties are only relevant to the working copy.
64 '''
64 '''
65
65
66 __slots__ = ()
66 __slots__ = ()
67
67
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 clean):
69 clean):
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 ignored, clean))
71 ignored, clean))
72
72
73 @property
73 @property
74 def modified(self):
74 def modified(self):
75 '''files that have been modified'''
75 '''files that have been modified'''
76 return self[0]
76 return self[0]
77
77
78 @property
78 @property
79 def added(self):
79 def added(self):
80 '''files that have been added'''
80 '''files that have been added'''
81 return self[1]
81 return self[1]
82
82
83 @property
83 @property
84 def removed(self):
84 def removed(self):
85 '''files that have been removed'''
85 '''files that have been removed'''
86 return self[2]
86 return self[2]
87
87
88 @property
88 @property
89 def deleted(self):
89 def deleted(self):
90 '''files that are in the dirstate, but have been deleted from the
90 '''files that are in the dirstate, but have been deleted from the
91 working copy (aka "missing")
91 working copy (aka "missing")
92 '''
92 '''
93 return self[3]
93 return self[3]
94
94
95 @property
95 @property
96 def unknown(self):
96 def unknown(self):
97 '''files not in the dirstate that are not ignored'''
97 '''files not in the dirstate that are not ignored'''
98 return self[4]
98 return self[4]
99
99
100 @property
100 @property
101 def ignored(self):
101 def ignored(self):
102 '''files not in the dirstate that are ignored (by _dirignore())'''
102 '''files not in the dirstate that are ignored (by _dirignore())'''
103 return self[5]
103 return self[5]
104
104
105 @property
105 @property
106 def clean(self):
106 def clean(self):
107 '''files that have not been modified'''
107 '''files that have not been modified'''
108 return self[6]
108 return self[6]
109
109
110 def __repr__(self, *args, **kwargs):
110 def __repr__(self, *args, **kwargs):
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 r'unknown=%s, ignored=%s, clean=%s>') %
112 r'unknown=%s, ignored=%s, clean=%s>') %
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114
114
115 def itersubrepos(ctx1, ctx2):
115 def itersubrepos(ctx1, ctx2):
116 """find subrepos in ctx1 or ctx2"""
116 """find subrepos in ctx1 or ctx2"""
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # has been modified (in ctx2) but not yet committed (in ctx1).
119 # has been modified (in ctx2) but not yet committed (in ctx1).
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122
122
123 missing = set()
123 missing = set()
124
124
125 for subpath in ctx2.substate:
125 for subpath in ctx2.substate:
126 if subpath not in ctx1.substate:
126 if subpath not in ctx1.substate:
127 del subpaths[subpath]
127 del subpaths[subpath]
128 missing.add(subpath)
128 missing.add(subpath)
129
129
130 for subpath, ctx in sorted(subpaths.iteritems()):
130 for subpath, ctx in sorted(subpaths.iteritems()):
131 yield subpath, ctx.sub(subpath)
131 yield subpath, ctx.sub(subpath)
132
132
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # status and diff will have an accurate result when it does
134 # status and diff will have an accurate result when it does
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # against itself.
136 # against itself.
137 for subpath in missing:
137 for subpath in missing:
138 yield subpath, ctx2.nullsub(subpath, ctx1)
138 yield subpath, ctx2.nullsub(subpath, ctx1)
139
139
140 def nochangesfound(ui, repo, excluded=None):
140 def nochangesfound(ui, repo, excluded=None):
141 '''Report no changes for push/pull, excluded is None or a list of
141 '''Report no changes for push/pull, excluded is None or a list of
142 nodes excluded from the push/pull.
142 nodes excluded from the push/pull.
143 '''
143 '''
144 secretlist = []
144 secretlist = []
145 if excluded:
145 if excluded:
146 for n in excluded:
146 for n in excluded:
147 ctx = repo[n]
147 ctx = repo[n]
148 if ctx.phase() >= phases.secret and not ctx.extinct():
148 if ctx.phase() >= phases.secret and not ctx.extinct():
149 secretlist.append(n)
149 secretlist.append(n)
150
150
151 if secretlist:
151 if secretlist:
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 % len(secretlist))
153 % len(secretlist))
154 else:
154 else:
155 ui.status(_("no changes found\n"))
155 ui.status(_("no changes found\n"))
156
156
157 def callcatch(ui, func):
157 def callcatch(ui, func):
158 """call func() with global exception handling
158 """call func() with global exception handling
159
159
160 return func() if no exception happens. otherwise do some error handling
160 return func() if no exception happens. otherwise do some error handling
161 and return an exit code accordingly. does not handle all exceptions.
161 and return an exit code accordingly. does not handle all exceptions.
162 """
162 """
163 try:
163 try:
164 try:
164 try:
165 return func()
165 return func()
166 except: # re-raises
166 except: # re-raises
167 ui.traceback()
167 ui.traceback()
168 raise
168 raise
169 # Global exception handling, alphabetically
169 # Global exception handling, alphabetically
170 # Mercurial-specific first, followed by built-in and library exceptions
170 # Mercurial-specific first, followed by built-in and library exceptions
171 except error.LockHeld as inst:
171 except error.LockHeld as inst:
172 if inst.errno == errno.ETIMEDOUT:
172 if inst.errno == errno.ETIMEDOUT:
173 reason = _('timed out waiting for lock held by %r') % (
173 reason = _('timed out waiting for lock held by %r') % (
174 pycompat.bytestr(inst.locker))
174 pycompat.bytestr(inst.locker))
175 else:
175 else:
176 reason = _('lock held by %r') % inst.locker
176 reason = _('lock held by %r') % inst.locker
177 ui.error(_("abort: %s: %s\n") % (
177 ui.error(_("abort: %s: %s\n") % (
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 if not inst.locker:
179 if not inst.locker:
180 ui.error(_("(lock might be very busy)\n"))
180 ui.error(_("(lock might be very busy)\n"))
181 except error.LockUnavailable as inst:
181 except error.LockUnavailable as inst:
182 ui.error(_("abort: could not lock %s: %s\n") %
182 ui.error(_("abort: could not lock %s: %s\n") %
183 (inst.desc or stringutil.forcebytestr(inst.filename),
183 (inst.desc or stringutil.forcebytestr(inst.filename),
184 encoding.strtolocal(inst.strerror)))
184 encoding.strtolocal(inst.strerror)))
185 except error.OutOfBandError as inst:
185 except error.OutOfBandError as inst:
186 if inst.args:
186 if inst.args:
187 msg = _("abort: remote error:\n")
187 msg = _("abort: remote error:\n")
188 else:
188 else:
189 msg = _("abort: remote error\n")
189 msg = _("abort: remote error\n")
190 ui.error(msg)
190 ui.error(msg)
191 if inst.args:
191 if inst.args:
192 ui.error(''.join(inst.args))
192 ui.error(''.join(inst.args))
193 if inst.hint:
193 if inst.hint:
194 ui.error('(%s)\n' % inst.hint)
194 ui.error('(%s)\n' % inst.hint)
195 except error.RepoError as inst:
195 except error.RepoError as inst:
196 ui.error(_("abort: %s!\n") % inst)
196 ui.error(_("abort: %s!\n") % inst)
197 if inst.hint:
197 if inst.hint:
198 ui.error(_("(%s)\n") % inst.hint)
198 ui.error(_("(%s)\n") % inst.hint)
199 except error.ResponseError as inst:
199 except error.ResponseError as inst:
200 ui.error(_("abort: %s") % inst.args[0])
200 ui.error(_("abort: %s") % inst.args[0])
201 msg = inst.args[1]
201 msg = inst.args[1]
202 if isinstance(msg, type(u'')):
202 if isinstance(msg, type(u'')):
203 msg = pycompat.sysbytes(msg)
203 msg = pycompat.sysbytes(msg)
204 if not isinstance(msg, bytes):
204 if not isinstance(msg, bytes):
205 ui.error(" %r\n" % (msg,))
205 ui.error(" %r\n" % (msg,))
206 elif not msg:
206 elif not msg:
207 ui.error(_(" empty string\n"))
207 ui.error(_(" empty string\n"))
208 else:
208 else:
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 except error.CensoredNodeError as inst:
210 except error.CensoredNodeError as inst:
211 ui.error(_("abort: file censored %s!\n") % inst)
211 ui.error(_("abort: file censored %s!\n") % inst)
212 except error.StorageError as inst:
212 except error.StorageError as inst:
213 ui.error(_("abort: %s!\n") % inst)
213 ui.error(_("abort: %s!\n") % inst)
214 if inst.hint:
214 if inst.hint:
215 ui.error(_("(%s)\n") % inst.hint)
215 ui.error(_("(%s)\n") % inst.hint)
216 except error.InterventionRequired as inst:
216 except error.InterventionRequired as inst:
217 ui.error("%s\n" % inst)
217 ui.error("%s\n" % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.error(_("(%s)\n") % inst.hint)
219 ui.error(_("(%s)\n") % inst.hint)
220 return 1
220 return 1
221 except error.WdirUnsupported:
221 except error.WdirUnsupported:
222 ui.error(_("abort: working directory revision cannot be specified\n"))
222 ui.error(_("abort: working directory revision cannot be specified\n"))
223 except error.Abort as inst:
223 except error.Abort as inst:
224 ui.error(_("abort: %s\n") % inst)
224 ui.error(_("abort: %s\n") % inst)
225 if inst.hint:
225 if inst.hint:
226 ui.error(_("(%s)\n") % inst.hint)
226 ui.error(_("(%s)\n") % inst.hint)
227 except ImportError as inst:
227 except ImportError as inst:
228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 m = stringutil.forcebytestr(inst).split()[-1]
229 m = stringutil.forcebytestr(inst).split()[-1]
230 if m in "mpatch bdiff".split():
230 if m in "mpatch bdiff".split():
231 ui.error(_("(did you forget to compile extensions?)\n"))
231 ui.error(_("(did you forget to compile extensions?)\n"))
232 elif m in "zlib".split():
232 elif m in "zlib".split():
233 ui.error(_("(is your Python install correct?)\n"))
233 ui.error(_("(is your Python install correct?)\n"))
234 except (IOError, OSError) as inst:
234 except (IOError, OSError) as inst:
235 if util.safehasattr(inst, "code"): # HTTPError
235 if util.safehasattr(inst, "code"): # HTTPError
236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 elif util.safehasattr(inst, "reason"): # URLError or SSLError
237 elif util.safehasattr(inst, "reason"): # URLError or SSLError
238 try: # usually it is in the form (errno, strerror)
238 try: # usually it is in the form (errno, strerror)
239 reason = inst.reason.args[1]
239 reason = inst.reason.args[1]
240 except (AttributeError, IndexError):
240 except (AttributeError, IndexError):
241 # it might be anything, for example a string
241 # it might be anything, for example a string
242 reason = inst.reason
242 reason = inst.reason
243 if isinstance(reason, pycompat.unicode):
243 if isinstance(reason, pycompat.unicode):
244 # SSLError of Python 2.7.9 contains a unicode
244 # SSLError of Python 2.7.9 contains a unicode
245 reason = encoding.unitolocal(reason)
245 reason = encoding.unitolocal(reason)
246 ui.error(_("abort: error: %s\n") % reason)
246 ui.error(_("abort: error: %s\n") % reason)
247 elif (util.safehasattr(inst, "args")
247 elif (util.safehasattr(inst, "args")
248 and inst.args and inst.args[0] == errno.EPIPE):
248 and inst.args and inst.args[0] == errno.EPIPE):
249 pass
249 pass
250 elif getattr(inst, "strerror", None): # common IOError or OSError
250 elif getattr(inst, "strerror", None): # common IOError or OSError
251 if getattr(inst, "filename", None) is not None:
251 if getattr(inst, "filename", None) is not None:
252 ui.error(_("abort: %s: '%s'\n") % (
252 ui.error(_("abort: %s: '%s'\n") % (
253 encoding.strtolocal(inst.strerror),
253 encoding.strtolocal(inst.strerror),
254 stringutil.forcebytestr(inst.filename)))
254 stringutil.forcebytestr(inst.filename)))
255 else:
255 else:
256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 else: # suspicious IOError
257 else: # suspicious IOError
258 raise
258 raise
259 except MemoryError:
259 except MemoryError:
260 ui.error(_("abort: out of memory\n"))
260 ui.error(_("abort: out of memory\n"))
261 except SystemExit as inst:
261 except SystemExit as inst:
262 # Commands shouldn't sys.exit directly, but give a return code.
262 # Commands shouldn't sys.exit directly, but give a return code.
263 # Just in case catch this and and pass exit code to caller.
263 # Just in case catch this and and pass exit code to caller.
264 return inst.code
264 return inst.code
265
265
266 return -1
266 return -1
267
267
268 def checknewlabel(repo, lbl, kind):
268 def checknewlabel(repo, lbl, kind):
269 # Do not use the "kind" parameter in ui output.
269 # Do not use the "kind" parameter in ui output.
270 # It makes strings difficult to translate.
270 # It makes strings difficult to translate.
271 if lbl in ['tip', '.', 'null']:
271 if lbl in ['tip', '.', 'null']:
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 for c in (':', '\0', '\n', '\r'):
273 for c in (':', '\0', '\n', '\r'):
274 if c in lbl:
274 if c in lbl:
275 raise error.Abort(
275 raise error.Abort(
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 try:
277 try:
278 int(lbl)
278 int(lbl)
279 raise error.Abort(_("cannot use an integer as a name"))
279 raise error.Abort(_("cannot use an integer as a name"))
280 except ValueError:
280 except ValueError:
281 pass
281 pass
282 if lbl.strip() != lbl:
282 if lbl.strip() != lbl:
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284
284
285 def checkfilename(f):
285 def checkfilename(f):
286 '''Check that the filename f is an acceptable filename for a tracked file'''
286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 if '\r' in f or '\n' in f:
287 if '\r' in f or '\n' in f:
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
289 % pycompat.bytestr(f))
289 % pycompat.bytestr(f))
290
290
291 def checkportable(ui, f):
291 def checkportable(ui, f):
292 '''Check if filename f is portable and warn or abort depending on config'''
292 '''Check if filename f is portable and warn or abort depending on config'''
293 checkfilename(f)
293 checkfilename(f)
294 abort, warn = checkportabilityalert(ui)
294 abort, warn = checkportabilityalert(ui)
295 if abort or warn:
295 if abort or warn:
296 msg = util.checkwinfilename(f)
296 msg = util.checkwinfilename(f)
297 if msg:
297 if msg:
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 if abort:
299 if abort:
300 raise error.Abort(msg)
300 raise error.Abort(msg)
301 ui.warn(_("warning: %s\n") % msg)
301 ui.warn(_("warning: %s\n") % msg)
302
302
303 def checkportabilityalert(ui):
303 def checkportabilityalert(ui):
304 '''check if the user's config requests nothing, a warning, or abort for
304 '''check if the user's config requests nothing, a warning, or abort for
305 non-portable filenames'''
305 non-portable filenames'''
306 val = ui.config('ui', 'portablefilenames')
306 val = ui.config('ui', 'portablefilenames')
307 lval = val.lower()
307 lval = val.lower()
308 bval = stringutil.parsebool(val)
308 bval = stringutil.parsebool(val)
309 abort = pycompat.iswindows or lval == 'abort'
309 abort = pycompat.iswindows or lval == 'abort'
310 warn = bval or lval == 'warn'
310 warn = bval or lval == 'warn'
311 if bval is None and not (warn or abort or lval == 'ignore'):
311 if bval is None and not (warn or abort or lval == 'ignore'):
312 raise error.ConfigError(
312 raise error.ConfigError(
313 _("ui.portablefilenames value is invalid ('%s')") % val)
313 _("ui.portablefilenames value is invalid ('%s')") % val)
314 return abort, warn
314 return abort, warn
315
315
316 class casecollisionauditor(object):
316 class casecollisionauditor(object):
317 def __init__(self, ui, abort, dirstate):
317 def __init__(self, ui, abort, dirstate):
318 self._ui = ui
318 self._ui = ui
319 self._abort = abort
319 self._abort = abort
320 allfiles = '\0'.join(dirstate._map)
320 allfiles = '\0'.join(dirstate._map)
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 self._dirstate = dirstate
322 self._dirstate = dirstate
323 # The purpose of _newfiles is so that we don't complain about
323 # The purpose of _newfiles is so that we don't complain about
324 # case collisions if someone were to call this object with the
324 # case collisions if someone were to call this object with the
325 # same filename twice.
325 # same filename twice.
326 self._newfiles = set()
326 self._newfiles = set()
327
327
328 def __call__(self, f):
328 def __call__(self, f):
329 if f in self._newfiles:
329 if f in self._newfiles:
330 return
330 return
331 fl = encoding.lower(f)
331 fl = encoding.lower(f)
332 if fl in self._loweredfiles and f not in self._dirstate:
332 if fl in self._loweredfiles and f not in self._dirstate:
333 msg = _('possible case-folding collision for %s') % f
333 msg = _('possible case-folding collision for %s') % f
334 if self._abort:
334 if self._abort:
335 raise error.Abort(msg)
335 raise error.Abort(msg)
336 self._ui.warn(_("warning: %s\n") % msg)
336 self._ui.warn(_("warning: %s\n") % msg)
337 self._loweredfiles.add(fl)
337 self._loweredfiles.add(fl)
338 self._newfiles.add(f)
338 self._newfiles.add(f)
339
339
340 def filteredhash(repo, maxrev):
340 def filteredhash(repo, maxrev):
341 """build hash of filtered revisions in the current repoview.
341 """build hash of filtered revisions in the current repoview.
342
342
343 Multiple caches perform up-to-date validation by checking that the
343 Multiple caches perform up-to-date validation by checking that the
344 tiprev and tipnode stored in the cache file match the current repository.
344 tiprev and tipnode stored in the cache file match the current repository.
345 However, this is not sufficient for validating repoviews because the set
345 However, this is not sufficient for validating repoviews because the set
346 of revisions in the view may change without the repository tiprev and
346 of revisions in the view may change without the repository tiprev and
347 tipnode changing.
347 tipnode changing.
348
348
349 This function hashes all the revs filtered from the view and returns
349 This function hashes all the revs filtered from the view and returns
350 that SHA-1 digest.
350 that SHA-1 digest.
351 """
351 """
352 cl = repo.changelog
352 cl = repo.changelog
353 if not cl.filteredrevs:
353 if not cl.filteredrevs:
354 return None
354 return None
355 key = None
355 key = None
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 if revs:
357 if revs:
358 s = hashlib.sha1()
358 s = hashlib.sha1()
359 for rev in revs:
359 for rev in revs:
360 s.update('%d;' % rev)
360 s.update('%d;' % rev)
361 key = s.digest()
361 key = s.digest()
362 return key
362 return key
363
363
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 '''yield every hg repository under path, always recursively.
365 '''yield every hg repository under path, always recursively.
366 The recurse flag will only control recursion into repo working dirs'''
366 The recurse flag will only control recursion into repo working dirs'''
367 def errhandler(err):
367 def errhandler(err):
368 if err.filename == path:
368 if err.filename == path:
369 raise err
369 raise err
370 samestat = getattr(os.path, 'samestat', None)
370 samestat = getattr(os.path, 'samestat', None)
371 if followsym and samestat is not None:
371 if followsym and samestat is not None:
372 def adddir(dirlst, dirname):
372 def adddir(dirlst, dirname):
373 dirstat = os.stat(dirname)
373 dirstat = os.stat(dirname)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 if not match:
375 if not match:
376 dirlst.append(dirstat)
376 dirlst.append(dirstat)
377 return not match
377 return not match
378 else:
378 else:
379 followsym = False
379 followsym = False
380
380
381 if (seen_dirs is None) and followsym:
381 if (seen_dirs is None) and followsym:
382 seen_dirs = []
382 seen_dirs = []
383 adddir(seen_dirs, path)
383 adddir(seen_dirs, path)
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 dirs.sort()
385 dirs.sort()
386 if '.hg' in dirs:
386 if '.hg' in dirs:
387 yield root # found a repository
387 yield root # found a repository
388 qroot = os.path.join(root, '.hg', 'patches')
388 qroot = os.path.join(root, '.hg', 'patches')
389 if os.path.isdir(os.path.join(qroot, '.hg')):
389 if os.path.isdir(os.path.join(qroot, '.hg')):
390 yield qroot # we have a patch queue repo here
390 yield qroot # we have a patch queue repo here
391 if recurse:
391 if recurse:
392 # avoid recursing inside the .hg directory
392 # avoid recursing inside the .hg directory
393 dirs.remove('.hg')
393 dirs.remove('.hg')
394 else:
394 else:
395 dirs[:] = [] # don't descend further
395 dirs[:] = [] # don't descend further
396 elif followsym:
396 elif followsym:
397 newdirs = []
397 newdirs = []
398 for d in dirs:
398 for d in dirs:
399 fname = os.path.join(root, d)
399 fname = os.path.join(root, d)
400 if adddir(seen_dirs, fname):
400 if adddir(seen_dirs, fname):
401 if os.path.islink(fname):
401 if os.path.islink(fname):
402 for hgname in walkrepos(fname, True, seen_dirs):
402 for hgname in walkrepos(fname, True, seen_dirs):
403 yield hgname
403 yield hgname
404 else:
404 else:
405 newdirs.append(d)
405 newdirs.append(d)
406 dirs[:] = newdirs
406 dirs[:] = newdirs
407
407
408 def binnode(ctx):
408 def binnode(ctx):
409 """Return binary node id for a given basectx"""
409 """Return binary node id for a given basectx"""
410 node = ctx.node()
410 node = ctx.node()
411 if node is None:
411 if node is None:
412 return wdirid
412 return wdirid
413 return node
413 return node
414
414
415 def intrev(ctx):
415 def intrev(ctx):
416 """Return integer for a given basectx that can be used in comparison or
416 """Return integer for a given basectx that can be used in comparison or
417 arithmetic operation"""
417 arithmetic operation"""
418 rev = ctx.rev()
418 rev = ctx.rev()
419 if rev is None:
419 if rev is None:
420 return wdirrev
420 return wdirrev
421 return rev
421 return rev
422
422
423 def formatchangeid(ctx):
423 def formatchangeid(ctx):
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 template provided by logcmdutil.changesettemplater"""
425 template provided by logcmdutil.changesettemplater"""
426 repo = ctx.repo()
426 repo = ctx.repo()
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428
428
429 def formatrevnode(ui, rev, node):
429 def formatrevnode(ui, rev, node):
430 """Format given revision and node depending on the current verbosity"""
430 """Format given revision and node depending on the current verbosity"""
431 if ui.debugflag:
431 if ui.debugflag:
432 hexfunc = hex
432 hexfunc = hex
433 else:
433 else:
434 hexfunc = short
434 hexfunc = short
435 return '%d:%s' % (rev, hexfunc(node))
435 return '%d:%s' % (rev, hexfunc(node))
436
436
437 def resolvehexnodeidprefix(repo, prefix):
437 def resolvehexnodeidprefix(repo, prefix):
438 if (prefix.startswith('x') and
438 if (prefix.startswith('x') and
439 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
439 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
440 prefix = prefix[1:]
440 prefix = prefix[1:]
441 try:
441 try:
442 # Uses unfiltered repo because it's faster when prefix is ambiguous/
442 # Uses unfiltered repo because it's faster when prefix is ambiguous/
443 # This matches the shortesthexnodeidprefix() function below.
443 # This matches the shortesthexnodeidprefix() function below.
444 node = repo.unfiltered().changelog._partialmatch(prefix)
444 node = repo.unfiltered().changelog._partialmatch(prefix)
445 except error.AmbiguousPrefixLookupError:
445 except error.AmbiguousPrefixLookupError:
446 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
446 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
447 if revset:
447 if revset:
448 # Clear config to avoid infinite recursion
448 # Clear config to avoid infinite recursion
449 configoverrides = {('experimental',
449 configoverrides = {('experimental',
450 'revisions.disambiguatewithin'): None}
450 'revisions.disambiguatewithin'): None}
451 with repo.ui.configoverride(configoverrides):
451 with repo.ui.configoverride(configoverrides):
452 revs = repo.anyrevs([revset], user=True)
452 revs = repo.anyrevs([revset], user=True)
453 matches = []
453 matches = []
454 for rev in revs:
454 for rev in revs:
455 node = repo.changelog.node(rev)
455 node = repo.changelog.node(rev)
456 if hex(node).startswith(prefix):
456 if hex(node).startswith(prefix):
457 matches.append(node)
457 matches.append(node)
458 if len(matches) == 1:
458 if len(matches) == 1:
459 return matches[0]
459 return matches[0]
460 raise
460 raise
461 if node is None:
461 if node is None:
462 return
462 return
463 repo.changelog.rev(node) # make sure node isn't filtered
463 repo.changelog.rev(node) # make sure node isn't filtered
464 return node
464 return node
465
465
466 def mayberevnum(repo, prefix):
466 def mayberevnum(repo, prefix):
467 """Checks if the given prefix may be mistaken for a revision number"""
467 """Checks if the given prefix may be mistaken for a revision number"""
468 try:
468 try:
469 i = int(prefix)
469 i = int(prefix)
470 # if we are a pure int, then starting with zero will not be
470 # if we are a pure int, then starting with zero will not be
471 # confused as a rev; or, obviously, if the int is larger
471 # confused as a rev; or, obviously, if the int is larger
472 # than the value of the tip rev. We still need to disambiguate if
472 # than the value of the tip rev. We still need to disambiguate if
473 # prefix == '0', since that *is* a valid revnum.
473 # prefix == '0', since that *is* a valid revnum.
474 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
474 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
475 return False
475 return False
476 return True
476 return True
477 except ValueError:
477 except ValueError:
478 return False
478 return False
479
479
480 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
480 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
481 """Find the shortest unambiguous prefix that matches hexnode.
481 """Find the shortest unambiguous prefix that matches hexnode.
482
482
483 If "cache" is not None, it must be a dictionary that can be used for
483 If "cache" is not None, it must be a dictionary that can be used for
484 caching between calls to this method.
484 caching between calls to this method.
485 """
485 """
486 # _partialmatch() of filtered changelog could take O(len(repo)) time,
486 # _partialmatch() of filtered changelog could take O(len(repo)) time,
487 # which would be unacceptably slow. so we look for hash collision in
487 # which would be unacceptably slow. so we look for hash collision in
488 # unfiltered space, which means some hashes may be slightly longer.
488 # unfiltered space, which means some hashes may be slightly longer.
489
489
490 minlength=max(minlength, 1)
490 minlength=max(minlength, 1)
491
491
492 def disambiguate(prefix):
492 def disambiguate(prefix):
493 """Disambiguate against revnums."""
493 """Disambiguate against revnums."""
494 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
494 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
495 if mayberevnum(repo, prefix):
495 if mayberevnum(repo, prefix):
496 return 'x' + prefix
496 return 'x' + prefix
497 else:
497 else:
498 return prefix
498 return prefix
499
499
500 hexnode = hex(node)
500 hexnode = hex(node)
501 for length in range(len(prefix), len(hexnode) + 1):
501 for length in range(len(prefix), len(hexnode) + 1):
502 prefix = hexnode[:length]
502 prefix = hexnode[:length]
503 if not mayberevnum(repo, prefix):
503 if not mayberevnum(repo, prefix):
504 return prefix
504 return prefix
505
505
506 cl = repo.unfiltered().changelog
506 cl = repo.unfiltered().changelog
507 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
507 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
508 if revset:
508 if revset:
509 revs = None
509 revs = None
510 if cache is not None:
510 if cache is not None:
511 revs = cache.get('disambiguationrevset')
511 revs = cache.get('disambiguationrevset')
512 if revs is None:
512 if revs is None:
513 revs = repo.anyrevs([revset], user=True)
513 revs = repo.anyrevs([revset], user=True)
514 if cache is not None:
514 if cache is not None:
515 cache['disambiguationrevset'] = revs
515 cache['disambiguationrevset'] = revs
516 if cl.rev(node) in revs:
516 if cl.rev(node) in revs:
517 hexnode = hex(node)
517 hexnode = hex(node)
518 nodetree = None
518 nodetree = None
519 if cache is not None:
519 if cache is not None:
520 nodetree = cache.get('disambiguationnodetree')
520 nodetree = cache.get('disambiguationnodetree')
521 if not nodetree:
521 if not nodetree:
522 try:
522 try:
523 nodetree = parsers.nodetree(cl.index, len(revs))
523 nodetree = parsers.nodetree(cl.index, len(revs))
524 except AttributeError:
524 except AttributeError:
525 # no native nodetree
525 # no native nodetree
526 pass
526 pass
527 else:
527 else:
528 for r in revs:
528 for r in revs:
529 nodetree.insert(r)
529 nodetree.insert(r)
530 if cache is not None:
530 if cache is not None:
531 cache['disambiguationnodetree'] = nodetree
531 cache['disambiguationnodetree'] = nodetree
532 if nodetree is not None:
532 if nodetree is not None:
533 length = max(nodetree.shortest(node), minlength)
533 length = max(nodetree.shortest(node), minlength)
534 prefix = hexnode[:length]
534 prefix = hexnode[:length]
535 return disambiguate(prefix)
535 return disambiguate(prefix)
536 for length in range(minlength, len(hexnode) + 1):
536 for length in range(minlength, len(hexnode) + 1):
537 matches = []
537 matches = []
538 prefix = hexnode[:length]
538 prefix = hexnode[:length]
539 for rev in revs:
539 for rev in revs:
540 otherhexnode = repo[rev].hex()
540 otherhexnode = repo[rev].hex()
541 if prefix == otherhexnode[:length]:
541 if prefix == otherhexnode[:length]:
542 matches.append(otherhexnode)
542 matches.append(otherhexnode)
543 if len(matches) == 1:
543 if len(matches) == 1:
544 return disambiguate(prefix)
544 return disambiguate(prefix)
545
545
546 try:
546 try:
547 return disambiguate(cl.shortest(node, minlength))
547 return disambiguate(cl.shortest(node, minlength))
548 except error.LookupError:
548 except error.LookupError:
549 raise error.RepoLookupError()
549 raise error.RepoLookupError()
550
550
551 def isrevsymbol(repo, symbol):
551 def isrevsymbol(repo, symbol):
552 """Checks if a symbol exists in the repo.
552 """Checks if a symbol exists in the repo.
553
553
554 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
554 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
555 symbol is an ambiguous nodeid prefix.
555 symbol is an ambiguous nodeid prefix.
556 """
556 """
557 try:
557 try:
558 revsymbol(repo, symbol)
558 revsymbol(repo, symbol)
559 return True
559 return True
560 except error.RepoLookupError:
560 except error.RepoLookupError:
561 return False
561 return False
562
562
563 def revsymbol(repo, symbol):
563 def revsymbol(repo, symbol):
564 """Returns a context given a single revision symbol (as string).
564 """Returns a context given a single revision symbol (as string).
565
565
566 This is similar to revsingle(), but accepts only a single revision symbol,
566 This is similar to revsingle(), but accepts only a single revision symbol,
567 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
567 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
568 not "max(public())".
568 not "max(public())".
569 """
569 """
570 if not isinstance(symbol, bytes):
570 if not isinstance(symbol, bytes):
571 msg = ("symbol (%s of type %s) was not a string, did you mean "
571 msg = ("symbol (%s of type %s) was not a string, did you mean "
572 "repo[symbol]?" % (symbol, type(symbol)))
572 "repo[symbol]?" % (symbol, type(symbol)))
573 raise error.ProgrammingError(msg)
573 raise error.ProgrammingError(msg)
574 try:
574 try:
575 if symbol in ('.', 'tip', 'null'):
575 if symbol in ('.', 'tip', 'null'):
576 return repo[symbol]
576 return repo[symbol]
577
577
578 try:
578 try:
579 r = int(symbol)
579 r = int(symbol)
580 if '%d' % r != symbol:
580 if '%d' % r != symbol:
581 raise ValueError
581 raise ValueError
582 l = len(repo.changelog)
582 l = len(repo.changelog)
583 if r < 0:
583 if r < 0:
584 r += l
584 r += l
585 if r < 0 or r >= l and r != wdirrev:
585 if r < 0 or r >= l and r != wdirrev:
586 raise ValueError
586 raise ValueError
587 return repo[r]
587 return repo[r]
588 except error.FilteredIndexError:
588 except error.FilteredIndexError:
589 raise
589 raise
590 except (ValueError, OverflowError, IndexError):
590 except (ValueError, OverflowError, IndexError):
591 pass
591 pass
592
592
593 if len(symbol) == 40:
593 if len(symbol) == 40:
594 try:
594 try:
595 node = bin(symbol)
595 node = bin(symbol)
596 rev = repo.changelog.rev(node)
596 rev = repo.changelog.rev(node)
597 return repo[rev]
597 return repo[rev]
598 except error.FilteredLookupError:
598 except error.FilteredLookupError:
599 raise
599 raise
600 except (TypeError, LookupError):
600 except (TypeError, LookupError):
601 pass
601 pass
602
602
603 # look up bookmarks through the name interface
603 # look up bookmarks through the name interface
604 try:
604 try:
605 node = repo.names.singlenode(repo, symbol)
605 node = repo.names.singlenode(repo, symbol)
606 rev = repo.changelog.rev(node)
606 rev = repo.changelog.rev(node)
607 return repo[rev]
607 return repo[rev]
608 except KeyError:
608 except KeyError:
609 pass
609 pass
610
610
611 node = resolvehexnodeidprefix(repo, symbol)
611 node = resolvehexnodeidprefix(repo, symbol)
612 if node is not None:
612 if node is not None:
613 rev = repo.changelog.rev(node)
613 rev = repo.changelog.rev(node)
614 return repo[rev]
614 return repo[rev]
615
615
616 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
616 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
617
617
618 except error.WdirUnsupported:
618 except error.WdirUnsupported:
619 return repo[None]
619 return repo[None]
620 except (error.FilteredIndexError, error.FilteredLookupError,
620 except (error.FilteredIndexError, error.FilteredLookupError,
621 error.FilteredRepoLookupError):
621 error.FilteredRepoLookupError):
622 raise _filterederror(repo, symbol)
622 raise _filterederror(repo, symbol)
623
623
624 def _filterederror(repo, changeid):
624 def _filterederror(repo, changeid):
625 """build an exception to be raised about a filtered changeid
625 """build an exception to be raised about a filtered changeid
626
626
627 This is extracted in a function to help extensions (eg: evolve) to
627 This is extracted in a function to help extensions (eg: evolve) to
628 experiment with various message variants."""
628 experiment with various message variants."""
629 if repo.filtername.startswith('visible'):
629 if repo.filtername.startswith('visible'):
630
630
631 # Check if the changeset is obsolete
631 # Check if the changeset is obsolete
632 unfilteredrepo = repo.unfiltered()
632 unfilteredrepo = repo.unfiltered()
633 ctx = revsymbol(unfilteredrepo, changeid)
633 ctx = revsymbol(unfilteredrepo, changeid)
634
634
635 # If the changeset is obsolete, enrich the message with the reason
635 # If the changeset is obsolete, enrich the message with the reason
636 # that made this changeset not visible
636 # that made this changeset not visible
637 if ctx.obsolete():
637 if ctx.obsolete():
638 msg = obsutil._getfilteredreason(repo, changeid, ctx)
638 msg = obsutil._getfilteredreason(repo, changeid, ctx)
639 else:
639 else:
640 msg = _("hidden revision '%s'") % changeid
640 msg = _("hidden revision '%s'") % changeid
641
641
642 hint = _('use --hidden to access hidden revisions')
642 hint = _('use --hidden to access hidden revisions')
643
643
644 return error.FilteredRepoLookupError(msg, hint=hint)
644 return error.FilteredRepoLookupError(msg, hint=hint)
645 msg = _("filtered revision '%s' (not in '%s' subset)")
645 msg = _("filtered revision '%s' (not in '%s' subset)")
646 msg %= (changeid, repo.filtername)
646 msg %= (changeid, repo.filtername)
647 return error.FilteredRepoLookupError(msg)
647 return error.FilteredRepoLookupError(msg)
648
648
649 def revsingle(repo, revspec, default='.', localalias=None):
649 def revsingle(repo, revspec, default='.', localalias=None):
650 if not revspec and revspec != 0:
650 if not revspec and revspec != 0:
651 return repo[default]
651 return repo[default]
652
652
653 l = revrange(repo, [revspec], localalias=localalias)
653 l = revrange(repo, [revspec], localalias=localalias)
654 if not l:
654 if not l:
655 raise error.Abort(_('empty revision set'))
655 raise error.Abort(_('empty revision set'))
656 return repo[l.last()]
656 return repo[l.last()]
657
657
658 def _pairspec(revspec):
658 def _pairspec(revspec):
659 tree = revsetlang.parse(revspec)
659 tree = revsetlang.parse(revspec)
660 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
660 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
661
661
662 def revpair(repo, revs):
662 def revpair(repo, revs):
663 if not revs:
663 if not revs:
664 return repo['.'], repo[None]
664 return repo['.'], repo[None]
665
665
666 l = revrange(repo, revs)
666 l = revrange(repo, revs)
667
667
668 if not l:
668 if not l:
669 raise error.Abort(_('empty revision range'))
669 raise error.Abort(_('empty revision range'))
670
670
671 first = l.first()
671 first = l.first()
672 second = l.last()
672 second = l.last()
673
673
674 if (first == second and len(revs) >= 2
674 if (first == second and len(revs) >= 2
675 and not all(revrange(repo, [r]) for r in revs)):
675 and not all(revrange(repo, [r]) for r in revs)):
676 raise error.Abort(_('empty revision on one side of range'))
676 raise error.Abort(_('empty revision on one side of range'))
677
677
678 # if top-level is range expression, the result must always be a pair
678 # if top-level is range expression, the result must always be a pair
679 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
679 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
680 return repo[first], repo[None]
680 return repo[first], repo[None]
681
681
682 return repo[first], repo[second]
682 return repo[first], repo[second]
683
683
684 def revrange(repo, specs, localalias=None):
684 def revrange(repo, specs, localalias=None):
685 """Execute 1 to many revsets and return the union.
685 """Execute 1 to many revsets and return the union.
686
686
687 This is the preferred mechanism for executing revsets using user-specified
687 This is the preferred mechanism for executing revsets using user-specified
688 config options, such as revset aliases.
688 config options, such as revset aliases.
689
689
690 The revsets specified by ``specs`` will be executed via a chained ``OR``
690 The revsets specified by ``specs`` will be executed via a chained ``OR``
691 expression. If ``specs`` is empty, an empty result is returned.
691 expression. If ``specs`` is empty, an empty result is returned.
692
692
693 ``specs`` can contain integers, in which case they are assumed to be
693 ``specs`` can contain integers, in which case they are assumed to be
694 revision numbers.
694 revision numbers.
695
695
696 It is assumed the revsets are already formatted. If you have arguments
696 It is assumed the revsets are already formatted. If you have arguments
697 that need to be expanded in the revset, call ``revsetlang.formatspec()``
697 that need to be expanded in the revset, call ``revsetlang.formatspec()``
698 and pass the result as an element of ``specs``.
698 and pass the result as an element of ``specs``.
699
699
700 Specifying a single revset is allowed.
700 Specifying a single revset is allowed.
701
701
702 Returns a ``revset.abstractsmartset`` which is a list-like interface over
702 Returns a ``revset.abstractsmartset`` which is a list-like interface over
703 integer revisions.
703 integer revisions.
704 """
704 """
705 allspecs = []
705 allspecs = []
706 for spec in specs:
706 for spec in specs:
707 if isinstance(spec, int):
707 if isinstance(spec, int):
708 spec = revsetlang.formatspec('%d', spec)
708 spec = revsetlang.formatspec('%d', spec)
709 allspecs.append(spec)
709 allspecs.append(spec)
710 return repo.anyrevs(allspecs, user=True, localalias=localalias)
710 return repo.anyrevs(allspecs, user=True, localalias=localalias)
711
711
712 def meaningfulparents(repo, ctx):
712 def meaningfulparents(repo, ctx):
713 """Return list of meaningful (or all if debug) parentrevs for rev.
713 """Return list of meaningful (or all if debug) parentrevs for rev.
714
714
715 For merges (two non-nullrev revisions) both parents are meaningful.
715 For merges (two non-nullrev revisions) both parents are meaningful.
716 Otherwise the first parent revision is considered meaningful if it
716 Otherwise the first parent revision is considered meaningful if it
717 is not the preceding revision.
717 is not the preceding revision.
718 """
718 """
719 parents = ctx.parents()
719 parents = ctx.parents()
720 if len(parents) > 1:
720 if len(parents) > 1:
721 return parents
721 return parents
722 if repo.ui.debugflag:
722 if repo.ui.debugflag:
723 return [parents[0], repo[nullrev]]
723 return [parents[0], repo[nullrev]]
724 if parents[0].rev() >= intrev(ctx) - 1:
724 if parents[0].rev() >= intrev(ctx) - 1:
725 return []
725 return []
726 return parents
726 return parents
727
727
728 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
728 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
729 """Return a function that produced paths for presenting to the user.
729 """Return a function that produced paths for presenting to the user.
730
730
731 The returned function takes a repo-relative path and produces a path
731 The returned function takes a repo-relative path and produces a path
732 that can be presented in the UI.
732 that can be presented in the UI.
733
733
734 Depending on the value of ui.relative-paths, either a repo-relative or
734 Depending on the value of ui.relative-paths, either a repo-relative or
735 cwd-relative path will be produced.
735 cwd-relative path will be produced.
736
736
737 legacyrelativevalue is the value to use if ui.relative-paths=legacy
737 legacyrelativevalue is the value to use if ui.relative-paths=legacy
738
738
739 If forcerelativevalue is not None, then that value will be used regardless
739 If forcerelativevalue is not None, then that value will be used regardless
740 of what ui.relative-paths is set to.
740 of what ui.relative-paths is set to.
741 """
741 """
742 if forcerelativevalue is not None:
742 if forcerelativevalue is not None:
743 relative = forcerelativevalue
743 relative = forcerelativevalue
744 else:
744 else:
745 config = repo.ui.config('ui', 'relative-paths')
745 config = repo.ui.config('ui', 'relative-paths')
746 if config == 'legacy':
746 if config == 'legacy':
747 relative = legacyrelativevalue
747 relative = legacyrelativevalue
748 else:
748 else:
749 relative = stringutil.parsebool(config)
749 relative = stringutil.parsebool(config)
750 if relative is None:
750 if relative is None:
751 raise error.ConfigError(
751 raise error.ConfigError(
752 _("ui.relative-paths is not a boolean ('%s')") % config)
752 _("ui.relative-paths is not a boolean ('%s')") % config)
753
753
754 if relative:
754 if relative:
755 cwd = repo.getcwd()
755 cwd = repo.getcwd()
756 pathto = repo.pathto
756 pathto = repo.pathto
757 return lambda f: pathto(f, cwd)
757 return lambda f: pathto(f, cwd)
758 else:
758 else:
759 return lambda f: f
759 return lambda f: f
760
760
761 def expandpats(pats):
761 def expandpats(pats):
762 '''Expand bare globs when running on windows.
762 '''Expand bare globs when running on windows.
763 On posix we assume it already has already been done by sh.'''
763 On posix we assume it already has already been done by sh.'''
764 if not util.expandglobs:
764 if not util.expandglobs:
765 return list(pats)
765 return list(pats)
766 ret = []
766 ret = []
767 for kindpat in pats:
767 for kindpat in pats:
768 kind, pat = matchmod._patsplit(kindpat, None)
768 kind, pat = matchmod._patsplit(kindpat, None)
769 if kind is None:
769 if kind is None:
770 try:
770 try:
771 globbed = glob.glob(pat)
771 globbed = glob.glob(pat)
772 except re.error:
772 except re.error:
773 globbed = [pat]
773 globbed = [pat]
774 if globbed:
774 if globbed:
775 ret.extend(globbed)
775 ret.extend(globbed)
776 continue
776 continue
777 ret.append(kindpat)
777 ret.append(kindpat)
778 return ret
778 return ret
779
779
780 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
780 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
781 badfn=None):
781 badfn=None):
782 '''Return a matcher and the patterns that were used.
782 '''Return a matcher and the patterns that were used.
783 The matcher will warn about bad matches, unless an alternate badfn callback
783 The matcher will warn about bad matches, unless an alternate badfn callback
784 is provided.'''
784 is provided.'''
785 if pats == ("",):
785 if pats == ("",):
786 pats = []
786 pats = []
787 if opts is None:
787 if opts is None:
788 opts = {}
788 opts = {}
789 if not globbed and default == 'relpath':
789 if not globbed and default == 'relpath':
790 pats = expandpats(pats or [])
790 pats = expandpats(pats or [])
791
791
792 def bad(f, msg):
792 def bad(f, msg):
793 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
793 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
794
794
795 if badfn is None:
795 if badfn is None:
796 badfn = bad
796 badfn = bad
797
797
798 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
798 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
799 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
799 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
800
800
801 if m.always():
801 if m.always():
802 pats = []
802 pats = []
803 return m, pats
803 return m, pats
804
804
805 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
805 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
806 badfn=None):
806 badfn=None):
807 '''Return a matcher that will warn about bad matches.'''
807 '''Return a matcher that will warn about bad matches.'''
808 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
808 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
809
809
810 def matchall(repo):
810 def matchall(repo):
811 '''Return a matcher that will efficiently match everything.'''
811 '''Return a matcher that will efficiently match everything.'''
812 return matchmod.always(repo.root, repo.getcwd())
812 return matchmod.always(repo.root, repo.getcwd())
813
813
814 def matchfiles(repo, files, badfn=None):
814 def matchfiles(repo, files, badfn=None):
815 '''Return a matcher that will efficiently match exactly these files.'''
815 '''Return a matcher that will efficiently match exactly these files.'''
816 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
816 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
817
817
818 def parsefollowlinespattern(repo, rev, pat, msg):
818 def parsefollowlinespattern(repo, rev, pat, msg):
819 """Return a file name from `pat` pattern suitable for usage in followlines
819 """Return a file name from `pat` pattern suitable for usage in followlines
820 logic.
820 logic.
821 """
821 """
822 if not matchmod.patkind(pat):
822 if not matchmod.patkind(pat):
823 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
823 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
824 else:
824 else:
825 ctx = repo[rev]
825 ctx = repo[rev]
826 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
826 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
827 files = [f for f in ctx if m(f)]
827 files = [f for f in ctx if m(f)]
828 if len(files) != 1:
828 if len(files) != 1:
829 raise error.ParseError(msg)
829 raise error.ParseError(msg)
830 return files[0]
830 return files[0]
831
831
832 def getorigvfs(ui, repo):
832 def getorigvfs(ui, repo):
833 """return a vfs suitable to save 'orig' file
833 """return a vfs suitable to save 'orig' file
834
834
835 return None if no special directory is configured"""
835 return None if no special directory is configured"""
836 origbackuppath = ui.config('ui', 'origbackuppath')
836 origbackuppath = ui.config('ui', 'origbackuppath')
837 if not origbackuppath:
837 if not origbackuppath:
838 return None
838 return None
839 return vfs.vfs(repo.wvfs.join(origbackuppath))
839 return vfs.vfs(repo.wvfs.join(origbackuppath))
840
840
841 def backuppath(ui, repo, filepath):
841 def backuppath(ui, repo, filepath):
842 '''customize where working copy backup files (.orig files) are created
842 '''customize where working copy backup files (.orig files) are created
843
843
844 Fetch user defined path from config file: [ui] origbackuppath = <path>
844 Fetch user defined path from config file: [ui] origbackuppath = <path>
845 Fall back to default (filepath with .orig suffix) if not specified
845 Fall back to default (filepath with .orig suffix) if not specified
846
846
847 filepath is repo-relative
847 filepath is repo-relative
848
848
849 Returns an absolute path
849 Returns an absolute path
850 '''
850 '''
851 origvfs = getorigvfs(ui, repo)
851 origvfs = getorigvfs(ui, repo)
852 if origvfs is None:
852 if origvfs is None:
853 return repo.wjoin(filepath + ".orig")
853 return repo.wjoin(filepath + ".orig")
854
854
855 origbackupdir = origvfs.dirname(filepath)
855 origbackupdir = origvfs.dirname(filepath)
856 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
856 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
857 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
857 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
858
858
859 # Remove any files that conflict with the backup file's path
859 # Remove any files that conflict with the backup file's path
860 for f in reversed(list(util.finddirs(filepath))):
860 for f in reversed(list(util.finddirs(filepath))):
861 if origvfs.isfileorlink(f):
861 if origvfs.isfileorlink(f):
862 ui.note(_('removing conflicting file: %s\n')
862 ui.note(_('removing conflicting file: %s\n')
863 % origvfs.join(f))
863 % origvfs.join(f))
864 origvfs.unlink(f)
864 origvfs.unlink(f)
865 break
865 break
866
866
867 origvfs.makedirs(origbackupdir)
867 origvfs.makedirs(origbackupdir)
868
868
869 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
869 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
870 ui.note(_('removing conflicting directory: %s\n')
870 ui.note(_('removing conflicting directory: %s\n')
871 % origvfs.join(filepath))
871 % origvfs.join(filepath))
872 origvfs.rmtree(filepath, forcibly=True)
872 origvfs.rmtree(filepath, forcibly=True)
873
873
874 return origvfs.join(filepath)
874 return origvfs.join(filepath)
875
875
876 def origpath(ui, repo, filepath):
877 '''customize where .orig files are created
878
879 Fetch user defined path from config file: [ui] origbackuppath = <path>
880 Fall back to default (filepath with .orig suffix) if not specified
881 '''
882 origvfs = getorigvfs(ui, repo)
883 if origvfs is None:
884 return filepath + ".orig"
885
886 # Convert filepath from an absolute path into a path inside the repo.
887 filepathfromroot = util.normpath(os.path.relpath(filepath,
888 start=repo.root))
889
890 origbackupdir = origvfs.dirname(filepathfromroot)
891 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
892 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
893
894 # Remove any files that conflict with the backup file's path
895 for f in reversed(list(util.finddirs(filepathfromroot))):
896 if origvfs.isfileorlink(f):
897 ui.note(_('removing conflicting file: %s\n')
898 % origvfs.join(f))
899 origvfs.unlink(f)
900 break
901
902 origvfs.makedirs(origbackupdir)
903
904 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
905 ui.note(_('removing conflicting directory: %s\n')
906 % origvfs.join(filepathfromroot))
907 origvfs.rmtree(filepathfromroot, forcibly=True)
908
909 return origvfs.join(filepathfromroot)
910
911 class _containsnode(object):
876 class _containsnode(object):
912 """proxy __contains__(node) to container.__contains__ which accepts revs"""
877 """proxy __contains__(node) to container.__contains__ which accepts revs"""
913
878
914 def __init__(self, repo, revcontainer):
879 def __init__(self, repo, revcontainer):
915 self._torev = repo.changelog.rev
880 self._torev = repo.changelog.rev
916 self._revcontains = revcontainer.__contains__
881 self._revcontains = revcontainer.__contains__
917
882
918 def __contains__(self, node):
883 def __contains__(self, node):
919 return self._revcontains(self._torev(node))
884 return self._revcontains(self._torev(node))
920
885
921 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
886 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
922 fixphase=False, targetphase=None, backup=True):
887 fixphase=False, targetphase=None, backup=True):
923 """do common cleanups when old nodes are replaced by new nodes
888 """do common cleanups when old nodes are replaced by new nodes
924
889
925 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
890 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
926 (we might also want to move working directory parent in the future)
891 (we might also want to move working directory parent in the future)
927
892
928 By default, bookmark moves are calculated automatically from 'replacements',
893 By default, bookmark moves are calculated automatically from 'replacements',
929 but 'moves' can be used to override that. Also, 'moves' may include
894 but 'moves' can be used to override that. Also, 'moves' may include
930 additional bookmark moves that should not have associated obsmarkers.
895 additional bookmark moves that should not have associated obsmarkers.
931
896
932 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
897 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
933 have replacements. operation is a string, like "rebase".
898 have replacements. operation is a string, like "rebase".
934
899
935 metadata is dictionary containing metadata to be stored in obsmarker if
900 metadata is dictionary containing metadata to be stored in obsmarker if
936 obsolescence is enabled.
901 obsolescence is enabled.
937 """
902 """
938 assert fixphase or targetphase is None
903 assert fixphase or targetphase is None
939 if not replacements and not moves:
904 if not replacements and not moves:
940 return
905 return
941
906
942 # translate mapping's other forms
907 # translate mapping's other forms
943 if not util.safehasattr(replacements, 'items'):
908 if not util.safehasattr(replacements, 'items'):
944 replacements = {(n,): () for n in replacements}
909 replacements = {(n,): () for n in replacements}
945 else:
910 else:
946 # upgrading non tuple "source" to tuple ones for BC
911 # upgrading non tuple "source" to tuple ones for BC
947 repls = {}
912 repls = {}
948 for key, value in replacements.items():
913 for key, value in replacements.items():
949 if not isinstance(key, tuple):
914 if not isinstance(key, tuple):
950 key = (key,)
915 key = (key,)
951 repls[key] = value
916 repls[key] = value
952 replacements = repls
917 replacements = repls
953
918
954 # Unfiltered repo is needed since nodes in replacements might be hidden.
919 # Unfiltered repo is needed since nodes in replacements might be hidden.
955 unfi = repo.unfiltered()
920 unfi = repo.unfiltered()
956
921
957 # Calculate bookmark movements
922 # Calculate bookmark movements
958 if moves is None:
923 if moves is None:
959 moves = {}
924 moves = {}
960 for oldnodes, newnodes in replacements.items():
925 for oldnodes, newnodes in replacements.items():
961 for oldnode in oldnodes:
926 for oldnode in oldnodes:
962 if oldnode in moves:
927 if oldnode in moves:
963 continue
928 continue
964 if len(newnodes) > 1:
929 if len(newnodes) > 1:
965 # usually a split, take the one with biggest rev number
930 # usually a split, take the one with biggest rev number
966 newnode = next(unfi.set('max(%ln)', newnodes)).node()
931 newnode = next(unfi.set('max(%ln)', newnodes)).node()
967 elif len(newnodes) == 0:
932 elif len(newnodes) == 0:
968 # move bookmark backwards
933 # move bookmark backwards
969 allreplaced = []
934 allreplaced = []
970 for rep in replacements:
935 for rep in replacements:
971 allreplaced.extend(rep)
936 allreplaced.extend(rep)
972 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
937 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
973 allreplaced))
938 allreplaced))
974 if roots:
939 if roots:
975 newnode = roots[0].node()
940 newnode = roots[0].node()
976 else:
941 else:
977 newnode = nullid
942 newnode = nullid
978 else:
943 else:
979 newnode = newnodes[0]
944 newnode = newnodes[0]
980 moves[oldnode] = newnode
945 moves[oldnode] = newnode
981
946
982 allnewnodes = [n for ns in replacements.values() for n in ns]
947 allnewnodes = [n for ns in replacements.values() for n in ns]
983 toretract = {}
948 toretract = {}
984 toadvance = {}
949 toadvance = {}
985 if fixphase:
950 if fixphase:
986 precursors = {}
951 precursors = {}
987 for oldnodes, newnodes in replacements.items():
952 for oldnodes, newnodes in replacements.items():
988 for oldnode in oldnodes:
953 for oldnode in oldnodes:
989 for newnode in newnodes:
954 for newnode in newnodes:
990 precursors.setdefault(newnode, []).append(oldnode)
955 precursors.setdefault(newnode, []).append(oldnode)
991
956
992 allnewnodes.sort(key=lambda n: unfi[n].rev())
957 allnewnodes.sort(key=lambda n: unfi[n].rev())
993 newphases = {}
958 newphases = {}
994 def phase(ctx):
959 def phase(ctx):
995 return newphases.get(ctx.node(), ctx.phase())
960 return newphases.get(ctx.node(), ctx.phase())
996 for newnode in allnewnodes:
961 for newnode in allnewnodes:
997 ctx = unfi[newnode]
962 ctx = unfi[newnode]
998 parentphase = max(phase(p) for p in ctx.parents())
963 parentphase = max(phase(p) for p in ctx.parents())
999 if targetphase is None:
964 if targetphase is None:
1000 oldphase = max(unfi[oldnode].phase()
965 oldphase = max(unfi[oldnode].phase()
1001 for oldnode in precursors[newnode])
966 for oldnode in precursors[newnode])
1002 newphase = max(oldphase, parentphase)
967 newphase = max(oldphase, parentphase)
1003 else:
968 else:
1004 newphase = max(targetphase, parentphase)
969 newphase = max(targetphase, parentphase)
1005 newphases[newnode] = newphase
970 newphases[newnode] = newphase
1006 if newphase > ctx.phase():
971 if newphase > ctx.phase():
1007 toretract.setdefault(newphase, []).append(newnode)
972 toretract.setdefault(newphase, []).append(newnode)
1008 elif newphase < ctx.phase():
973 elif newphase < ctx.phase():
1009 toadvance.setdefault(newphase, []).append(newnode)
974 toadvance.setdefault(newphase, []).append(newnode)
1010
975
1011 with repo.transaction('cleanup') as tr:
976 with repo.transaction('cleanup') as tr:
1012 # Move bookmarks
977 # Move bookmarks
1013 bmarks = repo._bookmarks
978 bmarks = repo._bookmarks
1014 bmarkchanges = []
979 bmarkchanges = []
1015 for oldnode, newnode in moves.items():
980 for oldnode, newnode in moves.items():
1016 oldbmarks = repo.nodebookmarks(oldnode)
981 oldbmarks = repo.nodebookmarks(oldnode)
1017 if not oldbmarks:
982 if not oldbmarks:
1018 continue
983 continue
1019 from . import bookmarks # avoid import cycle
984 from . import bookmarks # avoid import cycle
1020 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
985 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1021 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
986 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1022 hex(oldnode), hex(newnode)))
987 hex(oldnode), hex(newnode)))
1023 # Delete divergent bookmarks being parents of related newnodes
988 # Delete divergent bookmarks being parents of related newnodes
1024 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
989 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1025 allnewnodes, newnode, oldnode)
990 allnewnodes, newnode, oldnode)
1026 deletenodes = _containsnode(repo, deleterevs)
991 deletenodes = _containsnode(repo, deleterevs)
1027 for name in oldbmarks:
992 for name in oldbmarks:
1028 bmarkchanges.append((name, newnode))
993 bmarkchanges.append((name, newnode))
1029 for b in bookmarks.divergent2delete(repo, deletenodes, name):
994 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1030 bmarkchanges.append((b, None))
995 bmarkchanges.append((b, None))
1031
996
1032 if bmarkchanges:
997 if bmarkchanges:
1033 bmarks.applychanges(repo, tr, bmarkchanges)
998 bmarks.applychanges(repo, tr, bmarkchanges)
1034
999
1035 for phase, nodes in toretract.items():
1000 for phase, nodes in toretract.items():
1036 phases.retractboundary(repo, tr, phase, nodes)
1001 phases.retractboundary(repo, tr, phase, nodes)
1037 for phase, nodes in toadvance.items():
1002 for phase, nodes in toadvance.items():
1038 phases.advanceboundary(repo, tr, phase, nodes)
1003 phases.advanceboundary(repo, tr, phase, nodes)
1039
1004
1040 # Obsolete or strip nodes
1005 # Obsolete or strip nodes
1041 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1006 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1042 # If a node is already obsoleted, and we want to obsolete it
1007 # If a node is already obsoleted, and we want to obsolete it
1043 # without a successor, skip that obssolete request since it's
1008 # without a successor, skip that obssolete request since it's
1044 # unnecessary. That's the "if s or not isobs(n)" check below.
1009 # unnecessary. That's the "if s or not isobs(n)" check below.
1045 # Also sort the node in topology order, that might be useful for
1010 # Also sort the node in topology order, that might be useful for
1046 # some obsstore logic.
1011 # some obsstore logic.
1047 # NOTE: the sorting might belong to createmarkers.
1012 # NOTE: the sorting might belong to createmarkers.
1048 torev = unfi.changelog.rev
1013 torev = unfi.changelog.rev
1049 sortfunc = lambda ns: torev(ns[0][0])
1014 sortfunc = lambda ns: torev(ns[0][0])
1050 rels = []
1015 rels = []
1051 for ns, s in sorted(replacements.items(), key=sortfunc):
1016 for ns, s in sorted(replacements.items(), key=sortfunc):
1052 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1017 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1053 rels.append(rel)
1018 rels.append(rel)
1054 if rels:
1019 if rels:
1055 obsolete.createmarkers(repo, rels, operation=operation,
1020 obsolete.createmarkers(repo, rels, operation=operation,
1056 metadata=metadata)
1021 metadata=metadata)
1057 else:
1022 else:
1058 from . import repair # avoid import cycle
1023 from . import repair # avoid import cycle
1059 tostrip = list(n for ns in replacements for n in ns)
1024 tostrip = list(n for ns in replacements for n in ns)
1060 if tostrip:
1025 if tostrip:
1061 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1026 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1062 backup=backup)
1027 backup=backup)
1063
1028
1064 def addremove(repo, matcher, prefix, opts=None):
1029 def addremove(repo, matcher, prefix, opts=None):
1065 if opts is None:
1030 if opts is None:
1066 opts = {}
1031 opts = {}
1067 m = matcher
1032 m = matcher
1068 dry_run = opts.get('dry_run')
1033 dry_run = opts.get('dry_run')
1069 try:
1034 try:
1070 similarity = float(opts.get('similarity') or 0)
1035 similarity = float(opts.get('similarity') or 0)
1071 except ValueError:
1036 except ValueError:
1072 raise error.Abort(_('similarity must be a number'))
1037 raise error.Abort(_('similarity must be a number'))
1073 if similarity < 0 or similarity > 100:
1038 if similarity < 0 or similarity > 100:
1074 raise error.Abort(_('similarity must be between 0 and 100'))
1039 raise error.Abort(_('similarity must be between 0 and 100'))
1075 similarity /= 100.0
1040 similarity /= 100.0
1076
1041
1077 ret = 0
1042 ret = 0
1078 join = lambda f: os.path.join(prefix, f)
1043 join = lambda f: os.path.join(prefix, f)
1079
1044
1080 wctx = repo[None]
1045 wctx = repo[None]
1081 for subpath in sorted(wctx.substate):
1046 for subpath in sorted(wctx.substate):
1082 submatch = matchmod.subdirmatcher(subpath, m)
1047 submatch = matchmod.subdirmatcher(subpath, m)
1083 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1048 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1084 sub = wctx.sub(subpath)
1049 sub = wctx.sub(subpath)
1085 try:
1050 try:
1086 if sub.addremove(submatch, prefix, opts):
1051 if sub.addremove(submatch, prefix, opts):
1087 ret = 1
1052 ret = 1
1088 except error.LookupError:
1053 except error.LookupError:
1089 repo.ui.status(_("skipping missing subrepository: %s\n")
1054 repo.ui.status(_("skipping missing subrepository: %s\n")
1090 % join(subpath))
1055 % join(subpath))
1091
1056
1092 rejected = []
1057 rejected = []
1093 def badfn(f, msg):
1058 def badfn(f, msg):
1094 if f in m.files():
1059 if f in m.files():
1095 m.bad(f, msg)
1060 m.bad(f, msg)
1096 rejected.append(f)
1061 rejected.append(f)
1097
1062
1098 badmatch = matchmod.badmatch(m, badfn)
1063 badmatch = matchmod.badmatch(m, badfn)
1099 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1064 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1100 badmatch)
1065 badmatch)
1101
1066
1102 unknownset = set(unknown + forgotten)
1067 unknownset = set(unknown + forgotten)
1103 toprint = unknownset.copy()
1068 toprint = unknownset.copy()
1104 toprint.update(deleted)
1069 toprint.update(deleted)
1105 for abs in sorted(toprint):
1070 for abs in sorted(toprint):
1106 if repo.ui.verbose or not m.exact(abs):
1071 if repo.ui.verbose or not m.exact(abs):
1107 if abs in unknownset:
1072 if abs in unknownset:
1108 status = _('adding %s\n') % m.uipath(abs)
1073 status = _('adding %s\n') % m.uipath(abs)
1109 label = 'ui.addremove.added'
1074 label = 'ui.addremove.added'
1110 else:
1075 else:
1111 status = _('removing %s\n') % m.uipath(abs)
1076 status = _('removing %s\n') % m.uipath(abs)
1112 label = 'ui.addremove.removed'
1077 label = 'ui.addremove.removed'
1113 repo.ui.status(status, label=label)
1078 repo.ui.status(status, label=label)
1114
1079
1115 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1080 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1116 similarity)
1081 similarity)
1117
1082
1118 if not dry_run:
1083 if not dry_run:
1119 _markchanges(repo, unknown + forgotten, deleted, renames)
1084 _markchanges(repo, unknown + forgotten, deleted, renames)
1120
1085
1121 for f in rejected:
1086 for f in rejected:
1122 if f in m.files():
1087 if f in m.files():
1123 return 1
1088 return 1
1124 return ret
1089 return ret
1125
1090
1126 def marktouched(repo, files, similarity=0.0):
1091 def marktouched(repo, files, similarity=0.0):
1127 '''Assert that files have somehow been operated upon. files are relative to
1092 '''Assert that files have somehow been operated upon. files are relative to
1128 the repo root.'''
1093 the repo root.'''
1129 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1094 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1130 rejected = []
1095 rejected = []
1131
1096
1132 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1097 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1133
1098
1134 if repo.ui.verbose:
1099 if repo.ui.verbose:
1135 unknownset = set(unknown + forgotten)
1100 unknownset = set(unknown + forgotten)
1136 toprint = unknownset.copy()
1101 toprint = unknownset.copy()
1137 toprint.update(deleted)
1102 toprint.update(deleted)
1138 for abs in sorted(toprint):
1103 for abs in sorted(toprint):
1139 if abs in unknownset:
1104 if abs in unknownset:
1140 status = _('adding %s\n') % abs
1105 status = _('adding %s\n') % abs
1141 else:
1106 else:
1142 status = _('removing %s\n') % abs
1107 status = _('removing %s\n') % abs
1143 repo.ui.status(status)
1108 repo.ui.status(status)
1144
1109
1145 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1110 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1146 similarity)
1111 similarity)
1147
1112
1148 _markchanges(repo, unknown + forgotten, deleted, renames)
1113 _markchanges(repo, unknown + forgotten, deleted, renames)
1149
1114
1150 for f in rejected:
1115 for f in rejected:
1151 if f in m.files():
1116 if f in m.files():
1152 return 1
1117 return 1
1153 return 0
1118 return 0
1154
1119
1155 def _interestingfiles(repo, matcher):
1120 def _interestingfiles(repo, matcher):
1156 '''Walk dirstate with matcher, looking for files that addremove would care
1121 '''Walk dirstate with matcher, looking for files that addremove would care
1157 about.
1122 about.
1158
1123
1159 This is different from dirstate.status because it doesn't care about
1124 This is different from dirstate.status because it doesn't care about
1160 whether files are modified or clean.'''
1125 whether files are modified or clean.'''
1161 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1126 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1162 audit_path = pathutil.pathauditor(repo.root, cached=True)
1127 audit_path = pathutil.pathauditor(repo.root, cached=True)
1163
1128
1164 ctx = repo[None]
1129 ctx = repo[None]
1165 dirstate = repo.dirstate
1130 dirstate = repo.dirstate
1166 matcher = repo.narrowmatch(matcher, includeexact=True)
1131 matcher = repo.narrowmatch(matcher, includeexact=True)
1167 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1132 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1168 unknown=True, ignored=False, full=False)
1133 unknown=True, ignored=False, full=False)
1169 for abs, st in walkresults.iteritems():
1134 for abs, st in walkresults.iteritems():
1170 dstate = dirstate[abs]
1135 dstate = dirstate[abs]
1171 if dstate == '?' and audit_path.check(abs):
1136 if dstate == '?' and audit_path.check(abs):
1172 unknown.append(abs)
1137 unknown.append(abs)
1173 elif dstate != 'r' and not st:
1138 elif dstate != 'r' and not st:
1174 deleted.append(abs)
1139 deleted.append(abs)
1175 elif dstate == 'r' and st:
1140 elif dstate == 'r' and st:
1176 forgotten.append(abs)
1141 forgotten.append(abs)
1177 # for finding renames
1142 # for finding renames
1178 elif dstate == 'r' and not st:
1143 elif dstate == 'r' and not st:
1179 removed.append(abs)
1144 removed.append(abs)
1180 elif dstate == 'a':
1145 elif dstate == 'a':
1181 added.append(abs)
1146 added.append(abs)
1182
1147
1183 return added, unknown, deleted, removed, forgotten
1148 return added, unknown, deleted, removed, forgotten
1184
1149
1185 def _findrenames(repo, matcher, added, removed, similarity):
1150 def _findrenames(repo, matcher, added, removed, similarity):
1186 '''Find renames from removed files to added ones.'''
1151 '''Find renames from removed files to added ones.'''
1187 renames = {}
1152 renames = {}
1188 if similarity > 0:
1153 if similarity > 0:
1189 for old, new, score in similar.findrenames(repo, added, removed,
1154 for old, new, score in similar.findrenames(repo, added, removed,
1190 similarity):
1155 similarity):
1191 if (repo.ui.verbose or not matcher.exact(old)
1156 if (repo.ui.verbose or not matcher.exact(old)
1192 or not matcher.exact(new)):
1157 or not matcher.exact(new)):
1193 repo.ui.status(_('recording removal of %s as rename to %s '
1158 repo.ui.status(_('recording removal of %s as rename to %s '
1194 '(%d%% similar)\n') %
1159 '(%d%% similar)\n') %
1195 (matcher.rel(old), matcher.rel(new),
1160 (matcher.rel(old), matcher.rel(new),
1196 score * 100))
1161 score * 100))
1197 renames[new] = old
1162 renames[new] = old
1198 return renames
1163 return renames
1199
1164
1200 def _markchanges(repo, unknown, deleted, renames):
1165 def _markchanges(repo, unknown, deleted, renames):
1201 '''Marks the files in unknown as added, the files in deleted as removed,
1166 '''Marks the files in unknown as added, the files in deleted as removed,
1202 and the files in renames as copied.'''
1167 and the files in renames as copied.'''
1203 wctx = repo[None]
1168 wctx = repo[None]
1204 with repo.wlock():
1169 with repo.wlock():
1205 wctx.forget(deleted)
1170 wctx.forget(deleted)
1206 wctx.add(unknown)
1171 wctx.add(unknown)
1207 for new, old in renames.iteritems():
1172 for new, old in renames.iteritems():
1208 wctx.copy(old, new)
1173 wctx.copy(old, new)
1209
1174
1210 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1175 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1211 """Update the dirstate to reflect the intent of copying src to dst. For
1176 """Update the dirstate to reflect the intent of copying src to dst. For
1212 different reasons it might not end with dst being marked as copied from src.
1177 different reasons it might not end with dst being marked as copied from src.
1213 """
1178 """
1214 origsrc = repo.dirstate.copied(src) or src
1179 origsrc = repo.dirstate.copied(src) or src
1215 if dst == origsrc: # copying back a copy?
1180 if dst == origsrc: # copying back a copy?
1216 if repo.dirstate[dst] not in 'mn' and not dryrun:
1181 if repo.dirstate[dst] not in 'mn' and not dryrun:
1217 repo.dirstate.normallookup(dst)
1182 repo.dirstate.normallookup(dst)
1218 else:
1183 else:
1219 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1184 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1220 if not ui.quiet:
1185 if not ui.quiet:
1221 ui.warn(_("%s has not been committed yet, so no copy "
1186 ui.warn(_("%s has not been committed yet, so no copy "
1222 "data will be stored for %s.\n")
1187 "data will be stored for %s.\n")
1223 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1188 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1224 if repo.dirstate[dst] in '?r' and not dryrun:
1189 if repo.dirstate[dst] in '?r' and not dryrun:
1225 wctx.add([dst])
1190 wctx.add([dst])
1226 elif not dryrun:
1191 elif not dryrun:
1227 wctx.copy(origsrc, dst)
1192 wctx.copy(origsrc, dst)
1228
1193
1229 def writerequires(opener, requirements):
1194 def writerequires(opener, requirements):
1230 with opener('requires', 'w', atomictemp=True) as fp:
1195 with opener('requires', 'w', atomictemp=True) as fp:
1231 for r in sorted(requirements):
1196 for r in sorted(requirements):
1232 fp.write("%s\n" % r)
1197 fp.write("%s\n" % r)
1233
1198
1234 class filecachesubentry(object):
1199 class filecachesubentry(object):
1235 def __init__(self, path, stat):
1200 def __init__(self, path, stat):
1236 self.path = path
1201 self.path = path
1237 self.cachestat = None
1202 self.cachestat = None
1238 self._cacheable = None
1203 self._cacheable = None
1239
1204
1240 if stat:
1205 if stat:
1241 self.cachestat = filecachesubentry.stat(self.path)
1206 self.cachestat = filecachesubentry.stat(self.path)
1242
1207
1243 if self.cachestat:
1208 if self.cachestat:
1244 self._cacheable = self.cachestat.cacheable()
1209 self._cacheable = self.cachestat.cacheable()
1245 else:
1210 else:
1246 # None means we don't know yet
1211 # None means we don't know yet
1247 self._cacheable = None
1212 self._cacheable = None
1248
1213
1249 def refresh(self):
1214 def refresh(self):
1250 if self.cacheable():
1215 if self.cacheable():
1251 self.cachestat = filecachesubentry.stat(self.path)
1216 self.cachestat = filecachesubentry.stat(self.path)
1252
1217
1253 def cacheable(self):
1218 def cacheable(self):
1254 if self._cacheable is not None:
1219 if self._cacheable is not None:
1255 return self._cacheable
1220 return self._cacheable
1256
1221
1257 # we don't know yet, assume it is for now
1222 # we don't know yet, assume it is for now
1258 return True
1223 return True
1259
1224
1260 def changed(self):
1225 def changed(self):
1261 # no point in going further if we can't cache it
1226 # no point in going further if we can't cache it
1262 if not self.cacheable():
1227 if not self.cacheable():
1263 return True
1228 return True
1264
1229
1265 newstat = filecachesubentry.stat(self.path)
1230 newstat = filecachesubentry.stat(self.path)
1266
1231
1267 # we may not know if it's cacheable yet, check again now
1232 # we may not know if it's cacheable yet, check again now
1268 if newstat and self._cacheable is None:
1233 if newstat and self._cacheable is None:
1269 self._cacheable = newstat.cacheable()
1234 self._cacheable = newstat.cacheable()
1270
1235
1271 # check again
1236 # check again
1272 if not self._cacheable:
1237 if not self._cacheable:
1273 return True
1238 return True
1274
1239
1275 if self.cachestat != newstat:
1240 if self.cachestat != newstat:
1276 self.cachestat = newstat
1241 self.cachestat = newstat
1277 return True
1242 return True
1278 else:
1243 else:
1279 return False
1244 return False
1280
1245
1281 @staticmethod
1246 @staticmethod
1282 def stat(path):
1247 def stat(path):
1283 try:
1248 try:
1284 return util.cachestat(path)
1249 return util.cachestat(path)
1285 except OSError as e:
1250 except OSError as e:
1286 if e.errno != errno.ENOENT:
1251 if e.errno != errno.ENOENT:
1287 raise
1252 raise
1288
1253
1289 class filecacheentry(object):
1254 class filecacheentry(object):
1290 def __init__(self, paths, stat=True):
1255 def __init__(self, paths, stat=True):
1291 self._entries = []
1256 self._entries = []
1292 for path in paths:
1257 for path in paths:
1293 self._entries.append(filecachesubentry(path, stat))
1258 self._entries.append(filecachesubentry(path, stat))
1294
1259
1295 def changed(self):
1260 def changed(self):
1296 '''true if any entry has changed'''
1261 '''true if any entry has changed'''
1297 for entry in self._entries:
1262 for entry in self._entries:
1298 if entry.changed():
1263 if entry.changed():
1299 return True
1264 return True
1300 return False
1265 return False
1301
1266
1302 def refresh(self):
1267 def refresh(self):
1303 for entry in self._entries:
1268 for entry in self._entries:
1304 entry.refresh()
1269 entry.refresh()
1305
1270
1306 class filecache(object):
1271 class filecache(object):
1307 """A property like decorator that tracks files under .hg/ for updates.
1272 """A property like decorator that tracks files under .hg/ for updates.
1308
1273
1309 On first access, the files defined as arguments are stat()ed and the
1274 On first access, the files defined as arguments are stat()ed and the
1310 results cached. The decorated function is called. The results are stashed
1275 results cached. The decorated function is called. The results are stashed
1311 away in a ``_filecache`` dict on the object whose method is decorated.
1276 away in a ``_filecache`` dict on the object whose method is decorated.
1312
1277
1313 On subsequent access, the cached result is used as it is set to the
1278 On subsequent access, the cached result is used as it is set to the
1314 instance dictionary.
1279 instance dictionary.
1315
1280
1316 On external property set/delete operations, the caller must update the
1281 On external property set/delete operations, the caller must update the
1317 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1282 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1318 instead of directly setting <attr>.
1283 instead of directly setting <attr>.
1319
1284
1320 When using the property API, the cached data is always used if available.
1285 When using the property API, the cached data is always used if available.
1321 No stat() is performed to check if the file has changed.
1286 No stat() is performed to check if the file has changed.
1322
1287
1323 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1288 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1324 can populate an entry before the property's getter is called. In this case,
1289 can populate an entry before the property's getter is called. In this case,
1325 entries in ``_filecache`` will be used during property operations,
1290 entries in ``_filecache`` will be used during property operations,
1326 if available. If the underlying file changes, it is up to external callers
1291 if available. If the underlying file changes, it is up to external callers
1327 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1292 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1328 method result as well as possibly calling ``del obj._filecache[attr]`` to
1293 method result as well as possibly calling ``del obj._filecache[attr]`` to
1329 remove the ``filecacheentry``.
1294 remove the ``filecacheentry``.
1330 """
1295 """
1331
1296
1332 def __init__(self, *paths):
1297 def __init__(self, *paths):
1333 self.paths = paths
1298 self.paths = paths
1334
1299
1335 def join(self, obj, fname):
1300 def join(self, obj, fname):
1336 """Used to compute the runtime path of a cached file.
1301 """Used to compute the runtime path of a cached file.
1337
1302
1338 Users should subclass filecache and provide their own version of this
1303 Users should subclass filecache and provide their own version of this
1339 function to call the appropriate join function on 'obj' (an instance
1304 function to call the appropriate join function on 'obj' (an instance
1340 of the class that its member function was decorated).
1305 of the class that its member function was decorated).
1341 """
1306 """
1342 raise NotImplementedError
1307 raise NotImplementedError
1343
1308
1344 def __call__(self, func):
1309 def __call__(self, func):
1345 self.func = func
1310 self.func = func
1346 self.sname = func.__name__
1311 self.sname = func.__name__
1347 self.name = pycompat.sysbytes(self.sname)
1312 self.name = pycompat.sysbytes(self.sname)
1348 return self
1313 return self
1349
1314
1350 def __get__(self, obj, type=None):
1315 def __get__(self, obj, type=None):
1351 # if accessed on the class, return the descriptor itself.
1316 # if accessed on the class, return the descriptor itself.
1352 if obj is None:
1317 if obj is None:
1353 return self
1318 return self
1354
1319
1355 assert self.sname not in obj.__dict__
1320 assert self.sname not in obj.__dict__
1356
1321
1357 entry = obj._filecache.get(self.name)
1322 entry = obj._filecache.get(self.name)
1358
1323
1359 if entry:
1324 if entry:
1360 if entry.changed():
1325 if entry.changed():
1361 entry.obj = self.func(obj)
1326 entry.obj = self.func(obj)
1362 else:
1327 else:
1363 paths = [self.join(obj, path) for path in self.paths]
1328 paths = [self.join(obj, path) for path in self.paths]
1364
1329
1365 # We stat -before- creating the object so our cache doesn't lie if
1330 # We stat -before- creating the object so our cache doesn't lie if
1366 # a writer modified between the time we read and stat
1331 # a writer modified between the time we read and stat
1367 entry = filecacheentry(paths, True)
1332 entry = filecacheentry(paths, True)
1368 entry.obj = self.func(obj)
1333 entry.obj = self.func(obj)
1369
1334
1370 obj._filecache[self.name] = entry
1335 obj._filecache[self.name] = entry
1371
1336
1372 obj.__dict__[self.sname] = entry.obj
1337 obj.__dict__[self.sname] = entry.obj
1373 return entry.obj
1338 return entry.obj
1374
1339
1375 # don't implement __set__(), which would make __dict__ lookup as slow as
1340 # don't implement __set__(), which would make __dict__ lookup as slow as
1376 # function call.
1341 # function call.
1377
1342
1378 def set(self, obj, value):
1343 def set(self, obj, value):
1379 if self.name not in obj._filecache:
1344 if self.name not in obj._filecache:
1380 # we add an entry for the missing value because X in __dict__
1345 # we add an entry for the missing value because X in __dict__
1381 # implies X in _filecache
1346 # implies X in _filecache
1382 paths = [self.join(obj, path) for path in self.paths]
1347 paths = [self.join(obj, path) for path in self.paths]
1383 ce = filecacheentry(paths, False)
1348 ce = filecacheentry(paths, False)
1384 obj._filecache[self.name] = ce
1349 obj._filecache[self.name] = ce
1385 else:
1350 else:
1386 ce = obj._filecache[self.name]
1351 ce = obj._filecache[self.name]
1387
1352
1388 ce.obj = value # update cached copy
1353 ce.obj = value # update cached copy
1389 obj.__dict__[self.sname] = value # update copy returned by obj.x
1354 obj.__dict__[self.sname] = value # update copy returned by obj.x
1390
1355
1391 def extdatasource(repo, source):
1356 def extdatasource(repo, source):
1392 """Gather a map of rev -> value dict from the specified source
1357 """Gather a map of rev -> value dict from the specified source
1393
1358
1394 A source spec is treated as a URL, with a special case shell: type
1359 A source spec is treated as a URL, with a special case shell: type
1395 for parsing the output from a shell command.
1360 for parsing the output from a shell command.
1396
1361
1397 The data is parsed as a series of newline-separated records where
1362 The data is parsed as a series of newline-separated records where
1398 each record is a revision specifier optionally followed by a space
1363 each record is a revision specifier optionally followed by a space
1399 and a freeform string value. If the revision is known locally, it
1364 and a freeform string value. If the revision is known locally, it
1400 is converted to a rev, otherwise the record is skipped.
1365 is converted to a rev, otherwise the record is skipped.
1401
1366
1402 Note that both key and value are treated as UTF-8 and converted to
1367 Note that both key and value are treated as UTF-8 and converted to
1403 the local encoding. This allows uniformity between local and
1368 the local encoding. This allows uniformity between local and
1404 remote data sources.
1369 remote data sources.
1405 """
1370 """
1406
1371
1407 spec = repo.ui.config("extdata", source)
1372 spec = repo.ui.config("extdata", source)
1408 if not spec:
1373 if not spec:
1409 raise error.Abort(_("unknown extdata source '%s'") % source)
1374 raise error.Abort(_("unknown extdata source '%s'") % source)
1410
1375
1411 data = {}
1376 data = {}
1412 src = proc = None
1377 src = proc = None
1413 try:
1378 try:
1414 if spec.startswith("shell:"):
1379 if spec.startswith("shell:"):
1415 # external commands should be run relative to the repo root
1380 # external commands should be run relative to the repo root
1416 cmd = spec[6:]
1381 cmd = spec[6:]
1417 proc = subprocess.Popen(procutil.tonativestr(cmd),
1382 proc = subprocess.Popen(procutil.tonativestr(cmd),
1418 shell=True, bufsize=-1,
1383 shell=True, bufsize=-1,
1419 close_fds=procutil.closefds,
1384 close_fds=procutil.closefds,
1420 stdout=subprocess.PIPE,
1385 stdout=subprocess.PIPE,
1421 cwd=procutil.tonativestr(repo.root))
1386 cwd=procutil.tonativestr(repo.root))
1422 src = proc.stdout
1387 src = proc.stdout
1423 else:
1388 else:
1424 # treat as a URL or file
1389 # treat as a URL or file
1425 src = url.open(repo.ui, spec)
1390 src = url.open(repo.ui, spec)
1426 for l in src:
1391 for l in src:
1427 if " " in l:
1392 if " " in l:
1428 k, v = l.strip().split(" ", 1)
1393 k, v = l.strip().split(" ", 1)
1429 else:
1394 else:
1430 k, v = l.strip(), ""
1395 k, v = l.strip(), ""
1431
1396
1432 k = encoding.tolocal(k)
1397 k = encoding.tolocal(k)
1433 try:
1398 try:
1434 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1399 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1435 except (error.LookupError, error.RepoLookupError):
1400 except (error.LookupError, error.RepoLookupError):
1436 pass # we ignore data for nodes that don't exist locally
1401 pass # we ignore data for nodes that don't exist locally
1437 finally:
1402 finally:
1438 if proc:
1403 if proc:
1439 proc.communicate()
1404 proc.communicate()
1440 if src:
1405 if src:
1441 src.close()
1406 src.close()
1442 if proc and proc.returncode != 0:
1407 if proc and proc.returncode != 0:
1443 raise error.Abort(_("extdata command '%s' failed: %s")
1408 raise error.Abort(_("extdata command '%s' failed: %s")
1444 % (cmd, procutil.explainexit(proc.returncode)))
1409 % (cmd, procutil.explainexit(proc.returncode)))
1445
1410
1446 return data
1411 return data
1447
1412
1448 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1413 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1449 if lock is None:
1414 if lock is None:
1450 raise error.LockInheritanceContractViolation(
1415 raise error.LockInheritanceContractViolation(
1451 'lock can only be inherited while held')
1416 'lock can only be inherited while held')
1452 if environ is None:
1417 if environ is None:
1453 environ = {}
1418 environ = {}
1454 with lock.inherit() as locker:
1419 with lock.inherit() as locker:
1455 environ[envvar] = locker
1420 environ[envvar] = locker
1456 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1421 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1457
1422
1458 def wlocksub(repo, cmd, *args, **kwargs):
1423 def wlocksub(repo, cmd, *args, **kwargs):
1459 """run cmd as a subprocess that allows inheriting repo's wlock
1424 """run cmd as a subprocess that allows inheriting repo's wlock
1460
1425
1461 This can only be called while the wlock is held. This takes all the
1426 This can only be called while the wlock is held. This takes all the
1462 arguments that ui.system does, and returns the exit code of the
1427 arguments that ui.system does, and returns the exit code of the
1463 subprocess."""
1428 subprocess."""
1464 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1429 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1465 **kwargs)
1430 **kwargs)
1466
1431
1467 class progress(object):
1432 class progress(object):
1468 def __init__(self, ui, updatebar, topic, unit="", total=None):
1433 def __init__(self, ui, updatebar, topic, unit="", total=None):
1469 self.ui = ui
1434 self.ui = ui
1470 self.pos = 0
1435 self.pos = 0
1471 self.topic = topic
1436 self.topic = topic
1472 self.unit = unit
1437 self.unit = unit
1473 self.total = total
1438 self.total = total
1474 self.debug = ui.configbool('progress', 'debug')
1439 self.debug = ui.configbool('progress', 'debug')
1475 self._updatebar = updatebar
1440 self._updatebar = updatebar
1476
1441
1477 def __enter__(self):
1442 def __enter__(self):
1478 return self
1443 return self
1479
1444
1480 def __exit__(self, exc_type, exc_value, exc_tb):
1445 def __exit__(self, exc_type, exc_value, exc_tb):
1481 self.complete()
1446 self.complete()
1482
1447
1483 def update(self, pos, item="", total=None):
1448 def update(self, pos, item="", total=None):
1484 assert pos is not None
1449 assert pos is not None
1485 if total:
1450 if total:
1486 self.total = total
1451 self.total = total
1487 self.pos = pos
1452 self.pos = pos
1488 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1453 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1489 if self.debug:
1454 if self.debug:
1490 self._printdebug(item)
1455 self._printdebug(item)
1491
1456
1492 def increment(self, step=1, item="", total=None):
1457 def increment(self, step=1, item="", total=None):
1493 self.update(self.pos + step, item, total)
1458 self.update(self.pos + step, item, total)
1494
1459
1495 def complete(self):
1460 def complete(self):
1496 self.pos = None
1461 self.pos = None
1497 self.unit = ""
1462 self.unit = ""
1498 self.total = None
1463 self.total = None
1499 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1464 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1500
1465
1501 def _printdebug(self, item):
1466 def _printdebug(self, item):
1502 if self.unit:
1467 if self.unit:
1503 unit = ' ' + self.unit
1468 unit = ' ' + self.unit
1504 if item:
1469 if item:
1505 item = ' ' + item
1470 item = ' ' + item
1506
1471
1507 if self.total:
1472 if self.total:
1508 pct = 100.0 * self.pos / self.total
1473 pct = 100.0 * self.pos / self.total
1509 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1474 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1510 % (self.topic, item, self.pos, self.total, unit, pct))
1475 % (self.topic, item, self.pos, self.total, unit, pct))
1511 else:
1476 else:
1512 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1477 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1513
1478
1514 def gdinitconfig(ui):
1479 def gdinitconfig(ui):
1515 """helper function to know if a repo should be created as general delta
1480 """helper function to know if a repo should be created as general delta
1516 """
1481 """
1517 # experimental config: format.generaldelta
1482 # experimental config: format.generaldelta
1518 return (ui.configbool('format', 'generaldelta')
1483 return (ui.configbool('format', 'generaldelta')
1519 or ui.configbool('format', 'usegeneraldelta'))
1484 or ui.configbool('format', 'usegeneraldelta'))
1520
1485
1521 def gddeltaconfig(ui):
1486 def gddeltaconfig(ui):
1522 """helper function to know if incoming delta should be optimised
1487 """helper function to know if incoming delta should be optimised
1523 """
1488 """
1524 # experimental config: format.generaldelta
1489 # experimental config: format.generaldelta
1525 return ui.configbool('format', 'generaldelta')
1490 return ui.configbool('format', 'generaldelta')
1526
1491
1527 class simplekeyvaluefile(object):
1492 class simplekeyvaluefile(object):
1528 """A simple file with key=value lines
1493 """A simple file with key=value lines
1529
1494
1530 Keys must be alphanumerics and start with a letter, values must not
1495 Keys must be alphanumerics and start with a letter, values must not
1531 contain '\n' characters"""
1496 contain '\n' characters"""
1532 firstlinekey = '__firstline'
1497 firstlinekey = '__firstline'
1533
1498
1534 def __init__(self, vfs, path, keys=None):
1499 def __init__(self, vfs, path, keys=None):
1535 self.vfs = vfs
1500 self.vfs = vfs
1536 self.path = path
1501 self.path = path
1537
1502
1538 def read(self, firstlinenonkeyval=False):
1503 def read(self, firstlinenonkeyval=False):
1539 """Read the contents of a simple key-value file
1504 """Read the contents of a simple key-value file
1540
1505
1541 'firstlinenonkeyval' indicates whether the first line of file should
1506 'firstlinenonkeyval' indicates whether the first line of file should
1542 be treated as a key-value pair or reuturned fully under the
1507 be treated as a key-value pair or reuturned fully under the
1543 __firstline key."""
1508 __firstline key."""
1544 lines = self.vfs.readlines(self.path)
1509 lines = self.vfs.readlines(self.path)
1545 d = {}
1510 d = {}
1546 if firstlinenonkeyval:
1511 if firstlinenonkeyval:
1547 if not lines:
1512 if not lines:
1548 e = _("empty simplekeyvalue file")
1513 e = _("empty simplekeyvalue file")
1549 raise error.CorruptedState(e)
1514 raise error.CorruptedState(e)
1550 # we don't want to include '\n' in the __firstline
1515 # we don't want to include '\n' in the __firstline
1551 d[self.firstlinekey] = lines[0][:-1]
1516 d[self.firstlinekey] = lines[0][:-1]
1552 del lines[0]
1517 del lines[0]
1553
1518
1554 try:
1519 try:
1555 # the 'if line.strip()' part prevents us from failing on empty
1520 # the 'if line.strip()' part prevents us from failing on empty
1556 # lines which only contain '\n' therefore are not skipped
1521 # lines which only contain '\n' therefore are not skipped
1557 # by 'if line'
1522 # by 'if line'
1558 updatedict = dict(line[:-1].split('=', 1) for line in lines
1523 updatedict = dict(line[:-1].split('=', 1) for line in lines
1559 if line.strip())
1524 if line.strip())
1560 if self.firstlinekey in updatedict:
1525 if self.firstlinekey in updatedict:
1561 e = _("%r can't be used as a key")
1526 e = _("%r can't be used as a key")
1562 raise error.CorruptedState(e % self.firstlinekey)
1527 raise error.CorruptedState(e % self.firstlinekey)
1563 d.update(updatedict)
1528 d.update(updatedict)
1564 except ValueError as e:
1529 except ValueError as e:
1565 raise error.CorruptedState(str(e))
1530 raise error.CorruptedState(str(e))
1566 return d
1531 return d
1567
1532
1568 def write(self, data, firstline=None):
1533 def write(self, data, firstline=None):
1569 """Write key=>value mapping to a file
1534 """Write key=>value mapping to a file
1570 data is a dict. Keys must be alphanumerical and start with a letter.
1535 data is a dict. Keys must be alphanumerical and start with a letter.
1571 Values must not contain newline characters.
1536 Values must not contain newline characters.
1572
1537
1573 If 'firstline' is not None, it is written to file before
1538 If 'firstline' is not None, it is written to file before
1574 everything else, as it is, not in a key=value form"""
1539 everything else, as it is, not in a key=value form"""
1575 lines = []
1540 lines = []
1576 if firstline is not None:
1541 if firstline is not None:
1577 lines.append('%s\n' % firstline)
1542 lines.append('%s\n' % firstline)
1578
1543
1579 for k, v in data.items():
1544 for k, v in data.items():
1580 if k == self.firstlinekey:
1545 if k == self.firstlinekey:
1581 e = "key name '%s' is reserved" % self.firstlinekey
1546 e = "key name '%s' is reserved" % self.firstlinekey
1582 raise error.ProgrammingError(e)
1547 raise error.ProgrammingError(e)
1583 if not k[0:1].isalpha():
1548 if not k[0:1].isalpha():
1584 e = "keys must start with a letter in a key-value file"
1549 e = "keys must start with a letter in a key-value file"
1585 raise error.ProgrammingError(e)
1550 raise error.ProgrammingError(e)
1586 if not k.isalnum():
1551 if not k.isalnum():
1587 e = "invalid key name in a simple key-value file"
1552 e = "invalid key name in a simple key-value file"
1588 raise error.ProgrammingError(e)
1553 raise error.ProgrammingError(e)
1589 if '\n' in v:
1554 if '\n' in v:
1590 e = "invalid value in a simple key-value file"
1555 e = "invalid value in a simple key-value file"
1591 raise error.ProgrammingError(e)
1556 raise error.ProgrammingError(e)
1592 lines.append("%s=%s\n" % (k, v))
1557 lines.append("%s=%s\n" % (k, v))
1593 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1558 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1594 fp.write(''.join(lines))
1559 fp.write(''.join(lines))
1595
1560
1596 _reportobsoletedsource = [
1561 _reportobsoletedsource = [
1597 'debugobsolete',
1562 'debugobsolete',
1598 'pull',
1563 'pull',
1599 'push',
1564 'push',
1600 'serve',
1565 'serve',
1601 'unbundle',
1566 'unbundle',
1602 ]
1567 ]
1603
1568
1604 _reportnewcssource = [
1569 _reportnewcssource = [
1605 'pull',
1570 'pull',
1606 'unbundle',
1571 'unbundle',
1607 ]
1572 ]
1608
1573
1609 def prefetchfiles(repo, revs, match):
1574 def prefetchfiles(repo, revs, match):
1610 """Invokes the registered file prefetch functions, allowing extensions to
1575 """Invokes the registered file prefetch functions, allowing extensions to
1611 ensure the corresponding files are available locally, before the command
1576 ensure the corresponding files are available locally, before the command
1612 uses them."""
1577 uses them."""
1613 if match:
1578 if match:
1614 # The command itself will complain about files that don't exist, so
1579 # The command itself will complain about files that don't exist, so
1615 # don't duplicate the message.
1580 # don't duplicate the message.
1616 match = matchmod.badmatch(match, lambda fn, msg: None)
1581 match = matchmod.badmatch(match, lambda fn, msg: None)
1617 else:
1582 else:
1618 match = matchall(repo)
1583 match = matchall(repo)
1619
1584
1620 fileprefetchhooks(repo, revs, match)
1585 fileprefetchhooks(repo, revs, match)
1621
1586
1622 # a list of (repo, revs, match) prefetch functions
1587 # a list of (repo, revs, match) prefetch functions
1623 fileprefetchhooks = util.hooks()
1588 fileprefetchhooks = util.hooks()
1624
1589
1625 # A marker that tells the evolve extension to suppress its own reporting
1590 # A marker that tells the evolve extension to suppress its own reporting
1626 _reportstroubledchangesets = True
1591 _reportstroubledchangesets = True
1627
1592
1628 def registersummarycallback(repo, otr, txnname=''):
1593 def registersummarycallback(repo, otr, txnname=''):
1629 """register a callback to issue a summary after the transaction is closed
1594 """register a callback to issue a summary after the transaction is closed
1630 """
1595 """
1631 def txmatch(sources):
1596 def txmatch(sources):
1632 return any(txnname.startswith(source) for source in sources)
1597 return any(txnname.startswith(source) for source in sources)
1633
1598
1634 categories = []
1599 categories = []
1635
1600
1636 def reportsummary(func):
1601 def reportsummary(func):
1637 """decorator for report callbacks."""
1602 """decorator for report callbacks."""
1638 # The repoview life cycle is shorter than the one of the actual
1603 # The repoview life cycle is shorter than the one of the actual
1639 # underlying repository. So the filtered object can die before the
1604 # underlying repository. So the filtered object can die before the
1640 # weakref is used leading to troubles. We keep a reference to the
1605 # weakref is used leading to troubles. We keep a reference to the
1641 # unfiltered object and restore the filtering when retrieving the
1606 # unfiltered object and restore the filtering when retrieving the
1642 # repository through the weakref.
1607 # repository through the weakref.
1643 filtername = repo.filtername
1608 filtername = repo.filtername
1644 reporef = weakref.ref(repo.unfiltered())
1609 reporef = weakref.ref(repo.unfiltered())
1645 def wrapped(tr):
1610 def wrapped(tr):
1646 repo = reporef()
1611 repo = reporef()
1647 if filtername:
1612 if filtername:
1648 repo = repo.filtered(filtername)
1613 repo = repo.filtered(filtername)
1649 func(repo, tr)
1614 func(repo, tr)
1650 newcat = '%02i-txnreport' % len(categories)
1615 newcat = '%02i-txnreport' % len(categories)
1651 otr.addpostclose(newcat, wrapped)
1616 otr.addpostclose(newcat, wrapped)
1652 categories.append(newcat)
1617 categories.append(newcat)
1653 return wrapped
1618 return wrapped
1654
1619
1655 if txmatch(_reportobsoletedsource):
1620 if txmatch(_reportobsoletedsource):
1656 @reportsummary
1621 @reportsummary
1657 def reportobsoleted(repo, tr):
1622 def reportobsoleted(repo, tr):
1658 obsoleted = obsutil.getobsoleted(repo, tr)
1623 obsoleted = obsutil.getobsoleted(repo, tr)
1659 if obsoleted:
1624 if obsoleted:
1660 repo.ui.status(_('obsoleted %i changesets\n')
1625 repo.ui.status(_('obsoleted %i changesets\n')
1661 % len(obsoleted))
1626 % len(obsoleted))
1662
1627
1663 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1628 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1664 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1629 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1665 instabilitytypes = [
1630 instabilitytypes = [
1666 ('orphan', 'orphan'),
1631 ('orphan', 'orphan'),
1667 ('phase-divergent', 'phasedivergent'),
1632 ('phase-divergent', 'phasedivergent'),
1668 ('content-divergent', 'contentdivergent'),
1633 ('content-divergent', 'contentdivergent'),
1669 ]
1634 ]
1670
1635
1671 def getinstabilitycounts(repo):
1636 def getinstabilitycounts(repo):
1672 filtered = repo.changelog.filteredrevs
1637 filtered = repo.changelog.filteredrevs
1673 counts = {}
1638 counts = {}
1674 for instability, revset in instabilitytypes:
1639 for instability, revset in instabilitytypes:
1675 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1640 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1676 filtered)
1641 filtered)
1677 return counts
1642 return counts
1678
1643
1679 oldinstabilitycounts = getinstabilitycounts(repo)
1644 oldinstabilitycounts = getinstabilitycounts(repo)
1680 @reportsummary
1645 @reportsummary
1681 def reportnewinstabilities(repo, tr):
1646 def reportnewinstabilities(repo, tr):
1682 newinstabilitycounts = getinstabilitycounts(repo)
1647 newinstabilitycounts = getinstabilitycounts(repo)
1683 for instability, revset in instabilitytypes:
1648 for instability, revset in instabilitytypes:
1684 delta = (newinstabilitycounts[instability] -
1649 delta = (newinstabilitycounts[instability] -
1685 oldinstabilitycounts[instability])
1650 oldinstabilitycounts[instability])
1686 msg = getinstabilitymessage(delta, instability)
1651 msg = getinstabilitymessage(delta, instability)
1687 if msg:
1652 if msg:
1688 repo.ui.warn(msg)
1653 repo.ui.warn(msg)
1689
1654
1690 if txmatch(_reportnewcssource):
1655 if txmatch(_reportnewcssource):
1691 @reportsummary
1656 @reportsummary
1692 def reportnewcs(repo, tr):
1657 def reportnewcs(repo, tr):
1693 """Report the range of new revisions pulled/unbundled."""
1658 """Report the range of new revisions pulled/unbundled."""
1694 origrepolen = tr.changes.get('origrepolen', len(repo))
1659 origrepolen = tr.changes.get('origrepolen', len(repo))
1695 unfi = repo.unfiltered()
1660 unfi = repo.unfiltered()
1696 if origrepolen >= len(unfi):
1661 if origrepolen >= len(unfi):
1697 return
1662 return
1698
1663
1699 # Compute the bounds of new visible revisions' range.
1664 # Compute the bounds of new visible revisions' range.
1700 revs = smartset.spanset(repo, start=origrepolen)
1665 revs = smartset.spanset(repo, start=origrepolen)
1701 if revs:
1666 if revs:
1702 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1667 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1703
1668
1704 if minrev == maxrev:
1669 if minrev == maxrev:
1705 revrange = minrev
1670 revrange = minrev
1706 else:
1671 else:
1707 revrange = '%s:%s' % (minrev, maxrev)
1672 revrange = '%s:%s' % (minrev, maxrev)
1708 draft = len(repo.revs('%ld and draft()', revs))
1673 draft = len(repo.revs('%ld and draft()', revs))
1709 secret = len(repo.revs('%ld and secret()', revs))
1674 secret = len(repo.revs('%ld and secret()', revs))
1710 if not (draft or secret):
1675 if not (draft or secret):
1711 msg = _('new changesets %s\n') % revrange
1676 msg = _('new changesets %s\n') % revrange
1712 elif draft and secret:
1677 elif draft and secret:
1713 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1678 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1714 msg %= (revrange, draft, secret)
1679 msg %= (revrange, draft, secret)
1715 elif draft:
1680 elif draft:
1716 msg = _('new changesets %s (%d drafts)\n')
1681 msg = _('new changesets %s (%d drafts)\n')
1717 msg %= (revrange, draft)
1682 msg %= (revrange, draft)
1718 elif secret:
1683 elif secret:
1719 msg = _('new changesets %s (%d secrets)\n')
1684 msg = _('new changesets %s (%d secrets)\n')
1720 msg %= (revrange, secret)
1685 msg %= (revrange, secret)
1721 else:
1686 else:
1722 errormsg = 'entered unreachable condition'
1687 errormsg = 'entered unreachable condition'
1723 raise error.ProgrammingError(errormsg)
1688 raise error.ProgrammingError(errormsg)
1724 repo.ui.status(msg)
1689 repo.ui.status(msg)
1725
1690
1726 # search new changesets directly pulled as obsolete
1691 # search new changesets directly pulled as obsolete
1727 duplicates = tr.changes.get('revduplicates', ())
1692 duplicates = tr.changes.get('revduplicates', ())
1728 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1693 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1729 origrepolen, duplicates)
1694 origrepolen, duplicates)
1730 cl = repo.changelog
1695 cl = repo.changelog
1731 extinctadded = [r for r in obsadded if r not in cl]
1696 extinctadded = [r for r in obsadded if r not in cl]
1732 if extinctadded:
1697 if extinctadded:
1733 # They are not just obsolete, but obsolete and invisible
1698 # They are not just obsolete, but obsolete and invisible
1734 # we call them "extinct" internally but the terms have not been
1699 # we call them "extinct" internally but the terms have not been
1735 # exposed to users.
1700 # exposed to users.
1736 msg = '(%d other changesets obsolete on arrival)\n'
1701 msg = '(%d other changesets obsolete on arrival)\n'
1737 repo.ui.status(msg % len(extinctadded))
1702 repo.ui.status(msg % len(extinctadded))
1738
1703
1739 @reportsummary
1704 @reportsummary
1740 def reportphasechanges(repo, tr):
1705 def reportphasechanges(repo, tr):
1741 """Report statistics of phase changes for changesets pre-existing
1706 """Report statistics of phase changes for changesets pre-existing
1742 pull/unbundle.
1707 pull/unbundle.
1743 """
1708 """
1744 origrepolen = tr.changes.get('origrepolen', len(repo))
1709 origrepolen = tr.changes.get('origrepolen', len(repo))
1745 phasetracking = tr.changes.get('phases', {})
1710 phasetracking = tr.changes.get('phases', {})
1746 if not phasetracking:
1711 if not phasetracking:
1747 return
1712 return
1748 published = [
1713 published = [
1749 rev for rev, (old, new) in phasetracking.iteritems()
1714 rev for rev, (old, new) in phasetracking.iteritems()
1750 if new == phases.public and rev < origrepolen
1715 if new == phases.public and rev < origrepolen
1751 ]
1716 ]
1752 if not published:
1717 if not published:
1753 return
1718 return
1754 repo.ui.status(_('%d local changesets published\n')
1719 repo.ui.status(_('%d local changesets published\n')
1755 % len(published))
1720 % len(published))
1756
1721
1757 def getinstabilitymessage(delta, instability):
1722 def getinstabilitymessage(delta, instability):
1758 """function to return the message to show warning about new instabilities
1723 """function to return the message to show warning about new instabilities
1759
1724
1760 exists as a separate function so that extension can wrap to show more
1725 exists as a separate function so that extension can wrap to show more
1761 information like how to fix instabilities"""
1726 information like how to fix instabilities"""
1762 if delta > 0:
1727 if delta > 0:
1763 return _('%i new %s changesets\n') % (delta, instability)
1728 return _('%i new %s changesets\n') % (delta, instability)
1764
1729
1765 def nodesummaries(repo, nodes, maxnumnodes=4):
1730 def nodesummaries(repo, nodes, maxnumnodes=4):
1766 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1731 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1767 return ' '.join(short(h) for h in nodes)
1732 return ' '.join(short(h) for h in nodes)
1768 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1733 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1769 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1734 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1770
1735
1771 def enforcesinglehead(repo, tr, desc):
1736 def enforcesinglehead(repo, tr, desc):
1772 """check that no named branch has multiple heads"""
1737 """check that no named branch has multiple heads"""
1773 if desc in ('strip', 'repair'):
1738 if desc in ('strip', 'repair'):
1774 # skip the logic during strip
1739 # skip the logic during strip
1775 return
1740 return
1776 visible = repo.filtered('visible')
1741 visible = repo.filtered('visible')
1777 # possible improvement: we could restrict the check to affected branch
1742 # possible improvement: we could restrict the check to affected branch
1778 for name, heads in visible.branchmap().iteritems():
1743 for name, heads in visible.branchmap().iteritems():
1779 if len(heads) > 1:
1744 if len(heads) > 1:
1780 msg = _('rejecting multiple heads on branch "%s"')
1745 msg = _('rejecting multiple heads on branch "%s"')
1781 msg %= name
1746 msg %= name
1782 hint = _('%d heads: %s')
1747 hint = _('%d heads: %s')
1783 hint %= (len(heads), nodesummaries(repo, heads))
1748 hint %= (len(heads), nodesummaries(repo, heads))
1784 raise error.Abort(msg, hint=hint)
1749 raise error.Abort(msg, hint=hint)
1785
1750
1786 def wrapconvertsink(sink):
1751 def wrapconvertsink(sink):
1787 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1752 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1788 before it is used, whether or not the convert extension was formally loaded.
1753 before it is used, whether or not the convert extension was formally loaded.
1789 """
1754 """
1790 return sink
1755 return sink
1791
1756
1792 def unhidehashlikerevs(repo, specs, hiddentype):
1757 def unhidehashlikerevs(repo, specs, hiddentype):
1793 """parse the user specs and unhide changesets whose hash or revision number
1758 """parse the user specs and unhide changesets whose hash or revision number
1794 is passed.
1759 is passed.
1795
1760
1796 hiddentype can be: 1) 'warn': warn while unhiding changesets
1761 hiddentype can be: 1) 'warn': warn while unhiding changesets
1797 2) 'nowarn': don't warn while unhiding changesets
1762 2) 'nowarn': don't warn while unhiding changesets
1798
1763
1799 returns a repo object with the required changesets unhidden
1764 returns a repo object with the required changesets unhidden
1800 """
1765 """
1801 if not repo.filtername or not repo.ui.configbool('experimental',
1766 if not repo.filtername or not repo.ui.configbool('experimental',
1802 'directaccess'):
1767 'directaccess'):
1803 return repo
1768 return repo
1804
1769
1805 if repo.filtername not in ('visible', 'visible-hidden'):
1770 if repo.filtername not in ('visible', 'visible-hidden'):
1806 return repo
1771 return repo
1807
1772
1808 symbols = set()
1773 symbols = set()
1809 for spec in specs:
1774 for spec in specs:
1810 try:
1775 try:
1811 tree = revsetlang.parse(spec)
1776 tree = revsetlang.parse(spec)
1812 except error.ParseError: # will be reported by scmutil.revrange()
1777 except error.ParseError: # will be reported by scmutil.revrange()
1813 continue
1778 continue
1814
1779
1815 symbols.update(revsetlang.gethashlikesymbols(tree))
1780 symbols.update(revsetlang.gethashlikesymbols(tree))
1816
1781
1817 if not symbols:
1782 if not symbols:
1818 return repo
1783 return repo
1819
1784
1820 revs = _getrevsfromsymbols(repo, symbols)
1785 revs = _getrevsfromsymbols(repo, symbols)
1821
1786
1822 if not revs:
1787 if not revs:
1823 return repo
1788 return repo
1824
1789
1825 if hiddentype == 'warn':
1790 if hiddentype == 'warn':
1826 unfi = repo.unfiltered()
1791 unfi = repo.unfiltered()
1827 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1792 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1828 repo.ui.warn(_("warning: accessing hidden changesets for write "
1793 repo.ui.warn(_("warning: accessing hidden changesets for write "
1829 "operation: %s\n") % revstr)
1794 "operation: %s\n") % revstr)
1830
1795
1831 # we have to use new filtername to separate branch/tags cache until we can
1796 # we have to use new filtername to separate branch/tags cache until we can
1832 # disbale these cache when revisions are dynamically pinned.
1797 # disbale these cache when revisions are dynamically pinned.
1833 return repo.filtered('visible-hidden', revs)
1798 return repo.filtered('visible-hidden', revs)
1834
1799
1835 def _getrevsfromsymbols(repo, symbols):
1800 def _getrevsfromsymbols(repo, symbols):
1836 """parse the list of symbols and returns a set of revision numbers of hidden
1801 """parse the list of symbols and returns a set of revision numbers of hidden
1837 changesets present in symbols"""
1802 changesets present in symbols"""
1838 revs = set()
1803 revs = set()
1839 unfi = repo.unfiltered()
1804 unfi = repo.unfiltered()
1840 unficl = unfi.changelog
1805 unficl = unfi.changelog
1841 cl = repo.changelog
1806 cl = repo.changelog
1842 tiprev = len(unficl)
1807 tiprev = len(unficl)
1843 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1808 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1844 for s in symbols:
1809 for s in symbols:
1845 try:
1810 try:
1846 n = int(s)
1811 n = int(s)
1847 if n <= tiprev:
1812 if n <= tiprev:
1848 if not allowrevnums:
1813 if not allowrevnums:
1849 continue
1814 continue
1850 else:
1815 else:
1851 if n not in cl:
1816 if n not in cl:
1852 revs.add(n)
1817 revs.add(n)
1853 continue
1818 continue
1854 except ValueError:
1819 except ValueError:
1855 pass
1820 pass
1856
1821
1857 try:
1822 try:
1858 s = resolvehexnodeidprefix(unfi, s)
1823 s = resolvehexnodeidprefix(unfi, s)
1859 except (error.LookupError, error.WdirUnsupported):
1824 except (error.LookupError, error.WdirUnsupported):
1860 s = None
1825 s = None
1861
1826
1862 if s is not None:
1827 if s is not None:
1863 rev = unficl.rev(s)
1828 rev = unficl.rev(s)
1864 if rev not in cl:
1829 if rev not in cl:
1865 revs.add(rev)
1830 revs.add(rev)
1866
1831
1867 return revs
1832 return revs
1868
1833
1869 def bookmarkrevs(repo, mark):
1834 def bookmarkrevs(repo, mark):
1870 """
1835 """
1871 Select revisions reachable by a given bookmark
1836 Select revisions reachable by a given bookmark
1872 """
1837 """
1873 return repo.revs("ancestors(bookmark(%s)) - "
1838 return repo.revs("ancestors(bookmark(%s)) - "
1874 "ancestors(head() and not bookmark(%s)) - "
1839 "ancestors(head() and not bookmark(%s)) - "
1875 "ancestors(bookmark() and not bookmark(%s))",
1840 "ancestors(bookmark() and not bookmark(%s))",
1876 mark, mark, mark)
1841 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now