##// END OF EJS Templates
addremove: add labels for messages about added and removed files...
Boris Feld -
r39123:ad88726d default
parent child Browse files
Show More
@@ -1,1753 +1,1755 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 revsetlang,
38 revsetlang,
39 similar,
39 similar,
40 url,
40 url,
41 util,
41 util,
42 vfs,
42 vfs,
43 )
43 )
44
44
45 from .utils import (
45 from .utils import (
46 procutil,
46 procutil,
47 stringutil,
47 stringutil,
48 )
48 )
49
49
50 if pycompat.iswindows:
50 if pycompat.iswindows:
51 from . import scmwindows as scmplatform
51 from . import scmwindows as scmplatform
52 else:
52 else:
53 from . import scmposix as scmplatform
53 from . import scmposix as scmplatform
54
54
55 termsize = scmplatform.termsize
55 termsize = scmplatform.termsize
56
56
57 class status(tuple):
57 class status(tuple):
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 and 'ignored' properties are only relevant to the working copy.
59 and 'ignored' properties are only relevant to the working copy.
60 '''
60 '''
61
61
62 __slots__ = ()
62 __slots__ = ()
63
63
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 clean):
65 clean):
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 ignored, clean))
67 ignored, clean))
68
68
69 @property
69 @property
70 def modified(self):
70 def modified(self):
71 '''files that have been modified'''
71 '''files that have been modified'''
72 return self[0]
72 return self[0]
73
73
74 @property
74 @property
75 def added(self):
75 def added(self):
76 '''files that have been added'''
76 '''files that have been added'''
77 return self[1]
77 return self[1]
78
78
79 @property
79 @property
80 def removed(self):
80 def removed(self):
81 '''files that have been removed'''
81 '''files that have been removed'''
82 return self[2]
82 return self[2]
83
83
84 @property
84 @property
85 def deleted(self):
85 def deleted(self):
86 '''files that are in the dirstate, but have been deleted from the
86 '''files that are in the dirstate, but have been deleted from the
87 working copy (aka "missing")
87 working copy (aka "missing")
88 '''
88 '''
89 return self[3]
89 return self[3]
90
90
91 @property
91 @property
92 def unknown(self):
92 def unknown(self):
93 '''files not in the dirstate that are not ignored'''
93 '''files not in the dirstate that are not ignored'''
94 return self[4]
94 return self[4]
95
95
96 @property
96 @property
97 def ignored(self):
97 def ignored(self):
98 '''files not in the dirstate that are ignored (by _dirignore())'''
98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 return self[5]
99 return self[5]
100
100
101 @property
101 @property
102 def clean(self):
102 def clean(self):
103 '''files that have not been modified'''
103 '''files that have not been modified'''
104 return self[6]
104 return self[6]
105
105
106 def __repr__(self, *args, **kwargs):
106 def __repr__(self, *args, **kwargs):
107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
108 r'unknown=%s, ignored=%s, clean=%s>') %
108 r'unknown=%s, ignored=%s, clean=%s>') %
109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
110
110
111 def itersubrepos(ctx1, ctx2):
111 def itersubrepos(ctx1, ctx2):
112 """find subrepos in ctx1 or ctx2"""
112 """find subrepos in ctx1 or ctx2"""
113 # Create a (subpath, ctx) mapping where we prefer subpaths from
113 # Create a (subpath, ctx) mapping where we prefer subpaths from
114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
115 # has been modified (in ctx2) but not yet committed (in ctx1).
115 # has been modified (in ctx2) but not yet committed (in ctx1).
116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
118
118
119 missing = set()
119 missing = set()
120
120
121 for subpath in ctx2.substate:
121 for subpath in ctx2.substate:
122 if subpath not in ctx1.substate:
122 if subpath not in ctx1.substate:
123 del subpaths[subpath]
123 del subpaths[subpath]
124 missing.add(subpath)
124 missing.add(subpath)
125
125
126 for subpath, ctx in sorted(subpaths.iteritems()):
126 for subpath, ctx in sorted(subpaths.iteritems()):
127 yield subpath, ctx.sub(subpath)
127 yield subpath, ctx.sub(subpath)
128
128
129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
130 # status and diff will have an accurate result when it does
130 # status and diff will have an accurate result when it does
131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
132 # against itself.
132 # against itself.
133 for subpath in missing:
133 for subpath in missing:
134 yield subpath, ctx2.nullsub(subpath, ctx1)
134 yield subpath, ctx2.nullsub(subpath, ctx1)
135
135
136 def nochangesfound(ui, repo, excluded=None):
136 def nochangesfound(ui, repo, excluded=None):
137 '''Report no changes for push/pull, excluded is None or a list of
137 '''Report no changes for push/pull, excluded is None or a list of
138 nodes excluded from the push/pull.
138 nodes excluded from the push/pull.
139 '''
139 '''
140 secretlist = []
140 secretlist = []
141 if excluded:
141 if excluded:
142 for n in excluded:
142 for n in excluded:
143 ctx = repo[n]
143 ctx = repo[n]
144 if ctx.phase() >= phases.secret and not ctx.extinct():
144 if ctx.phase() >= phases.secret and not ctx.extinct():
145 secretlist.append(n)
145 secretlist.append(n)
146
146
147 if secretlist:
147 if secretlist:
148 ui.status(_("no changes found (ignored %d secret changesets)\n")
148 ui.status(_("no changes found (ignored %d secret changesets)\n")
149 % len(secretlist))
149 % len(secretlist))
150 else:
150 else:
151 ui.status(_("no changes found\n"))
151 ui.status(_("no changes found\n"))
152
152
153 def callcatch(ui, func):
153 def callcatch(ui, func):
154 """call func() with global exception handling
154 """call func() with global exception handling
155
155
156 return func() if no exception happens. otherwise do some error handling
156 return func() if no exception happens. otherwise do some error handling
157 and return an exit code accordingly. does not handle all exceptions.
157 and return an exit code accordingly. does not handle all exceptions.
158 """
158 """
159 try:
159 try:
160 try:
160 try:
161 return func()
161 return func()
162 except: # re-raises
162 except: # re-raises
163 ui.traceback()
163 ui.traceback()
164 raise
164 raise
165 # Global exception handling, alphabetically
165 # Global exception handling, alphabetically
166 # Mercurial-specific first, followed by built-in and library exceptions
166 # Mercurial-specific first, followed by built-in and library exceptions
167 except error.LockHeld as inst:
167 except error.LockHeld as inst:
168 if inst.errno == errno.ETIMEDOUT:
168 if inst.errno == errno.ETIMEDOUT:
169 reason = _('timed out waiting for lock held by %r') % inst.locker
169 reason = _('timed out waiting for lock held by %r') % inst.locker
170 else:
170 else:
171 reason = _('lock held by %r') % inst.locker
171 reason = _('lock held by %r') % inst.locker
172 ui.error(_("abort: %s: %s\n") % (
172 ui.error(_("abort: %s: %s\n") % (
173 inst.desc or stringutil.forcebytestr(inst.filename), reason))
173 inst.desc or stringutil.forcebytestr(inst.filename), reason))
174 if not inst.locker:
174 if not inst.locker:
175 ui.error(_("(lock might be very busy)\n"))
175 ui.error(_("(lock might be very busy)\n"))
176 except error.LockUnavailable as inst:
176 except error.LockUnavailable as inst:
177 ui.error(_("abort: could not lock %s: %s\n") %
177 ui.error(_("abort: could not lock %s: %s\n") %
178 (inst.desc or stringutil.forcebytestr(inst.filename),
178 (inst.desc or stringutil.forcebytestr(inst.filename),
179 encoding.strtolocal(inst.strerror)))
179 encoding.strtolocal(inst.strerror)))
180 except error.OutOfBandError as inst:
180 except error.OutOfBandError as inst:
181 if inst.args:
181 if inst.args:
182 msg = _("abort: remote error:\n")
182 msg = _("abort: remote error:\n")
183 else:
183 else:
184 msg = _("abort: remote error\n")
184 msg = _("abort: remote error\n")
185 ui.error(msg)
185 ui.error(msg)
186 if inst.args:
186 if inst.args:
187 ui.error(''.join(inst.args))
187 ui.error(''.join(inst.args))
188 if inst.hint:
188 if inst.hint:
189 ui.error('(%s)\n' % inst.hint)
189 ui.error('(%s)\n' % inst.hint)
190 except error.RepoError as inst:
190 except error.RepoError as inst:
191 ui.error(_("abort: %s!\n") % inst)
191 ui.error(_("abort: %s!\n") % inst)
192 if inst.hint:
192 if inst.hint:
193 ui.error(_("(%s)\n") % inst.hint)
193 ui.error(_("(%s)\n") % inst.hint)
194 except error.ResponseError as inst:
194 except error.ResponseError as inst:
195 ui.error(_("abort: %s") % inst.args[0])
195 ui.error(_("abort: %s") % inst.args[0])
196 msg = inst.args[1]
196 msg = inst.args[1]
197 if isinstance(msg, type(u'')):
197 if isinstance(msg, type(u'')):
198 msg = pycompat.sysbytes(msg)
198 msg = pycompat.sysbytes(msg)
199 if not isinstance(msg, bytes):
199 if not isinstance(msg, bytes):
200 ui.error(" %r\n" % (msg,))
200 ui.error(" %r\n" % (msg,))
201 elif not msg:
201 elif not msg:
202 ui.error(_(" empty string\n"))
202 ui.error(_(" empty string\n"))
203 else:
203 else:
204 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
204 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
205 except error.CensoredNodeError as inst:
205 except error.CensoredNodeError as inst:
206 ui.error(_("abort: file censored %s!\n") % inst)
206 ui.error(_("abort: file censored %s!\n") % inst)
207 except error.RevlogError as inst:
207 except error.RevlogError as inst:
208 ui.error(_("abort: %s!\n") % inst)
208 ui.error(_("abort: %s!\n") % inst)
209 except error.InterventionRequired as inst:
209 except error.InterventionRequired as inst:
210 ui.error("%s\n" % inst)
210 ui.error("%s\n" % inst)
211 if inst.hint:
211 if inst.hint:
212 ui.error(_("(%s)\n") % inst.hint)
212 ui.error(_("(%s)\n") % inst.hint)
213 return 1
213 return 1
214 except error.WdirUnsupported:
214 except error.WdirUnsupported:
215 ui.error(_("abort: working directory revision cannot be specified\n"))
215 ui.error(_("abort: working directory revision cannot be specified\n"))
216 except error.Abort as inst:
216 except error.Abort as inst:
217 ui.error(_("abort: %s\n") % inst)
217 ui.error(_("abort: %s\n") % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.error(_("(%s)\n") % inst.hint)
219 ui.error(_("(%s)\n") % inst.hint)
220 except ImportError as inst:
220 except ImportError as inst:
221 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
221 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
222 m = stringutil.forcebytestr(inst).split()[-1]
222 m = stringutil.forcebytestr(inst).split()[-1]
223 if m in "mpatch bdiff".split():
223 if m in "mpatch bdiff".split():
224 ui.error(_("(did you forget to compile extensions?)\n"))
224 ui.error(_("(did you forget to compile extensions?)\n"))
225 elif m in "zlib".split():
225 elif m in "zlib".split():
226 ui.error(_("(is your Python install correct?)\n"))
226 ui.error(_("(is your Python install correct?)\n"))
227 except IOError as inst:
227 except IOError as inst:
228 if util.safehasattr(inst, "code"):
228 if util.safehasattr(inst, "code"):
229 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
229 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
230 elif util.safehasattr(inst, "reason"):
230 elif util.safehasattr(inst, "reason"):
231 try: # usually it is in the form (errno, strerror)
231 try: # usually it is in the form (errno, strerror)
232 reason = inst.reason.args[1]
232 reason = inst.reason.args[1]
233 except (AttributeError, IndexError):
233 except (AttributeError, IndexError):
234 # it might be anything, for example a string
234 # it might be anything, for example a string
235 reason = inst.reason
235 reason = inst.reason
236 if isinstance(reason, pycompat.unicode):
236 if isinstance(reason, pycompat.unicode):
237 # SSLError of Python 2.7.9 contains a unicode
237 # SSLError of Python 2.7.9 contains a unicode
238 reason = encoding.unitolocal(reason)
238 reason = encoding.unitolocal(reason)
239 ui.error(_("abort: error: %s\n") % reason)
239 ui.error(_("abort: error: %s\n") % reason)
240 elif (util.safehasattr(inst, "args")
240 elif (util.safehasattr(inst, "args")
241 and inst.args and inst.args[0] == errno.EPIPE):
241 and inst.args and inst.args[0] == errno.EPIPE):
242 pass
242 pass
243 elif getattr(inst, "strerror", None):
243 elif getattr(inst, "strerror", None):
244 if getattr(inst, "filename", None):
244 if getattr(inst, "filename", None):
245 ui.error(_("abort: %s: %s\n") % (
245 ui.error(_("abort: %s: %s\n") % (
246 encoding.strtolocal(inst.strerror),
246 encoding.strtolocal(inst.strerror),
247 stringutil.forcebytestr(inst.filename)))
247 stringutil.forcebytestr(inst.filename)))
248 else:
248 else:
249 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
250 else:
250 else:
251 raise
251 raise
252 except OSError as inst:
252 except OSError as inst:
253 if getattr(inst, "filename", None) is not None:
253 if getattr(inst, "filename", None) is not None:
254 ui.error(_("abort: %s: '%s'\n") % (
254 ui.error(_("abort: %s: '%s'\n") % (
255 encoding.strtolocal(inst.strerror),
255 encoding.strtolocal(inst.strerror),
256 stringutil.forcebytestr(inst.filename)))
256 stringutil.forcebytestr(inst.filename)))
257 else:
257 else:
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 except MemoryError:
259 except MemoryError:
260 ui.error(_("abort: out of memory\n"))
260 ui.error(_("abort: out of memory\n"))
261 except SystemExit as inst:
261 except SystemExit as inst:
262 # Commands shouldn't sys.exit directly, but give a return code.
262 # Commands shouldn't sys.exit directly, but give a return code.
263 # Just in case catch this and and pass exit code to caller.
263 # Just in case catch this and and pass exit code to caller.
264 return inst.code
264 return inst.code
265 except socket.error as inst:
265 except socket.error as inst:
266 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
266 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
267
267
268 return -1
268 return -1
269
269
270 def checknewlabel(repo, lbl, kind):
270 def checknewlabel(repo, lbl, kind):
271 # Do not use the "kind" parameter in ui output.
271 # Do not use the "kind" parameter in ui output.
272 # It makes strings difficult to translate.
272 # It makes strings difficult to translate.
273 if lbl in ['tip', '.', 'null']:
273 if lbl in ['tip', '.', 'null']:
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 for c in (':', '\0', '\n', '\r'):
275 for c in (':', '\0', '\n', '\r'):
276 if c in lbl:
276 if c in lbl:
277 raise error.Abort(
277 raise error.Abort(
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 raise error.Abort(_("cannot use an integer as a name"))
281 raise error.Abort(_("cannot use an integer as a name"))
282 except ValueError:
282 except ValueError:
283 pass
283 pass
284 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286
286
287 def checkfilename(f):
287 def checkfilename(f):
288 '''Check that the filename f is an acceptable filename for a tracked file'''
288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 if '\r' in f or '\n' in f:
289 if '\r' in f or '\n' in f:
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 % pycompat.bytestr(f))
291 % pycompat.bytestr(f))
292
292
293 def checkportable(ui, f):
293 def checkportable(ui, f):
294 '''Check if filename f is portable and warn or abort depending on config'''
294 '''Check if filename f is portable and warn or abort depending on config'''
295 checkfilename(f)
295 checkfilename(f)
296 abort, warn = checkportabilityalert(ui)
296 abort, warn = checkportabilityalert(ui)
297 if abort or warn:
297 if abort or warn:
298 msg = util.checkwinfilename(f)
298 msg = util.checkwinfilename(f)
299 if msg:
299 if msg:
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 if abort:
301 if abort:
302 raise error.Abort(msg)
302 raise error.Abort(msg)
303 ui.warn(_("warning: %s\n") % msg)
303 ui.warn(_("warning: %s\n") % msg)
304
304
305 def checkportabilityalert(ui):
305 def checkportabilityalert(ui):
306 '''check if the user's config requests nothing, a warning, or abort for
306 '''check if the user's config requests nothing, a warning, or abort for
307 non-portable filenames'''
307 non-portable filenames'''
308 val = ui.config('ui', 'portablefilenames')
308 val = ui.config('ui', 'portablefilenames')
309 lval = val.lower()
309 lval = val.lower()
310 bval = stringutil.parsebool(val)
310 bval = stringutil.parsebool(val)
311 abort = pycompat.iswindows or lval == 'abort'
311 abort = pycompat.iswindows or lval == 'abort'
312 warn = bval or lval == 'warn'
312 warn = bval or lval == 'warn'
313 if bval is None and not (warn or abort or lval == 'ignore'):
313 if bval is None and not (warn or abort or lval == 'ignore'):
314 raise error.ConfigError(
314 raise error.ConfigError(
315 _("ui.portablefilenames value is invalid ('%s')") % val)
315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 return abort, warn
316 return abort, warn
317
317
318 class casecollisionauditor(object):
318 class casecollisionauditor(object):
319 def __init__(self, ui, abort, dirstate):
319 def __init__(self, ui, abort, dirstate):
320 self._ui = ui
320 self._ui = ui
321 self._abort = abort
321 self._abort = abort
322 allfiles = '\0'.join(dirstate._map)
322 allfiles = '\0'.join(dirstate._map)
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 self._dirstate = dirstate
324 self._dirstate = dirstate
325 # The purpose of _newfiles is so that we don't complain about
325 # The purpose of _newfiles is so that we don't complain about
326 # case collisions if someone were to call this object with the
326 # case collisions if someone were to call this object with the
327 # same filename twice.
327 # same filename twice.
328 self._newfiles = set()
328 self._newfiles = set()
329
329
330 def __call__(self, f):
330 def __call__(self, f):
331 if f in self._newfiles:
331 if f in self._newfiles:
332 return
332 return
333 fl = encoding.lower(f)
333 fl = encoding.lower(f)
334 if fl in self._loweredfiles and f not in self._dirstate:
334 if fl in self._loweredfiles and f not in self._dirstate:
335 msg = _('possible case-folding collision for %s') % f
335 msg = _('possible case-folding collision for %s') % f
336 if self._abort:
336 if self._abort:
337 raise error.Abort(msg)
337 raise error.Abort(msg)
338 self._ui.warn(_("warning: %s\n") % msg)
338 self._ui.warn(_("warning: %s\n") % msg)
339 self._loweredfiles.add(fl)
339 self._loweredfiles.add(fl)
340 self._newfiles.add(f)
340 self._newfiles.add(f)
341
341
342 def filteredhash(repo, maxrev):
342 def filteredhash(repo, maxrev):
343 """build hash of filtered revisions in the current repoview.
343 """build hash of filtered revisions in the current repoview.
344
344
345 Multiple caches perform up-to-date validation by checking that the
345 Multiple caches perform up-to-date validation by checking that the
346 tiprev and tipnode stored in the cache file match the current repository.
346 tiprev and tipnode stored in the cache file match the current repository.
347 However, this is not sufficient for validating repoviews because the set
347 However, this is not sufficient for validating repoviews because the set
348 of revisions in the view may change without the repository tiprev and
348 of revisions in the view may change without the repository tiprev and
349 tipnode changing.
349 tipnode changing.
350
350
351 This function hashes all the revs filtered from the view and returns
351 This function hashes all the revs filtered from the view and returns
352 that SHA-1 digest.
352 that SHA-1 digest.
353 """
353 """
354 cl = repo.changelog
354 cl = repo.changelog
355 if not cl.filteredrevs:
355 if not cl.filteredrevs:
356 return None
356 return None
357 key = None
357 key = None
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 if revs:
359 if revs:
360 s = hashlib.sha1()
360 s = hashlib.sha1()
361 for rev in revs:
361 for rev in revs:
362 s.update('%d;' % rev)
362 s.update('%d;' % rev)
363 key = s.digest()
363 key = s.digest()
364 return key
364 return key
365
365
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 '''yield every hg repository under path, always recursively.
367 '''yield every hg repository under path, always recursively.
368 The recurse flag will only control recursion into repo working dirs'''
368 The recurse flag will only control recursion into repo working dirs'''
369 def errhandler(err):
369 def errhandler(err):
370 if err.filename == path:
370 if err.filename == path:
371 raise err
371 raise err
372 samestat = getattr(os.path, 'samestat', None)
372 samestat = getattr(os.path, 'samestat', None)
373 if followsym and samestat is not None:
373 if followsym and samestat is not None:
374 def adddir(dirlst, dirname):
374 def adddir(dirlst, dirname):
375 dirstat = os.stat(dirname)
375 dirstat = os.stat(dirname)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 if not match:
377 if not match:
378 dirlst.append(dirstat)
378 dirlst.append(dirstat)
379 return not match
379 return not match
380 else:
380 else:
381 followsym = False
381 followsym = False
382
382
383 if (seen_dirs is None) and followsym:
383 if (seen_dirs is None) and followsym:
384 seen_dirs = []
384 seen_dirs = []
385 adddir(seen_dirs, path)
385 adddir(seen_dirs, path)
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 dirs.sort()
387 dirs.sort()
388 if '.hg' in dirs:
388 if '.hg' in dirs:
389 yield root # found a repository
389 yield root # found a repository
390 qroot = os.path.join(root, '.hg', 'patches')
390 qroot = os.path.join(root, '.hg', 'patches')
391 if os.path.isdir(os.path.join(qroot, '.hg')):
391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 yield qroot # we have a patch queue repo here
392 yield qroot # we have a patch queue repo here
393 if recurse:
393 if recurse:
394 # avoid recursing inside the .hg directory
394 # avoid recursing inside the .hg directory
395 dirs.remove('.hg')
395 dirs.remove('.hg')
396 else:
396 else:
397 dirs[:] = [] # don't descend further
397 dirs[:] = [] # don't descend further
398 elif followsym:
398 elif followsym:
399 newdirs = []
399 newdirs = []
400 for d in dirs:
400 for d in dirs:
401 fname = os.path.join(root, d)
401 fname = os.path.join(root, d)
402 if adddir(seen_dirs, fname):
402 if adddir(seen_dirs, fname):
403 if os.path.islink(fname):
403 if os.path.islink(fname):
404 for hgname in walkrepos(fname, True, seen_dirs):
404 for hgname in walkrepos(fname, True, seen_dirs):
405 yield hgname
405 yield hgname
406 else:
406 else:
407 newdirs.append(d)
407 newdirs.append(d)
408 dirs[:] = newdirs
408 dirs[:] = newdirs
409
409
410 def binnode(ctx):
410 def binnode(ctx):
411 """Return binary node id for a given basectx"""
411 """Return binary node id for a given basectx"""
412 node = ctx.node()
412 node = ctx.node()
413 if node is None:
413 if node is None:
414 return wdirid
414 return wdirid
415 return node
415 return node
416
416
417 def intrev(ctx):
417 def intrev(ctx):
418 """Return integer for a given basectx that can be used in comparison or
418 """Return integer for a given basectx that can be used in comparison or
419 arithmetic operation"""
419 arithmetic operation"""
420 rev = ctx.rev()
420 rev = ctx.rev()
421 if rev is None:
421 if rev is None:
422 return wdirrev
422 return wdirrev
423 return rev
423 return rev
424
424
425 def formatchangeid(ctx):
425 def formatchangeid(ctx):
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 template provided by logcmdutil.changesettemplater"""
427 template provided by logcmdutil.changesettemplater"""
428 repo = ctx.repo()
428 repo = ctx.repo()
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430
430
431 def formatrevnode(ui, rev, node):
431 def formatrevnode(ui, rev, node):
432 """Format given revision and node depending on the current verbosity"""
432 """Format given revision and node depending on the current verbosity"""
433 if ui.debugflag:
433 if ui.debugflag:
434 hexfunc = hex
434 hexfunc = hex
435 else:
435 else:
436 hexfunc = short
436 hexfunc = short
437 return '%d:%s' % (rev, hexfunc(node))
437 return '%d:%s' % (rev, hexfunc(node))
438
438
439 def resolvehexnodeidprefix(repo, prefix):
439 def resolvehexnodeidprefix(repo, prefix):
440 if (prefix.startswith('x') and
440 if (prefix.startswith('x') and
441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
442 prefix = prefix[1:]
442 prefix = prefix[1:]
443 try:
443 try:
444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
445 # This matches the shortesthexnodeidprefix() function below.
445 # This matches the shortesthexnodeidprefix() function below.
446 node = repo.unfiltered().changelog._partialmatch(prefix)
446 node = repo.unfiltered().changelog._partialmatch(prefix)
447 except error.AmbiguousPrefixLookupError:
447 except error.AmbiguousPrefixLookupError:
448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
449 if revset:
449 if revset:
450 # Clear config to avoid infinite recursion
450 # Clear config to avoid infinite recursion
451 configoverrides = {('experimental',
451 configoverrides = {('experimental',
452 'revisions.disambiguatewithin'): None}
452 'revisions.disambiguatewithin'): None}
453 with repo.ui.configoverride(configoverrides):
453 with repo.ui.configoverride(configoverrides):
454 revs = repo.anyrevs([revset], user=True)
454 revs = repo.anyrevs([revset], user=True)
455 matches = []
455 matches = []
456 for rev in revs:
456 for rev in revs:
457 node = repo.changelog.node(rev)
457 node = repo.changelog.node(rev)
458 if hex(node).startswith(prefix):
458 if hex(node).startswith(prefix):
459 matches.append(node)
459 matches.append(node)
460 if len(matches) == 1:
460 if len(matches) == 1:
461 return matches[0]
461 return matches[0]
462 raise
462 raise
463 if node is None:
463 if node is None:
464 return
464 return
465 repo.changelog.rev(node) # make sure node isn't filtered
465 repo.changelog.rev(node) # make sure node isn't filtered
466 return node
466 return node
467
467
468 def mayberevnum(repo, prefix):
468 def mayberevnum(repo, prefix):
469 """Checks if the given prefix may be mistaken for a revision number"""
469 """Checks if the given prefix may be mistaken for a revision number"""
470 try:
470 try:
471 i = int(prefix)
471 i = int(prefix)
472 # if we are a pure int, then starting with zero will not be
472 # if we are a pure int, then starting with zero will not be
473 # confused as a rev; or, obviously, if the int is larger
473 # confused as a rev; or, obviously, if the int is larger
474 # than the value of the tip rev
474 # than the value of the tip rev
475 if prefix[0:1] == b'0' or i > len(repo):
475 if prefix[0:1] == b'0' or i > len(repo):
476 return False
476 return False
477 return True
477 return True
478 except ValueError:
478 except ValueError:
479 return False
479 return False
480
480
481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 """Find the shortest unambiguous prefix that matches hexnode.
482 """Find the shortest unambiguous prefix that matches hexnode.
483
483
484 If "cache" is not None, it must be a dictionary that can be used for
484 If "cache" is not None, it must be a dictionary that can be used for
485 caching between calls to this method.
485 caching between calls to this method.
486 """
486 """
487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 # which would be unacceptably slow. so we look for hash collision in
488 # which would be unacceptably slow. so we look for hash collision in
489 # unfiltered space, which means some hashes may be slightly longer.
489 # unfiltered space, which means some hashes may be slightly longer.
490
490
491 def disambiguate(prefix):
491 def disambiguate(prefix):
492 """Disambiguate against revnums."""
492 """Disambiguate against revnums."""
493 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
493 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
494 if mayberevnum(repo, prefix):
494 if mayberevnum(repo, prefix):
495 return 'x' + prefix
495 return 'x' + prefix
496 else:
496 else:
497 return prefix
497 return prefix
498
498
499 hexnode = hex(node)
499 hexnode = hex(node)
500 for length in range(len(prefix), len(hexnode) + 1):
500 for length in range(len(prefix), len(hexnode) + 1):
501 prefix = hexnode[:length]
501 prefix = hexnode[:length]
502 if not mayberevnum(repo, prefix):
502 if not mayberevnum(repo, prefix):
503 return prefix
503 return prefix
504
504
505 cl = repo.unfiltered().changelog
505 cl = repo.unfiltered().changelog
506 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
506 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
507 if revset:
507 if revset:
508 revs = None
508 revs = None
509 if cache is not None:
509 if cache is not None:
510 revs = cache.get('disambiguationrevset')
510 revs = cache.get('disambiguationrevset')
511 if revs is None:
511 if revs is None:
512 revs = repo.anyrevs([revset], user=True)
512 revs = repo.anyrevs([revset], user=True)
513 if cache is not None:
513 if cache is not None:
514 cache['disambiguationrevset'] = revs
514 cache['disambiguationrevset'] = revs
515 if cl.rev(node) in revs:
515 if cl.rev(node) in revs:
516 hexnode = hex(node)
516 hexnode = hex(node)
517 for length in range(minlength, len(hexnode) + 1):
517 for length in range(minlength, len(hexnode) + 1):
518 matches = []
518 matches = []
519 prefix = hexnode[:length]
519 prefix = hexnode[:length]
520 for rev in revs:
520 for rev in revs:
521 otherhexnode = repo[rev].hex()
521 otherhexnode = repo[rev].hex()
522 if prefix == otherhexnode[:length]:
522 if prefix == otherhexnode[:length]:
523 matches.append(otherhexnode)
523 matches.append(otherhexnode)
524 if len(matches) == 1:
524 if len(matches) == 1:
525 return disambiguate(prefix)
525 return disambiguate(prefix)
526
526
527 try:
527 try:
528 return disambiguate(cl.shortest(node, minlength))
528 return disambiguate(cl.shortest(node, minlength))
529 except error.LookupError:
529 except error.LookupError:
530 raise error.RepoLookupError()
530 raise error.RepoLookupError()
531
531
532 def isrevsymbol(repo, symbol):
532 def isrevsymbol(repo, symbol):
533 """Checks if a symbol exists in the repo.
533 """Checks if a symbol exists in the repo.
534
534
535 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
535 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
536 symbol is an ambiguous nodeid prefix.
536 symbol is an ambiguous nodeid prefix.
537 """
537 """
538 try:
538 try:
539 revsymbol(repo, symbol)
539 revsymbol(repo, symbol)
540 return True
540 return True
541 except error.RepoLookupError:
541 except error.RepoLookupError:
542 return False
542 return False
543
543
544 def revsymbol(repo, symbol):
544 def revsymbol(repo, symbol):
545 """Returns a context given a single revision symbol (as string).
545 """Returns a context given a single revision symbol (as string).
546
546
547 This is similar to revsingle(), but accepts only a single revision symbol,
547 This is similar to revsingle(), but accepts only a single revision symbol,
548 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
548 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
549 not "max(public())".
549 not "max(public())".
550 """
550 """
551 if not isinstance(symbol, bytes):
551 if not isinstance(symbol, bytes):
552 msg = ("symbol (%s of type %s) was not a string, did you mean "
552 msg = ("symbol (%s of type %s) was not a string, did you mean "
553 "repo[symbol]?" % (symbol, type(symbol)))
553 "repo[symbol]?" % (symbol, type(symbol)))
554 raise error.ProgrammingError(msg)
554 raise error.ProgrammingError(msg)
555 try:
555 try:
556 if symbol in ('.', 'tip', 'null'):
556 if symbol in ('.', 'tip', 'null'):
557 return repo[symbol]
557 return repo[symbol]
558
558
559 try:
559 try:
560 r = int(symbol)
560 r = int(symbol)
561 if '%d' % r != symbol:
561 if '%d' % r != symbol:
562 raise ValueError
562 raise ValueError
563 l = len(repo.changelog)
563 l = len(repo.changelog)
564 if r < 0:
564 if r < 0:
565 r += l
565 r += l
566 if r < 0 or r >= l and r != wdirrev:
566 if r < 0 or r >= l and r != wdirrev:
567 raise ValueError
567 raise ValueError
568 return repo[r]
568 return repo[r]
569 except error.FilteredIndexError:
569 except error.FilteredIndexError:
570 raise
570 raise
571 except (ValueError, OverflowError, IndexError):
571 except (ValueError, OverflowError, IndexError):
572 pass
572 pass
573
573
574 if len(symbol) == 40:
574 if len(symbol) == 40:
575 try:
575 try:
576 node = bin(symbol)
576 node = bin(symbol)
577 rev = repo.changelog.rev(node)
577 rev = repo.changelog.rev(node)
578 return repo[rev]
578 return repo[rev]
579 except error.FilteredLookupError:
579 except error.FilteredLookupError:
580 raise
580 raise
581 except (TypeError, LookupError):
581 except (TypeError, LookupError):
582 pass
582 pass
583
583
584 # look up bookmarks through the name interface
584 # look up bookmarks through the name interface
585 try:
585 try:
586 node = repo.names.singlenode(repo, symbol)
586 node = repo.names.singlenode(repo, symbol)
587 rev = repo.changelog.rev(node)
587 rev = repo.changelog.rev(node)
588 return repo[rev]
588 return repo[rev]
589 except KeyError:
589 except KeyError:
590 pass
590 pass
591
591
592 node = resolvehexnodeidprefix(repo, symbol)
592 node = resolvehexnodeidprefix(repo, symbol)
593 if node is not None:
593 if node is not None:
594 rev = repo.changelog.rev(node)
594 rev = repo.changelog.rev(node)
595 return repo[rev]
595 return repo[rev]
596
596
597 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
597 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
598
598
599 except error.WdirUnsupported:
599 except error.WdirUnsupported:
600 return repo[None]
600 return repo[None]
601 except (error.FilteredIndexError, error.FilteredLookupError,
601 except (error.FilteredIndexError, error.FilteredLookupError,
602 error.FilteredRepoLookupError):
602 error.FilteredRepoLookupError):
603 raise _filterederror(repo, symbol)
603 raise _filterederror(repo, symbol)
604
604
605 def _filterederror(repo, changeid):
605 def _filterederror(repo, changeid):
606 """build an exception to be raised about a filtered changeid
606 """build an exception to be raised about a filtered changeid
607
607
608 This is extracted in a function to help extensions (eg: evolve) to
608 This is extracted in a function to help extensions (eg: evolve) to
609 experiment with various message variants."""
609 experiment with various message variants."""
610 if repo.filtername.startswith('visible'):
610 if repo.filtername.startswith('visible'):
611
611
612 # Check if the changeset is obsolete
612 # Check if the changeset is obsolete
613 unfilteredrepo = repo.unfiltered()
613 unfilteredrepo = repo.unfiltered()
614 ctx = revsymbol(unfilteredrepo, changeid)
614 ctx = revsymbol(unfilteredrepo, changeid)
615
615
616 # If the changeset is obsolete, enrich the message with the reason
616 # If the changeset is obsolete, enrich the message with the reason
617 # that made this changeset not visible
617 # that made this changeset not visible
618 if ctx.obsolete():
618 if ctx.obsolete():
619 msg = obsutil._getfilteredreason(repo, changeid, ctx)
619 msg = obsutil._getfilteredreason(repo, changeid, ctx)
620 else:
620 else:
621 msg = _("hidden revision '%s'") % changeid
621 msg = _("hidden revision '%s'") % changeid
622
622
623 hint = _('use --hidden to access hidden revisions')
623 hint = _('use --hidden to access hidden revisions')
624
624
625 return error.FilteredRepoLookupError(msg, hint=hint)
625 return error.FilteredRepoLookupError(msg, hint=hint)
626 msg = _("filtered revision '%s' (not in '%s' subset)")
626 msg = _("filtered revision '%s' (not in '%s' subset)")
627 msg %= (changeid, repo.filtername)
627 msg %= (changeid, repo.filtername)
628 return error.FilteredRepoLookupError(msg)
628 return error.FilteredRepoLookupError(msg)
629
629
630 def revsingle(repo, revspec, default='.', localalias=None):
630 def revsingle(repo, revspec, default='.', localalias=None):
631 if not revspec and revspec != 0:
631 if not revspec and revspec != 0:
632 return repo[default]
632 return repo[default]
633
633
634 l = revrange(repo, [revspec], localalias=localalias)
634 l = revrange(repo, [revspec], localalias=localalias)
635 if not l:
635 if not l:
636 raise error.Abort(_('empty revision set'))
636 raise error.Abort(_('empty revision set'))
637 return repo[l.last()]
637 return repo[l.last()]
638
638
639 def _pairspec(revspec):
639 def _pairspec(revspec):
640 tree = revsetlang.parse(revspec)
640 tree = revsetlang.parse(revspec)
641 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
641 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
642
642
643 def revpair(repo, revs):
643 def revpair(repo, revs):
644 if not revs:
644 if not revs:
645 return repo['.'], repo[None]
645 return repo['.'], repo[None]
646
646
647 l = revrange(repo, revs)
647 l = revrange(repo, revs)
648
648
649 if not l:
649 if not l:
650 first = second = None
650 first = second = None
651 elif l.isascending():
651 elif l.isascending():
652 first = l.min()
652 first = l.min()
653 second = l.max()
653 second = l.max()
654 elif l.isdescending():
654 elif l.isdescending():
655 first = l.max()
655 first = l.max()
656 second = l.min()
656 second = l.min()
657 else:
657 else:
658 first = l.first()
658 first = l.first()
659 second = l.last()
659 second = l.last()
660
660
661 if first is None:
661 if first is None:
662 raise error.Abort(_('empty revision range'))
662 raise error.Abort(_('empty revision range'))
663 if (first == second and len(revs) >= 2
663 if (first == second and len(revs) >= 2
664 and not all(revrange(repo, [r]) for r in revs)):
664 and not all(revrange(repo, [r]) for r in revs)):
665 raise error.Abort(_('empty revision on one side of range'))
665 raise error.Abort(_('empty revision on one side of range'))
666
666
667 # if top-level is range expression, the result must always be a pair
667 # if top-level is range expression, the result must always be a pair
668 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
668 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
669 return repo[first], repo[None]
669 return repo[first], repo[None]
670
670
671 return repo[first], repo[second]
671 return repo[first], repo[second]
672
672
673 def revrange(repo, specs, localalias=None):
673 def revrange(repo, specs, localalias=None):
674 """Execute 1 to many revsets and return the union.
674 """Execute 1 to many revsets and return the union.
675
675
676 This is the preferred mechanism for executing revsets using user-specified
676 This is the preferred mechanism for executing revsets using user-specified
677 config options, such as revset aliases.
677 config options, such as revset aliases.
678
678
679 The revsets specified by ``specs`` will be executed via a chained ``OR``
679 The revsets specified by ``specs`` will be executed via a chained ``OR``
680 expression. If ``specs`` is empty, an empty result is returned.
680 expression. If ``specs`` is empty, an empty result is returned.
681
681
682 ``specs`` can contain integers, in which case they are assumed to be
682 ``specs`` can contain integers, in which case they are assumed to be
683 revision numbers.
683 revision numbers.
684
684
685 It is assumed the revsets are already formatted. If you have arguments
685 It is assumed the revsets are already formatted. If you have arguments
686 that need to be expanded in the revset, call ``revsetlang.formatspec()``
686 that need to be expanded in the revset, call ``revsetlang.formatspec()``
687 and pass the result as an element of ``specs``.
687 and pass the result as an element of ``specs``.
688
688
689 Specifying a single revset is allowed.
689 Specifying a single revset is allowed.
690
690
691 Returns a ``revset.abstractsmartset`` which is a list-like interface over
691 Returns a ``revset.abstractsmartset`` which is a list-like interface over
692 integer revisions.
692 integer revisions.
693 """
693 """
694 allspecs = []
694 allspecs = []
695 for spec in specs:
695 for spec in specs:
696 if isinstance(spec, int):
696 if isinstance(spec, int):
697 spec = revsetlang.formatspec('rev(%d)', spec)
697 spec = revsetlang.formatspec('rev(%d)', spec)
698 allspecs.append(spec)
698 allspecs.append(spec)
699 return repo.anyrevs(allspecs, user=True, localalias=localalias)
699 return repo.anyrevs(allspecs, user=True, localalias=localalias)
700
700
701 def meaningfulparents(repo, ctx):
701 def meaningfulparents(repo, ctx):
702 """Return list of meaningful (or all if debug) parentrevs for rev.
702 """Return list of meaningful (or all if debug) parentrevs for rev.
703
703
704 For merges (two non-nullrev revisions) both parents are meaningful.
704 For merges (two non-nullrev revisions) both parents are meaningful.
705 Otherwise the first parent revision is considered meaningful if it
705 Otherwise the first parent revision is considered meaningful if it
706 is not the preceding revision.
706 is not the preceding revision.
707 """
707 """
708 parents = ctx.parents()
708 parents = ctx.parents()
709 if len(parents) > 1:
709 if len(parents) > 1:
710 return parents
710 return parents
711 if repo.ui.debugflag:
711 if repo.ui.debugflag:
712 return [parents[0], repo['null']]
712 return [parents[0], repo['null']]
713 if parents[0].rev() >= intrev(ctx) - 1:
713 if parents[0].rev() >= intrev(ctx) - 1:
714 return []
714 return []
715 return parents
715 return parents
716
716
717 def expandpats(pats):
717 def expandpats(pats):
718 '''Expand bare globs when running on windows.
718 '''Expand bare globs when running on windows.
719 On posix we assume it already has already been done by sh.'''
719 On posix we assume it already has already been done by sh.'''
720 if not util.expandglobs:
720 if not util.expandglobs:
721 return list(pats)
721 return list(pats)
722 ret = []
722 ret = []
723 for kindpat in pats:
723 for kindpat in pats:
724 kind, pat = matchmod._patsplit(kindpat, None)
724 kind, pat = matchmod._patsplit(kindpat, None)
725 if kind is None:
725 if kind is None:
726 try:
726 try:
727 globbed = glob.glob(pat)
727 globbed = glob.glob(pat)
728 except re.error:
728 except re.error:
729 globbed = [pat]
729 globbed = [pat]
730 if globbed:
730 if globbed:
731 ret.extend(globbed)
731 ret.extend(globbed)
732 continue
732 continue
733 ret.append(kindpat)
733 ret.append(kindpat)
734 return ret
734 return ret
735
735
736 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
736 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
737 badfn=None):
737 badfn=None):
738 '''Return a matcher and the patterns that were used.
738 '''Return a matcher and the patterns that were used.
739 The matcher will warn about bad matches, unless an alternate badfn callback
739 The matcher will warn about bad matches, unless an alternate badfn callback
740 is provided.'''
740 is provided.'''
741 if pats == ("",):
741 if pats == ("",):
742 pats = []
742 pats = []
743 if opts is None:
743 if opts is None:
744 opts = {}
744 opts = {}
745 if not globbed and default == 'relpath':
745 if not globbed and default == 'relpath':
746 pats = expandpats(pats or [])
746 pats = expandpats(pats or [])
747
747
748 def bad(f, msg):
748 def bad(f, msg):
749 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
749 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
750
750
751 if badfn is None:
751 if badfn is None:
752 badfn = bad
752 badfn = bad
753
753
754 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
754 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
755 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
755 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
756
756
757 if m.always():
757 if m.always():
758 pats = []
758 pats = []
759 return m, pats
759 return m, pats
760
760
761 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
761 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
762 badfn=None):
762 badfn=None):
763 '''Return a matcher that will warn about bad matches.'''
763 '''Return a matcher that will warn about bad matches.'''
764 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
764 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
765
765
766 def matchall(repo):
766 def matchall(repo):
767 '''Return a matcher that will efficiently match everything.'''
767 '''Return a matcher that will efficiently match everything.'''
768 return matchmod.always(repo.root, repo.getcwd())
768 return matchmod.always(repo.root, repo.getcwd())
769
769
770 def matchfiles(repo, files, badfn=None):
770 def matchfiles(repo, files, badfn=None):
771 '''Return a matcher that will efficiently match exactly these files.'''
771 '''Return a matcher that will efficiently match exactly these files.'''
772 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
772 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
773
773
774 def parsefollowlinespattern(repo, rev, pat, msg):
774 def parsefollowlinespattern(repo, rev, pat, msg):
775 """Return a file name from `pat` pattern suitable for usage in followlines
775 """Return a file name from `pat` pattern suitable for usage in followlines
776 logic.
776 logic.
777 """
777 """
778 if not matchmod.patkind(pat):
778 if not matchmod.patkind(pat):
779 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
779 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
780 else:
780 else:
781 ctx = repo[rev]
781 ctx = repo[rev]
782 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
782 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
783 files = [f for f in ctx if m(f)]
783 files = [f for f in ctx if m(f)]
784 if len(files) != 1:
784 if len(files) != 1:
785 raise error.ParseError(msg)
785 raise error.ParseError(msg)
786 return files[0]
786 return files[0]
787
787
788 def origpath(ui, repo, filepath):
788 def origpath(ui, repo, filepath):
789 '''customize where .orig files are created
789 '''customize where .orig files are created
790
790
791 Fetch user defined path from config file: [ui] origbackuppath = <path>
791 Fetch user defined path from config file: [ui] origbackuppath = <path>
792 Fall back to default (filepath with .orig suffix) if not specified
792 Fall back to default (filepath with .orig suffix) if not specified
793 '''
793 '''
794 origbackuppath = ui.config('ui', 'origbackuppath')
794 origbackuppath = ui.config('ui', 'origbackuppath')
795 if not origbackuppath:
795 if not origbackuppath:
796 return filepath + ".orig"
796 return filepath + ".orig"
797
797
798 # Convert filepath from an absolute path into a path inside the repo.
798 # Convert filepath from an absolute path into a path inside the repo.
799 filepathfromroot = util.normpath(os.path.relpath(filepath,
799 filepathfromroot = util.normpath(os.path.relpath(filepath,
800 start=repo.root))
800 start=repo.root))
801
801
802 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
802 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
803 origbackupdir = origvfs.dirname(filepathfromroot)
803 origbackupdir = origvfs.dirname(filepathfromroot)
804 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
804 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
805 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
805 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
806
806
807 # Remove any files that conflict with the backup file's path
807 # Remove any files that conflict with the backup file's path
808 for f in reversed(list(util.finddirs(filepathfromroot))):
808 for f in reversed(list(util.finddirs(filepathfromroot))):
809 if origvfs.isfileorlink(f):
809 if origvfs.isfileorlink(f):
810 ui.note(_('removing conflicting file: %s\n')
810 ui.note(_('removing conflicting file: %s\n')
811 % origvfs.join(f))
811 % origvfs.join(f))
812 origvfs.unlink(f)
812 origvfs.unlink(f)
813 break
813 break
814
814
815 origvfs.makedirs(origbackupdir)
815 origvfs.makedirs(origbackupdir)
816
816
817 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
817 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
818 ui.note(_('removing conflicting directory: %s\n')
818 ui.note(_('removing conflicting directory: %s\n')
819 % origvfs.join(filepathfromroot))
819 % origvfs.join(filepathfromroot))
820 origvfs.rmtree(filepathfromroot, forcibly=True)
820 origvfs.rmtree(filepathfromroot, forcibly=True)
821
821
822 return origvfs.join(filepathfromroot)
822 return origvfs.join(filepathfromroot)
823
823
824 class _containsnode(object):
824 class _containsnode(object):
825 """proxy __contains__(node) to container.__contains__ which accepts revs"""
825 """proxy __contains__(node) to container.__contains__ which accepts revs"""
826
826
827 def __init__(self, repo, revcontainer):
827 def __init__(self, repo, revcontainer):
828 self._torev = repo.changelog.rev
828 self._torev = repo.changelog.rev
829 self._revcontains = revcontainer.__contains__
829 self._revcontains = revcontainer.__contains__
830
830
831 def __contains__(self, node):
831 def __contains__(self, node):
832 return self._revcontains(self._torev(node))
832 return self._revcontains(self._torev(node))
833
833
834 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
834 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
835 fixphase=False, targetphase=None, backup=True):
835 fixphase=False, targetphase=None, backup=True):
836 """do common cleanups when old nodes are replaced by new nodes
836 """do common cleanups when old nodes are replaced by new nodes
837
837
838 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
838 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
839 (we might also want to move working directory parent in the future)
839 (we might also want to move working directory parent in the future)
840
840
841 By default, bookmark moves are calculated automatically from 'replacements',
841 By default, bookmark moves are calculated automatically from 'replacements',
842 but 'moves' can be used to override that. Also, 'moves' may include
842 but 'moves' can be used to override that. Also, 'moves' may include
843 additional bookmark moves that should not have associated obsmarkers.
843 additional bookmark moves that should not have associated obsmarkers.
844
844
845 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
845 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
846 have replacements. operation is a string, like "rebase".
846 have replacements. operation is a string, like "rebase".
847
847
848 metadata is dictionary containing metadata to be stored in obsmarker if
848 metadata is dictionary containing metadata to be stored in obsmarker if
849 obsolescence is enabled.
849 obsolescence is enabled.
850 """
850 """
851 assert fixphase or targetphase is None
851 assert fixphase or targetphase is None
852 if not replacements and not moves:
852 if not replacements and not moves:
853 return
853 return
854
854
855 # translate mapping's other forms
855 # translate mapping's other forms
856 if not util.safehasattr(replacements, 'items'):
856 if not util.safehasattr(replacements, 'items'):
857 replacements = {n: () for n in replacements}
857 replacements = {n: () for n in replacements}
858
858
859 # Calculate bookmark movements
859 # Calculate bookmark movements
860 if moves is None:
860 if moves is None:
861 moves = {}
861 moves = {}
862 # Unfiltered repo is needed since nodes in replacements might be hidden.
862 # Unfiltered repo is needed since nodes in replacements might be hidden.
863 unfi = repo.unfiltered()
863 unfi = repo.unfiltered()
864 for oldnode, newnodes in replacements.items():
864 for oldnode, newnodes in replacements.items():
865 if oldnode in moves:
865 if oldnode in moves:
866 continue
866 continue
867 if len(newnodes) > 1:
867 if len(newnodes) > 1:
868 # usually a split, take the one with biggest rev number
868 # usually a split, take the one with biggest rev number
869 newnode = next(unfi.set('max(%ln)', newnodes)).node()
869 newnode = next(unfi.set('max(%ln)', newnodes)).node()
870 elif len(newnodes) == 0:
870 elif len(newnodes) == 0:
871 # move bookmark backwards
871 # move bookmark backwards
872 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
872 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
873 list(replacements)))
873 list(replacements)))
874 if roots:
874 if roots:
875 newnode = roots[0].node()
875 newnode = roots[0].node()
876 else:
876 else:
877 newnode = nullid
877 newnode = nullid
878 else:
878 else:
879 newnode = newnodes[0]
879 newnode = newnodes[0]
880 moves[oldnode] = newnode
880 moves[oldnode] = newnode
881
881
882 allnewnodes = [n for ns in replacements.values() for n in ns]
882 allnewnodes = [n for ns in replacements.values() for n in ns]
883 toretract = {}
883 toretract = {}
884 toadvance = {}
884 toadvance = {}
885 if fixphase:
885 if fixphase:
886 precursors = {}
886 precursors = {}
887 for oldnode, newnodes in replacements.items():
887 for oldnode, newnodes in replacements.items():
888 for newnode in newnodes:
888 for newnode in newnodes:
889 precursors.setdefault(newnode, []).append(oldnode)
889 precursors.setdefault(newnode, []).append(oldnode)
890
890
891 allnewnodes.sort(key=lambda n: unfi[n].rev())
891 allnewnodes.sort(key=lambda n: unfi[n].rev())
892 newphases = {}
892 newphases = {}
893 def phase(ctx):
893 def phase(ctx):
894 return newphases.get(ctx.node(), ctx.phase())
894 return newphases.get(ctx.node(), ctx.phase())
895 for newnode in allnewnodes:
895 for newnode in allnewnodes:
896 ctx = unfi[newnode]
896 ctx = unfi[newnode]
897 parentphase = max(phase(p) for p in ctx.parents())
897 parentphase = max(phase(p) for p in ctx.parents())
898 if targetphase is None:
898 if targetphase is None:
899 oldphase = max(unfi[oldnode].phase()
899 oldphase = max(unfi[oldnode].phase()
900 for oldnode in precursors[newnode])
900 for oldnode in precursors[newnode])
901 newphase = max(oldphase, parentphase)
901 newphase = max(oldphase, parentphase)
902 else:
902 else:
903 newphase = max(targetphase, parentphase)
903 newphase = max(targetphase, parentphase)
904 newphases[newnode] = newphase
904 newphases[newnode] = newphase
905 if newphase > ctx.phase():
905 if newphase > ctx.phase():
906 toretract.setdefault(newphase, []).append(newnode)
906 toretract.setdefault(newphase, []).append(newnode)
907 elif newphase < ctx.phase():
907 elif newphase < ctx.phase():
908 toadvance.setdefault(newphase, []).append(newnode)
908 toadvance.setdefault(newphase, []).append(newnode)
909
909
910 with repo.transaction('cleanup') as tr:
910 with repo.transaction('cleanup') as tr:
911 # Move bookmarks
911 # Move bookmarks
912 bmarks = repo._bookmarks
912 bmarks = repo._bookmarks
913 bmarkchanges = []
913 bmarkchanges = []
914 for oldnode, newnode in moves.items():
914 for oldnode, newnode in moves.items():
915 oldbmarks = repo.nodebookmarks(oldnode)
915 oldbmarks = repo.nodebookmarks(oldnode)
916 if not oldbmarks:
916 if not oldbmarks:
917 continue
917 continue
918 from . import bookmarks # avoid import cycle
918 from . import bookmarks # avoid import cycle
919 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
919 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
920 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
920 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
921 hex(oldnode), hex(newnode)))
921 hex(oldnode), hex(newnode)))
922 # Delete divergent bookmarks being parents of related newnodes
922 # Delete divergent bookmarks being parents of related newnodes
923 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
923 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
924 allnewnodes, newnode, oldnode)
924 allnewnodes, newnode, oldnode)
925 deletenodes = _containsnode(repo, deleterevs)
925 deletenodes = _containsnode(repo, deleterevs)
926 for name in oldbmarks:
926 for name in oldbmarks:
927 bmarkchanges.append((name, newnode))
927 bmarkchanges.append((name, newnode))
928 for b in bookmarks.divergent2delete(repo, deletenodes, name):
928 for b in bookmarks.divergent2delete(repo, deletenodes, name):
929 bmarkchanges.append((b, None))
929 bmarkchanges.append((b, None))
930
930
931 if bmarkchanges:
931 if bmarkchanges:
932 bmarks.applychanges(repo, tr, bmarkchanges)
932 bmarks.applychanges(repo, tr, bmarkchanges)
933
933
934 for phase, nodes in toretract.items():
934 for phase, nodes in toretract.items():
935 phases.retractboundary(repo, tr, phase, nodes)
935 phases.retractboundary(repo, tr, phase, nodes)
936 for phase, nodes in toadvance.items():
936 for phase, nodes in toadvance.items():
937 phases.advanceboundary(repo, tr, phase, nodes)
937 phases.advanceboundary(repo, tr, phase, nodes)
938
938
939 # Obsolete or strip nodes
939 # Obsolete or strip nodes
940 if obsolete.isenabled(repo, obsolete.createmarkersopt):
940 if obsolete.isenabled(repo, obsolete.createmarkersopt):
941 # If a node is already obsoleted, and we want to obsolete it
941 # If a node is already obsoleted, and we want to obsolete it
942 # without a successor, skip that obssolete request since it's
942 # without a successor, skip that obssolete request since it's
943 # unnecessary. That's the "if s or not isobs(n)" check below.
943 # unnecessary. That's the "if s or not isobs(n)" check below.
944 # Also sort the node in topology order, that might be useful for
944 # Also sort the node in topology order, that might be useful for
945 # some obsstore logic.
945 # some obsstore logic.
946 # NOTE: the filtering and sorting might belong to createmarkers.
946 # NOTE: the filtering and sorting might belong to createmarkers.
947 isobs = unfi.obsstore.successors.__contains__
947 isobs = unfi.obsstore.successors.__contains__
948 torev = unfi.changelog.rev
948 torev = unfi.changelog.rev
949 sortfunc = lambda ns: torev(ns[0])
949 sortfunc = lambda ns: torev(ns[0])
950 rels = [(unfi[n], tuple(unfi[m] for m in s))
950 rels = [(unfi[n], tuple(unfi[m] for m in s))
951 for n, s in sorted(replacements.items(), key=sortfunc)
951 for n, s in sorted(replacements.items(), key=sortfunc)
952 if s or not isobs(n)]
952 if s or not isobs(n)]
953 if rels:
953 if rels:
954 obsolete.createmarkers(repo, rels, operation=operation,
954 obsolete.createmarkers(repo, rels, operation=operation,
955 metadata=metadata)
955 metadata=metadata)
956 else:
956 else:
957 from . import repair # avoid import cycle
957 from . import repair # avoid import cycle
958 tostrip = list(replacements)
958 tostrip = list(replacements)
959 if tostrip:
959 if tostrip:
960 repair.delayedstrip(repo.ui, repo, tostrip, operation,
960 repair.delayedstrip(repo.ui, repo, tostrip, operation,
961 backup=backup)
961 backup=backup)
962
962
963 def addremove(repo, matcher, prefix, opts=None):
963 def addremove(repo, matcher, prefix, opts=None):
964 if opts is None:
964 if opts is None:
965 opts = {}
965 opts = {}
966 m = matcher
966 m = matcher
967 dry_run = opts.get('dry_run')
967 dry_run = opts.get('dry_run')
968 try:
968 try:
969 similarity = float(opts.get('similarity') or 0)
969 similarity = float(opts.get('similarity') or 0)
970 except ValueError:
970 except ValueError:
971 raise error.Abort(_('similarity must be a number'))
971 raise error.Abort(_('similarity must be a number'))
972 if similarity < 0 or similarity > 100:
972 if similarity < 0 or similarity > 100:
973 raise error.Abort(_('similarity must be between 0 and 100'))
973 raise error.Abort(_('similarity must be between 0 and 100'))
974 similarity /= 100.0
974 similarity /= 100.0
975
975
976 ret = 0
976 ret = 0
977 join = lambda f: os.path.join(prefix, f)
977 join = lambda f: os.path.join(prefix, f)
978
978
979 wctx = repo[None]
979 wctx = repo[None]
980 for subpath in sorted(wctx.substate):
980 for subpath in sorted(wctx.substate):
981 submatch = matchmod.subdirmatcher(subpath, m)
981 submatch = matchmod.subdirmatcher(subpath, m)
982 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
982 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
983 sub = wctx.sub(subpath)
983 sub = wctx.sub(subpath)
984 try:
984 try:
985 if sub.addremove(submatch, prefix, opts):
985 if sub.addremove(submatch, prefix, opts):
986 ret = 1
986 ret = 1
987 except error.LookupError:
987 except error.LookupError:
988 repo.ui.status(_("skipping missing subrepository: %s\n")
988 repo.ui.status(_("skipping missing subrepository: %s\n")
989 % join(subpath))
989 % join(subpath))
990
990
991 rejected = []
991 rejected = []
992 def badfn(f, msg):
992 def badfn(f, msg):
993 if f in m.files():
993 if f in m.files():
994 m.bad(f, msg)
994 m.bad(f, msg)
995 rejected.append(f)
995 rejected.append(f)
996
996
997 badmatch = matchmod.badmatch(m, badfn)
997 badmatch = matchmod.badmatch(m, badfn)
998 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
998 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
999 badmatch)
999 badmatch)
1000
1000
1001 unknownset = set(unknown + forgotten)
1001 unknownset = set(unknown + forgotten)
1002 toprint = unknownset.copy()
1002 toprint = unknownset.copy()
1003 toprint.update(deleted)
1003 toprint.update(deleted)
1004 for abs in sorted(toprint):
1004 for abs in sorted(toprint):
1005 if repo.ui.verbose or not m.exact(abs):
1005 if repo.ui.verbose or not m.exact(abs):
1006 if abs in unknownset:
1006 if abs in unknownset:
1007 status = _('adding %s\n') % m.uipath(abs)
1007 status = _('adding %s\n') % m.uipath(abs)
1008 label = 'addremove.added'
1008 else:
1009 else:
1009 status = _('removing %s\n') % m.uipath(abs)
1010 status = _('removing %s\n') % m.uipath(abs)
1010 repo.ui.status(status)
1011 label = 'addremove.removed'
1012 repo.ui.status(status, label=label)
1011
1013
1012 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1014 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1013 similarity)
1015 similarity)
1014
1016
1015 if not dry_run:
1017 if not dry_run:
1016 _markchanges(repo, unknown + forgotten, deleted, renames)
1018 _markchanges(repo, unknown + forgotten, deleted, renames)
1017
1019
1018 for f in rejected:
1020 for f in rejected:
1019 if f in m.files():
1021 if f in m.files():
1020 return 1
1022 return 1
1021 return ret
1023 return ret
1022
1024
1023 def marktouched(repo, files, similarity=0.0):
1025 def marktouched(repo, files, similarity=0.0):
1024 '''Assert that files have somehow been operated upon. files are relative to
1026 '''Assert that files have somehow been operated upon. files are relative to
1025 the repo root.'''
1027 the repo root.'''
1026 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1028 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1027 rejected = []
1029 rejected = []
1028
1030
1029 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1031 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1030
1032
1031 if repo.ui.verbose:
1033 if repo.ui.verbose:
1032 unknownset = set(unknown + forgotten)
1034 unknownset = set(unknown + forgotten)
1033 toprint = unknownset.copy()
1035 toprint = unknownset.copy()
1034 toprint.update(deleted)
1036 toprint.update(deleted)
1035 for abs in sorted(toprint):
1037 for abs in sorted(toprint):
1036 if abs in unknownset:
1038 if abs in unknownset:
1037 status = _('adding %s\n') % abs
1039 status = _('adding %s\n') % abs
1038 else:
1040 else:
1039 status = _('removing %s\n') % abs
1041 status = _('removing %s\n') % abs
1040 repo.ui.status(status)
1042 repo.ui.status(status)
1041
1043
1042 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1044 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1043 similarity)
1045 similarity)
1044
1046
1045 _markchanges(repo, unknown + forgotten, deleted, renames)
1047 _markchanges(repo, unknown + forgotten, deleted, renames)
1046
1048
1047 for f in rejected:
1049 for f in rejected:
1048 if f in m.files():
1050 if f in m.files():
1049 return 1
1051 return 1
1050 return 0
1052 return 0
1051
1053
1052 def _interestingfiles(repo, matcher):
1054 def _interestingfiles(repo, matcher):
1053 '''Walk dirstate with matcher, looking for files that addremove would care
1055 '''Walk dirstate with matcher, looking for files that addremove would care
1054 about.
1056 about.
1055
1057
1056 This is different from dirstate.status because it doesn't care about
1058 This is different from dirstate.status because it doesn't care about
1057 whether files are modified or clean.'''
1059 whether files are modified or clean.'''
1058 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1060 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1059 audit_path = pathutil.pathauditor(repo.root, cached=True)
1061 audit_path = pathutil.pathauditor(repo.root, cached=True)
1060
1062
1061 ctx = repo[None]
1063 ctx = repo[None]
1062 dirstate = repo.dirstate
1064 dirstate = repo.dirstate
1063 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1065 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1064 unknown=True, ignored=False, full=False)
1066 unknown=True, ignored=False, full=False)
1065 for abs, st in walkresults.iteritems():
1067 for abs, st in walkresults.iteritems():
1066 dstate = dirstate[abs]
1068 dstate = dirstate[abs]
1067 if dstate == '?' and audit_path.check(abs):
1069 if dstate == '?' and audit_path.check(abs):
1068 unknown.append(abs)
1070 unknown.append(abs)
1069 elif dstate != 'r' and not st:
1071 elif dstate != 'r' and not st:
1070 deleted.append(abs)
1072 deleted.append(abs)
1071 elif dstate == 'r' and st:
1073 elif dstate == 'r' and st:
1072 forgotten.append(abs)
1074 forgotten.append(abs)
1073 # for finding renames
1075 # for finding renames
1074 elif dstate == 'r' and not st:
1076 elif dstate == 'r' and not st:
1075 removed.append(abs)
1077 removed.append(abs)
1076 elif dstate == 'a':
1078 elif dstate == 'a':
1077 added.append(abs)
1079 added.append(abs)
1078
1080
1079 return added, unknown, deleted, removed, forgotten
1081 return added, unknown, deleted, removed, forgotten
1080
1082
1081 def _findrenames(repo, matcher, added, removed, similarity):
1083 def _findrenames(repo, matcher, added, removed, similarity):
1082 '''Find renames from removed files to added ones.'''
1084 '''Find renames from removed files to added ones.'''
1083 renames = {}
1085 renames = {}
1084 if similarity > 0:
1086 if similarity > 0:
1085 for old, new, score in similar.findrenames(repo, added, removed,
1087 for old, new, score in similar.findrenames(repo, added, removed,
1086 similarity):
1088 similarity):
1087 if (repo.ui.verbose or not matcher.exact(old)
1089 if (repo.ui.verbose or not matcher.exact(old)
1088 or not matcher.exact(new)):
1090 or not matcher.exact(new)):
1089 repo.ui.status(_('recording removal of %s as rename to %s '
1091 repo.ui.status(_('recording removal of %s as rename to %s '
1090 '(%d%% similar)\n') %
1092 '(%d%% similar)\n') %
1091 (matcher.rel(old), matcher.rel(new),
1093 (matcher.rel(old), matcher.rel(new),
1092 score * 100))
1094 score * 100))
1093 renames[new] = old
1095 renames[new] = old
1094 return renames
1096 return renames
1095
1097
1096 def _markchanges(repo, unknown, deleted, renames):
1098 def _markchanges(repo, unknown, deleted, renames):
1097 '''Marks the files in unknown as added, the files in deleted as removed,
1099 '''Marks the files in unknown as added, the files in deleted as removed,
1098 and the files in renames as copied.'''
1100 and the files in renames as copied.'''
1099 wctx = repo[None]
1101 wctx = repo[None]
1100 with repo.wlock():
1102 with repo.wlock():
1101 wctx.forget(deleted)
1103 wctx.forget(deleted)
1102 wctx.add(unknown)
1104 wctx.add(unknown)
1103 for new, old in renames.iteritems():
1105 for new, old in renames.iteritems():
1104 wctx.copy(old, new)
1106 wctx.copy(old, new)
1105
1107
1106 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1108 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1107 """Update the dirstate to reflect the intent of copying src to dst. For
1109 """Update the dirstate to reflect the intent of copying src to dst. For
1108 different reasons it might not end with dst being marked as copied from src.
1110 different reasons it might not end with dst being marked as copied from src.
1109 """
1111 """
1110 origsrc = repo.dirstate.copied(src) or src
1112 origsrc = repo.dirstate.copied(src) or src
1111 if dst == origsrc: # copying back a copy?
1113 if dst == origsrc: # copying back a copy?
1112 if repo.dirstate[dst] not in 'mn' and not dryrun:
1114 if repo.dirstate[dst] not in 'mn' and not dryrun:
1113 repo.dirstate.normallookup(dst)
1115 repo.dirstate.normallookup(dst)
1114 else:
1116 else:
1115 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1117 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1116 if not ui.quiet:
1118 if not ui.quiet:
1117 ui.warn(_("%s has not been committed yet, so no copy "
1119 ui.warn(_("%s has not been committed yet, so no copy "
1118 "data will be stored for %s.\n")
1120 "data will be stored for %s.\n")
1119 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1121 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1120 if repo.dirstate[dst] in '?r' and not dryrun:
1122 if repo.dirstate[dst] in '?r' and not dryrun:
1121 wctx.add([dst])
1123 wctx.add([dst])
1122 elif not dryrun:
1124 elif not dryrun:
1123 wctx.copy(origsrc, dst)
1125 wctx.copy(origsrc, dst)
1124
1126
1125 def readrequires(opener, supported):
1127 def readrequires(opener, supported):
1126 '''Reads and parses .hg/requires and checks if all entries found
1128 '''Reads and parses .hg/requires and checks if all entries found
1127 are in the list of supported features.'''
1129 are in the list of supported features.'''
1128 requirements = set(opener.read("requires").splitlines())
1130 requirements = set(opener.read("requires").splitlines())
1129 missings = []
1131 missings = []
1130 for r in requirements:
1132 for r in requirements:
1131 if r not in supported:
1133 if r not in supported:
1132 if not r or not r[0:1].isalnum():
1134 if not r or not r[0:1].isalnum():
1133 raise error.RequirementError(_(".hg/requires file is corrupt"))
1135 raise error.RequirementError(_(".hg/requires file is corrupt"))
1134 missings.append(r)
1136 missings.append(r)
1135 missings.sort()
1137 missings.sort()
1136 if missings:
1138 if missings:
1137 raise error.RequirementError(
1139 raise error.RequirementError(
1138 _("repository requires features unknown to this Mercurial: %s")
1140 _("repository requires features unknown to this Mercurial: %s")
1139 % " ".join(missings),
1141 % " ".join(missings),
1140 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1142 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1141 " for more information"))
1143 " for more information"))
1142 return requirements
1144 return requirements
1143
1145
1144 def writerequires(opener, requirements):
1146 def writerequires(opener, requirements):
1145 with opener('requires', 'w') as fp:
1147 with opener('requires', 'w') as fp:
1146 for r in sorted(requirements):
1148 for r in sorted(requirements):
1147 fp.write("%s\n" % r)
1149 fp.write("%s\n" % r)
1148
1150
1149 class filecachesubentry(object):
1151 class filecachesubentry(object):
1150 def __init__(self, path, stat):
1152 def __init__(self, path, stat):
1151 self.path = path
1153 self.path = path
1152 self.cachestat = None
1154 self.cachestat = None
1153 self._cacheable = None
1155 self._cacheable = None
1154
1156
1155 if stat:
1157 if stat:
1156 self.cachestat = filecachesubentry.stat(self.path)
1158 self.cachestat = filecachesubentry.stat(self.path)
1157
1159
1158 if self.cachestat:
1160 if self.cachestat:
1159 self._cacheable = self.cachestat.cacheable()
1161 self._cacheable = self.cachestat.cacheable()
1160 else:
1162 else:
1161 # None means we don't know yet
1163 # None means we don't know yet
1162 self._cacheable = None
1164 self._cacheable = None
1163
1165
1164 def refresh(self):
1166 def refresh(self):
1165 if self.cacheable():
1167 if self.cacheable():
1166 self.cachestat = filecachesubentry.stat(self.path)
1168 self.cachestat = filecachesubentry.stat(self.path)
1167
1169
1168 def cacheable(self):
1170 def cacheable(self):
1169 if self._cacheable is not None:
1171 if self._cacheable is not None:
1170 return self._cacheable
1172 return self._cacheable
1171
1173
1172 # we don't know yet, assume it is for now
1174 # we don't know yet, assume it is for now
1173 return True
1175 return True
1174
1176
1175 def changed(self):
1177 def changed(self):
1176 # no point in going further if we can't cache it
1178 # no point in going further if we can't cache it
1177 if not self.cacheable():
1179 if not self.cacheable():
1178 return True
1180 return True
1179
1181
1180 newstat = filecachesubentry.stat(self.path)
1182 newstat = filecachesubentry.stat(self.path)
1181
1183
1182 # we may not know if it's cacheable yet, check again now
1184 # we may not know if it's cacheable yet, check again now
1183 if newstat and self._cacheable is None:
1185 if newstat and self._cacheable is None:
1184 self._cacheable = newstat.cacheable()
1186 self._cacheable = newstat.cacheable()
1185
1187
1186 # check again
1188 # check again
1187 if not self._cacheable:
1189 if not self._cacheable:
1188 return True
1190 return True
1189
1191
1190 if self.cachestat != newstat:
1192 if self.cachestat != newstat:
1191 self.cachestat = newstat
1193 self.cachestat = newstat
1192 return True
1194 return True
1193 else:
1195 else:
1194 return False
1196 return False
1195
1197
1196 @staticmethod
1198 @staticmethod
1197 def stat(path):
1199 def stat(path):
1198 try:
1200 try:
1199 return util.cachestat(path)
1201 return util.cachestat(path)
1200 except OSError as e:
1202 except OSError as e:
1201 if e.errno != errno.ENOENT:
1203 if e.errno != errno.ENOENT:
1202 raise
1204 raise
1203
1205
1204 class filecacheentry(object):
1206 class filecacheentry(object):
1205 def __init__(self, paths, stat=True):
1207 def __init__(self, paths, stat=True):
1206 self._entries = []
1208 self._entries = []
1207 for path in paths:
1209 for path in paths:
1208 self._entries.append(filecachesubentry(path, stat))
1210 self._entries.append(filecachesubentry(path, stat))
1209
1211
1210 def changed(self):
1212 def changed(self):
1211 '''true if any entry has changed'''
1213 '''true if any entry has changed'''
1212 for entry in self._entries:
1214 for entry in self._entries:
1213 if entry.changed():
1215 if entry.changed():
1214 return True
1216 return True
1215 return False
1217 return False
1216
1218
1217 def refresh(self):
1219 def refresh(self):
1218 for entry in self._entries:
1220 for entry in self._entries:
1219 entry.refresh()
1221 entry.refresh()
1220
1222
1221 class filecache(object):
1223 class filecache(object):
1222 """A property like decorator that tracks files under .hg/ for updates.
1224 """A property like decorator that tracks files under .hg/ for updates.
1223
1225
1224 On first access, the files defined as arguments are stat()ed and the
1226 On first access, the files defined as arguments are stat()ed and the
1225 results cached. The decorated function is called. The results are stashed
1227 results cached. The decorated function is called. The results are stashed
1226 away in a ``_filecache`` dict on the object whose method is decorated.
1228 away in a ``_filecache`` dict on the object whose method is decorated.
1227
1229
1228 On subsequent access, the cached result is returned.
1230 On subsequent access, the cached result is returned.
1229
1231
1230 On external property set operations, stat() calls are performed and the new
1232 On external property set operations, stat() calls are performed and the new
1231 value is cached.
1233 value is cached.
1232
1234
1233 On property delete operations, cached data is removed.
1235 On property delete operations, cached data is removed.
1234
1236
1235 When using the property API, cached data is always returned, if available:
1237 When using the property API, cached data is always returned, if available:
1236 no stat() is performed to check if the file has changed and if the function
1238 no stat() is performed to check if the file has changed and if the function
1237 needs to be called to reflect file changes.
1239 needs to be called to reflect file changes.
1238
1240
1239 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1241 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1240 can populate an entry before the property's getter is called. In this case,
1242 can populate an entry before the property's getter is called. In this case,
1241 entries in ``_filecache`` will be used during property operations,
1243 entries in ``_filecache`` will be used during property operations,
1242 if available. If the underlying file changes, it is up to external callers
1244 if available. If the underlying file changes, it is up to external callers
1243 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1245 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1244 method result as well as possibly calling ``del obj._filecache[attr]`` to
1246 method result as well as possibly calling ``del obj._filecache[attr]`` to
1245 remove the ``filecacheentry``.
1247 remove the ``filecacheentry``.
1246 """
1248 """
1247
1249
1248 def __init__(self, *paths):
1250 def __init__(self, *paths):
1249 self.paths = paths
1251 self.paths = paths
1250
1252
1251 def join(self, obj, fname):
1253 def join(self, obj, fname):
1252 """Used to compute the runtime path of a cached file.
1254 """Used to compute the runtime path of a cached file.
1253
1255
1254 Users should subclass filecache and provide their own version of this
1256 Users should subclass filecache and provide their own version of this
1255 function to call the appropriate join function on 'obj' (an instance
1257 function to call the appropriate join function on 'obj' (an instance
1256 of the class that its member function was decorated).
1258 of the class that its member function was decorated).
1257 """
1259 """
1258 raise NotImplementedError
1260 raise NotImplementedError
1259
1261
1260 def __call__(self, func):
1262 def __call__(self, func):
1261 self.func = func
1263 self.func = func
1262 self.sname = func.__name__
1264 self.sname = func.__name__
1263 self.name = pycompat.sysbytes(self.sname)
1265 self.name = pycompat.sysbytes(self.sname)
1264 return self
1266 return self
1265
1267
1266 def __get__(self, obj, type=None):
1268 def __get__(self, obj, type=None):
1267 # if accessed on the class, return the descriptor itself.
1269 # if accessed on the class, return the descriptor itself.
1268 if obj is None:
1270 if obj is None:
1269 return self
1271 return self
1270 # do we need to check if the file changed?
1272 # do we need to check if the file changed?
1271 if self.sname in obj.__dict__:
1273 if self.sname in obj.__dict__:
1272 assert self.name in obj._filecache, self.name
1274 assert self.name in obj._filecache, self.name
1273 return obj.__dict__[self.sname]
1275 return obj.__dict__[self.sname]
1274
1276
1275 entry = obj._filecache.get(self.name)
1277 entry = obj._filecache.get(self.name)
1276
1278
1277 if entry:
1279 if entry:
1278 if entry.changed():
1280 if entry.changed():
1279 entry.obj = self.func(obj)
1281 entry.obj = self.func(obj)
1280 else:
1282 else:
1281 paths = [self.join(obj, path) for path in self.paths]
1283 paths = [self.join(obj, path) for path in self.paths]
1282
1284
1283 # We stat -before- creating the object so our cache doesn't lie if
1285 # We stat -before- creating the object so our cache doesn't lie if
1284 # a writer modified between the time we read and stat
1286 # a writer modified between the time we read and stat
1285 entry = filecacheentry(paths, True)
1287 entry = filecacheentry(paths, True)
1286 entry.obj = self.func(obj)
1288 entry.obj = self.func(obj)
1287
1289
1288 obj._filecache[self.name] = entry
1290 obj._filecache[self.name] = entry
1289
1291
1290 obj.__dict__[self.sname] = entry.obj
1292 obj.__dict__[self.sname] = entry.obj
1291 return entry.obj
1293 return entry.obj
1292
1294
1293 def __set__(self, obj, value):
1295 def __set__(self, obj, value):
1294 if self.name not in obj._filecache:
1296 if self.name not in obj._filecache:
1295 # we add an entry for the missing value because X in __dict__
1297 # we add an entry for the missing value because X in __dict__
1296 # implies X in _filecache
1298 # implies X in _filecache
1297 paths = [self.join(obj, path) for path in self.paths]
1299 paths = [self.join(obj, path) for path in self.paths]
1298 ce = filecacheentry(paths, False)
1300 ce = filecacheentry(paths, False)
1299 obj._filecache[self.name] = ce
1301 obj._filecache[self.name] = ce
1300 else:
1302 else:
1301 ce = obj._filecache[self.name]
1303 ce = obj._filecache[self.name]
1302
1304
1303 ce.obj = value # update cached copy
1305 ce.obj = value # update cached copy
1304 obj.__dict__[self.sname] = value # update copy returned by obj.x
1306 obj.__dict__[self.sname] = value # update copy returned by obj.x
1305
1307
1306 def __delete__(self, obj):
1308 def __delete__(self, obj):
1307 try:
1309 try:
1308 del obj.__dict__[self.sname]
1310 del obj.__dict__[self.sname]
1309 except KeyError:
1311 except KeyError:
1310 raise AttributeError(self.sname)
1312 raise AttributeError(self.sname)
1311
1313
1312 def extdatasource(repo, source):
1314 def extdatasource(repo, source):
1313 """Gather a map of rev -> value dict from the specified source
1315 """Gather a map of rev -> value dict from the specified source
1314
1316
1315 A source spec is treated as a URL, with a special case shell: type
1317 A source spec is treated as a URL, with a special case shell: type
1316 for parsing the output from a shell command.
1318 for parsing the output from a shell command.
1317
1319
1318 The data is parsed as a series of newline-separated records where
1320 The data is parsed as a series of newline-separated records where
1319 each record is a revision specifier optionally followed by a space
1321 each record is a revision specifier optionally followed by a space
1320 and a freeform string value. If the revision is known locally, it
1322 and a freeform string value. If the revision is known locally, it
1321 is converted to a rev, otherwise the record is skipped.
1323 is converted to a rev, otherwise the record is skipped.
1322
1324
1323 Note that both key and value are treated as UTF-8 and converted to
1325 Note that both key and value are treated as UTF-8 and converted to
1324 the local encoding. This allows uniformity between local and
1326 the local encoding. This allows uniformity between local and
1325 remote data sources.
1327 remote data sources.
1326 """
1328 """
1327
1329
1328 spec = repo.ui.config("extdata", source)
1330 spec = repo.ui.config("extdata", source)
1329 if not spec:
1331 if not spec:
1330 raise error.Abort(_("unknown extdata source '%s'") % source)
1332 raise error.Abort(_("unknown extdata source '%s'") % source)
1331
1333
1332 data = {}
1334 data = {}
1333 src = proc = None
1335 src = proc = None
1334 try:
1336 try:
1335 if spec.startswith("shell:"):
1337 if spec.startswith("shell:"):
1336 # external commands should be run relative to the repo root
1338 # external commands should be run relative to the repo root
1337 cmd = spec[6:]
1339 cmd = spec[6:]
1338 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1340 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1339 close_fds=procutil.closefds,
1341 close_fds=procutil.closefds,
1340 stdout=subprocess.PIPE, cwd=repo.root)
1342 stdout=subprocess.PIPE, cwd=repo.root)
1341 src = proc.stdout
1343 src = proc.stdout
1342 else:
1344 else:
1343 # treat as a URL or file
1345 # treat as a URL or file
1344 src = url.open(repo.ui, spec)
1346 src = url.open(repo.ui, spec)
1345 for l in src:
1347 for l in src:
1346 if " " in l:
1348 if " " in l:
1347 k, v = l.strip().split(" ", 1)
1349 k, v = l.strip().split(" ", 1)
1348 else:
1350 else:
1349 k, v = l.strip(), ""
1351 k, v = l.strip(), ""
1350
1352
1351 k = encoding.tolocal(k)
1353 k = encoding.tolocal(k)
1352 try:
1354 try:
1353 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1355 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1354 except (error.LookupError, error.RepoLookupError):
1356 except (error.LookupError, error.RepoLookupError):
1355 pass # we ignore data for nodes that don't exist locally
1357 pass # we ignore data for nodes that don't exist locally
1356 finally:
1358 finally:
1357 if proc:
1359 if proc:
1358 proc.communicate()
1360 proc.communicate()
1359 if src:
1361 if src:
1360 src.close()
1362 src.close()
1361 if proc and proc.returncode != 0:
1363 if proc and proc.returncode != 0:
1362 raise error.Abort(_("extdata command '%s' failed: %s")
1364 raise error.Abort(_("extdata command '%s' failed: %s")
1363 % (cmd, procutil.explainexit(proc.returncode)))
1365 % (cmd, procutil.explainexit(proc.returncode)))
1364
1366
1365 return data
1367 return data
1366
1368
1367 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1369 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1368 if lock is None:
1370 if lock is None:
1369 raise error.LockInheritanceContractViolation(
1371 raise error.LockInheritanceContractViolation(
1370 'lock can only be inherited while held')
1372 'lock can only be inherited while held')
1371 if environ is None:
1373 if environ is None:
1372 environ = {}
1374 environ = {}
1373 with lock.inherit() as locker:
1375 with lock.inherit() as locker:
1374 environ[envvar] = locker
1376 environ[envvar] = locker
1375 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1377 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1376
1378
1377 def wlocksub(repo, cmd, *args, **kwargs):
1379 def wlocksub(repo, cmd, *args, **kwargs):
1378 """run cmd as a subprocess that allows inheriting repo's wlock
1380 """run cmd as a subprocess that allows inheriting repo's wlock
1379
1381
1380 This can only be called while the wlock is held. This takes all the
1382 This can only be called while the wlock is held. This takes all the
1381 arguments that ui.system does, and returns the exit code of the
1383 arguments that ui.system does, and returns the exit code of the
1382 subprocess."""
1384 subprocess."""
1383 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1385 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1384 **kwargs)
1386 **kwargs)
1385
1387
1386 class progress(object):
1388 class progress(object):
1387 def __init__(self, ui, topic, unit="", total=None):
1389 def __init__(self, ui, topic, unit="", total=None):
1388 self.ui = ui
1390 self.ui = ui
1389 self.pos = 0
1391 self.pos = 0
1390 self.topic = topic
1392 self.topic = topic
1391 self.unit = unit
1393 self.unit = unit
1392 self.total = total
1394 self.total = total
1393
1395
1394 def __enter__(self):
1396 def __enter__(self):
1395 return self
1397 return self
1396
1398
1397 def __exit__(self, exc_type, exc_value, exc_tb):
1399 def __exit__(self, exc_type, exc_value, exc_tb):
1398 self.complete()
1400 self.complete()
1399
1401
1400 def update(self, pos, item="", total=None):
1402 def update(self, pos, item="", total=None):
1401 assert pos is not None
1403 assert pos is not None
1402 if total:
1404 if total:
1403 self.total = total
1405 self.total = total
1404 self.pos = pos
1406 self.pos = pos
1405 self._print(item)
1407 self._print(item)
1406
1408
1407 def increment(self, step=1, item="", total=None):
1409 def increment(self, step=1, item="", total=None):
1408 self.update(self.pos + step, item, total)
1410 self.update(self.pos + step, item, total)
1409
1411
1410 def complete(self):
1412 def complete(self):
1411 self.ui.progress(self.topic, None)
1413 self.ui.progress(self.topic, None)
1412
1414
1413 def _print(self, item):
1415 def _print(self, item):
1414 self.ui.progress(self.topic, self.pos, item, self.unit,
1416 self.ui.progress(self.topic, self.pos, item, self.unit,
1415 self.total)
1417 self.total)
1416
1418
1417 def gdinitconfig(ui):
1419 def gdinitconfig(ui):
1418 """helper function to know if a repo should be created as general delta
1420 """helper function to know if a repo should be created as general delta
1419 """
1421 """
1420 # experimental config: format.generaldelta
1422 # experimental config: format.generaldelta
1421 return (ui.configbool('format', 'generaldelta')
1423 return (ui.configbool('format', 'generaldelta')
1422 or ui.configbool('format', 'usegeneraldelta')
1424 or ui.configbool('format', 'usegeneraldelta')
1423 or ui.configbool('format', 'sparse-revlog'))
1425 or ui.configbool('format', 'sparse-revlog'))
1424
1426
1425 def gddeltaconfig(ui):
1427 def gddeltaconfig(ui):
1426 """helper function to know if incoming delta should be optimised
1428 """helper function to know if incoming delta should be optimised
1427 """
1429 """
1428 # experimental config: format.generaldelta
1430 # experimental config: format.generaldelta
1429 return ui.configbool('format', 'generaldelta')
1431 return ui.configbool('format', 'generaldelta')
1430
1432
1431 class simplekeyvaluefile(object):
1433 class simplekeyvaluefile(object):
1432 """A simple file with key=value lines
1434 """A simple file with key=value lines
1433
1435
1434 Keys must be alphanumerics and start with a letter, values must not
1436 Keys must be alphanumerics and start with a letter, values must not
1435 contain '\n' characters"""
1437 contain '\n' characters"""
1436 firstlinekey = '__firstline'
1438 firstlinekey = '__firstline'
1437
1439
1438 def __init__(self, vfs, path, keys=None):
1440 def __init__(self, vfs, path, keys=None):
1439 self.vfs = vfs
1441 self.vfs = vfs
1440 self.path = path
1442 self.path = path
1441
1443
1442 def read(self, firstlinenonkeyval=False):
1444 def read(self, firstlinenonkeyval=False):
1443 """Read the contents of a simple key-value file
1445 """Read the contents of a simple key-value file
1444
1446
1445 'firstlinenonkeyval' indicates whether the first line of file should
1447 'firstlinenonkeyval' indicates whether the first line of file should
1446 be treated as a key-value pair or reuturned fully under the
1448 be treated as a key-value pair or reuturned fully under the
1447 __firstline key."""
1449 __firstline key."""
1448 lines = self.vfs.readlines(self.path)
1450 lines = self.vfs.readlines(self.path)
1449 d = {}
1451 d = {}
1450 if firstlinenonkeyval:
1452 if firstlinenonkeyval:
1451 if not lines:
1453 if not lines:
1452 e = _("empty simplekeyvalue file")
1454 e = _("empty simplekeyvalue file")
1453 raise error.CorruptedState(e)
1455 raise error.CorruptedState(e)
1454 # we don't want to include '\n' in the __firstline
1456 # we don't want to include '\n' in the __firstline
1455 d[self.firstlinekey] = lines[0][:-1]
1457 d[self.firstlinekey] = lines[0][:-1]
1456 del lines[0]
1458 del lines[0]
1457
1459
1458 try:
1460 try:
1459 # the 'if line.strip()' part prevents us from failing on empty
1461 # the 'if line.strip()' part prevents us from failing on empty
1460 # lines which only contain '\n' therefore are not skipped
1462 # lines which only contain '\n' therefore are not skipped
1461 # by 'if line'
1463 # by 'if line'
1462 updatedict = dict(line[:-1].split('=', 1) for line in lines
1464 updatedict = dict(line[:-1].split('=', 1) for line in lines
1463 if line.strip())
1465 if line.strip())
1464 if self.firstlinekey in updatedict:
1466 if self.firstlinekey in updatedict:
1465 e = _("%r can't be used as a key")
1467 e = _("%r can't be used as a key")
1466 raise error.CorruptedState(e % self.firstlinekey)
1468 raise error.CorruptedState(e % self.firstlinekey)
1467 d.update(updatedict)
1469 d.update(updatedict)
1468 except ValueError as e:
1470 except ValueError as e:
1469 raise error.CorruptedState(str(e))
1471 raise error.CorruptedState(str(e))
1470 return d
1472 return d
1471
1473
1472 def write(self, data, firstline=None):
1474 def write(self, data, firstline=None):
1473 """Write key=>value mapping to a file
1475 """Write key=>value mapping to a file
1474 data is a dict. Keys must be alphanumerical and start with a letter.
1476 data is a dict. Keys must be alphanumerical and start with a letter.
1475 Values must not contain newline characters.
1477 Values must not contain newline characters.
1476
1478
1477 If 'firstline' is not None, it is written to file before
1479 If 'firstline' is not None, it is written to file before
1478 everything else, as it is, not in a key=value form"""
1480 everything else, as it is, not in a key=value form"""
1479 lines = []
1481 lines = []
1480 if firstline is not None:
1482 if firstline is not None:
1481 lines.append('%s\n' % firstline)
1483 lines.append('%s\n' % firstline)
1482
1484
1483 for k, v in data.items():
1485 for k, v in data.items():
1484 if k == self.firstlinekey:
1486 if k == self.firstlinekey:
1485 e = "key name '%s' is reserved" % self.firstlinekey
1487 e = "key name '%s' is reserved" % self.firstlinekey
1486 raise error.ProgrammingError(e)
1488 raise error.ProgrammingError(e)
1487 if not k[0:1].isalpha():
1489 if not k[0:1].isalpha():
1488 e = "keys must start with a letter in a key-value file"
1490 e = "keys must start with a letter in a key-value file"
1489 raise error.ProgrammingError(e)
1491 raise error.ProgrammingError(e)
1490 if not k.isalnum():
1492 if not k.isalnum():
1491 e = "invalid key name in a simple key-value file"
1493 e = "invalid key name in a simple key-value file"
1492 raise error.ProgrammingError(e)
1494 raise error.ProgrammingError(e)
1493 if '\n' in v:
1495 if '\n' in v:
1494 e = "invalid value in a simple key-value file"
1496 e = "invalid value in a simple key-value file"
1495 raise error.ProgrammingError(e)
1497 raise error.ProgrammingError(e)
1496 lines.append("%s=%s\n" % (k, v))
1498 lines.append("%s=%s\n" % (k, v))
1497 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1499 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1498 fp.write(''.join(lines))
1500 fp.write(''.join(lines))
1499
1501
1500 _reportobsoletedsource = [
1502 _reportobsoletedsource = [
1501 'debugobsolete',
1503 'debugobsolete',
1502 'pull',
1504 'pull',
1503 'push',
1505 'push',
1504 'serve',
1506 'serve',
1505 'unbundle',
1507 'unbundle',
1506 ]
1508 ]
1507
1509
1508 _reportnewcssource = [
1510 _reportnewcssource = [
1509 'pull',
1511 'pull',
1510 'unbundle',
1512 'unbundle',
1511 ]
1513 ]
1512
1514
1513 def prefetchfiles(repo, revs, match):
1515 def prefetchfiles(repo, revs, match):
1514 """Invokes the registered file prefetch functions, allowing extensions to
1516 """Invokes the registered file prefetch functions, allowing extensions to
1515 ensure the corresponding files are available locally, before the command
1517 ensure the corresponding files are available locally, before the command
1516 uses them."""
1518 uses them."""
1517 if match:
1519 if match:
1518 # The command itself will complain about files that don't exist, so
1520 # The command itself will complain about files that don't exist, so
1519 # don't duplicate the message.
1521 # don't duplicate the message.
1520 match = matchmod.badmatch(match, lambda fn, msg: None)
1522 match = matchmod.badmatch(match, lambda fn, msg: None)
1521 else:
1523 else:
1522 match = matchall(repo)
1524 match = matchall(repo)
1523
1525
1524 fileprefetchhooks(repo, revs, match)
1526 fileprefetchhooks(repo, revs, match)
1525
1527
1526 # a list of (repo, revs, match) prefetch functions
1528 # a list of (repo, revs, match) prefetch functions
1527 fileprefetchhooks = util.hooks()
1529 fileprefetchhooks = util.hooks()
1528
1530
1529 # A marker that tells the evolve extension to suppress its own reporting
1531 # A marker that tells the evolve extension to suppress its own reporting
1530 _reportstroubledchangesets = True
1532 _reportstroubledchangesets = True
1531
1533
1532 def registersummarycallback(repo, otr, txnname=''):
1534 def registersummarycallback(repo, otr, txnname=''):
1533 """register a callback to issue a summary after the transaction is closed
1535 """register a callback to issue a summary after the transaction is closed
1534 """
1536 """
1535 def txmatch(sources):
1537 def txmatch(sources):
1536 return any(txnname.startswith(source) for source in sources)
1538 return any(txnname.startswith(source) for source in sources)
1537
1539
1538 categories = []
1540 categories = []
1539
1541
1540 def reportsummary(func):
1542 def reportsummary(func):
1541 """decorator for report callbacks."""
1543 """decorator for report callbacks."""
1542 # The repoview life cycle is shorter than the one of the actual
1544 # The repoview life cycle is shorter than the one of the actual
1543 # underlying repository. So the filtered object can die before the
1545 # underlying repository. So the filtered object can die before the
1544 # weakref is used leading to troubles. We keep a reference to the
1546 # weakref is used leading to troubles. We keep a reference to the
1545 # unfiltered object and restore the filtering when retrieving the
1547 # unfiltered object and restore the filtering when retrieving the
1546 # repository through the weakref.
1548 # repository through the weakref.
1547 filtername = repo.filtername
1549 filtername = repo.filtername
1548 reporef = weakref.ref(repo.unfiltered())
1550 reporef = weakref.ref(repo.unfiltered())
1549 def wrapped(tr):
1551 def wrapped(tr):
1550 repo = reporef()
1552 repo = reporef()
1551 if filtername:
1553 if filtername:
1552 repo = repo.filtered(filtername)
1554 repo = repo.filtered(filtername)
1553 func(repo, tr)
1555 func(repo, tr)
1554 newcat = '%02i-txnreport' % len(categories)
1556 newcat = '%02i-txnreport' % len(categories)
1555 otr.addpostclose(newcat, wrapped)
1557 otr.addpostclose(newcat, wrapped)
1556 categories.append(newcat)
1558 categories.append(newcat)
1557 return wrapped
1559 return wrapped
1558
1560
1559 if txmatch(_reportobsoletedsource):
1561 if txmatch(_reportobsoletedsource):
1560 @reportsummary
1562 @reportsummary
1561 def reportobsoleted(repo, tr):
1563 def reportobsoleted(repo, tr):
1562 obsoleted = obsutil.getobsoleted(repo, tr)
1564 obsoleted = obsutil.getobsoleted(repo, tr)
1563 if obsoleted:
1565 if obsoleted:
1564 repo.ui.status(_('obsoleted %i changesets\n')
1566 repo.ui.status(_('obsoleted %i changesets\n')
1565 % len(obsoleted))
1567 % len(obsoleted))
1566
1568
1567 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1569 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1568 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1570 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1569 instabilitytypes = [
1571 instabilitytypes = [
1570 ('orphan', 'orphan'),
1572 ('orphan', 'orphan'),
1571 ('phase-divergent', 'phasedivergent'),
1573 ('phase-divergent', 'phasedivergent'),
1572 ('content-divergent', 'contentdivergent'),
1574 ('content-divergent', 'contentdivergent'),
1573 ]
1575 ]
1574
1576
1575 def getinstabilitycounts(repo):
1577 def getinstabilitycounts(repo):
1576 filtered = repo.changelog.filteredrevs
1578 filtered = repo.changelog.filteredrevs
1577 counts = {}
1579 counts = {}
1578 for instability, revset in instabilitytypes:
1580 for instability, revset in instabilitytypes:
1579 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1581 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1580 filtered)
1582 filtered)
1581 return counts
1583 return counts
1582
1584
1583 oldinstabilitycounts = getinstabilitycounts(repo)
1585 oldinstabilitycounts = getinstabilitycounts(repo)
1584 @reportsummary
1586 @reportsummary
1585 def reportnewinstabilities(repo, tr):
1587 def reportnewinstabilities(repo, tr):
1586 newinstabilitycounts = getinstabilitycounts(repo)
1588 newinstabilitycounts = getinstabilitycounts(repo)
1587 for instability, revset in instabilitytypes:
1589 for instability, revset in instabilitytypes:
1588 delta = (newinstabilitycounts[instability] -
1590 delta = (newinstabilitycounts[instability] -
1589 oldinstabilitycounts[instability])
1591 oldinstabilitycounts[instability])
1590 msg = getinstabilitymessage(delta, instability)
1592 msg = getinstabilitymessage(delta, instability)
1591 if msg:
1593 if msg:
1592 repo.ui.warn(msg)
1594 repo.ui.warn(msg)
1593
1595
1594 if txmatch(_reportnewcssource):
1596 if txmatch(_reportnewcssource):
1595 @reportsummary
1597 @reportsummary
1596 def reportnewcs(repo, tr):
1598 def reportnewcs(repo, tr):
1597 """Report the range of new revisions pulled/unbundled."""
1599 """Report the range of new revisions pulled/unbundled."""
1598 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1600 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1599 if not newrevs:
1601 if not newrevs:
1600 return
1602 return
1601
1603
1602 # Compute the bounds of new revisions' range, excluding obsoletes.
1604 # Compute the bounds of new revisions' range, excluding obsoletes.
1603 unfi = repo.unfiltered()
1605 unfi = repo.unfiltered()
1604 revs = unfi.revs('%ld and not obsolete()', newrevs)
1606 revs = unfi.revs('%ld and not obsolete()', newrevs)
1605 if not revs:
1607 if not revs:
1606 # Got only obsoletes.
1608 # Got only obsoletes.
1607 return
1609 return
1608 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1610 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1609
1611
1610 if minrev == maxrev:
1612 if minrev == maxrev:
1611 revrange = minrev
1613 revrange = minrev
1612 else:
1614 else:
1613 revrange = '%s:%s' % (minrev, maxrev)
1615 revrange = '%s:%s' % (minrev, maxrev)
1614 repo.ui.status(_('new changesets %s\n') % revrange)
1616 repo.ui.status(_('new changesets %s\n') % revrange)
1615
1617
1616 @reportsummary
1618 @reportsummary
1617 def reportphasechanges(repo, tr):
1619 def reportphasechanges(repo, tr):
1618 """Report statistics of phase changes for changesets pre-existing
1620 """Report statistics of phase changes for changesets pre-existing
1619 pull/unbundle.
1621 pull/unbundle.
1620 """
1622 """
1621 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1623 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1622 phasetracking = tr.changes.get('phases', {})
1624 phasetracking = tr.changes.get('phases', {})
1623 if not phasetracking:
1625 if not phasetracking:
1624 return
1626 return
1625 published = [
1627 published = [
1626 rev for rev, (old, new) in phasetracking.iteritems()
1628 rev for rev, (old, new) in phasetracking.iteritems()
1627 if new == phases.public and rev not in newrevs
1629 if new == phases.public and rev not in newrevs
1628 ]
1630 ]
1629 if not published:
1631 if not published:
1630 return
1632 return
1631 repo.ui.status(_('%d local changesets published\n')
1633 repo.ui.status(_('%d local changesets published\n')
1632 % len(published))
1634 % len(published))
1633
1635
1634 def getinstabilitymessage(delta, instability):
1636 def getinstabilitymessage(delta, instability):
1635 """function to return the message to show warning about new instabilities
1637 """function to return the message to show warning about new instabilities
1636
1638
1637 exists as a separate function so that extension can wrap to show more
1639 exists as a separate function so that extension can wrap to show more
1638 information like how to fix instabilities"""
1640 information like how to fix instabilities"""
1639 if delta > 0:
1641 if delta > 0:
1640 return _('%i new %s changesets\n') % (delta, instability)
1642 return _('%i new %s changesets\n') % (delta, instability)
1641
1643
1642 def nodesummaries(repo, nodes, maxnumnodes=4):
1644 def nodesummaries(repo, nodes, maxnumnodes=4):
1643 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1645 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1644 return ' '.join(short(h) for h in nodes)
1646 return ' '.join(short(h) for h in nodes)
1645 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1647 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1646 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1648 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1647
1649
1648 def enforcesinglehead(repo, tr, desc):
1650 def enforcesinglehead(repo, tr, desc):
1649 """check that no named branch has multiple heads"""
1651 """check that no named branch has multiple heads"""
1650 if desc in ('strip', 'repair'):
1652 if desc in ('strip', 'repair'):
1651 # skip the logic during strip
1653 # skip the logic during strip
1652 return
1654 return
1653 visible = repo.filtered('visible')
1655 visible = repo.filtered('visible')
1654 # possible improvement: we could restrict the check to affected branch
1656 # possible improvement: we could restrict the check to affected branch
1655 for name, heads in visible.branchmap().iteritems():
1657 for name, heads in visible.branchmap().iteritems():
1656 if len(heads) > 1:
1658 if len(heads) > 1:
1657 msg = _('rejecting multiple heads on branch "%s"')
1659 msg = _('rejecting multiple heads on branch "%s"')
1658 msg %= name
1660 msg %= name
1659 hint = _('%d heads: %s')
1661 hint = _('%d heads: %s')
1660 hint %= (len(heads), nodesummaries(repo, heads))
1662 hint %= (len(heads), nodesummaries(repo, heads))
1661 raise error.Abort(msg, hint=hint)
1663 raise error.Abort(msg, hint=hint)
1662
1664
1663 def wrapconvertsink(sink):
1665 def wrapconvertsink(sink):
1664 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1666 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1665 before it is used, whether or not the convert extension was formally loaded.
1667 before it is used, whether or not the convert extension was formally loaded.
1666 """
1668 """
1667 return sink
1669 return sink
1668
1670
1669 def unhidehashlikerevs(repo, specs, hiddentype):
1671 def unhidehashlikerevs(repo, specs, hiddentype):
1670 """parse the user specs and unhide changesets whose hash or revision number
1672 """parse the user specs and unhide changesets whose hash or revision number
1671 is passed.
1673 is passed.
1672
1674
1673 hiddentype can be: 1) 'warn': warn while unhiding changesets
1675 hiddentype can be: 1) 'warn': warn while unhiding changesets
1674 2) 'nowarn': don't warn while unhiding changesets
1676 2) 'nowarn': don't warn while unhiding changesets
1675
1677
1676 returns a repo object with the required changesets unhidden
1678 returns a repo object with the required changesets unhidden
1677 """
1679 """
1678 if not repo.filtername or not repo.ui.configbool('experimental',
1680 if not repo.filtername or not repo.ui.configbool('experimental',
1679 'directaccess'):
1681 'directaccess'):
1680 return repo
1682 return repo
1681
1683
1682 if repo.filtername not in ('visible', 'visible-hidden'):
1684 if repo.filtername not in ('visible', 'visible-hidden'):
1683 return repo
1685 return repo
1684
1686
1685 symbols = set()
1687 symbols = set()
1686 for spec in specs:
1688 for spec in specs:
1687 try:
1689 try:
1688 tree = revsetlang.parse(spec)
1690 tree = revsetlang.parse(spec)
1689 except error.ParseError: # will be reported by scmutil.revrange()
1691 except error.ParseError: # will be reported by scmutil.revrange()
1690 continue
1692 continue
1691
1693
1692 symbols.update(revsetlang.gethashlikesymbols(tree))
1694 symbols.update(revsetlang.gethashlikesymbols(tree))
1693
1695
1694 if not symbols:
1696 if not symbols:
1695 return repo
1697 return repo
1696
1698
1697 revs = _getrevsfromsymbols(repo, symbols)
1699 revs = _getrevsfromsymbols(repo, symbols)
1698
1700
1699 if not revs:
1701 if not revs:
1700 return repo
1702 return repo
1701
1703
1702 if hiddentype == 'warn':
1704 if hiddentype == 'warn':
1703 unfi = repo.unfiltered()
1705 unfi = repo.unfiltered()
1704 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1706 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1705 repo.ui.warn(_("warning: accessing hidden changesets for write "
1707 repo.ui.warn(_("warning: accessing hidden changesets for write "
1706 "operation: %s\n") % revstr)
1708 "operation: %s\n") % revstr)
1707
1709
1708 # we have to use new filtername to separate branch/tags cache until we can
1710 # we have to use new filtername to separate branch/tags cache until we can
1709 # disbale these cache when revisions are dynamically pinned.
1711 # disbale these cache when revisions are dynamically pinned.
1710 return repo.filtered('visible-hidden', revs)
1712 return repo.filtered('visible-hidden', revs)
1711
1713
1712 def _getrevsfromsymbols(repo, symbols):
1714 def _getrevsfromsymbols(repo, symbols):
1713 """parse the list of symbols and returns a set of revision numbers of hidden
1715 """parse the list of symbols and returns a set of revision numbers of hidden
1714 changesets present in symbols"""
1716 changesets present in symbols"""
1715 revs = set()
1717 revs = set()
1716 unfi = repo.unfiltered()
1718 unfi = repo.unfiltered()
1717 unficl = unfi.changelog
1719 unficl = unfi.changelog
1718 cl = repo.changelog
1720 cl = repo.changelog
1719 tiprev = len(unficl)
1721 tiprev = len(unficl)
1720 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1722 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1721 for s in symbols:
1723 for s in symbols:
1722 try:
1724 try:
1723 n = int(s)
1725 n = int(s)
1724 if n <= tiprev:
1726 if n <= tiprev:
1725 if not allowrevnums:
1727 if not allowrevnums:
1726 continue
1728 continue
1727 else:
1729 else:
1728 if n not in cl:
1730 if n not in cl:
1729 revs.add(n)
1731 revs.add(n)
1730 continue
1732 continue
1731 except ValueError:
1733 except ValueError:
1732 pass
1734 pass
1733
1735
1734 try:
1736 try:
1735 s = resolvehexnodeidprefix(unfi, s)
1737 s = resolvehexnodeidprefix(unfi, s)
1736 except (error.LookupError, error.WdirUnsupported):
1738 except (error.LookupError, error.WdirUnsupported):
1737 s = None
1739 s = None
1738
1740
1739 if s is not None:
1741 if s is not None:
1740 rev = unficl.rev(s)
1742 rev = unficl.rev(s)
1741 if rev not in cl:
1743 if rev not in cl:
1742 revs.add(rev)
1744 revs.add(rev)
1743
1745
1744 return revs
1746 return revs
1745
1747
1746 def bookmarkrevs(repo, mark):
1748 def bookmarkrevs(repo, mark):
1747 """
1749 """
1748 Select revisions reachable by a given bookmark
1750 Select revisions reachable by a given bookmark
1749 """
1751 """
1750 return repo.revs("ancestors(bookmark(%s)) - "
1752 return repo.revs("ancestors(bookmark(%s)) - "
1751 "ancestors(head() and not bookmark(%s)) - "
1753 "ancestors(head() and not bookmark(%s)) - "
1752 "ancestors(bookmark() and not bookmark(%s))",
1754 "ancestors(bookmark() and not bookmark(%s))",
1753 mark, mark, mark)
1755 mark, mark, mark)
@@ -1,94 +1,100 b''
1 $ hg init rep
1 $ hg init rep
2 $ cd rep
2 $ cd rep
3 $ mkdir dir
3 $ mkdir dir
4 $ touch foo dir/bar
4 $ touch foo dir/bar
5 $ hg -v addremove
5 $ hg -v addremove
6 adding dir/bar
6 adding dir/bar
7 adding foo
7 adding foo
8 $ hg -v commit -m "add 1"
8 $ hg -v commit -m "add 1"
9 committing files:
9 committing files:
10 dir/bar
10 dir/bar
11 foo
11 foo
12 committing manifest
12 committing manifest
13 committing changelog
13 committing changelog
14 committed changeset 0:6f7f953567a2
14 committed changeset 0:6f7f953567a2
15 $ cd dir/
15 $ cd dir/
16 $ touch ../foo_2 bar_2
16 $ touch ../foo_2 bar_2
17 $ hg -v addremove
17 $ hg -v addremove
18 adding dir/bar_2
18 adding dir/bar_2
19 adding foo_2
19 adding foo_2
20 $ hg -v commit -m "add 2"
20 $ hg -v commit -m "add 2"
21 committing files:
21 committing files:
22 dir/bar_2
22 dir/bar_2
23 foo_2
23 foo_2
24 committing manifest
24 committing manifest
25 committing changelog
25 committing changelog
26 committed changeset 1:e65414bf35c5
26 committed changeset 1:e65414bf35c5
27 $ cd ..
27 $ cd ..
28 $ hg forget foo
28 $ hg forget foo
29 $ hg -v addremove
29 $ hg -v addremove
30 adding foo
30 adding foo
31 $ hg forget foo
31 $ hg forget foo
32
32
33 $ hg -v addremove nonexistent
33 $ hg -v addremove nonexistent
34 nonexistent: $ENOENT$
34 nonexistent: $ENOENT$
35 [1]
35 [1]
36
36
37 $ cd ..
37 $ cd ..
38
38
39 $ hg init subdir
39 $ hg init subdir
40 $ cd subdir
40 $ cd subdir
41 $ mkdir dir
41 $ mkdir dir
42 $ cd dir
42 $ cd dir
43 $ touch a.py
43 $ touch a.py
44 $ hg addremove 'glob:*.py'
44 $ hg addremove 'glob:*.py'
45 adding a.py
45 adding a.py
46 $ hg forget a.py
46 $ hg forget a.py
47 $ hg addremove -I 'glob:*.py'
47 $ hg addremove -I 'glob:*.py'
48 adding a.py
48 adding a.py
49 $ hg forget a.py
49 $ hg forget a.py
50 $ hg addremove
50 $ hg addremove
51 adding dir/a.py
51 adding dir/a.py
52 $ cd ..
52 $ cd ..
53
53
54 $ hg init sim
54 $ hg init sim
55 $ cd sim
55 $ cd sim
56 $ echo a > a
56 $ echo a > a
57 $ echo a >> a
57 $ echo a >> a
58 $ echo a >> a
58 $ echo a >> a
59 $ echo c > c
59 $ echo c > c
60 $ hg commit -Ama
60 $ hg commit -Ama
61 adding a
61 adding a
62 adding c
62 adding c
63 $ mv a b
63 $ mv a b
64 $ rm c
64 $ rm c
65 $ echo d > d
65 $ echo d > d
66 $ hg addremove -n -s 50 # issue 1696
66 $ hg addremove -n -s 50 # issue 1696
67 removing a
67 removing a
68 adding b
68 adding b
69 removing c
69 removing c
70 adding d
70 adding d
71 recording removal of a as rename to b (100% similar)
71 recording removal of a as rename to b (100% similar)
72 $ hg addremove -ns 50 --color debug
73 [addremove.removed ui.status|removing a]
74 [addremove.added ui.status|adding b]
75 [addremove.removed ui.status|removing c]
76 [addremove.added ui.status|adding d]
77 [ ui.status|recording removal of a as rename to b (100% similar)]
72 $ hg addremove -s 50
78 $ hg addremove -s 50
73 removing a
79 removing a
74 adding b
80 adding b
75 removing c
81 removing c
76 adding d
82 adding d
77 recording removal of a as rename to b (100% similar)
83 recording removal of a as rename to b (100% similar)
78 $ hg commit -mb
84 $ hg commit -mb
79 $ cp b c
85 $ cp b c
80 $ hg forget b
86 $ hg forget b
81 $ hg addremove -s 50
87 $ hg addremove -s 50
82 adding b
88 adding b
83 adding c
89 adding c
84
90
85 $ rm c
91 $ rm c
86
92
87 $ hg ci -A -m "c" nonexistent
93 $ hg ci -A -m "c" nonexistent
88 nonexistent: $ENOENT$
94 nonexistent: $ENOENT$
89 abort: failed to mark all new/missing files as added/removed
95 abort: failed to mark all new/missing files as added/removed
90 [255]
96 [255]
91
97
92 $ hg st
98 $ hg st
93 ! c
99 ! c
94 $ cd ..
100 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now