##// END OF EJS Templates
errors: use InputError for errors about bad paths...
Martin von Zweigbergk -
r46448:3175b0e0 default
parent child Browse files
Show More
@@ -1,2308 +1,2308 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import os
12 import os
13 import posixpath
13 import posixpath
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28 from .pycompat import getattr
28 from .pycompat import getattr
29 from .thirdparty import attr
29 from .thirdparty import attr
30 from . import (
30 from . import (
31 copies as copiesmod,
31 copies as copiesmod,
32 encoding,
32 encoding,
33 error,
33 error,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 obsutil,
36 obsutil,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 policy,
39 policy,
40 pycompat,
40 pycompat,
41 requirements as requirementsmod,
41 requirements as requirementsmod,
42 revsetlang,
42 revsetlang,
43 similar,
43 similar,
44 smartset,
44 smartset,
45 url,
45 url,
46 util,
46 util,
47 vfs,
47 vfs,
48 )
48 )
49
49
50 from .utils import (
50 from .utils import (
51 hashutil,
51 hashutil,
52 procutil,
52 procutil,
53 stringutil,
53 stringutil,
54 )
54 )
55
55
56 if pycompat.iswindows:
56 if pycompat.iswindows:
57 from . import scmwindows as scmplatform
57 from . import scmwindows as scmplatform
58 else:
58 else:
59 from . import scmposix as scmplatform
59 from . import scmposix as scmplatform
60
60
61 parsers = policy.importmod('parsers')
61 parsers = policy.importmod('parsers')
62 rustrevlog = policy.importrust('revlog')
62 rustrevlog = policy.importrust('revlog')
63
63
64 termsize = scmplatform.termsize
64 termsize = scmplatform.termsize
65
65
66
66
67 @attr.s(slots=True, repr=False)
67 @attr.s(slots=True, repr=False)
68 class status(object):
68 class status(object):
69 '''Struct with a list of files per status.
69 '''Struct with a list of files per status.
70
70
71 The 'deleted', 'unknown' and 'ignored' properties are only
71 The 'deleted', 'unknown' and 'ignored' properties are only
72 relevant to the working copy.
72 relevant to the working copy.
73 '''
73 '''
74
74
75 modified = attr.ib(default=attr.Factory(list))
75 modified = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
77 removed = attr.ib(default=attr.Factory(list))
77 removed = attr.ib(default=attr.Factory(list))
78 deleted = attr.ib(default=attr.Factory(list))
78 deleted = attr.ib(default=attr.Factory(list))
79 unknown = attr.ib(default=attr.Factory(list))
79 unknown = attr.ib(default=attr.Factory(list))
80 ignored = attr.ib(default=attr.Factory(list))
80 ignored = attr.ib(default=attr.Factory(list))
81 clean = attr.ib(default=attr.Factory(list))
81 clean = attr.ib(default=attr.Factory(list))
82
82
83 def __iter__(self):
83 def __iter__(self):
84 yield self.modified
84 yield self.modified
85 yield self.added
85 yield self.added
86 yield self.removed
86 yield self.removed
87 yield self.deleted
87 yield self.deleted
88 yield self.unknown
88 yield self.unknown
89 yield self.ignored
89 yield self.ignored
90 yield self.clean
90 yield self.clean
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return (
93 return (
94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
95 r'unknown=%s, ignored=%s, clean=%s>'
95 r'unknown=%s, ignored=%s, clean=%s>'
96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
97
97
98
98
99 def itersubrepos(ctx1, ctx2):
99 def itersubrepos(ctx1, ctx2):
100 """find subrepos in ctx1 or ctx2"""
100 """find subrepos in ctx1 or ctx2"""
101 # Create a (subpath, ctx) mapping where we prefer subpaths from
101 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 # has been modified (in ctx2) but not yet committed (in ctx1).
103 # has been modified (in ctx2) but not yet committed (in ctx1).
104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106
106
107 missing = set()
107 missing = set()
108
108
109 for subpath in ctx2.substate:
109 for subpath in ctx2.substate:
110 if subpath not in ctx1.substate:
110 if subpath not in ctx1.substate:
111 del subpaths[subpath]
111 del subpaths[subpath]
112 missing.add(subpath)
112 missing.add(subpath)
113
113
114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
115 yield subpath, ctx.sub(subpath)
115 yield subpath, ctx.sub(subpath)
116
116
117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 # status and diff will have an accurate result when it does
118 # status and diff will have an accurate result when it does
119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 # against itself.
120 # against itself.
121 for subpath in missing:
121 for subpath in missing:
122 yield subpath, ctx2.nullsub(subpath, ctx1)
122 yield subpath, ctx2.nullsub(subpath, ctx1)
123
123
124
124
125 def nochangesfound(ui, repo, excluded=None):
125 def nochangesfound(ui, repo, excluded=None):
126 '''Report no changes for push/pull, excluded is None or a list of
126 '''Report no changes for push/pull, excluded is None or a list of
127 nodes excluded from the push/pull.
127 nodes excluded from the push/pull.
128 '''
128 '''
129 secretlist = []
129 secretlist = []
130 if excluded:
130 if excluded:
131 for n in excluded:
131 for n in excluded:
132 ctx = repo[n]
132 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
134 secretlist.append(n)
135
135
136 if secretlist:
136 if secretlist:
137 ui.status(
137 ui.status(
138 _(b"no changes found (ignored %d secret changesets)\n")
138 _(b"no changes found (ignored %d secret changesets)\n")
139 % len(secretlist)
139 % len(secretlist)
140 )
140 )
141 else:
141 else:
142 ui.status(_(b"no changes found\n"))
142 ui.status(_(b"no changes found\n"))
143
143
144
144
145 def callcatch(ui, func):
145 def callcatch(ui, func):
146 """call func() with global exception handling
146 """call func() with global exception handling
147
147
148 return func() if no exception happens. otherwise do some error handling
148 return func() if no exception happens. otherwise do some error handling
149 and return an exit code accordingly. does not handle all exceptions.
149 and return an exit code accordingly. does not handle all exceptions.
150 """
150 """
151 coarse_exit_code = -1
151 coarse_exit_code = -1
152 detailed_exit_code = -1
152 detailed_exit_code = -1
153 try:
153 try:
154 try:
154 try:
155 return func()
155 return func()
156 except: # re-raises
156 except: # re-raises
157 ui.traceback()
157 ui.traceback()
158 raise
158 raise
159 # Global exception handling, alphabetically
159 # Global exception handling, alphabetically
160 # Mercurial-specific first, followed by built-in and library exceptions
160 # Mercurial-specific first, followed by built-in and library exceptions
161 except error.LockHeld as inst:
161 except error.LockHeld as inst:
162 detailed_exit_code = 20
162 detailed_exit_code = 20
163 if inst.errno == errno.ETIMEDOUT:
163 if inst.errno == errno.ETIMEDOUT:
164 reason = _(b'timed out waiting for lock held by %r') % (
164 reason = _(b'timed out waiting for lock held by %r') % (
165 pycompat.bytestr(inst.locker)
165 pycompat.bytestr(inst.locker)
166 )
166 )
167 else:
167 else:
168 reason = _(b'lock held by %r') % inst.locker
168 reason = _(b'lock held by %r') % inst.locker
169 ui.error(
169 ui.error(
170 _(b"abort: %s: %s\n")
170 _(b"abort: %s: %s\n")
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
172 )
172 )
173 if not inst.locker:
173 if not inst.locker:
174 ui.error(_(b"(lock might be very busy)\n"))
174 ui.error(_(b"(lock might be very busy)\n"))
175 except error.LockUnavailable as inst:
175 except error.LockUnavailable as inst:
176 detailed_exit_code = 20
176 detailed_exit_code = 20
177 ui.error(
177 ui.error(
178 _(b"abort: could not lock %s: %s\n")
178 _(b"abort: could not lock %s: %s\n")
179 % (
179 % (
180 inst.desc or stringutil.forcebytestr(inst.filename),
180 inst.desc or stringutil.forcebytestr(inst.filename),
181 encoding.strtolocal(inst.strerror),
181 encoding.strtolocal(inst.strerror),
182 )
182 )
183 )
183 )
184 except error.OutOfBandError as inst:
184 except error.OutOfBandError as inst:
185 detailed_exit_code = 100
185 detailed_exit_code = 100
186 if inst.args:
186 if inst.args:
187 msg = _(b"abort: remote error:\n")
187 msg = _(b"abort: remote error:\n")
188 else:
188 else:
189 msg = _(b"abort: remote error\n")
189 msg = _(b"abort: remote error\n")
190 ui.error(msg)
190 ui.error(msg)
191 if inst.args:
191 if inst.args:
192 ui.error(b''.join(inst.args))
192 ui.error(b''.join(inst.args))
193 if inst.hint:
193 if inst.hint:
194 ui.error(b'(%s)\n' % inst.hint)
194 ui.error(b'(%s)\n' % inst.hint)
195 except error.RepoError as inst:
195 except error.RepoError as inst:
196 ui.error(_(b"abort: %s!\n") % inst)
196 ui.error(_(b"abort: %s!\n") % inst)
197 if inst.hint:
197 if inst.hint:
198 ui.error(_(b"(%s)\n") % inst.hint)
198 ui.error(_(b"(%s)\n") % inst.hint)
199 except error.ResponseError as inst:
199 except error.ResponseError as inst:
200 ui.error(_(b"abort: %s") % inst.args[0])
200 ui.error(_(b"abort: %s") % inst.args[0])
201 msg = inst.args[1]
201 msg = inst.args[1]
202 if isinstance(msg, type(u'')):
202 if isinstance(msg, type(u'')):
203 msg = pycompat.sysbytes(msg)
203 msg = pycompat.sysbytes(msg)
204 if not isinstance(msg, bytes):
204 if not isinstance(msg, bytes):
205 ui.error(b" %r\n" % (msg,))
205 ui.error(b" %r\n" % (msg,))
206 elif not msg:
206 elif not msg:
207 ui.error(_(b" empty string\n"))
207 ui.error(_(b" empty string\n"))
208 else:
208 else:
209 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 except error.CensoredNodeError as inst:
210 except error.CensoredNodeError as inst:
211 ui.error(_(b"abort: file censored %s!\n") % inst)
211 ui.error(_(b"abort: file censored %s!\n") % inst)
212 except error.StorageError as inst:
212 except error.StorageError as inst:
213 ui.error(_(b"abort: %s!\n") % inst)
213 ui.error(_(b"abort: %s!\n") % inst)
214 if inst.hint:
214 if inst.hint:
215 ui.error(_(b"(%s)\n") % inst.hint)
215 ui.error(_(b"(%s)\n") % inst.hint)
216 except error.InterventionRequired as inst:
216 except error.InterventionRequired as inst:
217 ui.error(b"%s\n" % inst)
217 ui.error(b"%s\n" % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.error(_(b"(%s)\n") % inst.hint)
219 ui.error(_(b"(%s)\n") % inst.hint)
220 detailed_exit_code = 240
220 detailed_exit_code = 240
221 coarse_exit_code = 1
221 coarse_exit_code = 1
222 except error.WdirUnsupported:
222 except error.WdirUnsupported:
223 ui.error(_(b"abort: working directory revision cannot be specified\n"))
223 ui.error(_(b"abort: working directory revision cannot be specified\n"))
224 except error.Abort as inst:
224 except error.Abort as inst:
225 if isinstance(inst, error.InputError):
225 if isinstance(inst, error.InputError):
226 detailed_exit_code = 10
226 detailed_exit_code = 10
227 elif isinstance(inst, error.StateError):
227 elif isinstance(inst, error.StateError):
228 detailed_exit_code = 20
228 detailed_exit_code = 20
229 elif isinstance(inst, error.ConfigError):
229 elif isinstance(inst, error.ConfigError):
230 detailed_exit_code = 30
230 detailed_exit_code = 30
231 ui.error(_(b"abort: %s\n") % inst.message)
231 ui.error(_(b"abort: %s\n") % inst.message)
232 if inst.hint:
232 if inst.hint:
233 ui.error(_(b"(%s)\n") % inst.hint)
233 ui.error(_(b"(%s)\n") % inst.hint)
234 except error.WorkerError as inst:
234 except error.WorkerError as inst:
235 # Don't print a message -- the worker already should have
235 # Don't print a message -- the worker already should have
236 return inst.status_code
236 return inst.status_code
237 except ImportError as inst:
237 except ImportError as inst:
238 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
238 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
239 m = stringutil.forcebytestr(inst).split()[-1]
239 m = stringutil.forcebytestr(inst).split()[-1]
240 if m in b"mpatch bdiff".split():
240 if m in b"mpatch bdiff".split():
241 ui.error(_(b"(did you forget to compile extensions?)\n"))
241 ui.error(_(b"(did you forget to compile extensions?)\n"))
242 elif m in b"zlib".split():
242 elif m in b"zlib".split():
243 ui.error(_(b"(is your Python install correct?)\n"))
243 ui.error(_(b"(is your Python install correct?)\n"))
244 except util.urlerr.httperror as inst:
244 except util.urlerr.httperror as inst:
245 detailed_exit_code = 100
245 detailed_exit_code = 100
246 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
246 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
247 except util.urlerr.urlerror as inst:
247 except util.urlerr.urlerror as inst:
248 detailed_exit_code = 100
248 detailed_exit_code = 100
249 try: # usually it is in the form (errno, strerror)
249 try: # usually it is in the form (errno, strerror)
250 reason = inst.reason.args[1]
250 reason = inst.reason.args[1]
251 except (AttributeError, IndexError):
251 except (AttributeError, IndexError):
252 # it might be anything, for example a string
252 # it might be anything, for example a string
253 reason = inst.reason
253 reason = inst.reason
254 if isinstance(reason, pycompat.unicode):
254 if isinstance(reason, pycompat.unicode):
255 # SSLError of Python 2.7.9 contains a unicode
255 # SSLError of Python 2.7.9 contains a unicode
256 reason = encoding.unitolocal(reason)
256 reason = encoding.unitolocal(reason)
257 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
257 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
258 except (IOError, OSError) as inst:
258 except (IOError, OSError) as inst:
259 if (
259 if (
260 util.safehasattr(inst, b"args")
260 util.safehasattr(inst, b"args")
261 and inst.args
261 and inst.args
262 and inst.args[0] == errno.EPIPE
262 and inst.args[0] == errno.EPIPE
263 ):
263 ):
264 pass
264 pass
265 elif getattr(inst, "strerror", None): # common IOError or OSError
265 elif getattr(inst, "strerror", None): # common IOError or OSError
266 if getattr(inst, "filename", None) is not None:
266 if getattr(inst, "filename", None) is not None:
267 ui.error(
267 ui.error(
268 _(b"abort: %s: '%s'\n")
268 _(b"abort: %s: '%s'\n")
269 % (
269 % (
270 encoding.strtolocal(inst.strerror),
270 encoding.strtolocal(inst.strerror),
271 stringutil.forcebytestr(inst.filename),
271 stringutil.forcebytestr(inst.filename),
272 )
272 )
273 )
273 )
274 else:
274 else:
275 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
275 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
276 else: # suspicious IOError
276 else: # suspicious IOError
277 raise
277 raise
278 except MemoryError:
278 except MemoryError:
279 ui.error(_(b"abort: out of memory\n"))
279 ui.error(_(b"abort: out of memory\n"))
280 except SystemExit as inst:
280 except SystemExit as inst:
281 # Commands shouldn't sys.exit directly, but give a return code.
281 # Commands shouldn't sys.exit directly, but give a return code.
282 # Just in case catch this and and pass exit code to caller.
282 # Just in case catch this and and pass exit code to caller.
283 detailed_exit_code = 254
283 detailed_exit_code = 254
284 coarse_exit_code = inst.code
284 coarse_exit_code = inst.code
285
285
286 if ui.configbool(b'ui', b'detailed-exit-code'):
286 if ui.configbool(b'ui', b'detailed-exit-code'):
287 return detailed_exit_code
287 return detailed_exit_code
288 else:
288 else:
289 return coarse_exit_code
289 return coarse_exit_code
290
290
291
291
292 def checknewlabel(repo, lbl, kind):
292 def checknewlabel(repo, lbl, kind):
293 # Do not use the "kind" parameter in ui output.
293 # Do not use the "kind" parameter in ui output.
294 # It makes strings difficult to translate.
294 # It makes strings difficult to translate.
295 if lbl in [b'tip', b'.', b'null']:
295 if lbl in [b'tip', b'.', b'null']:
296 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
296 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
297 for c in (b':', b'\0', b'\n', b'\r'):
297 for c in (b':', b'\0', b'\n', b'\r'):
298 if c in lbl:
298 if c in lbl:
299 raise error.Abort(
299 raise error.Abort(
300 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
300 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
301 )
301 )
302 try:
302 try:
303 int(lbl)
303 int(lbl)
304 raise error.Abort(_(b"cannot use an integer as a name"))
304 raise error.Abort(_(b"cannot use an integer as a name"))
305 except ValueError:
305 except ValueError:
306 pass
306 pass
307 if lbl.strip() != lbl:
307 if lbl.strip() != lbl:
308 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
308 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
309
309
310
310
311 def checkfilename(f):
311 def checkfilename(f):
312 '''Check that the filename f is an acceptable filename for a tracked file'''
312 '''Check that the filename f is an acceptable filename for a tracked file'''
313 if b'\r' in f or b'\n' in f:
313 if b'\r' in f or b'\n' in f:
314 raise error.Abort(
314 raise error.InputError(
315 _(b"'\\n' and '\\r' disallowed in filenames: %r")
315 _(b"'\\n' and '\\r' disallowed in filenames: %r")
316 % pycompat.bytestr(f)
316 % pycompat.bytestr(f)
317 )
317 )
318
318
319
319
320 def checkportable(ui, f):
320 def checkportable(ui, f):
321 '''Check if filename f is portable and warn or abort depending on config'''
321 '''Check if filename f is portable and warn or abort depending on config'''
322 checkfilename(f)
322 checkfilename(f)
323 abort, warn = checkportabilityalert(ui)
323 abort, warn = checkportabilityalert(ui)
324 if abort or warn:
324 if abort or warn:
325 msg = util.checkwinfilename(f)
325 msg = util.checkwinfilename(f)
326 if msg:
326 if msg:
327 msg = b"%s: %s" % (msg, procutil.shellquote(f))
327 msg = b"%s: %s" % (msg, procutil.shellquote(f))
328 if abort:
328 if abort:
329 raise error.Abort(msg)
329 raise error.InputError(msg)
330 ui.warn(_(b"warning: %s\n") % msg)
330 ui.warn(_(b"warning: %s\n") % msg)
331
331
332
332
333 def checkportabilityalert(ui):
333 def checkportabilityalert(ui):
334 '''check if the user's config requests nothing, a warning, or abort for
334 '''check if the user's config requests nothing, a warning, or abort for
335 non-portable filenames'''
335 non-portable filenames'''
336 val = ui.config(b'ui', b'portablefilenames')
336 val = ui.config(b'ui', b'portablefilenames')
337 lval = val.lower()
337 lval = val.lower()
338 bval = stringutil.parsebool(val)
338 bval = stringutil.parsebool(val)
339 abort = pycompat.iswindows or lval == b'abort'
339 abort = pycompat.iswindows or lval == b'abort'
340 warn = bval or lval == b'warn'
340 warn = bval or lval == b'warn'
341 if bval is None and not (warn or abort or lval == b'ignore'):
341 if bval is None and not (warn or abort or lval == b'ignore'):
342 raise error.ConfigError(
342 raise error.ConfigError(
343 _(b"ui.portablefilenames value is invalid ('%s')") % val
343 _(b"ui.portablefilenames value is invalid ('%s')") % val
344 )
344 )
345 return abort, warn
345 return abort, warn
346
346
347
347
348 class casecollisionauditor(object):
348 class casecollisionauditor(object):
349 def __init__(self, ui, abort, dirstate):
349 def __init__(self, ui, abort, dirstate):
350 self._ui = ui
350 self._ui = ui
351 self._abort = abort
351 self._abort = abort
352 allfiles = b'\0'.join(dirstate)
352 allfiles = b'\0'.join(dirstate)
353 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
353 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
354 self._dirstate = dirstate
354 self._dirstate = dirstate
355 # The purpose of _newfiles is so that we don't complain about
355 # The purpose of _newfiles is so that we don't complain about
356 # case collisions if someone were to call this object with the
356 # case collisions if someone were to call this object with the
357 # same filename twice.
357 # same filename twice.
358 self._newfiles = set()
358 self._newfiles = set()
359
359
360 def __call__(self, f):
360 def __call__(self, f):
361 if f in self._newfiles:
361 if f in self._newfiles:
362 return
362 return
363 fl = encoding.lower(f)
363 fl = encoding.lower(f)
364 if fl in self._loweredfiles and f not in self._dirstate:
364 if fl in self._loweredfiles and f not in self._dirstate:
365 msg = _(b'possible case-folding collision for %s') % f
365 msg = _(b'possible case-folding collision for %s') % f
366 if self._abort:
366 if self._abort:
367 raise error.Abort(msg)
367 raise error.Abort(msg)
368 self._ui.warn(_(b"warning: %s\n") % msg)
368 self._ui.warn(_(b"warning: %s\n") % msg)
369 self._loweredfiles.add(fl)
369 self._loweredfiles.add(fl)
370 self._newfiles.add(f)
370 self._newfiles.add(f)
371
371
372
372
373 def filteredhash(repo, maxrev):
373 def filteredhash(repo, maxrev):
374 """build hash of filtered revisions in the current repoview.
374 """build hash of filtered revisions in the current repoview.
375
375
376 Multiple caches perform up-to-date validation by checking that the
376 Multiple caches perform up-to-date validation by checking that the
377 tiprev and tipnode stored in the cache file match the current repository.
377 tiprev and tipnode stored in the cache file match the current repository.
378 However, this is not sufficient for validating repoviews because the set
378 However, this is not sufficient for validating repoviews because the set
379 of revisions in the view may change without the repository tiprev and
379 of revisions in the view may change without the repository tiprev and
380 tipnode changing.
380 tipnode changing.
381
381
382 This function hashes all the revs filtered from the view and returns
382 This function hashes all the revs filtered from the view and returns
383 that SHA-1 digest.
383 that SHA-1 digest.
384 """
384 """
385 cl = repo.changelog
385 cl = repo.changelog
386 if not cl.filteredrevs:
386 if not cl.filteredrevs:
387 return None
387 return None
388 key = cl._filteredrevs_hashcache.get(maxrev)
388 key = cl._filteredrevs_hashcache.get(maxrev)
389 if not key:
389 if not key:
390 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
390 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
391 if revs:
391 if revs:
392 s = hashutil.sha1()
392 s = hashutil.sha1()
393 for rev in revs:
393 for rev in revs:
394 s.update(b'%d;' % rev)
394 s.update(b'%d;' % rev)
395 key = s.digest()
395 key = s.digest()
396 cl._filteredrevs_hashcache[maxrev] = key
396 cl._filteredrevs_hashcache[maxrev] = key
397 return key
397 return key
398
398
399
399
400 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
400 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
401 '''yield every hg repository under path, always recursively.
401 '''yield every hg repository under path, always recursively.
402 The recurse flag will only control recursion into repo working dirs'''
402 The recurse flag will only control recursion into repo working dirs'''
403
403
404 def errhandler(err):
404 def errhandler(err):
405 if err.filename == path:
405 if err.filename == path:
406 raise err
406 raise err
407
407
408 samestat = getattr(os.path, 'samestat', None)
408 samestat = getattr(os.path, 'samestat', None)
409 if followsym and samestat is not None:
409 if followsym and samestat is not None:
410
410
411 def adddir(dirlst, dirname):
411 def adddir(dirlst, dirname):
412 dirstat = os.stat(dirname)
412 dirstat = os.stat(dirname)
413 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
413 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
414 if not match:
414 if not match:
415 dirlst.append(dirstat)
415 dirlst.append(dirstat)
416 return not match
416 return not match
417
417
418 else:
418 else:
419 followsym = False
419 followsym = False
420
420
421 if (seen_dirs is None) and followsym:
421 if (seen_dirs is None) and followsym:
422 seen_dirs = []
422 seen_dirs = []
423 adddir(seen_dirs, path)
423 adddir(seen_dirs, path)
424 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
424 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
425 dirs.sort()
425 dirs.sort()
426 if b'.hg' in dirs:
426 if b'.hg' in dirs:
427 yield root # found a repository
427 yield root # found a repository
428 qroot = os.path.join(root, b'.hg', b'patches')
428 qroot = os.path.join(root, b'.hg', b'patches')
429 if os.path.isdir(os.path.join(qroot, b'.hg')):
429 if os.path.isdir(os.path.join(qroot, b'.hg')):
430 yield qroot # we have a patch queue repo here
430 yield qroot # we have a patch queue repo here
431 if recurse:
431 if recurse:
432 # avoid recursing inside the .hg directory
432 # avoid recursing inside the .hg directory
433 dirs.remove(b'.hg')
433 dirs.remove(b'.hg')
434 else:
434 else:
435 dirs[:] = [] # don't descend further
435 dirs[:] = [] # don't descend further
436 elif followsym:
436 elif followsym:
437 newdirs = []
437 newdirs = []
438 for d in dirs:
438 for d in dirs:
439 fname = os.path.join(root, d)
439 fname = os.path.join(root, d)
440 if adddir(seen_dirs, fname):
440 if adddir(seen_dirs, fname):
441 if os.path.islink(fname):
441 if os.path.islink(fname):
442 for hgname in walkrepos(fname, True, seen_dirs):
442 for hgname in walkrepos(fname, True, seen_dirs):
443 yield hgname
443 yield hgname
444 else:
444 else:
445 newdirs.append(d)
445 newdirs.append(d)
446 dirs[:] = newdirs
446 dirs[:] = newdirs
447
447
448
448
449 def binnode(ctx):
449 def binnode(ctx):
450 """Return binary node id for a given basectx"""
450 """Return binary node id for a given basectx"""
451 node = ctx.node()
451 node = ctx.node()
452 if node is None:
452 if node is None:
453 return wdirid
453 return wdirid
454 return node
454 return node
455
455
456
456
457 def intrev(ctx):
457 def intrev(ctx):
458 """Return integer for a given basectx that can be used in comparison or
458 """Return integer for a given basectx that can be used in comparison or
459 arithmetic operation"""
459 arithmetic operation"""
460 rev = ctx.rev()
460 rev = ctx.rev()
461 if rev is None:
461 if rev is None:
462 return wdirrev
462 return wdirrev
463 return rev
463 return rev
464
464
465
465
466 def formatchangeid(ctx):
466 def formatchangeid(ctx):
467 """Format changectx as '{rev}:{node|formatnode}', which is the default
467 """Format changectx as '{rev}:{node|formatnode}', which is the default
468 template provided by logcmdutil.changesettemplater"""
468 template provided by logcmdutil.changesettemplater"""
469 repo = ctx.repo()
469 repo = ctx.repo()
470 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
470 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
471
471
472
472
473 def formatrevnode(ui, rev, node):
473 def formatrevnode(ui, rev, node):
474 """Format given revision and node depending on the current verbosity"""
474 """Format given revision and node depending on the current verbosity"""
475 if ui.debugflag:
475 if ui.debugflag:
476 hexfunc = hex
476 hexfunc = hex
477 else:
477 else:
478 hexfunc = short
478 hexfunc = short
479 return b'%d:%s' % (rev, hexfunc(node))
479 return b'%d:%s' % (rev, hexfunc(node))
480
480
481
481
482 def resolvehexnodeidprefix(repo, prefix):
482 def resolvehexnodeidprefix(repo, prefix):
483 if prefix.startswith(b'x'):
483 if prefix.startswith(b'x'):
484 prefix = prefix[1:]
484 prefix = prefix[1:]
485 try:
485 try:
486 # Uses unfiltered repo because it's faster when prefix is ambiguous/
486 # Uses unfiltered repo because it's faster when prefix is ambiguous/
487 # This matches the shortesthexnodeidprefix() function below.
487 # This matches the shortesthexnodeidprefix() function below.
488 node = repo.unfiltered().changelog._partialmatch(prefix)
488 node = repo.unfiltered().changelog._partialmatch(prefix)
489 except error.AmbiguousPrefixLookupError:
489 except error.AmbiguousPrefixLookupError:
490 revset = repo.ui.config(
490 revset = repo.ui.config(
491 b'experimental', b'revisions.disambiguatewithin'
491 b'experimental', b'revisions.disambiguatewithin'
492 )
492 )
493 if revset:
493 if revset:
494 # Clear config to avoid infinite recursion
494 # Clear config to avoid infinite recursion
495 configoverrides = {
495 configoverrides = {
496 (b'experimental', b'revisions.disambiguatewithin'): None
496 (b'experimental', b'revisions.disambiguatewithin'): None
497 }
497 }
498 with repo.ui.configoverride(configoverrides):
498 with repo.ui.configoverride(configoverrides):
499 revs = repo.anyrevs([revset], user=True)
499 revs = repo.anyrevs([revset], user=True)
500 matches = []
500 matches = []
501 for rev in revs:
501 for rev in revs:
502 node = repo.changelog.node(rev)
502 node = repo.changelog.node(rev)
503 if hex(node).startswith(prefix):
503 if hex(node).startswith(prefix):
504 matches.append(node)
504 matches.append(node)
505 if len(matches) == 1:
505 if len(matches) == 1:
506 return matches[0]
506 return matches[0]
507 raise
507 raise
508 if node is None:
508 if node is None:
509 return
509 return
510 repo.changelog.rev(node) # make sure node isn't filtered
510 repo.changelog.rev(node) # make sure node isn't filtered
511 return node
511 return node
512
512
513
513
514 def mayberevnum(repo, prefix):
514 def mayberevnum(repo, prefix):
515 """Checks if the given prefix may be mistaken for a revision number"""
515 """Checks if the given prefix may be mistaken for a revision number"""
516 try:
516 try:
517 i = int(prefix)
517 i = int(prefix)
518 # if we are a pure int, then starting with zero will not be
518 # if we are a pure int, then starting with zero will not be
519 # confused as a rev; or, obviously, if the int is larger
519 # confused as a rev; or, obviously, if the int is larger
520 # than the value of the tip rev. We still need to disambiguate if
520 # than the value of the tip rev. We still need to disambiguate if
521 # prefix == '0', since that *is* a valid revnum.
521 # prefix == '0', since that *is* a valid revnum.
522 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
522 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
523 return False
523 return False
524 return True
524 return True
525 except ValueError:
525 except ValueError:
526 return False
526 return False
527
527
528
528
529 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
529 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
530 """Find the shortest unambiguous prefix that matches hexnode.
530 """Find the shortest unambiguous prefix that matches hexnode.
531
531
532 If "cache" is not None, it must be a dictionary that can be used for
532 If "cache" is not None, it must be a dictionary that can be used for
533 caching between calls to this method.
533 caching between calls to this method.
534 """
534 """
535 # _partialmatch() of filtered changelog could take O(len(repo)) time,
535 # _partialmatch() of filtered changelog could take O(len(repo)) time,
536 # which would be unacceptably slow. so we look for hash collision in
536 # which would be unacceptably slow. so we look for hash collision in
537 # unfiltered space, which means some hashes may be slightly longer.
537 # unfiltered space, which means some hashes may be slightly longer.
538
538
539 minlength = max(minlength, 1)
539 minlength = max(minlength, 1)
540
540
541 def disambiguate(prefix):
541 def disambiguate(prefix):
542 """Disambiguate against revnums."""
542 """Disambiguate against revnums."""
543 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
543 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
544 if mayberevnum(repo, prefix):
544 if mayberevnum(repo, prefix):
545 return b'x' + prefix
545 return b'x' + prefix
546 else:
546 else:
547 return prefix
547 return prefix
548
548
549 hexnode = hex(node)
549 hexnode = hex(node)
550 for length in range(len(prefix), len(hexnode) + 1):
550 for length in range(len(prefix), len(hexnode) + 1):
551 prefix = hexnode[:length]
551 prefix = hexnode[:length]
552 if not mayberevnum(repo, prefix):
552 if not mayberevnum(repo, prefix):
553 return prefix
553 return prefix
554
554
555 cl = repo.unfiltered().changelog
555 cl = repo.unfiltered().changelog
556 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
556 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
557 if revset:
557 if revset:
558 revs = None
558 revs = None
559 if cache is not None:
559 if cache is not None:
560 revs = cache.get(b'disambiguationrevset')
560 revs = cache.get(b'disambiguationrevset')
561 if revs is None:
561 if revs is None:
562 revs = repo.anyrevs([revset], user=True)
562 revs = repo.anyrevs([revset], user=True)
563 if cache is not None:
563 if cache is not None:
564 cache[b'disambiguationrevset'] = revs
564 cache[b'disambiguationrevset'] = revs
565 if cl.rev(node) in revs:
565 if cl.rev(node) in revs:
566 hexnode = hex(node)
566 hexnode = hex(node)
567 nodetree = None
567 nodetree = None
568 if cache is not None:
568 if cache is not None:
569 nodetree = cache.get(b'disambiguationnodetree')
569 nodetree = cache.get(b'disambiguationnodetree')
570 if not nodetree:
570 if not nodetree:
571 if util.safehasattr(parsers, 'nodetree'):
571 if util.safehasattr(parsers, 'nodetree'):
572 # The CExt is the only implementation to provide a nodetree
572 # The CExt is the only implementation to provide a nodetree
573 # class so far.
573 # class so far.
574 index = cl.index
574 index = cl.index
575 if util.safehasattr(index, 'get_cindex'):
575 if util.safehasattr(index, 'get_cindex'):
576 # the rust wrapped need to give access to its internal index
576 # the rust wrapped need to give access to its internal index
577 index = index.get_cindex()
577 index = index.get_cindex()
578 nodetree = parsers.nodetree(index, len(revs))
578 nodetree = parsers.nodetree(index, len(revs))
579 for r in revs:
579 for r in revs:
580 nodetree.insert(r)
580 nodetree.insert(r)
581 if cache is not None:
581 if cache is not None:
582 cache[b'disambiguationnodetree'] = nodetree
582 cache[b'disambiguationnodetree'] = nodetree
583 if nodetree is not None:
583 if nodetree is not None:
584 length = max(nodetree.shortest(node), minlength)
584 length = max(nodetree.shortest(node), minlength)
585 prefix = hexnode[:length]
585 prefix = hexnode[:length]
586 return disambiguate(prefix)
586 return disambiguate(prefix)
587 for length in range(minlength, len(hexnode) + 1):
587 for length in range(minlength, len(hexnode) + 1):
588 matches = []
588 matches = []
589 prefix = hexnode[:length]
589 prefix = hexnode[:length]
590 for rev in revs:
590 for rev in revs:
591 otherhexnode = repo[rev].hex()
591 otherhexnode = repo[rev].hex()
592 if prefix == otherhexnode[:length]:
592 if prefix == otherhexnode[:length]:
593 matches.append(otherhexnode)
593 matches.append(otherhexnode)
594 if len(matches) == 1:
594 if len(matches) == 1:
595 return disambiguate(prefix)
595 return disambiguate(prefix)
596
596
597 try:
597 try:
598 return disambiguate(cl.shortest(node, minlength))
598 return disambiguate(cl.shortest(node, minlength))
599 except error.LookupError:
599 except error.LookupError:
600 raise error.RepoLookupError()
600 raise error.RepoLookupError()
601
601
602
602
603 def isrevsymbol(repo, symbol):
603 def isrevsymbol(repo, symbol):
604 """Checks if a symbol exists in the repo.
604 """Checks if a symbol exists in the repo.
605
605
606 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
606 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
607 symbol is an ambiguous nodeid prefix.
607 symbol is an ambiguous nodeid prefix.
608 """
608 """
609 try:
609 try:
610 revsymbol(repo, symbol)
610 revsymbol(repo, symbol)
611 return True
611 return True
612 except error.RepoLookupError:
612 except error.RepoLookupError:
613 return False
613 return False
614
614
615
615
616 def revsymbol(repo, symbol):
616 def revsymbol(repo, symbol):
617 """Returns a context given a single revision symbol (as string).
617 """Returns a context given a single revision symbol (as string).
618
618
619 This is similar to revsingle(), but accepts only a single revision symbol,
619 This is similar to revsingle(), but accepts only a single revision symbol,
620 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
620 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
621 not "max(public())".
621 not "max(public())".
622 """
622 """
623 if not isinstance(symbol, bytes):
623 if not isinstance(symbol, bytes):
624 msg = (
624 msg = (
625 b"symbol (%s of type %s) was not a string, did you mean "
625 b"symbol (%s of type %s) was not a string, did you mean "
626 b"repo[symbol]?" % (symbol, type(symbol))
626 b"repo[symbol]?" % (symbol, type(symbol))
627 )
627 )
628 raise error.ProgrammingError(msg)
628 raise error.ProgrammingError(msg)
629 try:
629 try:
630 if symbol in (b'.', b'tip', b'null'):
630 if symbol in (b'.', b'tip', b'null'):
631 return repo[symbol]
631 return repo[symbol]
632
632
633 try:
633 try:
634 r = int(symbol)
634 r = int(symbol)
635 if b'%d' % r != symbol:
635 if b'%d' % r != symbol:
636 raise ValueError
636 raise ValueError
637 l = len(repo.changelog)
637 l = len(repo.changelog)
638 if r < 0:
638 if r < 0:
639 r += l
639 r += l
640 if r < 0 or r >= l and r != wdirrev:
640 if r < 0 or r >= l and r != wdirrev:
641 raise ValueError
641 raise ValueError
642 return repo[r]
642 return repo[r]
643 except error.FilteredIndexError:
643 except error.FilteredIndexError:
644 raise
644 raise
645 except (ValueError, OverflowError, IndexError):
645 except (ValueError, OverflowError, IndexError):
646 pass
646 pass
647
647
648 if len(symbol) == 40:
648 if len(symbol) == 40:
649 try:
649 try:
650 node = bin(symbol)
650 node = bin(symbol)
651 rev = repo.changelog.rev(node)
651 rev = repo.changelog.rev(node)
652 return repo[rev]
652 return repo[rev]
653 except error.FilteredLookupError:
653 except error.FilteredLookupError:
654 raise
654 raise
655 except (TypeError, LookupError):
655 except (TypeError, LookupError):
656 pass
656 pass
657
657
658 # look up bookmarks through the name interface
658 # look up bookmarks through the name interface
659 try:
659 try:
660 node = repo.names.singlenode(repo, symbol)
660 node = repo.names.singlenode(repo, symbol)
661 rev = repo.changelog.rev(node)
661 rev = repo.changelog.rev(node)
662 return repo[rev]
662 return repo[rev]
663 except KeyError:
663 except KeyError:
664 pass
664 pass
665
665
666 node = resolvehexnodeidprefix(repo, symbol)
666 node = resolvehexnodeidprefix(repo, symbol)
667 if node is not None:
667 if node is not None:
668 rev = repo.changelog.rev(node)
668 rev = repo.changelog.rev(node)
669 return repo[rev]
669 return repo[rev]
670
670
671 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
671 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
672
672
673 except error.WdirUnsupported:
673 except error.WdirUnsupported:
674 return repo[None]
674 return repo[None]
675 except (
675 except (
676 error.FilteredIndexError,
676 error.FilteredIndexError,
677 error.FilteredLookupError,
677 error.FilteredLookupError,
678 error.FilteredRepoLookupError,
678 error.FilteredRepoLookupError,
679 ):
679 ):
680 raise _filterederror(repo, symbol)
680 raise _filterederror(repo, symbol)
681
681
682
682
683 def _filterederror(repo, changeid):
683 def _filterederror(repo, changeid):
684 """build an exception to be raised about a filtered changeid
684 """build an exception to be raised about a filtered changeid
685
685
686 This is extracted in a function to help extensions (eg: evolve) to
686 This is extracted in a function to help extensions (eg: evolve) to
687 experiment with various message variants."""
687 experiment with various message variants."""
688 if repo.filtername.startswith(b'visible'):
688 if repo.filtername.startswith(b'visible'):
689
689
690 # Check if the changeset is obsolete
690 # Check if the changeset is obsolete
691 unfilteredrepo = repo.unfiltered()
691 unfilteredrepo = repo.unfiltered()
692 ctx = revsymbol(unfilteredrepo, changeid)
692 ctx = revsymbol(unfilteredrepo, changeid)
693
693
694 # If the changeset is obsolete, enrich the message with the reason
694 # If the changeset is obsolete, enrich the message with the reason
695 # that made this changeset not visible
695 # that made this changeset not visible
696 if ctx.obsolete():
696 if ctx.obsolete():
697 msg = obsutil._getfilteredreason(repo, changeid, ctx)
697 msg = obsutil._getfilteredreason(repo, changeid, ctx)
698 else:
698 else:
699 msg = _(b"hidden revision '%s'") % changeid
699 msg = _(b"hidden revision '%s'") % changeid
700
700
701 hint = _(b'use --hidden to access hidden revisions')
701 hint = _(b'use --hidden to access hidden revisions')
702
702
703 return error.FilteredRepoLookupError(msg, hint=hint)
703 return error.FilteredRepoLookupError(msg, hint=hint)
704 msg = _(b"filtered revision '%s' (not in '%s' subset)")
704 msg = _(b"filtered revision '%s' (not in '%s' subset)")
705 msg %= (changeid, repo.filtername)
705 msg %= (changeid, repo.filtername)
706 return error.FilteredRepoLookupError(msg)
706 return error.FilteredRepoLookupError(msg)
707
707
708
708
709 def revsingle(repo, revspec, default=b'.', localalias=None):
709 def revsingle(repo, revspec, default=b'.', localalias=None):
710 if not revspec and revspec != 0:
710 if not revspec and revspec != 0:
711 return repo[default]
711 return repo[default]
712
712
713 l = revrange(repo, [revspec], localalias=localalias)
713 l = revrange(repo, [revspec], localalias=localalias)
714 if not l:
714 if not l:
715 raise error.Abort(_(b'empty revision set'))
715 raise error.Abort(_(b'empty revision set'))
716 return repo[l.last()]
716 return repo[l.last()]
717
717
718
718
719 def _pairspec(revspec):
719 def _pairspec(revspec):
720 tree = revsetlang.parse(revspec)
720 tree = revsetlang.parse(revspec)
721 return tree and tree[0] in (
721 return tree and tree[0] in (
722 b'range',
722 b'range',
723 b'rangepre',
723 b'rangepre',
724 b'rangepost',
724 b'rangepost',
725 b'rangeall',
725 b'rangeall',
726 )
726 )
727
727
728
728
729 def revpair(repo, revs):
729 def revpair(repo, revs):
730 if not revs:
730 if not revs:
731 return repo[b'.'], repo[None]
731 return repo[b'.'], repo[None]
732
732
733 l = revrange(repo, revs)
733 l = revrange(repo, revs)
734
734
735 if not l:
735 if not l:
736 raise error.Abort(_(b'empty revision range'))
736 raise error.Abort(_(b'empty revision range'))
737
737
738 first = l.first()
738 first = l.first()
739 second = l.last()
739 second = l.last()
740
740
741 if (
741 if (
742 first == second
742 first == second
743 and len(revs) >= 2
743 and len(revs) >= 2
744 and not all(revrange(repo, [r]) for r in revs)
744 and not all(revrange(repo, [r]) for r in revs)
745 ):
745 ):
746 raise error.Abort(_(b'empty revision on one side of range'))
746 raise error.Abort(_(b'empty revision on one side of range'))
747
747
748 # if top-level is range expression, the result must always be a pair
748 # if top-level is range expression, the result must always be a pair
749 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
749 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
750 return repo[first], repo[None]
750 return repo[first], repo[None]
751
751
752 return repo[first], repo[second]
752 return repo[first], repo[second]
753
753
754
754
755 def revrange(repo, specs, localalias=None):
755 def revrange(repo, specs, localalias=None):
756 """Execute 1 to many revsets and return the union.
756 """Execute 1 to many revsets and return the union.
757
757
758 This is the preferred mechanism for executing revsets using user-specified
758 This is the preferred mechanism for executing revsets using user-specified
759 config options, such as revset aliases.
759 config options, such as revset aliases.
760
760
761 The revsets specified by ``specs`` will be executed via a chained ``OR``
761 The revsets specified by ``specs`` will be executed via a chained ``OR``
762 expression. If ``specs`` is empty, an empty result is returned.
762 expression. If ``specs`` is empty, an empty result is returned.
763
763
764 ``specs`` can contain integers, in which case they are assumed to be
764 ``specs`` can contain integers, in which case they are assumed to be
765 revision numbers.
765 revision numbers.
766
766
767 It is assumed the revsets are already formatted. If you have arguments
767 It is assumed the revsets are already formatted. If you have arguments
768 that need to be expanded in the revset, call ``revsetlang.formatspec()``
768 that need to be expanded in the revset, call ``revsetlang.formatspec()``
769 and pass the result as an element of ``specs``.
769 and pass the result as an element of ``specs``.
770
770
771 Specifying a single revset is allowed.
771 Specifying a single revset is allowed.
772
772
773 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
773 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
774 integer revisions.
774 integer revisions.
775 """
775 """
776 allspecs = []
776 allspecs = []
777 for spec in specs:
777 for spec in specs:
778 if isinstance(spec, int):
778 if isinstance(spec, int):
779 spec = revsetlang.formatspec(b'%d', spec)
779 spec = revsetlang.formatspec(b'%d', spec)
780 allspecs.append(spec)
780 allspecs.append(spec)
781 return repo.anyrevs(allspecs, user=True, localalias=localalias)
781 return repo.anyrevs(allspecs, user=True, localalias=localalias)
782
782
783
783
784 def increasingwindows(windowsize=8, sizelimit=512):
784 def increasingwindows(windowsize=8, sizelimit=512):
785 while True:
785 while True:
786 yield windowsize
786 yield windowsize
787 if windowsize < sizelimit:
787 if windowsize < sizelimit:
788 windowsize *= 2
788 windowsize *= 2
789
789
790
790
791 def walkchangerevs(repo, revs, makefilematcher, prepare):
791 def walkchangerevs(repo, revs, makefilematcher, prepare):
792 '''Iterate over files and the revs in a "windowed" way.
792 '''Iterate over files and the revs in a "windowed" way.
793
793
794 Callers most commonly need to iterate backwards over the history
794 Callers most commonly need to iterate backwards over the history
795 in which they are interested. Doing so has awful (quadratic-looking)
795 in which they are interested. Doing so has awful (quadratic-looking)
796 performance, so we use iterators in a "windowed" way.
796 performance, so we use iterators in a "windowed" way.
797
797
798 We walk a window of revisions in the desired order. Within the
798 We walk a window of revisions in the desired order. Within the
799 window, we first walk forwards to gather data, then in the desired
799 window, we first walk forwards to gather data, then in the desired
800 order (usually backwards) to display it.
800 order (usually backwards) to display it.
801
801
802 This function returns an iterator yielding contexts. Before
802 This function returns an iterator yielding contexts. Before
803 yielding each context, the iterator will first call the prepare
803 yielding each context, the iterator will first call the prepare
804 function on each context in the window in forward order.'''
804 function on each context in the window in forward order.'''
805
805
806 if not revs:
806 if not revs:
807 return []
807 return []
808 change = repo.__getitem__
808 change = repo.__getitem__
809
809
810 def iterate():
810 def iterate():
811 it = iter(revs)
811 it = iter(revs)
812 stopiteration = False
812 stopiteration = False
813 for windowsize in increasingwindows():
813 for windowsize in increasingwindows():
814 nrevs = []
814 nrevs = []
815 for i in pycompat.xrange(windowsize):
815 for i in pycompat.xrange(windowsize):
816 rev = next(it, None)
816 rev = next(it, None)
817 if rev is None:
817 if rev is None:
818 stopiteration = True
818 stopiteration = True
819 break
819 break
820 nrevs.append(rev)
820 nrevs.append(rev)
821 for rev in sorted(nrevs):
821 for rev in sorted(nrevs):
822 ctx = change(rev)
822 ctx = change(rev)
823 prepare(ctx, makefilematcher(ctx))
823 prepare(ctx, makefilematcher(ctx))
824 for rev in nrevs:
824 for rev in nrevs:
825 yield change(rev)
825 yield change(rev)
826
826
827 if stopiteration:
827 if stopiteration:
828 break
828 break
829
829
830 return iterate()
830 return iterate()
831
831
832
832
833 def meaningfulparents(repo, ctx):
833 def meaningfulparents(repo, ctx):
834 """Return list of meaningful (or all if debug) parentrevs for rev.
834 """Return list of meaningful (or all if debug) parentrevs for rev.
835
835
836 For merges (two non-nullrev revisions) both parents are meaningful.
836 For merges (two non-nullrev revisions) both parents are meaningful.
837 Otherwise the first parent revision is considered meaningful if it
837 Otherwise the first parent revision is considered meaningful if it
838 is not the preceding revision.
838 is not the preceding revision.
839 """
839 """
840 parents = ctx.parents()
840 parents = ctx.parents()
841 if len(parents) > 1:
841 if len(parents) > 1:
842 return parents
842 return parents
843 if repo.ui.debugflag:
843 if repo.ui.debugflag:
844 return [parents[0], repo[nullrev]]
844 return [parents[0], repo[nullrev]]
845 if parents[0].rev() >= intrev(ctx) - 1:
845 if parents[0].rev() >= intrev(ctx) - 1:
846 return []
846 return []
847 return parents
847 return parents
848
848
849
849
850 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
850 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
851 """Return a function that produced paths for presenting to the user.
851 """Return a function that produced paths for presenting to the user.
852
852
853 The returned function takes a repo-relative path and produces a path
853 The returned function takes a repo-relative path and produces a path
854 that can be presented in the UI.
854 that can be presented in the UI.
855
855
856 Depending on the value of ui.relative-paths, either a repo-relative or
856 Depending on the value of ui.relative-paths, either a repo-relative or
857 cwd-relative path will be produced.
857 cwd-relative path will be produced.
858
858
859 legacyrelativevalue is the value to use if ui.relative-paths=legacy
859 legacyrelativevalue is the value to use if ui.relative-paths=legacy
860
860
861 If forcerelativevalue is not None, then that value will be used regardless
861 If forcerelativevalue is not None, then that value will be used regardless
862 of what ui.relative-paths is set to.
862 of what ui.relative-paths is set to.
863 """
863 """
864 if forcerelativevalue is not None:
864 if forcerelativevalue is not None:
865 relative = forcerelativevalue
865 relative = forcerelativevalue
866 else:
866 else:
867 config = repo.ui.config(b'ui', b'relative-paths')
867 config = repo.ui.config(b'ui', b'relative-paths')
868 if config == b'legacy':
868 if config == b'legacy':
869 relative = legacyrelativevalue
869 relative = legacyrelativevalue
870 else:
870 else:
871 relative = stringutil.parsebool(config)
871 relative = stringutil.parsebool(config)
872 if relative is None:
872 if relative is None:
873 raise error.ConfigError(
873 raise error.ConfigError(
874 _(b"ui.relative-paths is not a boolean ('%s')") % config
874 _(b"ui.relative-paths is not a boolean ('%s')") % config
875 )
875 )
876
876
877 if relative:
877 if relative:
878 cwd = repo.getcwd()
878 cwd = repo.getcwd()
879 if cwd != b'':
879 if cwd != b'':
880 # this branch would work even if cwd == b'' (ie cwd = repo
880 # this branch would work even if cwd == b'' (ie cwd = repo
881 # root), but its generality makes the returned function slower
881 # root), but its generality makes the returned function slower
882 pathto = repo.pathto
882 pathto = repo.pathto
883 return lambda f: pathto(f, cwd)
883 return lambda f: pathto(f, cwd)
884 if repo.ui.configbool(b'ui', b'slash'):
884 if repo.ui.configbool(b'ui', b'slash'):
885 return lambda f: f
885 return lambda f: f
886 else:
886 else:
887 return util.localpath
887 return util.localpath
888
888
889
889
890 def subdiruipathfn(subpath, uipathfn):
890 def subdiruipathfn(subpath, uipathfn):
891 '''Create a new uipathfn that treats the file as relative to subpath.'''
891 '''Create a new uipathfn that treats the file as relative to subpath.'''
892 return lambda f: uipathfn(posixpath.join(subpath, f))
892 return lambda f: uipathfn(posixpath.join(subpath, f))
893
893
894
894
895 def anypats(pats, opts):
895 def anypats(pats, opts):
896 '''Checks if any patterns, including --include and --exclude were given.
896 '''Checks if any patterns, including --include and --exclude were given.
897
897
898 Some commands (e.g. addremove) use this condition for deciding whether to
898 Some commands (e.g. addremove) use this condition for deciding whether to
899 print absolute or relative paths.
899 print absolute or relative paths.
900 '''
900 '''
901 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
901 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
902
902
903
903
904 def expandpats(pats):
904 def expandpats(pats):
905 '''Expand bare globs when running on windows.
905 '''Expand bare globs when running on windows.
906 On posix we assume it already has already been done by sh.'''
906 On posix we assume it already has already been done by sh.'''
907 if not util.expandglobs:
907 if not util.expandglobs:
908 return list(pats)
908 return list(pats)
909 ret = []
909 ret = []
910 for kindpat in pats:
910 for kindpat in pats:
911 kind, pat = matchmod._patsplit(kindpat, None)
911 kind, pat = matchmod._patsplit(kindpat, None)
912 if kind is None:
912 if kind is None:
913 try:
913 try:
914 globbed = glob.glob(pat)
914 globbed = glob.glob(pat)
915 except re.error:
915 except re.error:
916 globbed = [pat]
916 globbed = [pat]
917 if globbed:
917 if globbed:
918 ret.extend(globbed)
918 ret.extend(globbed)
919 continue
919 continue
920 ret.append(kindpat)
920 ret.append(kindpat)
921 return ret
921 return ret
922
922
923
923
924 def matchandpats(
924 def matchandpats(
925 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
925 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
926 ):
926 ):
927 '''Return a matcher and the patterns that were used.
927 '''Return a matcher and the patterns that were used.
928 The matcher will warn about bad matches, unless an alternate badfn callback
928 The matcher will warn about bad matches, unless an alternate badfn callback
929 is provided.'''
929 is provided.'''
930 if opts is None:
930 if opts is None:
931 opts = {}
931 opts = {}
932 if not globbed and default == b'relpath':
932 if not globbed and default == b'relpath':
933 pats = expandpats(pats or [])
933 pats = expandpats(pats or [])
934
934
935 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
935 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
936
936
937 def bad(f, msg):
937 def bad(f, msg):
938 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
938 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
939
939
940 if badfn is None:
940 if badfn is None:
941 badfn = bad
941 badfn = bad
942
942
943 m = ctx.match(
943 m = ctx.match(
944 pats,
944 pats,
945 opts.get(b'include'),
945 opts.get(b'include'),
946 opts.get(b'exclude'),
946 opts.get(b'exclude'),
947 default,
947 default,
948 listsubrepos=opts.get(b'subrepos'),
948 listsubrepos=opts.get(b'subrepos'),
949 badfn=badfn,
949 badfn=badfn,
950 )
950 )
951
951
952 if m.always():
952 if m.always():
953 pats = []
953 pats = []
954 return m, pats
954 return m, pats
955
955
956
956
957 def match(
957 def match(
958 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
958 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
959 ):
959 ):
960 '''Return a matcher that will warn about bad matches.'''
960 '''Return a matcher that will warn about bad matches.'''
961 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
961 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
962
962
963
963
964 def matchall(repo):
964 def matchall(repo):
965 '''Return a matcher that will efficiently match everything.'''
965 '''Return a matcher that will efficiently match everything.'''
966 return matchmod.always()
966 return matchmod.always()
967
967
968
968
969 def matchfiles(repo, files, badfn=None):
969 def matchfiles(repo, files, badfn=None):
970 '''Return a matcher that will efficiently match exactly these files.'''
970 '''Return a matcher that will efficiently match exactly these files.'''
971 return matchmod.exact(files, badfn=badfn)
971 return matchmod.exact(files, badfn=badfn)
972
972
973
973
974 def parsefollowlinespattern(repo, rev, pat, msg):
974 def parsefollowlinespattern(repo, rev, pat, msg):
975 """Return a file name from `pat` pattern suitable for usage in followlines
975 """Return a file name from `pat` pattern suitable for usage in followlines
976 logic.
976 logic.
977 """
977 """
978 if not matchmod.patkind(pat):
978 if not matchmod.patkind(pat):
979 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
979 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
980 else:
980 else:
981 ctx = repo[rev]
981 ctx = repo[rev]
982 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
982 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
983 files = [f for f in ctx if m(f)]
983 files = [f for f in ctx if m(f)]
984 if len(files) != 1:
984 if len(files) != 1:
985 raise error.ParseError(msg)
985 raise error.ParseError(msg)
986 return files[0]
986 return files[0]
987
987
988
988
989 def getorigvfs(ui, repo):
989 def getorigvfs(ui, repo):
990 """return a vfs suitable to save 'orig' file
990 """return a vfs suitable to save 'orig' file
991
991
992 return None if no special directory is configured"""
992 return None if no special directory is configured"""
993 origbackuppath = ui.config(b'ui', b'origbackuppath')
993 origbackuppath = ui.config(b'ui', b'origbackuppath')
994 if not origbackuppath:
994 if not origbackuppath:
995 return None
995 return None
996 return vfs.vfs(repo.wvfs.join(origbackuppath))
996 return vfs.vfs(repo.wvfs.join(origbackuppath))
997
997
998
998
999 def backuppath(ui, repo, filepath):
999 def backuppath(ui, repo, filepath):
1000 '''customize where working copy backup files (.orig files) are created
1000 '''customize where working copy backup files (.orig files) are created
1001
1001
1002 Fetch user defined path from config file: [ui] origbackuppath = <path>
1002 Fetch user defined path from config file: [ui] origbackuppath = <path>
1003 Fall back to default (filepath with .orig suffix) if not specified
1003 Fall back to default (filepath with .orig suffix) if not specified
1004
1004
1005 filepath is repo-relative
1005 filepath is repo-relative
1006
1006
1007 Returns an absolute path
1007 Returns an absolute path
1008 '''
1008 '''
1009 origvfs = getorigvfs(ui, repo)
1009 origvfs = getorigvfs(ui, repo)
1010 if origvfs is None:
1010 if origvfs is None:
1011 return repo.wjoin(filepath + b".orig")
1011 return repo.wjoin(filepath + b".orig")
1012
1012
1013 origbackupdir = origvfs.dirname(filepath)
1013 origbackupdir = origvfs.dirname(filepath)
1014 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1014 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1015 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1015 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1016
1016
1017 # Remove any files that conflict with the backup file's path
1017 # Remove any files that conflict with the backup file's path
1018 for f in reversed(list(pathutil.finddirs(filepath))):
1018 for f in reversed(list(pathutil.finddirs(filepath))):
1019 if origvfs.isfileorlink(f):
1019 if origvfs.isfileorlink(f):
1020 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1020 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1021 origvfs.unlink(f)
1021 origvfs.unlink(f)
1022 break
1022 break
1023
1023
1024 origvfs.makedirs(origbackupdir)
1024 origvfs.makedirs(origbackupdir)
1025
1025
1026 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1026 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1027 ui.note(
1027 ui.note(
1028 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1028 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1029 )
1029 )
1030 origvfs.rmtree(filepath, forcibly=True)
1030 origvfs.rmtree(filepath, forcibly=True)
1031
1031
1032 return origvfs.join(filepath)
1032 return origvfs.join(filepath)
1033
1033
1034
1034
1035 class _containsnode(object):
1035 class _containsnode(object):
1036 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1036 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1037
1037
1038 def __init__(self, repo, revcontainer):
1038 def __init__(self, repo, revcontainer):
1039 self._torev = repo.changelog.rev
1039 self._torev = repo.changelog.rev
1040 self._revcontains = revcontainer.__contains__
1040 self._revcontains = revcontainer.__contains__
1041
1041
1042 def __contains__(self, node):
1042 def __contains__(self, node):
1043 return self._revcontains(self._torev(node))
1043 return self._revcontains(self._torev(node))
1044
1044
1045
1045
1046 def cleanupnodes(
1046 def cleanupnodes(
1047 repo,
1047 repo,
1048 replacements,
1048 replacements,
1049 operation,
1049 operation,
1050 moves=None,
1050 moves=None,
1051 metadata=None,
1051 metadata=None,
1052 fixphase=False,
1052 fixphase=False,
1053 targetphase=None,
1053 targetphase=None,
1054 backup=True,
1054 backup=True,
1055 ):
1055 ):
1056 """do common cleanups when old nodes are replaced by new nodes
1056 """do common cleanups when old nodes are replaced by new nodes
1057
1057
1058 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1058 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1059 (we might also want to move working directory parent in the future)
1059 (we might also want to move working directory parent in the future)
1060
1060
1061 By default, bookmark moves are calculated automatically from 'replacements',
1061 By default, bookmark moves are calculated automatically from 'replacements',
1062 but 'moves' can be used to override that. Also, 'moves' may include
1062 but 'moves' can be used to override that. Also, 'moves' may include
1063 additional bookmark moves that should not have associated obsmarkers.
1063 additional bookmark moves that should not have associated obsmarkers.
1064
1064
1065 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1065 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1066 have replacements. operation is a string, like "rebase".
1066 have replacements. operation is a string, like "rebase".
1067
1067
1068 metadata is dictionary containing metadata to be stored in obsmarker if
1068 metadata is dictionary containing metadata to be stored in obsmarker if
1069 obsolescence is enabled.
1069 obsolescence is enabled.
1070 """
1070 """
1071 assert fixphase or targetphase is None
1071 assert fixphase or targetphase is None
1072 if not replacements and not moves:
1072 if not replacements and not moves:
1073 return
1073 return
1074
1074
1075 # translate mapping's other forms
1075 # translate mapping's other forms
1076 if not util.safehasattr(replacements, b'items'):
1076 if not util.safehasattr(replacements, b'items'):
1077 replacements = {(n,): () for n in replacements}
1077 replacements = {(n,): () for n in replacements}
1078 else:
1078 else:
1079 # upgrading non tuple "source" to tuple ones for BC
1079 # upgrading non tuple "source" to tuple ones for BC
1080 repls = {}
1080 repls = {}
1081 for key, value in replacements.items():
1081 for key, value in replacements.items():
1082 if not isinstance(key, tuple):
1082 if not isinstance(key, tuple):
1083 key = (key,)
1083 key = (key,)
1084 repls[key] = value
1084 repls[key] = value
1085 replacements = repls
1085 replacements = repls
1086
1086
1087 # Unfiltered repo is needed since nodes in replacements might be hidden.
1087 # Unfiltered repo is needed since nodes in replacements might be hidden.
1088 unfi = repo.unfiltered()
1088 unfi = repo.unfiltered()
1089
1089
1090 # Calculate bookmark movements
1090 # Calculate bookmark movements
1091 if moves is None:
1091 if moves is None:
1092 moves = {}
1092 moves = {}
1093 for oldnodes, newnodes in replacements.items():
1093 for oldnodes, newnodes in replacements.items():
1094 for oldnode in oldnodes:
1094 for oldnode in oldnodes:
1095 if oldnode in moves:
1095 if oldnode in moves:
1096 continue
1096 continue
1097 if len(newnodes) > 1:
1097 if len(newnodes) > 1:
1098 # usually a split, take the one with biggest rev number
1098 # usually a split, take the one with biggest rev number
1099 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1099 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1100 elif len(newnodes) == 0:
1100 elif len(newnodes) == 0:
1101 # move bookmark backwards
1101 # move bookmark backwards
1102 allreplaced = []
1102 allreplaced = []
1103 for rep in replacements:
1103 for rep in replacements:
1104 allreplaced.extend(rep)
1104 allreplaced.extend(rep)
1105 roots = list(
1105 roots = list(
1106 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1106 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1107 )
1107 )
1108 if roots:
1108 if roots:
1109 newnode = roots[0].node()
1109 newnode = roots[0].node()
1110 else:
1110 else:
1111 newnode = nullid
1111 newnode = nullid
1112 else:
1112 else:
1113 newnode = newnodes[0]
1113 newnode = newnodes[0]
1114 moves[oldnode] = newnode
1114 moves[oldnode] = newnode
1115
1115
1116 allnewnodes = [n for ns in replacements.values() for n in ns]
1116 allnewnodes = [n for ns in replacements.values() for n in ns]
1117 toretract = {}
1117 toretract = {}
1118 toadvance = {}
1118 toadvance = {}
1119 if fixphase:
1119 if fixphase:
1120 precursors = {}
1120 precursors = {}
1121 for oldnodes, newnodes in replacements.items():
1121 for oldnodes, newnodes in replacements.items():
1122 for oldnode in oldnodes:
1122 for oldnode in oldnodes:
1123 for newnode in newnodes:
1123 for newnode in newnodes:
1124 precursors.setdefault(newnode, []).append(oldnode)
1124 precursors.setdefault(newnode, []).append(oldnode)
1125
1125
1126 allnewnodes.sort(key=lambda n: unfi[n].rev())
1126 allnewnodes.sort(key=lambda n: unfi[n].rev())
1127 newphases = {}
1127 newphases = {}
1128
1128
1129 def phase(ctx):
1129 def phase(ctx):
1130 return newphases.get(ctx.node(), ctx.phase())
1130 return newphases.get(ctx.node(), ctx.phase())
1131
1131
1132 for newnode in allnewnodes:
1132 for newnode in allnewnodes:
1133 ctx = unfi[newnode]
1133 ctx = unfi[newnode]
1134 parentphase = max(phase(p) for p in ctx.parents())
1134 parentphase = max(phase(p) for p in ctx.parents())
1135 if targetphase is None:
1135 if targetphase is None:
1136 oldphase = max(
1136 oldphase = max(
1137 unfi[oldnode].phase() for oldnode in precursors[newnode]
1137 unfi[oldnode].phase() for oldnode in precursors[newnode]
1138 )
1138 )
1139 newphase = max(oldphase, parentphase)
1139 newphase = max(oldphase, parentphase)
1140 else:
1140 else:
1141 newphase = max(targetphase, parentphase)
1141 newphase = max(targetphase, parentphase)
1142 newphases[newnode] = newphase
1142 newphases[newnode] = newphase
1143 if newphase > ctx.phase():
1143 if newphase > ctx.phase():
1144 toretract.setdefault(newphase, []).append(newnode)
1144 toretract.setdefault(newphase, []).append(newnode)
1145 elif newphase < ctx.phase():
1145 elif newphase < ctx.phase():
1146 toadvance.setdefault(newphase, []).append(newnode)
1146 toadvance.setdefault(newphase, []).append(newnode)
1147
1147
1148 with repo.transaction(b'cleanup') as tr:
1148 with repo.transaction(b'cleanup') as tr:
1149 # Move bookmarks
1149 # Move bookmarks
1150 bmarks = repo._bookmarks
1150 bmarks = repo._bookmarks
1151 bmarkchanges = []
1151 bmarkchanges = []
1152 for oldnode, newnode in moves.items():
1152 for oldnode, newnode in moves.items():
1153 oldbmarks = repo.nodebookmarks(oldnode)
1153 oldbmarks = repo.nodebookmarks(oldnode)
1154 if not oldbmarks:
1154 if not oldbmarks:
1155 continue
1155 continue
1156 from . import bookmarks # avoid import cycle
1156 from . import bookmarks # avoid import cycle
1157
1157
1158 repo.ui.debug(
1158 repo.ui.debug(
1159 b'moving bookmarks %r from %s to %s\n'
1159 b'moving bookmarks %r from %s to %s\n'
1160 % (
1160 % (
1161 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1161 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1162 hex(oldnode),
1162 hex(oldnode),
1163 hex(newnode),
1163 hex(newnode),
1164 )
1164 )
1165 )
1165 )
1166 # Delete divergent bookmarks being parents of related newnodes
1166 # Delete divergent bookmarks being parents of related newnodes
1167 deleterevs = repo.revs(
1167 deleterevs = repo.revs(
1168 b'parents(roots(%ln & (::%n))) - parents(%n)',
1168 b'parents(roots(%ln & (::%n))) - parents(%n)',
1169 allnewnodes,
1169 allnewnodes,
1170 newnode,
1170 newnode,
1171 oldnode,
1171 oldnode,
1172 )
1172 )
1173 deletenodes = _containsnode(repo, deleterevs)
1173 deletenodes = _containsnode(repo, deleterevs)
1174 for name in oldbmarks:
1174 for name in oldbmarks:
1175 bmarkchanges.append((name, newnode))
1175 bmarkchanges.append((name, newnode))
1176 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1176 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1177 bmarkchanges.append((b, None))
1177 bmarkchanges.append((b, None))
1178
1178
1179 if bmarkchanges:
1179 if bmarkchanges:
1180 bmarks.applychanges(repo, tr, bmarkchanges)
1180 bmarks.applychanges(repo, tr, bmarkchanges)
1181
1181
1182 for phase, nodes in toretract.items():
1182 for phase, nodes in toretract.items():
1183 phases.retractboundary(repo, tr, phase, nodes)
1183 phases.retractboundary(repo, tr, phase, nodes)
1184 for phase, nodes in toadvance.items():
1184 for phase, nodes in toadvance.items():
1185 phases.advanceboundary(repo, tr, phase, nodes)
1185 phases.advanceboundary(repo, tr, phase, nodes)
1186
1186
1187 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1187 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1188 # Obsolete or strip nodes
1188 # Obsolete or strip nodes
1189 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1189 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1190 # If a node is already obsoleted, and we want to obsolete it
1190 # If a node is already obsoleted, and we want to obsolete it
1191 # without a successor, skip that obssolete request since it's
1191 # without a successor, skip that obssolete request since it's
1192 # unnecessary. That's the "if s or not isobs(n)" check below.
1192 # unnecessary. That's the "if s or not isobs(n)" check below.
1193 # Also sort the node in topology order, that might be useful for
1193 # Also sort the node in topology order, that might be useful for
1194 # some obsstore logic.
1194 # some obsstore logic.
1195 # NOTE: the sorting might belong to createmarkers.
1195 # NOTE: the sorting might belong to createmarkers.
1196 torev = unfi.changelog.rev
1196 torev = unfi.changelog.rev
1197 sortfunc = lambda ns: torev(ns[0][0])
1197 sortfunc = lambda ns: torev(ns[0][0])
1198 rels = []
1198 rels = []
1199 for ns, s in sorted(replacements.items(), key=sortfunc):
1199 for ns, s in sorted(replacements.items(), key=sortfunc):
1200 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1200 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1201 rels.append(rel)
1201 rels.append(rel)
1202 if rels:
1202 if rels:
1203 obsolete.createmarkers(
1203 obsolete.createmarkers(
1204 repo, rels, operation=operation, metadata=metadata
1204 repo, rels, operation=operation, metadata=metadata
1205 )
1205 )
1206 elif phases.supportinternal(repo) and mayusearchived:
1206 elif phases.supportinternal(repo) and mayusearchived:
1207 # this assume we do not have "unstable" nodes above the cleaned ones
1207 # this assume we do not have "unstable" nodes above the cleaned ones
1208 allreplaced = set()
1208 allreplaced = set()
1209 for ns in replacements.keys():
1209 for ns in replacements.keys():
1210 allreplaced.update(ns)
1210 allreplaced.update(ns)
1211 if backup:
1211 if backup:
1212 from . import repair # avoid import cycle
1212 from . import repair # avoid import cycle
1213
1213
1214 node = min(allreplaced, key=repo.changelog.rev)
1214 node = min(allreplaced, key=repo.changelog.rev)
1215 repair.backupbundle(
1215 repair.backupbundle(
1216 repo, allreplaced, allreplaced, node, operation
1216 repo, allreplaced, allreplaced, node, operation
1217 )
1217 )
1218 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1218 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1219 else:
1219 else:
1220 from . import repair # avoid import cycle
1220 from . import repair # avoid import cycle
1221
1221
1222 tostrip = list(n for ns in replacements for n in ns)
1222 tostrip = list(n for ns in replacements for n in ns)
1223 if tostrip:
1223 if tostrip:
1224 repair.delayedstrip(
1224 repair.delayedstrip(
1225 repo.ui, repo, tostrip, operation, backup=backup
1225 repo.ui, repo, tostrip, operation, backup=backup
1226 )
1226 )
1227
1227
1228
1228
1229 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1229 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1230 if opts is None:
1230 if opts is None:
1231 opts = {}
1231 opts = {}
1232 m = matcher
1232 m = matcher
1233 dry_run = opts.get(b'dry_run')
1233 dry_run = opts.get(b'dry_run')
1234 try:
1234 try:
1235 similarity = float(opts.get(b'similarity') or 0)
1235 similarity = float(opts.get(b'similarity') or 0)
1236 except ValueError:
1236 except ValueError:
1237 raise error.Abort(_(b'similarity must be a number'))
1237 raise error.Abort(_(b'similarity must be a number'))
1238 if similarity < 0 or similarity > 100:
1238 if similarity < 0 or similarity > 100:
1239 raise error.Abort(_(b'similarity must be between 0 and 100'))
1239 raise error.Abort(_(b'similarity must be between 0 and 100'))
1240 similarity /= 100.0
1240 similarity /= 100.0
1241
1241
1242 ret = 0
1242 ret = 0
1243
1243
1244 wctx = repo[None]
1244 wctx = repo[None]
1245 for subpath in sorted(wctx.substate):
1245 for subpath in sorted(wctx.substate):
1246 submatch = matchmod.subdirmatcher(subpath, m)
1246 submatch = matchmod.subdirmatcher(subpath, m)
1247 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1247 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1248 sub = wctx.sub(subpath)
1248 sub = wctx.sub(subpath)
1249 subprefix = repo.wvfs.reljoin(prefix, subpath)
1249 subprefix = repo.wvfs.reljoin(prefix, subpath)
1250 subuipathfn = subdiruipathfn(subpath, uipathfn)
1250 subuipathfn = subdiruipathfn(subpath, uipathfn)
1251 try:
1251 try:
1252 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1252 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1253 ret = 1
1253 ret = 1
1254 except error.LookupError:
1254 except error.LookupError:
1255 repo.ui.status(
1255 repo.ui.status(
1256 _(b"skipping missing subrepository: %s\n")
1256 _(b"skipping missing subrepository: %s\n")
1257 % uipathfn(subpath)
1257 % uipathfn(subpath)
1258 )
1258 )
1259
1259
1260 rejected = []
1260 rejected = []
1261
1261
1262 def badfn(f, msg):
1262 def badfn(f, msg):
1263 if f in m.files():
1263 if f in m.files():
1264 m.bad(f, msg)
1264 m.bad(f, msg)
1265 rejected.append(f)
1265 rejected.append(f)
1266
1266
1267 badmatch = matchmod.badmatch(m, badfn)
1267 badmatch = matchmod.badmatch(m, badfn)
1268 added, unknown, deleted, removed, forgotten = _interestingfiles(
1268 added, unknown, deleted, removed, forgotten = _interestingfiles(
1269 repo, badmatch
1269 repo, badmatch
1270 )
1270 )
1271
1271
1272 unknownset = set(unknown + forgotten)
1272 unknownset = set(unknown + forgotten)
1273 toprint = unknownset.copy()
1273 toprint = unknownset.copy()
1274 toprint.update(deleted)
1274 toprint.update(deleted)
1275 for abs in sorted(toprint):
1275 for abs in sorted(toprint):
1276 if repo.ui.verbose or not m.exact(abs):
1276 if repo.ui.verbose or not m.exact(abs):
1277 if abs in unknownset:
1277 if abs in unknownset:
1278 status = _(b'adding %s\n') % uipathfn(abs)
1278 status = _(b'adding %s\n') % uipathfn(abs)
1279 label = b'ui.addremove.added'
1279 label = b'ui.addremove.added'
1280 else:
1280 else:
1281 status = _(b'removing %s\n') % uipathfn(abs)
1281 status = _(b'removing %s\n') % uipathfn(abs)
1282 label = b'ui.addremove.removed'
1282 label = b'ui.addremove.removed'
1283 repo.ui.status(status, label=label)
1283 repo.ui.status(status, label=label)
1284
1284
1285 renames = _findrenames(
1285 renames = _findrenames(
1286 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1286 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1287 )
1287 )
1288
1288
1289 if not dry_run:
1289 if not dry_run:
1290 _markchanges(repo, unknown + forgotten, deleted, renames)
1290 _markchanges(repo, unknown + forgotten, deleted, renames)
1291
1291
1292 for f in rejected:
1292 for f in rejected:
1293 if f in m.files():
1293 if f in m.files():
1294 return 1
1294 return 1
1295 return ret
1295 return ret
1296
1296
1297
1297
1298 def marktouched(repo, files, similarity=0.0):
1298 def marktouched(repo, files, similarity=0.0):
1299 '''Assert that files have somehow been operated upon. files are relative to
1299 '''Assert that files have somehow been operated upon. files are relative to
1300 the repo root.'''
1300 the repo root.'''
1301 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1301 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1302 rejected = []
1302 rejected = []
1303
1303
1304 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1304 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1305
1305
1306 if repo.ui.verbose:
1306 if repo.ui.verbose:
1307 unknownset = set(unknown + forgotten)
1307 unknownset = set(unknown + forgotten)
1308 toprint = unknownset.copy()
1308 toprint = unknownset.copy()
1309 toprint.update(deleted)
1309 toprint.update(deleted)
1310 for abs in sorted(toprint):
1310 for abs in sorted(toprint):
1311 if abs in unknownset:
1311 if abs in unknownset:
1312 status = _(b'adding %s\n') % abs
1312 status = _(b'adding %s\n') % abs
1313 else:
1313 else:
1314 status = _(b'removing %s\n') % abs
1314 status = _(b'removing %s\n') % abs
1315 repo.ui.status(status)
1315 repo.ui.status(status)
1316
1316
1317 # TODO: We should probably have the caller pass in uipathfn and apply it to
1317 # TODO: We should probably have the caller pass in uipathfn and apply it to
1318 # the messages above too. legacyrelativevalue=True is consistent with how
1318 # the messages above too. legacyrelativevalue=True is consistent with how
1319 # it used to work.
1319 # it used to work.
1320 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1320 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1321 renames = _findrenames(
1321 renames = _findrenames(
1322 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1322 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1323 )
1323 )
1324
1324
1325 _markchanges(repo, unknown + forgotten, deleted, renames)
1325 _markchanges(repo, unknown + forgotten, deleted, renames)
1326
1326
1327 for f in rejected:
1327 for f in rejected:
1328 if f in m.files():
1328 if f in m.files():
1329 return 1
1329 return 1
1330 return 0
1330 return 0
1331
1331
1332
1332
1333 def _interestingfiles(repo, matcher):
1333 def _interestingfiles(repo, matcher):
1334 '''Walk dirstate with matcher, looking for files that addremove would care
1334 '''Walk dirstate with matcher, looking for files that addremove would care
1335 about.
1335 about.
1336
1336
1337 This is different from dirstate.status because it doesn't care about
1337 This is different from dirstate.status because it doesn't care about
1338 whether files are modified or clean.'''
1338 whether files are modified or clean.'''
1339 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1339 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1340 audit_path = pathutil.pathauditor(repo.root, cached=True)
1340 audit_path = pathutil.pathauditor(repo.root, cached=True)
1341
1341
1342 ctx = repo[None]
1342 ctx = repo[None]
1343 dirstate = repo.dirstate
1343 dirstate = repo.dirstate
1344 matcher = repo.narrowmatch(matcher, includeexact=True)
1344 matcher = repo.narrowmatch(matcher, includeexact=True)
1345 walkresults = dirstate.walk(
1345 walkresults = dirstate.walk(
1346 matcher,
1346 matcher,
1347 subrepos=sorted(ctx.substate),
1347 subrepos=sorted(ctx.substate),
1348 unknown=True,
1348 unknown=True,
1349 ignored=False,
1349 ignored=False,
1350 full=False,
1350 full=False,
1351 )
1351 )
1352 for abs, st in pycompat.iteritems(walkresults):
1352 for abs, st in pycompat.iteritems(walkresults):
1353 dstate = dirstate[abs]
1353 dstate = dirstate[abs]
1354 if dstate == b'?' and audit_path.check(abs):
1354 if dstate == b'?' and audit_path.check(abs):
1355 unknown.append(abs)
1355 unknown.append(abs)
1356 elif dstate != b'r' and not st:
1356 elif dstate != b'r' and not st:
1357 deleted.append(abs)
1357 deleted.append(abs)
1358 elif dstate == b'r' and st:
1358 elif dstate == b'r' and st:
1359 forgotten.append(abs)
1359 forgotten.append(abs)
1360 # for finding renames
1360 # for finding renames
1361 elif dstate == b'r' and not st:
1361 elif dstate == b'r' and not st:
1362 removed.append(abs)
1362 removed.append(abs)
1363 elif dstate == b'a':
1363 elif dstate == b'a':
1364 added.append(abs)
1364 added.append(abs)
1365
1365
1366 return added, unknown, deleted, removed, forgotten
1366 return added, unknown, deleted, removed, forgotten
1367
1367
1368
1368
1369 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1369 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1370 '''Find renames from removed files to added ones.'''
1370 '''Find renames from removed files to added ones.'''
1371 renames = {}
1371 renames = {}
1372 if similarity > 0:
1372 if similarity > 0:
1373 for old, new, score in similar.findrenames(
1373 for old, new, score in similar.findrenames(
1374 repo, added, removed, similarity
1374 repo, added, removed, similarity
1375 ):
1375 ):
1376 if (
1376 if (
1377 repo.ui.verbose
1377 repo.ui.verbose
1378 or not matcher.exact(old)
1378 or not matcher.exact(old)
1379 or not matcher.exact(new)
1379 or not matcher.exact(new)
1380 ):
1380 ):
1381 repo.ui.status(
1381 repo.ui.status(
1382 _(
1382 _(
1383 b'recording removal of %s as rename to %s '
1383 b'recording removal of %s as rename to %s '
1384 b'(%d%% similar)\n'
1384 b'(%d%% similar)\n'
1385 )
1385 )
1386 % (uipathfn(old), uipathfn(new), score * 100)
1386 % (uipathfn(old), uipathfn(new), score * 100)
1387 )
1387 )
1388 renames[new] = old
1388 renames[new] = old
1389 return renames
1389 return renames
1390
1390
1391
1391
1392 def _markchanges(repo, unknown, deleted, renames):
1392 def _markchanges(repo, unknown, deleted, renames):
1393 '''Marks the files in unknown as added, the files in deleted as removed,
1393 '''Marks the files in unknown as added, the files in deleted as removed,
1394 and the files in renames as copied.'''
1394 and the files in renames as copied.'''
1395 wctx = repo[None]
1395 wctx = repo[None]
1396 with repo.wlock():
1396 with repo.wlock():
1397 wctx.forget(deleted)
1397 wctx.forget(deleted)
1398 wctx.add(unknown)
1398 wctx.add(unknown)
1399 for new, old in pycompat.iteritems(renames):
1399 for new, old in pycompat.iteritems(renames):
1400 wctx.copy(old, new)
1400 wctx.copy(old, new)
1401
1401
1402
1402
1403 def getrenamedfn(repo, endrev=None):
1403 def getrenamedfn(repo, endrev=None):
1404 if copiesmod.usechangesetcentricalgo(repo):
1404 if copiesmod.usechangesetcentricalgo(repo):
1405
1405
1406 def getrenamed(fn, rev):
1406 def getrenamed(fn, rev):
1407 ctx = repo[rev]
1407 ctx = repo[rev]
1408 p1copies = ctx.p1copies()
1408 p1copies = ctx.p1copies()
1409 if fn in p1copies:
1409 if fn in p1copies:
1410 return p1copies[fn]
1410 return p1copies[fn]
1411 p2copies = ctx.p2copies()
1411 p2copies = ctx.p2copies()
1412 if fn in p2copies:
1412 if fn in p2copies:
1413 return p2copies[fn]
1413 return p2copies[fn]
1414 return None
1414 return None
1415
1415
1416 return getrenamed
1416 return getrenamed
1417
1417
1418 rcache = {}
1418 rcache = {}
1419 if endrev is None:
1419 if endrev is None:
1420 endrev = len(repo)
1420 endrev = len(repo)
1421
1421
1422 def getrenamed(fn, rev):
1422 def getrenamed(fn, rev):
1423 '''looks up all renames for a file (up to endrev) the first
1423 '''looks up all renames for a file (up to endrev) the first
1424 time the file is given. It indexes on the changerev and only
1424 time the file is given. It indexes on the changerev and only
1425 parses the manifest if linkrev != changerev.
1425 parses the manifest if linkrev != changerev.
1426 Returns rename info for fn at changerev rev.'''
1426 Returns rename info for fn at changerev rev.'''
1427 if fn not in rcache:
1427 if fn not in rcache:
1428 rcache[fn] = {}
1428 rcache[fn] = {}
1429 fl = repo.file(fn)
1429 fl = repo.file(fn)
1430 for i in fl:
1430 for i in fl:
1431 lr = fl.linkrev(i)
1431 lr = fl.linkrev(i)
1432 renamed = fl.renamed(fl.node(i))
1432 renamed = fl.renamed(fl.node(i))
1433 rcache[fn][lr] = renamed and renamed[0]
1433 rcache[fn][lr] = renamed and renamed[0]
1434 if lr >= endrev:
1434 if lr >= endrev:
1435 break
1435 break
1436 if rev in rcache[fn]:
1436 if rev in rcache[fn]:
1437 return rcache[fn][rev]
1437 return rcache[fn][rev]
1438
1438
1439 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1439 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1440 # filectx logic.
1440 # filectx logic.
1441 try:
1441 try:
1442 return repo[rev][fn].copysource()
1442 return repo[rev][fn].copysource()
1443 except error.LookupError:
1443 except error.LookupError:
1444 return None
1444 return None
1445
1445
1446 return getrenamed
1446 return getrenamed
1447
1447
1448
1448
1449 def getcopiesfn(repo, endrev=None):
1449 def getcopiesfn(repo, endrev=None):
1450 if copiesmod.usechangesetcentricalgo(repo):
1450 if copiesmod.usechangesetcentricalgo(repo):
1451
1451
1452 def copiesfn(ctx):
1452 def copiesfn(ctx):
1453 if ctx.p2copies():
1453 if ctx.p2copies():
1454 allcopies = ctx.p1copies().copy()
1454 allcopies = ctx.p1copies().copy()
1455 # There should be no overlap
1455 # There should be no overlap
1456 allcopies.update(ctx.p2copies())
1456 allcopies.update(ctx.p2copies())
1457 return sorted(allcopies.items())
1457 return sorted(allcopies.items())
1458 else:
1458 else:
1459 return sorted(ctx.p1copies().items())
1459 return sorted(ctx.p1copies().items())
1460
1460
1461 else:
1461 else:
1462 getrenamed = getrenamedfn(repo, endrev)
1462 getrenamed = getrenamedfn(repo, endrev)
1463
1463
1464 def copiesfn(ctx):
1464 def copiesfn(ctx):
1465 copies = []
1465 copies = []
1466 for fn in ctx.files():
1466 for fn in ctx.files():
1467 rename = getrenamed(fn, ctx.rev())
1467 rename = getrenamed(fn, ctx.rev())
1468 if rename:
1468 if rename:
1469 copies.append((fn, rename))
1469 copies.append((fn, rename))
1470 return copies
1470 return copies
1471
1471
1472 return copiesfn
1472 return copiesfn
1473
1473
1474
1474
1475 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1475 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1476 """Update the dirstate to reflect the intent of copying src to dst. For
1476 """Update the dirstate to reflect the intent of copying src to dst. For
1477 different reasons it might not end with dst being marked as copied from src.
1477 different reasons it might not end with dst being marked as copied from src.
1478 """
1478 """
1479 origsrc = repo.dirstate.copied(src) or src
1479 origsrc = repo.dirstate.copied(src) or src
1480 if dst == origsrc: # copying back a copy?
1480 if dst == origsrc: # copying back a copy?
1481 if repo.dirstate[dst] not in b'mn' and not dryrun:
1481 if repo.dirstate[dst] not in b'mn' and not dryrun:
1482 repo.dirstate.normallookup(dst)
1482 repo.dirstate.normallookup(dst)
1483 else:
1483 else:
1484 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1484 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1485 if not ui.quiet:
1485 if not ui.quiet:
1486 ui.warn(
1486 ui.warn(
1487 _(
1487 _(
1488 b"%s has not been committed yet, so no copy "
1488 b"%s has not been committed yet, so no copy "
1489 b"data will be stored for %s.\n"
1489 b"data will be stored for %s.\n"
1490 )
1490 )
1491 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1491 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1492 )
1492 )
1493 if repo.dirstate[dst] in b'?r' and not dryrun:
1493 if repo.dirstate[dst] in b'?r' and not dryrun:
1494 wctx.add([dst])
1494 wctx.add([dst])
1495 elif not dryrun:
1495 elif not dryrun:
1496 wctx.copy(origsrc, dst)
1496 wctx.copy(origsrc, dst)
1497
1497
1498
1498
1499 def movedirstate(repo, newctx, match=None):
1499 def movedirstate(repo, newctx, match=None):
1500 """Move the dirstate to newctx and adjust it as necessary.
1500 """Move the dirstate to newctx and adjust it as necessary.
1501
1501
1502 A matcher can be provided as an optimization. It is probably a bug to pass
1502 A matcher can be provided as an optimization. It is probably a bug to pass
1503 a matcher that doesn't match all the differences between the parent of the
1503 a matcher that doesn't match all the differences between the parent of the
1504 working copy and newctx.
1504 working copy and newctx.
1505 """
1505 """
1506 oldctx = repo[b'.']
1506 oldctx = repo[b'.']
1507 ds = repo.dirstate
1507 ds = repo.dirstate
1508 copies = dict(ds.copies())
1508 copies = dict(ds.copies())
1509 ds.setparents(newctx.node(), nullid)
1509 ds.setparents(newctx.node(), nullid)
1510 s = newctx.status(oldctx, match=match)
1510 s = newctx.status(oldctx, match=match)
1511 for f in s.modified:
1511 for f in s.modified:
1512 if ds[f] == b'r':
1512 if ds[f] == b'r':
1513 # modified + removed -> removed
1513 # modified + removed -> removed
1514 continue
1514 continue
1515 ds.normallookup(f)
1515 ds.normallookup(f)
1516
1516
1517 for f in s.added:
1517 for f in s.added:
1518 if ds[f] == b'r':
1518 if ds[f] == b'r':
1519 # added + removed -> unknown
1519 # added + removed -> unknown
1520 ds.drop(f)
1520 ds.drop(f)
1521 elif ds[f] != b'a':
1521 elif ds[f] != b'a':
1522 ds.add(f)
1522 ds.add(f)
1523
1523
1524 for f in s.removed:
1524 for f in s.removed:
1525 if ds[f] == b'a':
1525 if ds[f] == b'a':
1526 # removed + added -> normal
1526 # removed + added -> normal
1527 ds.normallookup(f)
1527 ds.normallookup(f)
1528 elif ds[f] != b'r':
1528 elif ds[f] != b'r':
1529 ds.remove(f)
1529 ds.remove(f)
1530
1530
1531 # Merge old parent and old working dir copies
1531 # Merge old parent and old working dir copies
1532 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1532 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1533 oldcopies.update(copies)
1533 oldcopies.update(copies)
1534 copies = {
1534 copies = {
1535 dst: oldcopies.get(src, src)
1535 dst: oldcopies.get(src, src)
1536 for dst, src in pycompat.iteritems(oldcopies)
1536 for dst, src in pycompat.iteritems(oldcopies)
1537 }
1537 }
1538 # Adjust the dirstate copies
1538 # Adjust the dirstate copies
1539 for dst, src in pycompat.iteritems(copies):
1539 for dst, src in pycompat.iteritems(copies):
1540 if src not in newctx or dst in newctx or ds[dst] != b'a':
1540 if src not in newctx or dst in newctx or ds[dst] != b'a':
1541 src = None
1541 src = None
1542 ds.copy(src, dst)
1542 ds.copy(src, dst)
1543 repo._quick_access_changeid_invalidate()
1543 repo._quick_access_changeid_invalidate()
1544
1544
1545
1545
1546 def filterrequirements(requirements):
1546 def filterrequirements(requirements):
1547 """ filters the requirements into two sets:
1547 """ filters the requirements into two sets:
1548
1548
1549 wcreq: requirements which should be written in .hg/requires
1549 wcreq: requirements which should be written in .hg/requires
1550 storereq: which should be written in .hg/store/requires
1550 storereq: which should be written in .hg/store/requires
1551
1551
1552 Returns (wcreq, storereq)
1552 Returns (wcreq, storereq)
1553 """
1553 """
1554 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1554 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1555 wc, store = set(), set()
1555 wc, store = set(), set()
1556 for r in requirements:
1556 for r in requirements:
1557 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1557 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1558 wc.add(r)
1558 wc.add(r)
1559 else:
1559 else:
1560 store.add(r)
1560 store.add(r)
1561 return wc, store
1561 return wc, store
1562 return requirements, None
1562 return requirements, None
1563
1563
1564
1564
1565 def istreemanifest(repo):
1565 def istreemanifest(repo):
1566 """ returns whether the repository is using treemanifest or not """
1566 """ returns whether the repository is using treemanifest or not """
1567 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1567 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1568
1568
1569
1569
1570 def writereporequirements(repo, requirements=None):
1570 def writereporequirements(repo, requirements=None):
1571 """ writes requirements for the repo to .hg/requires """
1571 """ writes requirements for the repo to .hg/requires """
1572 if requirements:
1572 if requirements:
1573 repo.requirements = requirements
1573 repo.requirements = requirements
1574 wcreq, storereq = filterrequirements(repo.requirements)
1574 wcreq, storereq = filterrequirements(repo.requirements)
1575 if wcreq is not None:
1575 if wcreq is not None:
1576 writerequires(repo.vfs, wcreq)
1576 writerequires(repo.vfs, wcreq)
1577 if storereq is not None:
1577 if storereq is not None:
1578 writerequires(repo.svfs, storereq)
1578 writerequires(repo.svfs, storereq)
1579
1579
1580
1580
1581 def writerequires(opener, requirements):
1581 def writerequires(opener, requirements):
1582 with opener(b'requires', b'w', atomictemp=True) as fp:
1582 with opener(b'requires', b'w', atomictemp=True) as fp:
1583 for r in sorted(requirements):
1583 for r in sorted(requirements):
1584 fp.write(b"%s\n" % r)
1584 fp.write(b"%s\n" % r)
1585
1585
1586
1586
1587 class filecachesubentry(object):
1587 class filecachesubentry(object):
1588 def __init__(self, path, stat):
1588 def __init__(self, path, stat):
1589 self.path = path
1589 self.path = path
1590 self.cachestat = None
1590 self.cachestat = None
1591 self._cacheable = None
1591 self._cacheable = None
1592
1592
1593 if stat:
1593 if stat:
1594 self.cachestat = filecachesubentry.stat(self.path)
1594 self.cachestat = filecachesubentry.stat(self.path)
1595
1595
1596 if self.cachestat:
1596 if self.cachestat:
1597 self._cacheable = self.cachestat.cacheable()
1597 self._cacheable = self.cachestat.cacheable()
1598 else:
1598 else:
1599 # None means we don't know yet
1599 # None means we don't know yet
1600 self._cacheable = None
1600 self._cacheable = None
1601
1601
1602 def refresh(self):
1602 def refresh(self):
1603 if self.cacheable():
1603 if self.cacheable():
1604 self.cachestat = filecachesubentry.stat(self.path)
1604 self.cachestat = filecachesubentry.stat(self.path)
1605
1605
1606 def cacheable(self):
1606 def cacheable(self):
1607 if self._cacheable is not None:
1607 if self._cacheable is not None:
1608 return self._cacheable
1608 return self._cacheable
1609
1609
1610 # we don't know yet, assume it is for now
1610 # we don't know yet, assume it is for now
1611 return True
1611 return True
1612
1612
1613 def changed(self):
1613 def changed(self):
1614 # no point in going further if we can't cache it
1614 # no point in going further if we can't cache it
1615 if not self.cacheable():
1615 if not self.cacheable():
1616 return True
1616 return True
1617
1617
1618 newstat = filecachesubentry.stat(self.path)
1618 newstat = filecachesubentry.stat(self.path)
1619
1619
1620 # we may not know if it's cacheable yet, check again now
1620 # we may not know if it's cacheable yet, check again now
1621 if newstat and self._cacheable is None:
1621 if newstat and self._cacheable is None:
1622 self._cacheable = newstat.cacheable()
1622 self._cacheable = newstat.cacheable()
1623
1623
1624 # check again
1624 # check again
1625 if not self._cacheable:
1625 if not self._cacheable:
1626 return True
1626 return True
1627
1627
1628 if self.cachestat != newstat:
1628 if self.cachestat != newstat:
1629 self.cachestat = newstat
1629 self.cachestat = newstat
1630 return True
1630 return True
1631 else:
1631 else:
1632 return False
1632 return False
1633
1633
1634 @staticmethod
1634 @staticmethod
1635 def stat(path):
1635 def stat(path):
1636 try:
1636 try:
1637 return util.cachestat(path)
1637 return util.cachestat(path)
1638 except OSError as e:
1638 except OSError as e:
1639 if e.errno != errno.ENOENT:
1639 if e.errno != errno.ENOENT:
1640 raise
1640 raise
1641
1641
1642
1642
1643 class filecacheentry(object):
1643 class filecacheentry(object):
1644 def __init__(self, paths, stat=True):
1644 def __init__(self, paths, stat=True):
1645 self._entries = []
1645 self._entries = []
1646 for path in paths:
1646 for path in paths:
1647 self._entries.append(filecachesubentry(path, stat))
1647 self._entries.append(filecachesubentry(path, stat))
1648
1648
1649 def changed(self):
1649 def changed(self):
1650 '''true if any entry has changed'''
1650 '''true if any entry has changed'''
1651 for entry in self._entries:
1651 for entry in self._entries:
1652 if entry.changed():
1652 if entry.changed():
1653 return True
1653 return True
1654 return False
1654 return False
1655
1655
1656 def refresh(self):
1656 def refresh(self):
1657 for entry in self._entries:
1657 for entry in self._entries:
1658 entry.refresh()
1658 entry.refresh()
1659
1659
1660
1660
1661 class filecache(object):
1661 class filecache(object):
1662 """A property like decorator that tracks files under .hg/ for updates.
1662 """A property like decorator that tracks files under .hg/ for updates.
1663
1663
1664 On first access, the files defined as arguments are stat()ed and the
1664 On first access, the files defined as arguments are stat()ed and the
1665 results cached. The decorated function is called. The results are stashed
1665 results cached. The decorated function is called. The results are stashed
1666 away in a ``_filecache`` dict on the object whose method is decorated.
1666 away in a ``_filecache`` dict on the object whose method is decorated.
1667
1667
1668 On subsequent access, the cached result is used as it is set to the
1668 On subsequent access, the cached result is used as it is set to the
1669 instance dictionary.
1669 instance dictionary.
1670
1670
1671 On external property set/delete operations, the caller must update the
1671 On external property set/delete operations, the caller must update the
1672 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1672 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1673 instead of directly setting <attr>.
1673 instead of directly setting <attr>.
1674
1674
1675 When using the property API, the cached data is always used if available.
1675 When using the property API, the cached data is always used if available.
1676 No stat() is performed to check if the file has changed.
1676 No stat() is performed to check if the file has changed.
1677
1677
1678 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1678 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1679 can populate an entry before the property's getter is called. In this case,
1679 can populate an entry before the property's getter is called. In this case,
1680 entries in ``_filecache`` will be used during property operations,
1680 entries in ``_filecache`` will be used during property operations,
1681 if available. If the underlying file changes, it is up to external callers
1681 if available. If the underlying file changes, it is up to external callers
1682 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1682 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1683 method result as well as possibly calling ``del obj._filecache[attr]`` to
1683 method result as well as possibly calling ``del obj._filecache[attr]`` to
1684 remove the ``filecacheentry``.
1684 remove the ``filecacheentry``.
1685 """
1685 """
1686
1686
1687 def __init__(self, *paths):
1687 def __init__(self, *paths):
1688 self.paths = paths
1688 self.paths = paths
1689
1689
1690 def join(self, obj, fname):
1690 def join(self, obj, fname):
1691 """Used to compute the runtime path of a cached file.
1691 """Used to compute the runtime path of a cached file.
1692
1692
1693 Users should subclass filecache and provide their own version of this
1693 Users should subclass filecache and provide their own version of this
1694 function to call the appropriate join function on 'obj' (an instance
1694 function to call the appropriate join function on 'obj' (an instance
1695 of the class that its member function was decorated).
1695 of the class that its member function was decorated).
1696 """
1696 """
1697 raise NotImplementedError
1697 raise NotImplementedError
1698
1698
1699 def __call__(self, func):
1699 def __call__(self, func):
1700 self.func = func
1700 self.func = func
1701 self.sname = func.__name__
1701 self.sname = func.__name__
1702 self.name = pycompat.sysbytes(self.sname)
1702 self.name = pycompat.sysbytes(self.sname)
1703 return self
1703 return self
1704
1704
1705 def __get__(self, obj, type=None):
1705 def __get__(self, obj, type=None):
1706 # if accessed on the class, return the descriptor itself.
1706 # if accessed on the class, return the descriptor itself.
1707 if obj is None:
1707 if obj is None:
1708 return self
1708 return self
1709
1709
1710 assert self.sname not in obj.__dict__
1710 assert self.sname not in obj.__dict__
1711
1711
1712 entry = obj._filecache.get(self.name)
1712 entry = obj._filecache.get(self.name)
1713
1713
1714 if entry:
1714 if entry:
1715 if entry.changed():
1715 if entry.changed():
1716 entry.obj = self.func(obj)
1716 entry.obj = self.func(obj)
1717 else:
1717 else:
1718 paths = [self.join(obj, path) for path in self.paths]
1718 paths = [self.join(obj, path) for path in self.paths]
1719
1719
1720 # We stat -before- creating the object so our cache doesn't lie if
1720 # We stat -before- creating the object so our cache doesn't lie if
1721 # a writer modified between the time we read and stat
1721 # a writer modified between the time we read and stat
1722 entry = filecacheentry(paths, True)
1722 entry = filecacheentry(paths, True)
1723 entry.obj = self.func(obj)
1723 entry.obj = self.func(obj)
1724
1724
1725 obj._filecache[self.name] = entry
1725 obj._filecache[self.name] = entry
1726
1726
1727 obj.__dict__[self.sname] = entry.obj
1727 obj.__dict__[self.sname] = entry.obj
1728 return entry.obj
1728 return entry.obj
1729
1729
1730 # don't implement __set__(), which would make __dict__ lookup as slow as
1730 # don't implement __set__(), which would make __dict__ lookup as slow as
1731 # function call.
1731 # function call.
1732
1732
1733 def set(self, obj, value):
1733 def set(self, obj, value):
1734 if self.name not in obj._filecache:
1734 if self.name not in obj._filecache:
1735 # we add an entry for the missing value because X in __dict__
1735 # we add an entry for the missing value because X in __dict__
1736 # implies X in _filecache
1736 # implies X in _filecache
1737 paths = [self.join(obj, path) for path in self.paths]
1737 paths = [self.join(obj, path) for path in self.paths]
1738 ce = filecacheentry(paths, False)
1738 ce = filecacheentry(paths, False)
1739 obj._filecache[self.name] = ce
1739 obj._filecache[self.name] = ce
1740 else:
1740 else:
1741 ce = obj._filecache[self.name]
1741 ce = obj._filecache[self.name]
1742
1742
1743 ce.obj = value # update cached copy
1743 ce.obj = value # update cached copy
1744 obj.__dict__[self.sname] = value # update copy returned by obj.x
1744 obj.__dict__[self.sname] = value # update copy returned by obj.x
1745
1745
1746
1746
1747 def extdatasource(repo, source):
1747 def extdatasource(repo, source):
1748 """Gather a map of rev -> value dict from the specified source
1748 """Gather a map of rev -> value dict from the specified source
1749
1749
1750 A source spec is treated as a URL, with a special case shell: type
1750 A source spec is treated as a URL, with a special case shell: type
1751 for parsing the output from a shell command.
1751 for parsing the output from a shell command.
1752
1752
1753 The data is parsed as a series of newline-separated records where
1753 The data is parsed as a series of newline-separated records where
1754 each record is a revision specifier optionally followed by a space
1754 each record is a revision specifier optionally followed by a space
1755 and a freeform string value. If the revision is known locally, it
1755 and a freeform string value. If the revision is known locally, it
1756 is converted to a rev, otherwise the record is skipped.
1756 is converted to a rev, otherwise the record is skipped.
1757
1757
1758 Note that both key and value are treated as UTF-8 and converted to
1758 Note that both key and value are treated as UTF-8 and converted to
1759 the local encoding. This allows uniformity between local and
1759 the local encoding. This allows uniformity between local and
1760 remote data sources.
1760 remote data sources.
1761 """
1761 """
1762
1762
1763 spec = repo.ui.config(b"extdata", source)
1763 spec = repo.ui.config(b"extdata", source)
1764 if not spec:
1764 if not spec:
1765 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1765 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1766
1766
1767 data = {}
1767 data = {}
1768 src = proc = None
1768 src = proc = None
1769 try:
1769 try:
1770 if spec.startswith(b"shell:"):
1770 if spec.startswith(b"shell:"):
1771 # external commands should be run relative to the repo root
1771 # external commands should be run relative to the repo root
1772 cmd = spec[6:]
1772 cmd = spec[6:]
1773 proc = subprocess.Popen(
1773 proc = subprocess.Popen(
1774 procutil.tonativestr(cmd),
1774 procutil.tonativestr(cmd),
1775 shell=True,
1775 shell=True,
1776 bufsize=-1,
1776 bufsize=-1,
1777 close_fds=procutil.closefds,
1777 close_fds=procutil.closefds,
1778 stdout=subprocess.PIPE,
1778 stdout=subprocess.PIPE,
1779 cwd=procutil.tonativestr(repo.root),
1779 cwd=procutil.tonativestr(repo.root),
1780 )
1780 )
1781 src = proc.stdout
1781 src = proc.stdout
1782 else:
1782 else:
1783 # treat as a URL or file
1783 # treat as a URL or file
1784 src = url.open(repo.ui, spec)
1784 src = url.open(repo.ui, spec)
1785 for l in src:
1785 for l in src:
1786 if b" " in l:
1786 if b" " in l:
1787 k, v = l.strip().split(b" ", 1)
1787 k, v = l.strip().split(b" ", 1)
1788 else:
1788 else:
1789 k, v = l.strip(), b""
1789 k, v = l.strip(), b""
1790
1790
1791 k = encoding.tolocal(k)
1791 k = encoding.tolocal(k)
1792 try:
1792 try:
1793 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1793 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1794 except (error.LookupError, error.RepoLookupError):
1794 except (error.LookupError, error.RepoLookupError):
1795 pass # we ignore data for nodes that don't exist locally
1795 pass # we ignore data for nodes that don't exist locally
1796 finally:
1796 finally:
1797 if proc:
1797 if proc:
1798 try:
1798 try:
1799 proc.communicate()
1799 proc.communicate()
1800 except ValueError:
1800 except ValueError:
1801 # This happens if we started iterating src and then
1801 # This happens if we started iterating src and then
1802 # get a parse error on a line. It should be safe to ignore.
1802 # get a parse error on a line. It should be safe to ignore.
1803 pass
1803 pass
1804 if src:
1804 if src:
1805 src.close()
1805 src.close()
1806 if proc and proc.returncode != 0:
1806 if proc and proc.returncode != 0:
1807 raise error.Abort(
1807 raise error.Abort(
1808 _(b"extdata command '%s' failed: %s")
1808 _(b"extdata command '%s' failed: %s")
1809 % (cmd, procutil.explainexit(proc.returncode))
1809 % (cmd, procutil.explainexit(proc.returncode))
1810 )
1810 )
1811
1811
1812 return data
1812 return data
1813
1813
1814
1814
1815 class progress(object):
1815 class progress(object):
1816 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1816 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1817 self.ui = ui
1817 self.ui = ui
1818 self.pos = 0
1818 self.pos = 0
1819 self.topic = topic
1819 self.topic = topic
1820 self.unit = unit
1820 self.unit = unit
1821 self.total = total
1821 self.total = total
1822 self.debug = ui.configbool(b'progress', b'debug')
1822 self.debug = ui.configbool(b'progress', b'debug')
1823 self._updatebar = updatebar
1823 self._updatebar = updatebar
1824
1824
1825 def __enter__(self):
1825 def __enter__(self):
1826 return self
1826 return self
1827
1827
1828 def __exit__(self, exc_type, exc_value, exc_tb):
1828 def __exit__(self, exc_type, exc_value, exc_tb):
1829 self.complete()
1829 self.complete()
1830
1830
1831 def update(self, pos, item=b"", total=None):
1831 def update(self, pos, item=b"", total=None):
1832 assert pos is not None
1832 assert pos is not None
1833 if total:
1833 if total:
1834 self.total = total
1834 self.total = total
1835 self.pos = pos
1835 self.pos = pos
1836 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1836 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1837 if self.debug:
1837 if self.debug:
1838 self._printdebug(item)
1838 self._printdebug(item)
1839
1839
1840 def increment(self, step=1, item=b"", total=None):
1840 def increment(self, step=1, item=b"", total=None):
1841 self.update(self.pos + step, item, total)
1841 self.update(self.pos + step, item, total)
1842
1842
1843 def complete(self):
1843 def complete(self):
1844 self.pos = None
1844 self.pos = None
1845 self.unit = b""
1845 self.unit = b""
1846 self.total = None
1846 self.total = None
1847 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1847 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1848
1848
1849 def _printdebug(self, item):
1849 def _printdebug(self, item):
1850 unit = b''
1850 unit = b''
1851 if self.unit:
1851 if self.unit:
1852 unit = b' ' + self.unit
1852 unit = b' ' + self.unit
1853 if item:
1853 if item:
1854 item = b' ' + item
1854 item = b' ' + item
1855
1855
1856 if self.total:
1856 if self.total:
1857 pct = 100.0 * self.pos / self.total
1857 pct = 100.0 * self.pos / self.total
1858 self.ui.debug(
1858 self.ui.debug(
1859 b'%s:%s %d/%d%s (%4.2f%%)\n'
1859 b'%s:%s %d/%d%s (%4.2f%%)\n'
1860 % (self.topic, item, self.pos, self.total, unit, pct)
1860 % (self.topic, item, self.pos, self.total, unit, pct)
1861 )
1861 )
1862 else:
1862 else:
1863 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1863 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1864
1864
1865
1865
1866 def gdinitconfig(ui):
1866 def gdinitconfig(ui):
1867 """helper function to know if a repo should be created as general delta
1867 """helper function to know if a repo should be created as general delta
1868 """
1868 """
1869 # experimental config: format.generaldelta
1869 # experimental config: format.generaldelta
1870 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1870 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1871 b'format', b'usegeneraldelta'
1871 b'format', b'usegeneraldelta'
1872 )
1872 )
1873
1873
1874
1874
1875 def gddeltaconfig(ui):
1875 def gddeltaconfig(ui):
1876 """helper function to know if incoming delta should be optimised
1876 """helper function to know if incoming delta should be optimised
1877 """
1877 """
1878 # experimental config: format.generaldelta
1878 # experimental config: format.generaldelta
1879 return ui.configbool(b'format', b'generaldelta')
1879 return ui.configbool(b'format', b'generaldelta')
1880
1880
1881
1881
1882 class simplekeyvaluefile(object):
1882 class simplekeyvaluefile(object):
1883 """A simple file with key=value lines
1883 """A simple file with key=value lines
1884
1884
1885 Keys must be alphanumerics and start with a letter, values must not
1885 Keys must be alphanumerics and start with a letter, values must not
1886 contain '\n' characters"""
1886 contain '\n' characters"""
1887
1887
1888 firstlinekey = b'__firstline'
1888 firstlinekey = b'__firstline'
1889
1889
1890 def __init__(self, vfs, path, keys=None):
1890 def __init__(self, vfs, path, keys=None):
1891 self.vfs = vfs
1891 self.vfs = vfs
1892 self.path = path
1892 self.path = path
1893
1893
1894 def read(self, firstlinenonkeyval=False):
1894 def read(self, firstlinenonkeyval=False):
1895 """Read the contents of a simple key-value file
1895 """Read the contents of a simple key-value file
1896
1896
1897 'firstlinenonkeyval' indicates whether the first line of file should
1897 'firstlinenonkeyval' indicates whether the first line of file should
1898 be treated as a key-value pair or reuturned fully under the
1898 be treated as a key-value pair or reuturned fully under the
1899 __firstline key."""
1899 __firstline key."""
1900 lines = self.vfs.readlines(self.path)
1900 lines = self.vfs.readlines(self.path)
1901 d = {}
1901 d = {}
1902 if firstlinenonkeyval:
1902 if firstlinenonkeyval:
1903 if not lines:
1903 if not lines:
1904 e = _(b"empty simplekeyvalue file")
1904 e = _(b"empty simplekeyvalue file")
1905 raise error.CorruptedState(e)
1905 raise error.CorruptedState(e)
1906 # we don't want to include '\n' in the __firstline
1906 # we don't want to include '\n' in the __firstline
1907 d[self.firstlinekey] = lines[0][:-1]
1907 d[self.firstlinekey] = lines[0][:-1]
1908 del lines[0]
1908 del lines[0]
1909
1909
1910 try:
1910 try:
1911 # the 'if line.strip()' part prevents us from failing on empty
1911 # the 'if line.strip()' part prevents us from failing on empty
1912 # lines which only contain '\n' therefore are not skipped
1912 # lines which only contain '\n' therefore are not skipped
1913 # by 'if line'
1913 # by 'if line'
1914 updatedict = dict(
1914 updatedict = dict(
1915 line[:-1].split(b'=', 1) for line in lines if line.strip()
1915 line[:-1].split(b'=', 1) for line in lines if line.strip()
1916 )
1916 )
1917 if self.firstlinekey in updatedict:
1917 if self.firstlinekey in updatedict:
1918 e = _(b"%r can't be used as a key")
1918 e = _(b"%r can't be used as a key")
1919 raise error.CorruptedState(e % self.firstlinekey)
1919 raise error.CorruptedState(e % self.firstlinekey)
1920 d.update(updatedict)
1920 d.update(updatedict)
1921 except ValueError as e:
1921 except ValueError as e:
1922 raise error.CorruptedState(stringutil.forcebytestr(e))
1922 raise error.CorruptedState(stringutil.forcebytestr(e))
1923 return d
1923 return d
1924
1924
1925 def write(self, data, firstline=None):
1925 def write(self, data, firstline=None):
1926 """Write key=>value mapping to a file
1926 """Write key=>value mapping to a file
1927 data is a dict. Keys must be alphanumerical and start with a letter.
1927 data is a dict. Keys must be alphanumerical and start with a letter.
1928 Values must not contain newline characters.
1928 Values must not contain newline characters.
1929
1929
1930 If 'firstline' is not None, it is written to file before
1930 If 'firstline' is not None, it is written to file before
1931 everything else, as it is, not in a key=value form"""
1931 everything else, as it is, not in a key=value form"""
1932 lines = []
1932 lines = []
1933 if firstline is not None:
1933 if firstline is not None:
1934 lines.append(b'%s\n' % firstline)
1934 lines.append(b'%s\n' % firstline)
1935
1935
1936 for k, v in data.items():
1936 for k, v in data.items():
1937 if k == self.firstlinekey:
1937 if k == self.firstlinekey:
1938 e = b"key name '%s' is reserved" % self.firstlinekey
1938 e = b"key name '%s' is reserved" % self.firstlinekey
1939 raise error.ProgrammingError(e)
1939 raise error.ProgrammingError(e)
1940 if not k[0:1].isalpha():
1940 if not k[0:1].isalpha():
1941 e = b"keys must start with a letter in a key-value file"
1941 e = b"keys must start with a letter in a key-value file"
1942 raise error.ProgrammingError(e)
1942 raise error.ProgrammingError(e)
1943 if not k.isalnum():
1943 if not k.isalnum():
1944 e = b"invalid key name in a simple key-value file"
1944 e = b"invalid key name in a simple key-value file"
1945 raise error.ProgrammingError(e)
1945 raise error.ProgrammingError(e)
1946 if b'\n' in v:
1946 if b'\n' in v:
1947 e = b"invalid value in a simple key-value file"
1947 e = b"invalid value in a simple key-value file"
1948 raise error.ProgrammingError(e)
1948 raise error.ProgrammingError(e)
1949 lines.append(b"%s=%s\n" % (k, v))
1949 lines.append(b"%s=%s\n" % (k, v))
1950 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1950 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1951 fp.write(b''.join(lines))
1951 fp.write(b''.join(lines))
1952
1952
1953
1953
1954 _reportobsoletedsource = [
1954 _reportobsoletedsource = [
1955 b'debugobsolete',
1955 b'debugobsolete',
1956 b'pull',
1956 b'pull',
1957 b'push',
1957 b'push',
1958 b'serve',
1958 b'serve',
1959 b'unbundle',
1959 b'unbundle',
1960 ]
1960 ]
1961
1961
1962 _reportnewcssource = [
1962 _reportnewcssource = [
1963 b'pull',
1963 b'pull',
1964 b'unbundle',
1964 b'unbundle',
1965 ]
1965 ]
1966
1966
1967
1967
1968 def prefetchfiles(repo, revmatches):
1968 def prefetchfiles(repo, revmatches):
1969 """Invokes the registered file prefetch functions, allowing extensions to
1969 """Invokes the registered file prefetch functions, allowing extensions to
1970 ensure the corresponding files are available locally, before the command
1970 ensure the corresponding files are available locally, before the command
1971 uses them.
1971 uses them.
1972
1972
1973 Args:
1973 Args:
1974 revmatches: a list of (revision, match) tuples to indicate the files to
1974 revmatches: a list of (revision, match) tuples to indicate the files to
1975 fetch at each revision. If any of the match elements is None, it matches
1975 fetch at each revision. If any of the match elements is None, it matches
1976 all files.
1976 all files.
1977 """
1977 """
1978
1978
1979 def _matcher(m):
1979 def _matcher(m):
1980 if m:
1980 if m:
1981 assert isinstance(m, matchmod.basematcher)
1981 assert isinstance(m, matchmod.basematcher)
1982 # The command itself will complain about files that don't exist, so
1982 # The command itself will complain about files that don't exist, so
1983 # don't duplicate the message.
1983 # don't duplicate the message.
1984 return matchmod.badmatch(m, lambda fn, msg: None)
1984 return matchmod.badmatch(m, lambda fn, msg: None)
1985 else:
1985 else:
1986 return matchall(repo)
1986 return matchall(repo)
1987
1987
1988 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1988 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1989
1989
1990 fileprefetchhooks(repo, revbadmatches)
1990 fileprefetchhooks(repo, revbadmatches)
1991
1991
1992
1992
1993 # a list of (repo, revs, match) prefetch functions
1993 # a list of (repo, revs, match) prefetch functions
1994 fileprefetchhooks = util.hooks()
1994 fileprefetchhooks = util.hooks()
1995
1995
1996 # A marker that tells the evolve extension to suppress its own reporting
1996 # A marker that tells the evolve extension to suppress its own reporting
1997 _reportstroubledchangesets = True
1997 _reportstroubledchangesets = True
1998
1998
1999
1999
2000 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
2000 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
2001 """register a callback to issue a summary after the transaction is closed
2001 """register a callback to issue a summary after the transaction is closed
2002
2002
2003 If as_validator is true, then the callbacks are registered as transaction
2003 If as_validator is true, then the callbacks are registered as transaction
2004 validators instead
2004 validators instead
2005 """
2005 """
2006
2006
2007 def txmatch(sources):
2007 def txmatch(sources):
2008 return any(txnname.startswith(source) for source in sources)
2008 return any(txnname.startswith(source) for source in sources)
2009
2009
2010 categories = []
2010 categories = []
2011
2011
2012 def reportsummary(func):
2012 def reportsummary(func):
2013 """decorator for report callbacks."""
2013 """decorator for report callbacks."""
2014 # The repoview life cycle is shorter than the one of the actual
2014 # The repoview life cycle is shorter than the one of the actual
2015 # underlying repository. So the filtered object can die before the
2015 # underlying repository. So the filtered object can die before the
2016 # weakref is used leading to troubles. We keep a reference to the
2016 # weakref is used leading to troubles. We keep a reference to the
2017 # unfiltered object and restore the filtering when retrieving the
2017 # unfiltered object and restore the filtering when retrieving the
2018 # repository through the weakref.
2018 # repository through the weakref.
2019 filtername = repo.filtername
2019 filtername = repo.filtername
2020 reporef = weakref.ref(repo.unfiltered())
2020 reporef = weakref.ref(repo.unfiltered())
2021
2021
2022 def wrapped(tr):
2022 def wrapped(tr):
2023 repo = reporef()
2023 repo = reporef()
2024 if filtername:
2024 if filtername:
2025 assert repo is not None # help pytype
2025 assert repo is not None # help pytype
2026 repo = repo.filtered(filtername)
2026 repo = repo.filtered(filtername)
2027 func(repo, tr)
2027 func(repo, tr)
2028
2028
2029 newcat = b'%02i-txnreport' % len(categories)
2029 newcat = b'%02i-txnreport' % len(categories)
2030 if as_validator:
2030 if as_validator:
2031 otr.addvalidator(newcat, wrapped)
2031 otr.addvalidator(newcat, wrapped)
2032 else:
2032 else:
2033 otr.addpostclose(newcat, wrapped)
2033 otr.addpostclose(newcat, wrapped)
2034 categories.append(newcat)
2034 categories.append(newcat)
2035 return wrapped
2035 return wrapped
2036
2036
2037 @reportsummary
2037 @reportsummary
2038 def reportchangegroup(repo, tr):
2038 def reportchangegroup(repo, tr):
2039 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2039 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2040 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2040 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2041 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2041 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2042 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2042 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2043 if cgchangesets or cgrevisions or cgfiles:
2043 if cgchangesets or cgrevisions or cgfiles:
2044 htext = b""
2044 htext = b""
2045 if cgheads:
2045 if cgheads:
2046 htext = _(b" (%+d heads)") % cgheads
2046 htext = _(b" (%+d heads)") % cgheads
2047 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2047 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2048 if as_validator:
2048 if as_validator:
2049 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2049 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2050 assert repo is not None # help pytype
2050 assert repo is not None # help pytype
2051 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2051 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2052
2052
2053 if txmatch(_reportobsoletedsource):
2053 if txmatch(_reportobsoletedsource):
2054
2054
2055 @reportsummary
2055 @reportsummary
2056 def reportobsoleted(repo, tr):
2056 def reportobsoleted(repo, tr):
2057 obsoleted = obsutil.getobsoleted(repo, tr)
2057 obsoleted = obsutil.getobsoleted(repo, tr)
2058 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2058 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2059 if newmarkers:
2059 if newmarkers:
2060 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2060 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2061 if obsoleted:
2061 if obsoleted:
2062 msg = _(b'obsoleted %i changesets\n')
2062 msg = _(b'obsoleted %i changesets\n')
2063 if as_validator:
2063 if as_validator:
2064 msg = _(b'obsoleting %i changesets\n')
2064 msg = _(b'obsoleting %i changesets\n')
2065 repo.ui.status(msg % len(obsoleted))
2065 repo.ui.status(msg % len(obsoleted))
2066
2066
2067 if obsolete.isenabled(
2067 if obsolete.isenabled(
2068 repo, obsolete.createmarkersopt
2068 repo, obsolete.createmarkersopt
2069 ) and repo.ui.configbool(
2069 ) and repo.ui.configbool(
2070 b'experimental', b'evolution.report-instabilities'
2070 b'experimental', b'evolution.report-instabilities'
2071 ):
2071 ):
2072 instabilitytypes = [
2072 instabilitytypes = [
2073 (b'orphan', b'orphan'),
2073 (b'orphan', b'orphan'),
2074 (b'phase-divergent', b'phasedivergent'),
2074 (b'phase-divergent', b'phasedivergent'),
2075 (b'content-divergent', b'contentdivergent'),
2075 (b'content-divergent', b'contentdivergent'),
2076 ]
2076 ]
2077
2077
2078 def getinstabilitycounts(repo):
2078 def getinstabilitycounts(repo):
2079 filtered = repo.changelog.filteredrevs
2079 filtered = repo.changelog.filteredrevs
2080 counts = {}
2080 counts = {}
2081 for instability, revset in instabilitytypes:
2081 for instability, revset in instabilitytypes:
2082 counts[instability] = len(
2082 counts[instability] = len(
2083 set(obsolete.getrevs(repo, revset)) - filtered
2083 set(obsolete.getrevs(repo, revset)) - filtered
2084 )
2084 )
2085 return counts
2085 return counts
2086
2086
2087 oldinstabilitycounts = getinstabilitycounts(repo)
2087 oldinstabilitycounts = getinstabilitycounts(repo)
2088
2088
2089 @reportsummary
2089 @reportsummary
2090 def reportnewinstabilities(repo, tr):
2090 def reportnewinstabilities(repo, tr):
2091 newinstabilitycounts = getinstabilitycounts(repo)
2091 newinstabilitycounts = getinstabilitycounts(repo)
2092 for instability, revset in instabilitytypes:
2092 for instability, revset in instabilitytypes:
2093 delta = (
2093 delta = (
2094 newinstabilitycounts[instability]
2094 newinstabilitycounts[instability]
2095 - oldinstabilitycounts[instability]
2095 - oldinstabilitycounts[instability]
2096 )
2096 )
2097 msg = getinstabilitymessage(delta, instability)
2097 msg = getinstabilitymessage(delta, instability)
2098 if msg:
2098 if msg:
2099 repo.ui.warn(msg)
2099 repo.ui.warn(msg)
2100
2100
2101 if txmatch(_reportnewcssource):
2101 if txmatch(_reportnewcssource):
2102
2102
2103 @reportsummary
2103 @reportsummary
2104 def reportnewcs(repo, tr):
2104 def reportnewcs(repo, tr):
2105 """Report the range of new revisions pulled/unbundled."""
2105 """Report the range of new revisions pulled/unbundled."""
2106 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2106 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2107 unfi = repo.unfiltered()
2107 unfi = repo.unfiltered()
2108 if origrepolen >= len(unfi):
2108 if origrepolen >= len(unfi):
2109 return
2109 return
2110
2110
2111 # Compute the bounds of new visible revisions' range.
2111 # Compute the bounds of new visible revisions' range.
2112 revs = smartset.spanset(repo, start=origrepolen)
2112 revs = smartset.spanset(repo, start=origrepolen)
2113 if revs:
2113 if revs:
2114 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2114 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2115
2115
2116 if minrev == maxrev:
2116 if minrev == maxrev:
2117 revrange = minrev
2117 revrange = minrev
2118 else:
2118 else:
2119 revrange = b'%s:%s' % (minrev, maxrev)
2119 revrange = b'%s:%s' % (minrev, maxrev)
2120 draft = len(repo.revs(b'%ld and draft()', revs))
2120 draft = len(repo.revs(b'%ld and draft()', revs))
2121 secret = len(repo.revs(b'%ld and secret()', revs))
2121 secret = len(repo.revs(b'%ld and secret()', revs))
2122 if not (draft or secret):
2122 if not (draft or secret):
2123 msg = _(b'new changesets %s\n') % revrange
2123 msg = _(b'new changesets %s\n') % revrange
2124 elif draft and secret:
2124 elif draft and secret:
2125 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2125 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2126 msg %= (revrange, draft, secret)
2126 msg %= (revrange, draft, secret)
2127 elif draft:
2127 elif draft:
2128 msg = _(b'new changesets %s (%d drafts)\n')
2128 msg = _(b'new changesets %s (%d drafts)\n')
2129 msg %= (revrange, draft)
2129 msg %= (revrange, draft)
2130 elif secret:
2130 elif secret:
2131 msg = _(b'new changesets %s (%d secrets)\n')
2131 msg = _(b'new changesets %s (%d secrets)\n')
2132 msg %= (revrange, secret)
2132 msg %= (revrange, secret)
2133 else:
2133 else:
2134 errormsg = b'entered unreachable condition'
2134 errormsg = b'entered unreachable condition'
2135 raise error.ProgrammingError(errormsg)
2135 raise error.ProgrammingError(errormsg)
2136 repo.ui.status(msg)
2136 repo.ui.status(msg)
2137
2137
2138 # search new changesets directly pulled as obsolete
2138 # search new changesets directly pulled as obsolete
2139 duplicates = tr.changes.get(b'revduplicates', ())
2139 duplicates = tr.changes.get(b'revduplicates', ())
2140 obsadded = unfi.revs(
2140 obsadded = unfi.revs(
2141 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2141 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2142 )
2142 )
2143 cl = repo.changelog
2143 cl = repo.changelog
2144 extinctadded = [r for r in obsadded if r not in cl]
2144 extinctadded = [r for r in obsadded if r not in cl]
2145 if extinctadded:
2145 if extinctadded:
2146 # They are not just obsolete, but obsolete and invisible
2146 # They are not just obsolete, but obsolete and invisible
2147 # we call them "extinct" internally but the terms have not been
2147 # we call them "extinct" internally but the terms have not been
2148 # exposed to users.
2148 # exposed to users.
2149 msg = b'(%d other changesets obsolete on arrival)\n'
2149 msg = b'(%d other changesets obsolete on arrival)\n'
2150 repo.ui.status(msg % len(extinctadded))
2150 repo.ui.status(msg % len(extinctadded))
2151
2151
2152 @reportsummary
2152 @reportsummary
2153 def reportphasechanges(repo, tr):
2153 def reportphasechanges(repo, tr):
2154 """Report statistics of phase changes for changesets pre-existing
2154 """Report statistics of phase changes for changesets pre-existing
2155 pull/unbundle.
2155 pull/unbundle.
2156 """
2156 """
2157 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2157 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2158 published = []
2158 published = []
2159 for revs, (old, new) in tr.changes.get(b'phases', []):
2159 for revs, (old, new) in tr.changes.get(b'phases', []):
2160 if new != phases.public:
2160 if new != phases.public:
2161 continue
2161 continue
2162 published.extend(rev for rev in revs if rev < origrepolen)
2162 published.extend(rev for rev in revs if rev < origrepolen)
2163 if not published:
2163 if not published:
2164 return
2164 return
2165 msg = _(b'%d local changesets published\n')
2165 msg = _(b'%d local changesets published\n')
2166 if as_validator:
2166 if as_validator:
2167 msg = _(b'%d local changesets will be published\n')
2167 msg = _(b'%d local changesets will be published\n')
2168 repo.ui.status(msg % len(published))
2168 repo.ui.status(msg % len(published))
2169
2169
2170
2170
2171 def getinstabilitymessage(delta, instability):
2171 def getinstabilitymessage(delta, instability):
2172 """function to return the message to show warning about new instabilities
2172 """function to return the message to show warning about new instabilities
2173
2173
2174 exists as a separate function so that extension can wrap to show more
2174 exists as a separate function so that extension can wrap to show more
2175 information like how to fix instabilities"""
2175 information like how to fix instabilities"""
2176 if delta > 0:
2176 if delta > 0:
2177 return _(b'%i new %s changesets\n') % (delta, instability)
2177 return _(b'%i new %s changesets\n') % (delta, instability)
2178
2178
2179
2179
2180 def nodesummaries(repo, nodes, maxnumnodes=4):
2180 def nodesummaries(repo, nodes, maxnumnodes=4):
2181 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2181 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2182 return b' '.join(short(h) for h in nodes)
2182 return b' '.join(short(h) for h in nodes)
2183 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2183 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2184 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2184 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2185
2185
2186
2186
2187 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2187 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2188 """check that no named branch has multiple heads"""
2188 """check that no named branch has multiple heads"""
2189 if desc in (b'strip', b'repair'):
2189 if desc in (b'strip', b'repair'):
2190 # skip the logic during strip
2190 # skip the logic during strip
2191 return
2191 return
2192 visible = repo.filtered(b'visible')
2192 visible = repo.filtered(b'visible')
2193 # possible improvement: we could restrict the check to affected branch
2193 # possible improvement: we could restrict the check to affected branch
2194 bm = visible.branchmap()
2194 bm = visible.branchmap()
2195 for name in bm:
2195 for name in bm:
2196 heads = bm.branchheads(name, closed=accountclosed)
2196 heads = bm.branchheads(name, closed=accountclosed)
2197 if len(heads) > 1:
2197 if len(heads) > 1:
2198 msg = _(b'rejecting multiple heads on branch "%s"')
2198 msg = _(b'rejecting multiple heads on branch "%s"')
2199 msg %= name
2199 msg %= name
2200 hint = _(b'%d heads: %s')
2200 hint = _(b'%d heads: %s')
2201 hint %= (len(heads), nodesummaries(repo, heads))
2201 hint %= (len(heads), nodesummaries(repo, heads))
2202 raise error.Abort(msg, hint=hint)
2202 raise error.Abort(msg, hint=hint)
2203
2203
2204
2204
2205 def wrapconvertsink(sink):
2205 def wrapconvertsink(sink):
2206 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2206 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2207 before it is used, whether or not the convert extension was formally loaded.
2207 before it is used, whether or not the convert extension was formally loaded.
2208 """
2208 """
2209 return sink
2209 return sink
2210
2210
2211
2211
2212 def unhidehashlikerevs(repo, specs, hiddentype):
2212 def unhidehashlikerevs(repo, specs, hiddentype):
2213 """parse the user specs and unhide changesets whose hash or revision number
2213 """parse the user specs and unhide changesets whose hash or revision number
2214 is passed.
2214 is passed.
2215
2215
2216 hiddentype can be: 1) 'warn': warn while unhiding changesets
2216 hiddentype can be: 1) 'warn': warn while unhiding changesets
2217 2) 'nowarn': don't warn while unhiding changesets
2217 2) 'nowarn': don't warn while unhiding changesets
2218
2218
2219 returns a repo object with the required changesets unhidden
2219 returns a repo object with the required changesets unhidden
2220 """
2220 """
2221 if not repo.filtername or not repo.ui.configbool(
2221 if not repo.filtername or not repo.ui.configbool(
2222 b'experimental', b'directaccess'
2222 b'experimental', b'directaccess'
2223 ):
2223 ):
2224 return repo
2224 return repo
2225
2225
2226 if repo.filtername not in (b'visible', b'visible-hidden'):
2226 if repo.filtername not in (b'visible', b'visible-hidden'):
2227 return repo
2227 return repo
2228
2228
2229 symbols = set()
2229 symbols = set()
2230 for spec in specs:
2230 for spec in specs:
2231 try:
2231 try:
2232 tree = revsetlang.parse(spec)
2232 tree = revsetlang.parse(spec)
2233 except error.ParseError: # will be reported by scmutil.revrange()
2233 except error.ParseError: # will be reported by scmutil.revrange()
2234 continue
2234 continue
2235
2235
2236 symbols.update(revsetlang.gethashlikesymbols(tree))
2236 symbols.update(revsetlang.gethashlikesymbols(tree))
2237
2237
2238 if not symbols:
2238 if not symbols:
2239 return repo
2239 return repo
2240
2240
2241 revs = _getrevsfromsymbols(repo, symbols)
2241 revs = _getrevsfromsymbols(repo, symbols)
2242
2242
2243 if not revs:
2243 if not revs:
2244 return repo
2244 return repo
2245
2245
2246 if hiddentype == b'warn':
2246 if hiddentype == b'warn':
2247 unfi = repo.unfiltered()
2247 unfi = repo.unfiltered()
2248 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2248 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2249 repo.ui.warn(
2249 repo.ui.warn(
2250 _(
2250 _(
2251 b"warning: accessing hidden changesets for write "
2251 b"warning: accessing hidden changesets for write "
2252 b"operation: %s\n"
2252 b"operation: %s\n"
2253 )
2253 )
2254 % revstr
2254 % revstr
2255 )
2255 )
2256
2256
2257 # we have to use new filtername to separate branch/tags cache until we can
2257 # we have to use new filtername to separate branch/tags cache until we can
2258 # disbale these cache when revisions are dynamically pinned.
2258 # disbale these cache when revisions are dynamically pinned.
2259 return repo.filtered(b'visible-hidden', revs)
2259 return repo.filtered(b'visible-hidden', revs)
2260
2260
2261
2261
2262 def _getrevsfromsymbols(repo, symbols):
2262 def _getrevsfromsymbols(repo, symbols):
2263 """parse the list of symbols and returns a set of revision numbers of hidden
2263 """parse the list of symbols and returns a set of revision numbers of hidden
2264 changesets present in symbols"""
2264 changesets present in symbols"""
2265 revs = set()
2265 revs = set()
2266 unfi = repo.unfiltered()
2266 unfi = repo.unfiltered()
2267 unficl = unfi.changelog
2267 unficl = unfi.changelog
2268 cl = repo.changelog
2268 cl = repo.changelog
2269 tiprev = len(unficl)
2269 tiprev = len(unficl)
2270 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2270 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2271 for s in symbols:
2271 for s in symbols:
2272 try:
2272 try:
2273 n = int(s)
2273 n = int(s)
2274 if n <= tiprev:
2274 if n <= tiprev:
2275 if not allowrevnums:
2275 if not allowrevnums:
2276 continue
2276 continue
2277 else:
2277 else:
2278 if n not in cl:
2278 if n not in cl:
2279 revs.add(n)
2279 revs.add(n)
2280 continue
2280 continue
2281 except ValueError:
2281 except ValueError:
2282 pass
2282 pass
2283
2283
2284 try:
2284 try:
2285 s = resolvehexnodeidprefix(unfi, s)
2285 s = resolvehexnodeidprefix(unfi, s)
2286 except (error.LookupError, error.WdirUnsupported):
2286 except (error.LookupError, error.WdirUnsupported):
2287 s = None
2287 s = None
2288
2288
2289 if s is not None:
2289 if s is not None:
2290 rev = unficl.rev(s)
2290 rev = unficl.rev(s)
2291 if rev not in cl:
2291 if rev not in cl:
2292 revs.add(rev)
2292 revs.add(rev)
2293
2293
2294 return revs
2294 return revs
2295
2295
2296
2296
2297 def bookmarkrevs(repo, mark):
2297 def bookmarkrevs(repo, mark):
2298 """
2298 """
2299 Select revisions reachable by a given bookmark
2299 Select revisions reachable by a given bookmark
2300 """
2300 """
2301 return repo.revs(
2301 return repo.revs(
2302 b"ancestors(bookmark(%s)) - "
2302 b"ancestors(bookmark(%s)) - "
2303 b"ancestors(head() and not bookmark(%s)) - "
2303 b"ancestors(head() and not bookmark(%s)) - "
2304 b"ancestors(bookmark() and not bookmark(%s))",
2304 b"ancestors(bookmark() and not bookmark(%s))",
2305 mark,
2305 mark,
2306 mark,
2306 mark,
2307 mark,
2307 mark,
2308 )
2308 )
@@ -1,332 +1,332 b''
1 $ hg init a
1 $ hg init a
2 $ cd a
2 $ cd a
3 $ echo a > a
3 $ echo a > a
4 $ hg add -n
4 $ hg add -n
5 adding a
5 adding a
6 $ hg st
6 $ hg st
7 ? a
7 ? a
8 $ hg add
8 $ hg add
9 adding a
9 adding a
10 $ hg st
10 $ hg st
11 A a
11 A a
12 $ hg forget a
12 $ hg forget a
13 $ hg add
13 $ hg add
14 adding a
14 adding a
15 $ hg forget a
15 $ hg forget a
16 $ hg add --color debug
16 $ hg add --color debug
17 [ui.addremove.added ui.status|adding a]
17 [ui.addremove.added ui.status|adding a]
18 $ hg st
18 $ hg st
19 A a
19 A a
20 $ mkdir dir
20 $ mkdir dir
21 $ cd dir
21 $ cd dir
22 $ hg add ../a
22 $ hg add ../a
23 ../a already tracked!
23 ../a already tracked!
24 $ cd ..
24 $ cd ..
25
25
26 $ echo b > b
26 $ echo b > b
27 $ hg add -n b
27 $ hg add -n b
28 $ hg st
28 $ hg st
29 A a
29 A a
30 ? b
30 ? b
31 $ hg add b
31 $ hg add b
32 $ hg st
32 $ hg st
33 A a
33 A a
34 A b
34 A b
35
35
36 should fail
36 should fail
37
37
38 $ hg add b
38 $ hg add b
39 b already tracked!
39 b already tracked!
40 $ hg st
40 $ hg st
41 A a
41 A a
42 A b
42 A b
43
43
44 #if no-windows
44 #if no-windows
45 $ echo foo > con.xml
45 $ echo foo > con.xml
46 $ hg --config ui.portablefilenames=jump add con.xml
46 $ hg --config ui.portablefilenames=jump add con.xml
47 abort: ui.portablefilenames value is invalid ('jump')
47 abort: ui.portablefilenames value is invalid ('jump')
48 [30]
48 [30]
49 $ hg --config ui.portablefilenames=abort add con.xml
49 $ hg --config ui.portablefilenames=abort add con.xml
50 abort: filename contains 'con', which is reserved on Windows: con.xml
50 abort: filename contains 'con', which is reserved on Windows: con.xml
51 [255]
51 [10]
52 $ hg st
52 $ hg st
53 A a
53 A a
54 A b
54 A b
55 ? con.xml
55 ? con.xml
56 $ hg add con.xml
56 $ hg add con.xml
57 warning: filename contains 'con', which is reserved on Windows: con.xml
57 warning: filename contains 'con', which is reserved on Windows: con.xml
58 $ hg st
58 $ hg st
59 A a
59 A a
60 A b
60 A b
61 A con.xml
61 A con.xml
62 $ hg forget con.xml
62 $ hg forget con.xml
63 $ rm con.xml
63 $ rm con.xml
64 #endif
64 #endif
65
65
66 #if eol-in-paths
66 #if eol-in-paths
67 $ echo bla > 'hello:world'
67 $ echo bla > 'hello:world'
68 $ hg --config ui.portablefilenames=abort add
68 $ hg --config ui.portablefilenames=abort add
69 adding hello:world
69 adding hello:world
70 abort: filename contains ':', which is reserved on Windows: 'hello:world'
70 abort: filename contains ':', which is reserved on Windows: 'hello:world'
71 [255]
71 [10]
72 $ hg st
72 $ hg st
73 A a
73 A a
74 A b
74 A b
75 ? hello:world
75 ? hello:world
76 $ hg --config ui.portablefilenames=ignore add
76 $ hg --config ui.portablefilenames=ignore add
77 adding hello:world
77 adding hello:world
78 $ hg st
78 $ hg st
79 A a
79 A a
80 A b
80 A b
81 A hello:world
81 A hello:world
82 #endif
82 #endif
83
83
84 $ hg ci -m 0 --traceback
84 $ hg ci -m 0 --traceback
85
85
86 $ hg log -r "heads(. or wdir() & file('**'))"
86 $ hg log -r "heads(. or wdir() & file('**'))"
87 changeset: 0:* (glob)
87 changeset: 0:* (glob)
88 tag: tip
88 tag: tip
89 user: test
89 user: test
90 date: Thu Jan 01 00:00:00 1970 +0000
90 date: Thu Jan 01 00:00:00 1970 +0000
91 summary: 0
91 summary: 0
92
92
93 should fail
93 should fail
94
94
95 $ hg add a
95 $ hg add a
96 a already tracked!
96 a already tracked!
97
97
98 $ echo aa > a
98 $ echo aa > a
99 $ hg ci -m 1
99 $ hg ci -m 1
100 $ hg up 0
100 $ hg up 0
101 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
101 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
102 $ echo aaa > a
102 $ echo aaa > a
103 $ hg ci -m 2
103 $ hg ci -m 2
104 created new head
104 created new head
105
105
106 $ hg merge
106 $ hg merge
107 merging a
107 merging a
108 warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
108 warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
109 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
109 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
110 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
110 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
111 [1]
111 [1]
112 $ hg st
112 $ hg st
113 M a
113 M a
114 ? a.orig
114 ? a.orig
115
115
116 wdir doesn't cause a crash, and can be dynamically selected if dirty
116 wdir doesn't cause a crash, and can be dynamically selected if dirty
117
117
118 $ hg log -r "heads(. or wdir() & file('**'))"
118 $ hg log -r "heads(. or wdir() & file('**'))"
119 changeset: 2147483647:ffffffffffff
119 changeset: 2147483647:ffffffffffff
120 parent: 2:* (glob)
120 parent: 2:* (glob)
121 parent: 1:* (glob)
121 parent: 1:* (glob)
122 user: test
122 user: test
123 date: * (glob)
123 date: * (glob)
124
124
125 should fail
125 should fail
126
126
127 $ hg add a
127 $ hg add a
128 a already tracked!
128 a already tracked!
129 $ hg st
129 $ hg st
130 M a
130 M a
131 ? a.orig
131 ? a.orig
132 $ hg resolve -m a
132 $ hg resolve -m a
133 (no more unresolved files)
133 (no more unresolved files)
134 $ hg ci -m merge
134 $ hg ci -m merge
135
135
136 Issue683: peculiarity with hg revert of an removed then added file
136 Issue683: peculiarity with hg revert of an removed then added file
137
137
138 $ hg forget a
138 $ hg forget a
139 $ hg add a
139 $ hg add a
140 $ hg st
140 $ hg st
141 ? a.orig
141 ? a.orig
142 $ hg rm a
142 $ hg rm a
143 $ hg st
143 $ hg st
144 R a
144 R a
145 ? a.orig
145 ? a.orig
146 $ echo a > a
146 $ echo a > a
147 $ hg add a
147 $ hg add a
148 $ hg st
148 $ hg st
149 M a
149 M a
150 ? a.orig
150 ? a.orig
151
151
152 excluded file shouldn't be added even if it is explicitly specified
152 excluded file shouldn't be added even if it is explicitly specified
153
153
154 $ hg add a.orig -X '*.orig'
154 $ hg add a.orig -X '*.orig'
155 $ hg st
155 $ hg st
156 M a
156 M a
157 ? a.orig
157 ? a.orig
158
158
159 Forgotten file can be added back (as either clean or modified)
159 Forgotten file can be added back (as either clean or modified)
160
160
161 $ hg forget b
161 $ hg forget b
162 $ hg add b
162 $ hg add b
163 $ hg st -A b
163 $ hg st -A b
164 C b
164 C b
165 $ hg forget b
165 $ hg forget b
166 $ echo modified > b
166 $ echo modified > b
167 $ hg add b
167 $ hg add b
168 $ hg st -A b
168 $ hg st -A b
169 M b
169 M b
170 $ hg revert -qC b
170 $ hg revert -qC b
171
171
172 $ hg add c && echo "unexpected addition of missing file"
172 $ hg add c && echo "unexpected addition of missing file"
173 c: * (glob)
173 c: * (glob)
174 [1]
174 [1]
175 $ echo c > c
175 $ echo c > c
176 $ hg add d c && echo "unexpected addition of missing file"
176 $ hg add d c && echo "unexpected addition of missing file"
177 d: * (glob)
177 d: * (glob)
178 [1]
178 [1]
179 $ hg st
179 $ hg st
180 M a
180 M a
181 A c
181 A c
182 ? a.orig
182 ? a.orig
183 $ hg up -C
183 $ hg up -C
184 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
184 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
185
185
186 forget and get should have the right order: added but missing dir should be
186 forget and get should have the right order: added but missing dir should be
187 forgotten before file with same name is added
187 forgotten before file with same name is added
188
188
189 $ echo file d > d
189 $ echo file d > d
190 $ hg add d
190 $ hg add d
191 $ hg ci -md
191 $ hg ci -md
192 $ hg rm d
192 $ hg rm d
193 $ mkdir d
193 $ mkdir d
194 $ echo a > d/a
194 $ echo a > d/a
195 $ hg add d/a
195 $ hg add d/a
196 $ rm -r d
196 $ rm -r d
197 $ hg up -C
197 $ hg up -C
198 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
198 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
199 $ cat d
199 $ cat d
200 file d
200 file d
201
201
202 Test that adding a directory doesn't require case matching (issue4578)
202 Test that adding a directory doesn't require case matching (issue4578)
203 #if icasefs
203 #if icasefs
204 $ mkdir -p CapsDir1/CapsDir
204 $ mkdir -p CapsDir1/CapsDir
205 $ echo abc > CapsDir1/CapsDir/AbC.txt
205 $ echo abc > CapsDir1/CapsDir/AbC.txt
206 $ mkdir CapsDir1/CapsDir/SubDir
206 $ mkdir CapsDir1/CapsDir/SubDir
207 $ echo def > CapsDir1/CapsDir/SubDir/Def.txt
207 $ echo def > CapsDir1/CapsDir/SubDir/Def.txt
208
208
209 $ hg add capsdir1/capsdir
209 $ hg add capsdir1/capsdir
210 adding CapsDir1/CapsDir/AbC.txt
210 adding CapsDir1/CapsDir/AbC.txt
211 adding CapsDir1/CapsDir/SubDir/Def.txt
211 adding CapsDir1/CapsDir/SubDir/Def.txt
212
212
213 $ hg forget capsdir1/capsdir/abc.txt
213 $ hg forget capsdir1/capsdir/abc.txt
214
214
215 $ hg forget capsdir1/capsdir
215 $ hg forget capsdir1/capsdir
216 removing CapsDir1/CapsDir/SubDir/Def.txt
216 removing CapsDir1/CapsDir/SubDir/Def.txt
217
217
218 $ hg add capsdir1
218 $ hg add capsdir1
219 adding CapsDir1/CapsDir/AbC.txt
219 adding CapsDir1/CapsDir/AbC.txt
220 adding CapsDir1/CapsDir/SubDir/Def.txt
220 adding CapsDir1/CapsDir/SubDir/Def.txt
221
221
222 $ hg ci -m "AbCDef" capsdir1/capsdir
222 $ hg ci -m "AbCDef" capsdir1/capsdir
223
223
224 $ hg status -A capsdir1/capsdir
224 $ hg status -A capsdir1/capsdir
225 C CapsDir1/CapsDir/AbC.txt
225 C CapsDir1/CapsDir/AbC.txt
226 C CapsDir1/CapsDir/SubDir/Def.txt
226 C CapsDir1/CapsDir/SubDir/Def.txt
227
227
228 $ hg files capsdir1/capsdir
228 $ hg files capsdir1/capsdir
229 CapsDir1/CapsDir/AbC.txt
229 CapsDir1/CapsDir/AbC.txt
230 CapsDir1/CapsDir/SubDir/Def.txt
230 CapsDir1/CapsDir/SubDir/Def.txt
231
231
232 $ echo xyz > CapsDir1/CapsDir/SubDir/Def.txt
232 $ echo xyz > CapsDir1/CapsDir/SubDir/Def.txt
233 $ hg ci -m xyz capsdir1/capsdir/subdir/def.txt
233 $ hg ci -m xyz capsdir1/capsdir/subdir/def.txt
234
234
235 $ hg revert -r '.^' capsdir1/capsdir
235 $ hg revert -r '.^' capsdir1/capsdir
236 reverting CapsDir1/CapsDir/SubDir/Def.txt
236 reverting CapsDir1/CapsDir/SubDir/Def.txt
237
237
238 The conditional tests above mean the hash on the diff line differs on Windows
238 The conditional tests above mean the hash on the diff line differs on Windows
239 and OS X
239 and OS X
240 $ hg diff capsdir1/capsdir
240 $ hg diff capsdir1/capsdir
241 diff -r * CapsDir1/CapsDir/SubDir/Def.txt (glob)
241 diff -r * CapsDir1/CapsDir/SubDir/Def.txt (glob)
242 --- a/CapsDir1/CapsDir/SubDir/Def.txt Thu Jan 01 00:00:00 1970 +0000
242 --- a/CapsDir1/CapsDir/SubDir/Def.txt Thu Jan 01 00:00:00 1970 +0000
243 +++ b/CapsDir1/CapsDir/SubDir/Def.txt * (glob)
243 +++ b/CapsDir1/CapsDir/SubDir/Def.txt * (glob)
244 @@ -1,1 +1,1 @@
244 @@ -1,1 +1,1 @@
245 -xyz
245 -xyz
246 +def
246 +def
247
247
248 $ hg mv CapsDir1/CapsDir/abc.txt CapsDir1/CapsDir/ABC.txt
248 $ hg mv CapsDir1/CapsDir/abc.txt CapsDir1/CapsDir/ABC.txt
249 $ hg ci -m "case changing rename" CapsDir1/CapsDir/AbC.txt CapsDir1/CapsDir/ABC.txt
249 $ hg ci -m "case changing rename" CapsDir1/CapsDir/AbC.txt CapsDir1/CapsDir/ABC.txt
250
250
251 $ hg status -A capsdir1/capsdir
251 $ hg status -A capsdir1/capsdir
252 M CapsDir1/CapsDir/SubDir/Def.txt
252 M CapsDir1/CapsDir/SubDir/Def.txt
253 C CapsDir1/CapsDir/ABC.txt
253 C CapsDir1/CapsDir/ABC.txt
254
254
255 $ hg remove -f 'glob:**.txt' -X capsdir1/capsdir
255 $ hg remove -f 'glob:**.txt' -X capsdir1/capsdir
256 $ hg remove -f 'glob:**.txt' -I capsdir1/capsdir
256 $ hg remove -f 'glob:**.txt' -I capsdir1/capsdir
257 removing CapsDir1/CapsDir/ABC.txt
257 removing CapsDir1/CapsDir/ABC.txt
258 removing CapsDir1/CapsDir/SubDir/Def.txt
258 removing CapsDir1/CapsDir/SubDir/Def.txt
259 #endif
259 #endif
260
260
261 $ cd ..
261 $ cd ..
262
262
263 test --dry-run mode in forget
263 test --dry-run mode in forget
264
264
265 $ hg init testdir_forget
265 $ hg init testdir_forget
266 $ cd testdir_forget
266 $ cd testdir_forget
267 $ echo foo > foo
267 $ echo foo > foo
268 $ hg add foo
268 $ hg add foo
269 $ hg commit -m "foo"
269 $ hg commit -m "foo"
270 $ hg forget foo --dry-run -v
270 $ hg forget foo --dry-run -v
271 removing foo
271 removing foo
272 $ hg diff
272 $ hg diff
273 $ hg forget not_exist -n
273 $ hg forget not_exist -n
274 not_exist: $ENOENT$
274 not_exist: $ENOENT$
275 [1]
275 [1]
276
276
277 $ cd ..
277 $ cd ..
278
278
279 test --interactive mode in forget
279 test --interactive mode in forget
280
280
281 $ hg init interactiveforget
281 $ hg init interactiveforget
282 $ cd interactiveforget
282 $ cd interactiveforget
283 $ echo foo > foo
283 $ echo foo > foo
284 $ hg commit -qAm "foo"
284 $ hg commit -qAm "foo"
285 $ echo bar > bar
285 $ echo bar > bar
286 $ hg commit -qAm "bar"
286 $ hg commit -qAm "bar"
287 $ hg forget foo --dry-run -i
287 $ hg forget foo --dry-run -i
288 abort: cannot specify both --dry-run and --interactive
288 abort: cannot specify both --dry-run and --interactive
289 [10]
289 [10]
290
290
291 $ hg forget foo --config ui.interactive=True -i << EOF
291 $ hg forget foo --config ui.interactive=True -i << EOF
292 > ?
292 > ?
293 > n
293 > n
294 > EOF
294 > EOF
295 forget foo [Ynsa?] ?
295 forget foo [Ynsa?] ?
296 y - yes, forget this file
296 y - yes, forget this file
297 n - no, skip this file
297 n - no, skip this file
298 s - skip remaining files
298 s - skip remaining files
299 a - include all remaining files
299 a - include all remaining files
300 ? - ? (display help)
300 ? - ? (display help)
301 forget foo [Ynsa?] n
301 forget foo [Ynsa?] n
302
302
303 $ hg forget foo bar --config ui.interactive=True -i << EOF
303 $ hg forget foo bar --config ui.interactive=True -i << EOF
304 > y
304 > y
305 > n
305 > n
306 > EOF
306 > EOF
307 forget bar [Ynsa?] y
307 forget bar [Ynsa?] y
308 forget foo [Ynsa?] n
308 forget foo [Ynsa?] n
309 removing bar
309 removing bar
310 $ hg status
310 $ hg status
311 R bar
311 R bar
312 $ hg up -qC .
312 $ hg up -qC .
313
313
314 $ hg forget foo bar --config ui.interactive=True -i << EOF
314 $ hg forget foo bar --config ui.interactive=True -i << EOF
315 > s
315 > s
316 > EOF
316 > EOF
317 forget bar [Ynsa?] s
317 forget bar [Ynsa?] s
318 $ hg st
318 $ hg st
319 $ hg up -qC .
319 $ hg up -qC .
320
320
321 $ hg forget foo bar --config ui.interactive=True -i << EOF
321 $ hg forget foo bar --config ui.interactive=True -i << EOF
322 > a
322 > a
323 > EOF
323 > EOF
324 forget bar [Ynsa?] a
324 forget bar [Ynsa?] a
325 removing bar
325 removing bar
326 removing foo
326 removing foo
327 $ hg status
327 $ hg status
328 R bar
328 R bar
329 R foo
329 R foo
330 $ hg up -qC .
330 $ hg up -qC .
331
331
332 $ cd ..
332 $ cd ..
@@ -1,374 +1,374 b''
1 $ mkdir part1
1 $ mkdir part1
2 $ cd part1
2 $ cd part1
3
3
4 $ hg init
4 $ hg init
5 $ echo a > a
5 $ echo a > a
6 $ hg add a
6 $ hg add a
7 $ hg commit -m "1"
7 $ hg commit -m "1"
8 $ hg status
8 $ hg status
9 $ hg copy a b
9 $ hg copy a b
10 $ hg --config ui.portablefilenames=abort copy a con.xml
10 $ hg --config ui.portablefilenames=abort copy a con.xml
11 abort: filename contains 'con', which is reserved on Windows: con.xml
11 abort: filename contains 'con', which is reserved on Windows: con.xml
12 [255]
12 [10]
13 $ hg status
13 $ hg status
14 A b
14 A b
15 $ hg sum
15 $ hg sum
16 parent: 0:c19d34741b0a tip
16 parent: 0:c19d34741b0a tip
17 1
17 1
18 branch: default
18 branch: default
19 commit: 1 copied
19 commit: 1 copied
20 update: (current)
20 update: (current)
21 phases: 1 draft
21 phases: 1 draft
22 $ hg --debug commit -m "2"
22 $ hg --debug commit -m "2"
23 committing files:
23 committing files:
24 b
24 b
25 b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
25 b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
26 committing manifest
26 committing manifest
27 committing changelog
27 committing changelog
28 updating the branch cache
28 updating the branch cache
29 committed changeset 1:93580a2c28a50a56f63526fb305067e6fbf739c4
29 committed changeset 1:93580a2c28a50a56f63526fb305067e6fbf739c4
30
30
31 we should see two history entries
31 we should see two history entries
32
32
33 $ hg history -v
33 $ hg history -v
34 changeset: 1:93580a2c28a5
34 changeset: 1:93580a2c28a5
35 tag: tip
35 tag: tip
36 user: test
36 user: test
37 date: Thu Jan 01 00:00:00 1970 +0000
37 date: Thu Jan 01 00:00:00 1970 +0000
38 files: b
38 files: b
39 description:
39 description:
40 2
40 2
41
41
42
42
43 changeset: 0:c19d34741b0a
43 changeset: 0:c19d34741b0a
44 user: test
44 user: test
45 date: Thu Jan 01 00:00:00 1970 +0000
45 date: Thu Jan 01 00:00:00 1970 +0000
46 files: a
46 files: a
47 description:
47 description:
48 1
48 1
49
49
50
50
51
51
52 we should see one log entry for a
52 we should see one log entry for a
53
53
54 $ hg log a
54 $ hg log a
55 changeset: 0:c19d34741b0a
55 changeset: 0:c19d34741b0a
56 user: test
56 user: test
57 date: Thu Jan 01 00:00:00 1970 +0000
57 date: Thu Jan 01 00:00:00 1970 +0000
58 summary: 1
58 summary: 1
59
59
60
60
61 this should show a revision linked to changeset 0
61 this should show a revision linked to changeset 0
62
62
63 $ hg debugindex a
63 $ hg debugindex a
64 rev linkrev nodeid p1 p2
64 rev linkrev nodeid p1 p2
65 0 0 b789fdd96dc2 000000000000 000000000000
65 0 0 b789fdd96dc2 000000000000 000000000000
66
66
67 we should see one log entry for b
67 we should see one log entry for b
68
68
69 $ hg log b
69 $ hg log b
70 changeset: 1:93580a2c28a5
70 changeset: 1:93580a2c28a5
71 tag: tip
71 tag: tip
72 user: test
72 user: test
73 date: Thu Jan 01 00:00:00 1970 +0000
73 date: Thu Jan 01 00:00:00 1970 +0000
74 summary: 2
74 summary: 2
75
75
76
76
77 this should show a revision linked to changeset 1
77 this should show a revision linked to changeset 1
78
78
79 $ hg debugindex b
79 $ hg debugindex b
80 rev linkrev nodeid p1 p2
80 rev linkrev nodeid p1 p2
81 0 1 37d9b5d994ea 000000000000 000000000000
81 0 1 37d9b5d994ea 000000000000 000000000000
82
82
83 this should show the rename information in the metadata
83 this should show the rename information in the metadata
84
84
85 $ hg debugdata b 0 | head -3 | tail -2
85 $ hg debugdata b 0 | head -3 | tail -2
86 copy: a
86 copy: a
87 copyrev: b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
87 copyrev: b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
88
88
89 #if reporevlogstore
89 #if reporevlogstore
90 $ md5sum.py .hg/store/data/b.i
90 $ md5sum.py .hg/store/data/b.i
91 44913824c8f5890ae218f9829535922e .hg/store/data/b.i
91 44913824c8f5890ae218f9829535922e .hg/store/data/b.i
92 #endif
92 #endif
93 $ hg cat b > bsum
93 $ hg cat b > bsum
94 $ md5sum.py bsum
94 $ md5sum.py bsum
95 60b725f10c9c85c70d97880dfe8191b3 bsum
95 60b725f10c9c85c70d97880dfe8191b3 bsum
96 $ hg cat a > asum
96 $ hg cat a > asum
97 $ md5sum.py asum
97 $ md5sum.py asum
98 60b725f10c9c85c70d97880dfe8191b3 asum
98 60b725f10c9c85c70d97880dfe8191b3 asum
99 $ hg verify
99 $ hg verify
100 checking changesets
100 checking changesets
101 checking manifests
101 checking manifests
102 crosschecking files in changesets and manifests
102 crosschecking files in changesets and manifests
103 checking files
103 checking files
104 checked 2 changesets with 2 changes to 2 files
104 checked 2 changesets with 2 changes to 2 files
105
105
106 $ cd ..
106 $ cd ..
107
107
108
108
109 $ mkdir part2
109 $ mkdir part2
110 $ cd part2
110 $ cd part2
111
111
112 $ hg init
112 $ hg init
113 $ echo foo > foo
113 $ echo foo > foo
114 should fail - foo is not managed
114 should fail - foo is not managed
115 $ hg mv foo bar
115 $ hg mv foo bar
116 foo: not copying - file is not managed
116 foo: not copying - file is not managed
117 abort: no files to copy
117 abort: no files to copy
118 [10]
118 [10]
119 $ hg st -A
119 $ hg st -A
120 ? foo
120 ? foo
121 respects ui.relative-paths
121 respects ui.relative-paths
122 $ mkdir dir
122 $ mkdir dir
123 $ cd dir
123 $ cd dir
124 $ hg mv ../foo ../bar
124 $ hg mv ../foo ../bar
125 ../foo: not copying - file is not managed
125 ../foo: not copying - file is not managed
126 abort: no files to copy
126 abort: no files to copy
127 [10]
127 [10]
128 $ hg mv ../foo ../bar --config ui.relative-paths=yes
128 $ hg mv ../foo ../bar --config ui.relative-paths=yes
129 ../foo: not copying - file is not managed
129 ../foo: not copying - file is not managed
130 abort: no files to copy
130 abort: no files to copy
131 [10]
131 [10]
132 $ hg mv ../foo ../bar --config ui.relative-paths=no
132 $ hg mv ../foo ../bar --config ui.relative-paths=no
133 foo: not copying - file is not managed
133 foo: not copying - file is not managed
134 abort: no files to copy
134 abort: no files to copy
135 [10]
135 [10]
136 $ cd ..
136 $ cd ..
137 $ rmdir dir
137 $ rmdir dir
138 $ hg add foo
138 $ hg add foo
139 dry-run; print a warning that this is not a real copy; foo is added
139 dry-run; print a warning that this is not a real copy; foo is added
140 $ hg mv --dry-run foo bar
140 $ hg mv --dry-run foo bar
141 foo has not been committed yet, so no copy data will be stored for bar.
141 foo has not been committed yet, so no copy data will be stored for bar.
142 $ hg st -A
142 $ hg st -A
143 A foo
143 A foo
144 should print a warning that this is not a real copy; bar is added
144 should print a warning that this is not a real copy; bar is added
145 $ hg mv foo bar
145 $ hg mv foo bar
146 foo has not been committed yet, so no copy data will be stored for bar.
146 foo has not been committed yet, so no copy data will be stored for bar.
147 $ hg st -A
147 $ hg st -A
148 A bar
148 A bar
149 should print a warning that this is not a real copy; foo is added
149 should print a warning that this is not a real copy; foo is added
150 $ hg cp bar foo
150 $ hg cp bar foo
151 bar has not been committed yet, so no copy data will be stored for foo.
151 bar has not been committed yet, so no copy data will be stored for foo.
152 $ hg rm -f bar
152 $ hg rm -f bar
153 $ rm bar
153 $ rm bar
154 $ hg st -A
154 $ hg st -A
155 A foo
155 A foo
156 $ hg commit -m1
156 $ hg commit -m1
157
157
158 moving a missing file
158 moving a missing file
159 $ rm foo
159 $ rm foo
160 $ hg mv foo foo3
160 $ hg mv foo foo3
161 foo: deleted in working directory
161 foo: deleted in working directory
162 foo3 does not exist!
162 foo3 does not exist!
163 $ hg up -qC .
163 $ hg up -qC .
164
164
165 copy --after to a nonexistent target filename
165 copy --after to a nonexistent target filename
166 $ hg cp -A foo dummy
166 $ hg cp -A foo dummy
167 foo: not recording copy - dummy does not exist
167 foo: not recording copy - dummy does not exist
168 [1]
168 [1]
169
169
170 dry-run; should show that foo is clean
170 dry-run; should show that foo is clean
171 $ hg copy --dry-run foo bar
171 $ hg copy --dry-run foo bar
172 $ hg st -A
172 $ hg st -A
173 C foo
173 C foo
174 should show copy
174 should show copy
175 $ hg copy foo bar
175 $ hg copy foo bar
176 $ hg st -C
176 $ hg st -C
177 A bar
177 A bar
178 foo
178 foo
179
179
180 shouldn't show copy
180 shouldn't show copy
181 $ hg commit -m2
181 $ hg commit -m2
182 $ hg st -C
182 $ hg st -C
183
183
184 should match
184 should match
185 $ hg debugindex foo
185 $ hg debugindex foo
186 rev linkrev nodeid p1 p2
186 rev linkrev nodeid p1 p2
187 0 0 2ed2a3912a0b 000000000000 000000000000
187 0 0 2ed2a3912a0b 000000000000 000000000000
188 $ hg debugrename bar
188 $ hg debugrename bar
189 bar renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
189 bar renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
190
190
191 $ echo bleah > foo
191 $ echo bleah > foo
192 $ echo quux > bar
192 $ echo quux > bar
193 $ hg commit -m3
193 $ hg commit -m3
194
194
195 should not be renamed
195 should not be renamed
196 $ hg debugrename bar
196 $ hg debugrename bar
197 bar not renamed
197 bar not renamed
198
198
199 $ hg copy -f foo bar
199 $ hg copy -f foo bar
200 should show copy
200 should show copy
201 $ hg st -C
201 $ hg st -C
202 M bar
202 M bar
203 foo
203 foo
204
204
205 XXX: filtering lfilesrepo.status() in 3.3-rc causes the copy source to not be
205 XXX: filtering lfilesrepo.status() in 3.3-rc causes the copy source to not be
206 displayed.
206 displayed.
207 $ hg st -C --config extensions.largefiles=
207 $ hg st -C --config extensions.largefiles=
208 The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
208 The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
209 M bar
209 M bar
210 foo
210 foo
211
211
212 $ hg commit -m3
212 $ hg commit -m3
213
213
214 should show no parents for tip
214 should show no parents for tip
215 $ hg debugindex bar
215 $ hg debugindex bar
216 rev linkrev nodeid p1 p2
216 rev linkrev nodeid p1 p2
217 0 1 7711d36246cc 000000000000 000000000000
217 0 1 7711d36246cc 000000000000 000000000000
218 1 2 bdf70a2b8d03 7711d36246cc 000000000000
218 1 2 bdf70a2b8d03 7711d36246cc 000000000000
219 2 3 b2558327ea8d 000000000000 000000000000
219 2 3 b2558327ea8d 000000000000 000000000000
220 should match
220 should match
221 $ hg debugindex foo
221 $ hg debugindex foo
222 rev linkrev nodeid p1 p2
222 rev linkrev nodeid p1 p2
223 0 0 2ed2a3912a0b 000000000000 000000000000
223 0 0 2ed2a3912a0b 000000000000 000000000000
224 1 2 dd12c926cf16 2ed2a3912a0b 000000000000
224 1 2 dd12c926cf16 2ed2a3912a0b 000000000000
225 $ hg debugrename bar
225 $ hg debugrename bar
226 bar renamed from foo:dd12c926cf165e3eb4cf87b084955cb617221c17
226 bar renamed from foo:dd12c926cf165e3eb4cf87b084955cb617221c17
227
227
228 should show no copies
228 should show no copies
229 $ hg st -C
229 $ hg st -C
230
230
231 copy --after on an added file
231 copy --after on an added file
232 $ cp bar baz
232 $ cp bar baz
233 $ hg add baz
233 $ hg add baz
234 $ hg cp -A bar baz
234 $ hg cp -A bar baz
235 $ hg st -C
235 $ hg st -C
236 A baz
236 A baz
237 bar
237 bar
238
238
239 foo was clean:
239 foo was clean:
240 $ hg st -AC foo
240 $ hg st -AC foo
241 C foo
241 C foo
242 Trying to copy on top of an existing file fails,
242 Trying to copy on top of an existing file fails,
243 $ hg copy -A bar foo
243 $ hg copy -A bar foo
244 foo: not overwriting - file already committed
244 foo: not overwriting - file already committed
245 ('hg copy --after --force' to replace the file by recording a copy)
245 ('hg copy --after --force' to replace the file by recording a copy)
246 [1]
246 [1]
247 same error without the --after, so the user doesn't have to go through
247 same error without the --after, so the user doesn't have to go through
248 two hints:
248 two hints:
249 $ hg copy bar foo
249 $ hg copy bar foo
250 foo: not overwriting - file already committed
250 foo: not overwriting - file already committed
251 ('hg copy --force' to replace the file by recording a copy)
251 ('hg copy --force' to replace the file by recording a copy)
252 [1]
252 [1]
253 but it's considered modified after a copy --after --force
253 but it's considered modified after a copy --after --force
254 $ hg copy -Af bar foo
254 $ hg copy -Af bar foo
255 $ hg st -AC foo
255 $ hg st -AC foo
256 M foo
256 M foo
257 bar
257 bar
258 The hint for a file that exists but is not in file history doesn't
258 The hint for a file that exists but is not in file history doesn't
259 mention --force:
259 mention --force:
260 $ touch xyzzy
260 $ touch xyzzy
261 $ hg cp bar xyzzy
261 $ hg cp bar xyzzy
262 xyzzy: not overwriting - file exists
262 xyzzy: not overwriting - file exists
263 ('hg copy --after' to record the copy)
263 ('hg copy --after' to record the copy)
264 [1]
264 [1]
265 $ hg co -qC .
265 $ hg co -qC .
266 $ rm baz xyzzy
266 $ rm baz xyzzy
267
267
268
268
269 Test unmarking copy of a single file
269 Test unmarking copy of a single file
270
270
271 # Set up by creating a copy
271 # Set up by creating a copy
272 $ hg cp bar baz
272 $ hg cp bar baz
273 # Test uncopying a non-existent file
273 # Test uncopying a non-existent file
274 $ hg copy --forget non-existent
274 $ hg copy --forget non-existent
275 non-existent: $ENOENT$
275 non-existent: $ENOENT$
276 # Test uncopying an tracked but unrelated file
276 # Test uncopying an tracked but unrelated file
277 $ hg copy --forget foo
277 $ hg copy --forget foo
278 foo: not unmarking as copy - file is not marked as copied
278 foo: not unmarking as copy - file is not marked as copied
279 # Test uncopying a copy source
279 # Test uncopying a copy source
280 $ hg copy --forget bar
280 $ hg copy --forget bar
281 bar: not unmarking as copy - file is not marked as copied
281 bar: not unmarking as copy - file is not marked as copied
282 # baz should still be marked as a copy
282 # baz should still be marked as a copy
283 $ hg st -C
283 $ hg st -C
284 A baz
284 A baz
285 bar
285 bar
286 # Test the normal case
286 # Test the normal case
287 $ hg copy --forget baz
287 $ hg copy --forget baz
288 $ hg st -C
288 $ hg st -C
289 A baz
289 A baz
290 # Test uncopy with matching an non-matching patterns
290 # Test uncopy with matching an non-matching patterns
291 $ hg cp bar baz --after
291 $ hg cp bar baz --after
292 $ hg copy --forget bar baz
292 $ hg copy --forget bar baz
293 bar: not unmarking as copy - file is not marked as copied
293 bar: not unmarking as copy - file is not marked as copied
294 $ hg st -C
294 $ hg st -C
295 A baz
295 A baz
296 # Test uncopy with no exact matches
296 # Test uncopy with no exact matches
297 $ hg cp bar baz --after
297 $ hg cp bar baz --after
298 $ hg copy --forget .
298 $ hg copy --forget .
299 $ hg st -C
299 $ hg st -C
300 A baz
300 A baz
301 $ hg forget baz
301 $ hg forget baz
302 $ rm baz
302 $ rm baz
303
303
304 Test unmarking copy of a directory
304 Test unmarking copy of a directory
305
305
306 $ mkdir dir
306 $ mkdir dir
307 $ echo foo > dir/foo
307 $ echo foo > dir/foo
308 $ echo bar > dir/bar
308 $ echo bar > dir/bar
309 $ hg add dir
309 $ hg add dir
310 adding dir/bar
310 adding dir/bar
311 adding dir/foo
311 adding dir/foo
312 $ hg ci -m 'add dir/'
312 $ hg ci -m 'add dir/'
313 $ hg cp dir dir2
313 $ hg cp dir dir2
314 copying dir/bar to dir2/bar
314 copying dir/bar to dir2/bar
315 copying dir/foo to dir2/foo
315 copying dir/foo to dir2/foo
316 $ touch dir2/untracked
316 $ touch dir2/untracked
317 $ hg copy --forget dir2
317 $ hg copy --forget dir2
318 $ hg st -C
318 $ hg st -C
319 A dir2/bar
319 A dir2/bar
320 A dir2/foo
320 A dir2/foo
321 ? dir2/untracked
321 ? dir2/untracked
322 # Clean up for next test
322 # Clean up for next test
323 $ hg forget dir2
323 $ hg forget dir2
324 removing dir2/bar
324 removing dir2/bar
325 removing dir2/foo
325 removing dir2/foo
326 $ rm -r dir2
326 $ rm -r dir2
327
327
328 Test uncopy on committed copies
328 Test uncopy on committed copies
329
329
330 # Commit some copies
330 # Commit some copies
331 $ hg cp bar baz
331 $ hg cp bar baz
332 $ hg cp bar qux
332 $ hg cp bar qux
333 $ hg ci -m copies
333 $ hg ci -m copies
334 $ hg st -C --change .
334 $ hg st -C --change .
335 A baz
335 A baz
336 bar
336 bar
337 A qux
337 A qux
338 bar
338 bar
339 $ base=$(hg log -r '.^' -T '{rev}')
339 $ base=$(hg log -r '.^' -T '{rev}')
340 $ hg log -G -T '{rev}:{node|short} {desc}\n' -r $base:
340 $ hg log -G -T '{rev}:{node|short} {desc}\n' -r $base:
341 @ 5:a612dc2edfda copies
341 @ 5:a612dc2edfda copies
342 |
342 |
343 o 4:4800b1f1f38e add dir/
343 o 4:4800b1f1f38e add dir/
344 |
344 |
345 ~
345 ~
346 # Add a dirty change on top to show that it's unaffected
346 # Add a dirty change on top to show that it's unaffected
347 $ echo dirty >> baz
347 $ echo dirty >> baz
348 $ hg st
348 $ hg st
349 M baz
349 M baz
350 $ cat baz
350 $ cat baz
351 bleah
351 bleah
352 dirty
352 dirty
353 $ hg copy --forget --at-rev . baz
353 $ hg copy --forget --at-rev . baz
354 saved backup bundle to $TESTTMP/part2/.hg/strip-backup/a612dc2edfda-e36b4448-uncopy.hg
354 saved backup bundle to $TESTTMP/part2/.hg/strip-backup/a612dc2edfda-e36b4448-uncopy.hg
355 # The unwanted copy is no longer recorded, but the unrelated one is
355 # The unwanted copy is no longer recorded, but the unrelated one is
356 $ hg st -C --change .
356 $ hg st -C --change .
357 A baz
357 A baz
358 A qux
358 A qux
359 bar
359 bar
360 # The old commit is gone and we have updated to the new commit
360 # The old commit is gone and we have updated to the new commit
361 $ hg log -G -T '{rev}:{node|short} {desc}\n' -r $base:
361 $ hg log -G -T '{rev}:{node|short} {desc}\n' -r $base:
362 @ 5:c45090e5effe copies
362 @ 5:c45090e5effe copies
363 |
363 |
364 o 4:4800b1f1f38e add dir/
364 o 4:4800b1f1f38e add dir/
365 |
365 |
366 ~
366 ~
367 # Working copy still has the uncommitted change
367 # Working copy still has the uncommitted change
368 $ hg st
368 $ hg st
369 M baz
369 M baz
370 $ cat baz
370 $ cat baz
371 bleah
371 bleah
372 dirty
372 dirty
373
373
374 $ cd ..
374 $ cd ..
@@ -1,80 +1,80 b''
1 #require eol-in-paths
1 #require eol-in-paths
2
2
3 https://bz.mercurial-scm.org/352
3 https://bz.mercurial-scm.org/352
4
4
5 test issue352
5 test issue352
6
6
7 $ hg init foo
7 $ hg init foo
8 $ cd foo
8 $ cd foo
9 $ A=`printf 'he\rllo'`
9 $ A=`printf 'he\rllo'`
10 $ echo foo > "$A"
10 $ echo foo > "$A"
11 $ hg add
11 $ hg add
12 adding he\r (no-eol) (esc)
12 adding he\r (no-eol) (esc)
13 llo
13 llo
14 abort: '\n' and '\r' disallowed in filenames: 'he\rllo'
14 abort: '\n' and '\r' disallowed in filenames: 'he\rllo'
15 [255]
15 [10]
16 $ hg ci -A -m m
16 $ hg ci -A -m m
17 adding he\r (no-eol) (esc)
17 adding he\r (no-eol) (esc)
18 llo
18 llo
19 abort: '\n' and '\r' disallowed in filenames: 'he\rllo'
19 abort: '\n' and '\r' disallowed in filenames: 'he\rllo'
20 [255]
20 [10]
21 $ rm "$A"
21 $ rm "$A"
22 $ echo foo > "hell
22 $ echo foo > "hell
23 > o"
23 > o"
24 $ hg add
24 $ hg add
25 adding hell
25 adding hell
26 o
26 o
27 abort: '\n' and '\r' disallowed in filenames: 'hell\no'
27 abort: '\n' and '\r' disallowed in filenames: 'hell\no'
28 [255]
28 [10]
29 $ hg ci -A -m m
29 $ hg ci -A -m m
30 adding hell
30 adding hell
31 o
31 o
32 abort: '\n' and '\r' disallowed in filenames: 'hell\no'
32 abort: '\n' and '\r' disallowed in filenames: 'hell\no'
33 [255]
33 [10]
34 $ echo foo > "$A"
34 $ echo foo > "$A"
35 $ hg debugwalk -v
35 $ hg debugwalk -v
36 * matcher:
36 * matcher:
37 <alwaysmatcher>
37 <alwaysmatcher>
38 f he\r (no-eol) (esc)
38 f he\r (no-eol) (esc)
39 llo he\r (no-eol) (esc)
39 llo he\r (no-eol) (esc)
40 llo
40 llo
41 f hell
41 f hell
42 o hell
42 o hell
43 o
43 o
44
44
45 $ echo bla > quickfox
45 $ echo bla > quickfox
46 $ hg add quickfox
46 $ hg add quickfox
47 $ hg ci -m 2
47 $ hg ci -m 2
48 $ A=`printf 'quick\rfox'`
48 $ A=`printf 'quick\rfox'`
49 $ hg cp quickfox "$A"
49 $ hg cp quickfox "$A"
50 abort: '\n' and '\r' disallowed in filenames: 'quick\rfox'
50 abort: '\n' and '\r' disallowed in filenames: 'quick\rfox'
51 [255]
51 [10]
52 $ hg mv quickfox "$A"
52 $ hg mv quickfox "$A"
53 abort: '\n' and '\r' disallowed in filenames: 'quick\rfox'
53 abort: '\n' and '\r' disallowed in filenames: 'quick\rfox'
54 [255]
54 [10]
55
55
56 https://bz.mercurial-scm.org/2036
56 https://bz.mercurial-scm.org/2036
57
57
58 $ cd ..
58 $ cd ..
59
59
60 test issue2039
60 test issue2039
61
61
62 $ hg init bar
62 $ hg init bar
63 $ cd bar
63 $ cd bar
64 $ cat <<EOF >> $HGRCPATH
64 $ cat <<EOF >> $HGRCPATH
65 > [extensions]
65 > [extensions]
66 > color =
66 > color =
67 > [color]
67 > [color]
68 > mode = ansi
68 > mode = ansi
69 > EOF
69 > EOF
70 $ A=`printf 'foo\nbar'`
70 $ A=`printf 'foo\nbar'`
71 $ B=`printf 'foo\nbar.baz'`
71 $ B=`printf 'foo\nbar.baz'`
72 $ touch "$A"
72 $ touch "$A"
73 $ touch "$B"
73 $ touch "$B"
74 $ hg status --color=always
74 $ hg status --color=always
75 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mfoo\x1b[0m (esc)
75 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mfoo\x1b[0m (esc)
76 \x1b[0;35;1;4mbar\x1b[0m (esc)
76 \x1b[0;35;1;4mbar\x1b[0m (esc)
77 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mfoo\x1b[0m (esc)
77 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mfoo\x1b[0m (esc)
78 \x1b[0;35;1;4mbar.baz\x1b[0m (esc)
78 \x1b[0;35;1;4mbar.baz\x1b[0m (esc)
79
79
80 $ cd ..
80 $ cd ..
@@ -1,698 +1,698 b''
1 $ hg init
1 $ hg init
2 $ mkdir d1 d1/d11 d2
2 $ mkdir d1 d1/d11 d2
3 $ echo d1/a > d1/a
3 $ echo d1/a > d1/a
4 $ echo d1/ba > d1/ba
4 $ echo d1/ba > d1/ba
5 $ echo d1/a1 > d1/d11/a1
5 $ echo d1/a1 > d1/d11/a1
6 $ echo d1/b > d1/b
6 $ echo d1/b > d1/b
7 $ echo d2/b > d2/b
7 $ echo d2/b > d2/b
8 $ hg add d1/a d1/b d1/ba d1/d11/a1 d2/b
8 $ hg add d1/a d1/b d1/ba d1/d11/a1 d2/b
9 $ hg commit -m "1"
9 $ hg commit -m "1"
10
10
11 rename a single file
11 rename a single file
12
12
13 $ hg rename d1/d11/a1 d2/c
13 $ hg rename d1/d11/a1 d2/c
14 $ hg --config ui.portablefilenames=abort rename d1/a d1/con.xml
14 $ hg --config ui.portablefilenames=abort rename d1/a d1/con.xml
15 abort: filename contains 'con', which is reserved on Windows: d1/con.xml
15 abort: filename contains 'con', which is reserved on Windows: d1/con.xml
16 [255]
16 [10]
17 $ hg sum
17 $ hg sum
18 parent: 0:9b4b6e7b2c26 tip
18 parent: 0:9b4b6e7b2c26 tip
19 1
19 1
20 branch: default
20 branch: default
21 commit: 1 renamed
21 commit: 1 renamed
22 update: (current)
22 update: (current)
23 phases: 1 draft
23 phases: 1 draft
24 $ hg status -C
24 $ hg status -C
25 A d2/c
25 A d2/c
26 d1/d11/a1
26 d1/d11/a1
27 R d1/d11/a1
27 R d1/d11/a1
28 $ hg update -C
28 $ hg update -C
29 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
29 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
30 $ rm d2/c
30 $ rm d2/c
31
31
32 rename a single file using absolute paths
32 rename a single file using absolute paths
33
33
34 $ hg rename `pwd`/d1/d11/a1 `pwd`/d2/c
34 $ hg rename `pwd`/d1/d11/a1 `pwd`/d2/c
35 $ hg status -C
35 $ hg status -C
36 A d2/c
36 A d2/c
37 d1/d11/a1
37 d1/d11/a1
38 R d1/d11/a1
38 R d1/d11/a1
39 $ hg update -C
39 $ hg update -C
40 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
41 $ rm d2/c
41 $ rm d2/c
42
42
43 rename --after a single file
43 rename --after a single file
44
44
45 $ mv d1/d11/a1 d2/c
45 $ mv d1/d11/a1 d2/c
46 $ hg rename --after d1/d11/a1 d2/c
46 $ hg rename --after d1/d11/a1 d2/c
47 $ hg status -C
47 $ hg status -C
48 A d2/c
48 A d2/c
49 d1/d11/a1
49 d1/d11/a1
50 R d1/d11/a1
50 R d1/d11/a1
51 $ hg update -C
51 $ hg update -C
52 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
52 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
53 $ rm d2/c
53 $ rm d2/c
54
54
55 rename --after a single file when src and tgt already tracked
55 rename --after a single file when src and tgt already tracked
56
56
57 $ mv d1/d11/a1 d2/c
57 $ mv d1/d11/a1 d2/c
58 $ hg addrem -s 0
58 $ hg addrem -s 0
59 removing d1/d11/a1
59 removing d1/d11/a1
60 adding d2/c
60 adding d2/c
61 $ hg rename --after d1/d11/a1 d2/c
61 $ hg rename --after d1/d11/a1 d2/c
62 $ hg status -C
62 $ hg status -C
63 A d2/c
63 A d2/c
64 d1/d11/a1
64 d1/d11/a1
65 R d1/d11/a1
65 R d1/d11/a1
66 $ hg update -C
66 $ hg update -C
67 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
67 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
68 $ rm d2/c
68 $ rm d2/c
69
69
70 rename --after a single file to a nonexistent target filename
70 rename --after a single file to a nonexistent target filename
71
71
72 $ hg rename --after d1/a dummy
72 $ hg rename --after d1/a dummy
73 d1/a: not recording move - dummy does not exist
73 d1/a: not recording move - dummy does not exist
74 [1]
74 [1]
75
75
76 move a single file to an existing directory
76 move a single file to an existing directory
77
77
78 $ hg rename d1/d11/a1 d2
78 $ hg rename d1/d11/a1 d2
79 $ hg status -C
79 $ hg status -C
80 A d2/a1
80 A d2/a1
81 d1/d11/a1
81 d1/d11/a1
82 R d1/d11/a1
82 R d1/d11/a1
83 $ hg update -C
83 $ hg update -C
84 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
84 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
85 $ rm d2/a1
85 $ rm d2/a1
86
86
87 move --after a single file to an existing directory
87 move --after a single file to an existing directory
88
88
89 $ mv d1/d11/a1 d2
89 $ mv d1/d11/a1 d2
90 $ hg rename --after d1/d11/a1 d2
90 $ hg rename --after d1/d11/a1 d2
91 $ hg status -C
91 $ hg status -C
92 A d2/a1
92 A d2/a1
93 d1/d11/a1
93 d1/d11/a1
94 R d1/d11/a1
94 R d1/d11/a1
95 $ hg update -C
95 $ hg update -C
96 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
96 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
97 $ rm d2/a1
97 $ rm d2/a1
98
98
99 rename a file using a relative path
99 rename a file using a relative path
100
100
101 $ (cd d1/d11; hg rename ../../d2/b e)
101 $ (cd d1/d11; hg rename ../../d2/b e)
102 $ hg status -C
102 $ hg status -C
103 A d1/d11/e
103 A d1/d11/e
104 d2/b
104 d2/b
105 R d2/b
105 R d2/b
106 $ hg update -C
106 $ hg update -C
107 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
107 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
108 $ rm d1/d11/e
108 $ rm d1/d11/e
109
109
110 rename --after a file using a relative path
110 rename --after a file using a relative path
111
111
112 $ (cd d1/d11; mv ../../d2/b e; hg rename --after ../../d2/b e)
112 $ (cd d1/d11; mv ../../d2/b e; hg rename --after ../../d2/b e)
113 $ hg status -C
113 $ hg status -C
114 A d1/d11/e
114 A d1/d11/e
115 d2/b
115 d2/b
116 R d2/b
116 R d2/b
117 $ hg update -C
117 $ hg update -C
118 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
118 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
119 $ rm d1/d11/e
119 $ rm d1/d11/e
120
120
121 rename directory d1 as d3
121 rename directory d1 as d3
122
122
123 $ hg rename d1/ d3
123 $ hg rename d1/ d3
124 moving d1/a to d3/a
124 moving d1/a to d3/a
125 moving d1/b to d3/b
125 moving d1/b to d3/b
126 moving d1/ba to d3/ba
126 moving d1/ba to d3/ba
127 moving d1/d11/a1 to d3/d11/a1
127 moving d1/d11/a1 to d3/d11/a1
128 $ hg status -C
128 $ hg status -C
129 A d3/a
129 A d3/a
130 d1/a
130 d1/a
131 A d3/b
131 A d3/b
132 d1/b
132 d1/b
133 A d3/ba
133 A d3/ba
134 d1/ba
134 d1/ba
135 A d3/d11/a1
135 A d3/d11/a1
136 d1/d11/a1
136 d1/d11/a1
137 R d1/a
137 R d1/a
138 R d1/b
138 R d1/b
139 R d1/ba
139 R d1/ba
140 R d1/d11/a1
140 R d1/d11/a1
141 $ hg update -C
141 $ hg update -C
142 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
142 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
143 $ rm -rf d3
143 $ rm -rf d3
144
144
145 rename --after directory d1 as d3
145 rename --after directory d1 as d3
146
146
147 $ mv d1 d3
147 $ mv d1 d3
148 $ hg rename --after d1 d3
148 $ hg rename --after d1 d3
149 moving d1/a to d3/a
149 moving d1/a to d3/a
150 moving d1/b to d3/b
150 moving d1/b to d3/b
151 moving d1/ba to d3/ba
151 moving d1/ba to d3/ba
152 moving d1/d11/a1 to d3/d11/a1
152 moving d1/d11/a1 to d3/d11/a1
153 $ hg status -C
153 $ hg status -C
154 A d3/a
154 A d3/a
155 d1/a
155 d1/a
156 A d3/b
156 A d3/b
157 d1/b
157 d1/b
158 A d3/ba
158 A d3/ba
159 d1/ba
159 d1/ba
160 A d3/d11/a1
160 A d3/d11/a1
161 d1/d11/a1
161 d1/d11/a1
162 R d1/a
162 R d1/a
163 R d1/b
163 R d1/b
164 R d1/ba
164 R d1/ba
165 R d1/d11/a1
165 R d1/d11/a1
166 $ hg update -C
166 $ hg update -C
167 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
167 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
168 $ rm -rf d3
168 $ rm -rf d3
169
169
170 move a directory using a relative path
170 move a directory using a relative path
171
171
172 $ (cd d2; mkdir d3; hg rename ../d1/d11 d3)
172 $ (cd d2; mkdir d3; hg rename ../d1/d11 d3)
173 moving ../d1/d11/a1 to d3/d11/a1
173 moving ../d1/d11/a1 to d3/d11/a1
174 $ hg status -C
174 $ hg status -C
175 A d2/d3/d11/a1
175 A d2/d3/d11/a1
176 d1/d11/a1
176 d1/d11/a1
177 R d1/d11/a1
177 R d1/d11/a1
178 $ hg update -C
178 $ hg update -C
179 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
179 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
180 $ rm -rf d2/d3
180 $ rm -rf d2/d3
181
181
182 move --after a directory using a relative path
182 move --after a directory using a relative path
183
183
184 $ (cd d2; mkdir d3; mv ../d1/d11 d3; hg rename --after ../d1/d11 d3)
184 $ (cd d2; mkdir d3; mv ../d1/d11 d3; hg rename --after ../d1/d11 d3)
185 moving ../d1/d11/a1 to d3/d11/a1
185 moving ../d1/d11/a1 to d3/d11/a1
186 $ hg status -C
186 $ hg status -C
187 A d2/d3/d11/a1
187 A d2/d3/d11/a1
188 d1/d11/a1
188 d1/d11/a1
189 R d1/d11/a1
189 R d1/d11/a1
190 $ hg update -C
190 $ hg update -C
191 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
191 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
192 $ rm -rf d2/d3
192 $ rm -rf d2/d3
193
193
194 move directory d1/d11 to an existing directory d2 (removes empty d1)
194 move directory d1/d11 to an existing directory d2 (removes empty d1)
195
195
196 $ hg rename d1/d11/ d2
196 $ hg rename d1/d11/ d2
197 moving d1/d11/a1 to d2/d11/a1
197 moving d1/d11/a1 to d2/d11/a1
198 $ hg status -C
198 $ hg status -C
199 A d2/d11/a1
199 A d2/d11/a1
200 d1/d11/a1
200 d1/d11/a1
201 R d1/d11/a1
201 R d1/d11/a1
202 $ hg update -C
202 $ hg update -C
203 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
203 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
204 $ rm -rf d2/d11
204 $ rm -rf d2/d11
205
205
206 move directories d1 and d2 to a new directory d3
206 move directories d1 and d2 to a new directory d3
207
207
208 $ mkdir d3
208 $ mkdir d3
209 $ hg rename d1 d2 d3
209 $ hg rename d1 d2 d3
210 moving d1/a to d3/d1/a
210 moving d1/a to d3/d1/a
211 moving d1/b to d3/d1/b
211 moving d1/b to d3/d1/b
212 moving d1/ba to d3/d1/ba
212 moving d1/ba to d3/d1/ba
213 moving d1/d11/a1 to d3/d1/d11/a1
213 moving d1/d11/a1 to d3/d1/d11/a1
214 moving d2/b to d3/d2/b
214 moving d2/b to d3/d2/b
215 $ hg status -C
215 $ hg status -C
216 A d3/d1/a
216 A d3/d1/a
217 d1/a
217 d1/a
218 A d3/d1/b
218 A d3/d1/b
219 d1/b
219 d1/b
220 A d3/d1/ba
220 A d3/d1/ba
221 d1/ba
221 d1/ba
222 A d3/d1/d11/a1
222 A d3/d1/d11/a1
223 d1/d11/a1
223 d1/d11/a1
224 A d3/d2/b
224 A d3/d2/b
225 d2/b
225 d2/b
226 R d1/a
226 R d1/a
227 R d1/b
227 R d1/b
228 R d1/ba
228 R d1/ba
229 R d1/d11/a1
229 R d1/d11/a1
230 R d2/b
230 R d2/b
231 $ hg update -C
231 $ hg update -C
232 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
232 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
233 $ rm -rf d3
233 $ rm -rf d3
234
234
235 move --after directories d1 and d2 to a new directory d3
235 move --after directories d1 and d2 to a new directory d3
236
236
237 $ mkdir d3
237 $ mkdir d3
238 $ mv d1 d2 d3
238 $ mv d1 d2 d3
239 $ hg rename --after d1 d2 d3
239 $ hg rename --after d1 d2 d3
240 moving d1/a to d3/d1/a
240 moving d1/a to d3/d1/a
241 moving d1/b to d3/d1/b
241 moving d1/b to d3/d1/b
242 moving d1/ba to d3/d1/ba
242 moving d1/ba to d3/d1/ba
243 moving d1/d11/a1 to d3/d1/d11/a1
243 moving d1/d11/a1 to d3/d1/d11/a1
244 moving d2/b to d3/d2/b
244 moving d2/b to d3/d2/b
245 $ hg status -C
245 $ hg status -C
246 A d3/d1/a
246 A d3/d1/a
247 d1/a
247 d1/a
248 A d3/d1/b
248 A d3/d1/b
249 d1/b
249 d1/b
250 A d3/d1/ba
250 A d3/d1/ba
251 d1/ba
251 d1/ba
252 A d3/d1/d11/a1
252 A d3/d1/d11/a1
253 d1/d11/a1
253 d1/d11/a1
254 A d3/d2/b
254 A d3/d2/b
255 d2/b
255 d2/b
256 R d1/a
256 R d1/a
257 R d1/b
257 R d1/b
258 R d1/ba
258 R d1/ba
259 R d1/d11/a1
259 R d1/d11/a1
260 R d2/b
260 R d2/b
261 $ hg update -C
261 $ hg update -C
262 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
262 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
263 $ rm -rf d3
263 $ rm -rf d3
264
264
265 move everything under directory d1 to existing directory d2, do not
265 move everything under directory d1 to existing directory d2, do not
266 overwrite existing files (d2/b)
266 overwrite existing files (d2/b)
267
267
268 $ hg rename d1/* d2
268 $ hg rename d1/* d2
269 d2/b: not overwriting - file already committed
269 d2/b: not overwriting - file already committed
270 ('hg rename --force' to replace the file by recording a rename)
270 ('hg rename --force' to replace the file by recording a rename)
271 moving d1/d11/a1 to d2/d11/a1
271 moving d1/d11/a1 to d2/d11/a1
272 [1]
272 [1]
273 $ hg status -C
273 $ hg status -C
274 A d2/a
274 A d2/a
275 d1/a
275 d1/a
276 A d2/ba
276 A d2/ba
277 d1/ba
277 d1/ba
278 A d2/d11/a1
278 A d2/d11/a1
279 d1/d11/a1
279 d1/d11/a1
280 R d1/a
280 R d1/a
281 R d1/ba
281 R d1/ba
282 R d1/d11/a1
282 R d1/d11/a1
283 $ diff -u d1/b d2/b
283 $ diff -u d1/b d2/b
284 --- d1/b * (glob)
284 --- d1/b * (glob)
285 +++ d2/b * (glob)
285 +++ d2/b * (glob)
286 @@ * (glob)
286 @@ * (glob)
287 -d1/b
287 -d1/b
288 +d2/b
288 +d2/b
289 [1]
289 [1]
290 $ hg update -C
290 $ hg update -C
291 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
291 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
292 $ rm d2/a d2/ba d2/d11/a1
292 $ rm d2/a d2/ba d2/d11/a1
293
293
294 attempt to move one file into a non-existent directory
294 attempt to move one file into a non-existent directory
295
295
296 $ hg rename d1/a dx/
296 $ hg rename d1/a dx/
297 abort: destination dx/ is not a directory
297 abort: destination dx/ is not a directory
298 [10]
298 [10]
299 $ hg status -C
299 $ hg status -C
300 $ hg update -C
300 $ hg update -C
301 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
301 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
302
302
303 attempt to move potentially more than one file into a non-existent directory
303 attempt to move potentially more than one file into a non-existent directory
304
304
305 $ hg rename 'glob:d1/**' dx
305 $ hg rename 'glob:d1/**' dx
306 abort: with multiple sources, destination must be an existing directory
306 abort: with multiple sources, destination must be an existing directory
307 [10]
307 [10]
308
308
309 move every file under d1 to d2/d21
309 move every file under d1 to d2/d21
310
310
311 $ mkdir d2/d21
311 $ mkdir d2/d21
312 $ hg rename 'glob:d1/**' d2/d21
312 $ hg rename 'glob:d1/**' d2/d21
313 moving d1/a to d2/d21/a
313 moving d1/a to d2/d21/a
314 moving d1/b to d2/d21/b
314 moving d1/b to d2/d21/b
315 moving d1/ba to d2/d21/ba
315 moving d1/ba to d2/d21/ba
316 moving d1/d11/a1 to d2/d21/a1
316 moving d1/d11/a1 to d2/d21/a1
317 $ hg status -C
317 $ hg status -C
318 A d2/d21/a
318 A d2/d21/a
319 d1/a
319 d1/a
320 A d2/d21/a1
320 A d2/d21/a1
321 d1/d11/a1
321 d1/d11/a1
322 A d2/d21/b
322 A d2/d21/b
323 d1/b
323 d1/b
324 A d2/d21/ba
324 A d2/d21/ba
325 d1/ba
325 d1/ba
326 R d1/a
326 R d1/a
327 R d1/b
327 R d1/b
328 R d1/ba
328 R d1/ba
329 R d1/d11/a1
329 R d1/d11/a1
330 $ hg update -C
330 $ hg update -C
331 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
331 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
332 $ rm -rf d2/d21
332 $ rm -rf d2/d21
333
333
334 move --after some files under d1 to d2/d21
334 move --after some files under d1 to d2/d21
335
335
336 $ mkdir d2/d21
336 $ mkdir d2/d21
337 $ mv d1/a d1/d11/a1 d2/d21
337 $ mv d1/a d1/d11/a1 d2/d21
338 $ hg rename --after 'glob:d1/**' d2/d21
338 $ hg rename --after 'glob:d1/**' d2/d21
339 moving d1/a to d2/d21/a
339 moving d1/a to d2/d21/a
340 d1/b: not recording move - d2/d21/b does not exist
340 d1/b: not recording move - d2/d21/b does not exist
341 d1/ba: not recording move - d2/d21/ba does not exist
341 d1/ba: not recording move - d2/d21/ba does not exist
342 moving d1/d11/a1 to d2/d21/a1
342 moving d1/d11/a1 to d2/d21/a1
343 [1]
343 [1]
344 $ hg status -C
344 $ hg status -C
345 A d2/d21/a
345 A d2/d21/a
346 d1/a
346 d1/a
347 A d2/d21/a1
347 A d2/d21/a1
348 d1/d11/a1
348 d1/d11/a1
349 R d1/a
349 R d1/a
350 R d1/d11/a1
350 R d1/d11/a1
351 $ hg update -C
351 $ hg update -C
352 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
352 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
353 $ rm -rf d2/d21
353 $ rm -rf d2/d21
354
354
355 move every file under d1 starting with an 'a' to d2/d21 (regexp)
355 move every file under d1 starting with an 'a' to d2/d21 (regexp)
356
356
357 $ mkdir d2/d21
357 $ mkdir d2/d21
358 $ hg rename 're:d1/([^a][^/]*/)*a.*' d2/d21
358 $ hg rename 're:d1/([^a][^/]*/)*a.*' d2/d21
359 moving d1/a to d2/d21/a
359 moving d1/a to d2/d21/a
360 moving d1/d11/a1 to d2/d21/a1
360 moving d1/d11/a1 to d2/d21/a1
361 $ hg status -C
361 $ hg status -C
362 A d2/d21/a
362 A d2/d21/a
363 d1/a
363 d1/a
364 A d2/d21/a1
364 A d2/d21/a1
365 d1/d11/a1
365 d1/d11/a1
366 R d1/a
366 R d1/a
367 R d1/d11/a1
367 R d1/d11/a1
368 $ hg update -C
368 $ hg update -C
369 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
369 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
370 $ rm -rf d2/d21
370 $ rm -rf d2/d21
371
371
372 attempt to overwrite an existing file
372 attempt to overwrite an existing file
373
373
374 $ echo "ca" > d1/ca
374 $ echo "ca" > d1/ca
375 $ hg rename d1/ba d1/ca
375 $ hg rename d1/ba d1/ca
376 d1/ca: not overwriting - file exists
376 d1/ca: not overwriting - file exists
377 ('hg rename --after' to record the rename)
377 ('hg rename --after' to record the rename)
378 [1]
378 [1]
379 $ hg status -C
379 $ hg status -C
380 ? d1/ca
380 ? d1/ca
381 $ hg update -C
381 $ hg update -C
382 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
382 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
383
383
384 forced overwrite of an existing file
384 forced overwrite of an existing file
385
385
386 $ echo "ca" > d1/ca
386 $ echo "ca" > d1/ca
387 $ hg rename --force d1/ba d1/ca
387 $ hg rename --force d1/ba d1/ca
388 $ hg status -C
388 $ hg status -C
389 A d1/ca
389 A d1/ca
390 d1/ba
390 d1/ba
391 R d1/ba
391 R d1/ba
392 $ hg update -C
392 $ hg update -C
393 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
393 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
394 $ rm d1/ca
394 $ rm d1/ca
395
395
396 attempt to overwrite an existing broken symlink
396 attempt to overwrite an existing broken symlink
397
397
398 #if symlink
398 #if symlink
399 $ ln -s ba d1/ca
399 $ ln -s ba d1/ca
400 $ hg rename --traceback d1/ba d1/ca
400 $ hg rename --traceback d1/ba d1/ca
401 d1/ca: not overwriting - file exists
401 d1/ca: not overwriting - file exists
402 ('hg rename --after' to record the rename)
402 ('hg rename --after' to record the rename)
403 [1]
403 [1]
404 $ hg status -C
404 $ hg status -C
405 ? d1/ca
405 ? d1/ca
406 $ hg update -C
406 $ hg update -C
407 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
407 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
408 $ rm d1/ca
408 $ rm d1/ca
409
409
410 replace a symlink with a file
410 replace a symlink with a file
411
411
412 $ ln -s ba d1/ca
412 $ ln -s ba d1/ca
413 $ hg rename --force d1/ba d1/ca
413 $ hg rename --force d1/ba d1/ca
414 $ hg status -C
414 $ hg status -C
415 A d1/ca
415 A d1/ca
416 d1/ba
416 d1/ba
417 R d1/ba
417 R d1/ba
418 $ hg update -C
418 $ hg update -C
419 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
419 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
420 $ rm d1/ca
420 $ rm d1/ca
421 #endif
421 #endif
422
422
423 do not copy more than one source file to the same destination file
423 do not copy more than one source file to the same destination file
424
424
425 $ mkdir d3
425 $ mkdir d3
426 $ hg rename d1/* d2/* d3
426 $ hg rename d1/* d2/* d3
427 moving d1/d11/a1 to d3/d11/a1
427 moving d1/d11/a1 to d3/d11/a1
428 d3/b: not overwriting - d2/b collides with d1/b
428 d3/b: not overwriting - d2/b collides with d1/b
429 [1]
429 [1]
430 $ hg status -C
430 $ hg status -C
431 A d3/a
431 A d3/a
432 d1/a
432 d1/a
433 A d3/b
433 A d3/b
434 d1/b
434 d1/b
435 A d3/ba
435 A d3/ba
436 d1/ba
436 d1/ba
437 A d3/d11/a1
437 A d3/d11/a1
438 d1/d11/a1
438 d1/d11/a1
439 R d1/a
439 R d1/a
440 R d1/b
440 R d1/b
441 R d1/ba
441 R d1/ba
442 R d1/d11/a1
442 R d1/d11/a1
443 $ hg update -C
443 $ hg update -C
444 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
444 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
445 $ rm -rf d3
445 $ rm -rf d3
446
446
447 move a whole subtree with "hg rename ."
447 move a whole subtree with "hg rename ."
448
448
449 $ mkdir d3
449 $ mkdir d3
450 $ (cd d1; hg rename . ../d3)
450 $ (cd d1; hg rename . ../d3)
451 moving a to ../d3/d1/a
451 moving a to ../d3/d1/a
452 moving b to ../d3/d1/b
452 moving b to ../d3/d1/b
453 moving ba to ../d3/d1/ba
453 moving ba to ../d3/d1/ba
454 moving d11/a1 to ../d3/d1/d11/a1
454 moving d11/a1 to ../d3/d1/d11/a1
455 $ hg status -C
455 $ hg status -C
456 A d3/d1/a
456 A d3/d1/a
457 d1/a
457 d1/a
458 A d3/d1/b
458 A d3/d1/b
459 d1/b
459 d1/b
460 A d3/d1/ba
460 A d3/d1/ba
461 d1/ba
461 d1/ba
462 A d3/d1/d11/a1
462 A d3/d1/d11/a1
463 d1/d11/a1
463 d1/d11/a1
464 R d1/a
464 R d1/a
465 R d1/b
465 R d1/b
466 R d1/ba
466 R d1/ba
467 R d1/d11/a1
467 R d1/d11/a1
468 $ hg update -C
468 $ hg update -C
469 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
469 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
470 $ rm -rf d3
470 $ rm -rf d3
471
471
472 move a whole subtree with "hg rename --after ."
472 move a whole subtree with "hg rename --after ."
473
473
474 $ mkdir d3
474 $ mkdir d3
475 $ mv d1/* d3
475 $ mv d1/* d3
476 $ (cd d1; hg rename --after . ../d3)
476 $ (cd d1; hg rename --after . ../d3)
477 moving a to ../d3/a
477 moving a to ../d3/a
478 moving b to ../d3/b
478 moving b to ../d3/b
479 moving ba to ../d3/ba
479 moving ba to ../d3/ba
480 moving d11/a1 to ../d3/d11/a1
480 moving d11/a1 to ../d3/d11/a1
481 $ hg status -C
481 $ hg status -C
482 A d3/a
482 A d3/a
483 d1/a
483 d1/a
484 A d3/b
484 A d3/b
485 d1/b
485 d1/b
486 A d3/ba
486 A d3/ba
487 d1/ba
487 d1/ba
488 A d3/d11/a1
488 A d3/d11/a1
489 d1/d11/a1
489 d1/d11/a1
490 R d1/a
490 R d1/a
491 R d1/b
491 R d1/b
492 R d1/ba
492 R d1/ba
493 R d1/d11/a1
493 R d1/d11/a1
494 $ hg update -C
494 $ hg update -C
495 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
495 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
496 $ rm -rf d3
496 $ rm -rf d3
497
497
498 move the parent tree with "hg rename .."
498 move the parent tree with "hg rename .."
499
499
500 $ (cd d1/d11; hg rename .. ../../d3)
500 $ (cd d1/d11; hg rename .. ../../d3)
501 moving ../a to ../../d3/a
501 moving ../a to ../../d3/a
502 moving ../b to ../../d3/b
502 moving ../b to ../../d3/b
503 moving ../ba to ../../d3/ba
503 moving ../ba to ../../d3/ba
504 moving a1 to ../../d3/d11/a1
504 moving a1 to ../../d3/d11/a1
505 $ hg status -C
505 $ hg status -C
506 A d3/a
506 A d3/a
507 d1/a
507 d1/a
508 A d3/b
508 A d3/b
509 d1/b
509 d1/b
510 A d3/ba
510 A d3/ba
511 d1/ba
511 d1/ba
512 A d3/d11/a1
512 A d3/d11/a1
513 d1/d11/a1
513 d1/d11/a1
514 R d1/a
514 R d1/a
515 R d1/b
515 R d1/b
516 R d1/ba
516 R d1/ba
517 R d1/d11/a1
517 R d1/d11/a1
518 $ hg update -C
518 $ hg update -C
519 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
519 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
520 $ rm -rf d3
520 $ rm -rf d3
521
521
522 skip removed files
522 skip removed files
523
523
524 $ hg remove d1/b
524 $ hg remove d1/b
525 $ hg rename d1 d3
525 $ hg rename d1 d3
526 moving d1/a to d3/a
526 moving d1/a to d3/a
527 moving d1/ba to d3/ba
527 moving d1/ba to d3/ba
528 moving d1/d11/a1 to d3/d11/a1
528 moving d1/d11/a1 to d3/d11/a1
529 $ hg status -C
529 $ hg status -C
530 A d3/a
530 A d3/a
531 d1/a
531 d1/a
532 A d3/ba
532 A d3/ba
533 d1/ba
533 d1/ba
534 A d3/d11/a1
534 A d3/d11/a1
535 d1/d11/a1
535 d1/d11/a1
536 R d1/a
536 R d1/a
537 R d1/b
537 R d1/b
538 R d1/ba
538 R d1/ba
539 R d1/d11/a1
539 R d1/d11/a1
540 $ hg update -C
540 $ hg update -C
541 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
541 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
542 $ rm -rf d3
542 $ rm -rf d3
543
543
544 transitive rename
544 transitive rename
545
545
546 $ hg rename d1/b d1/bb
546 $ hg rename d1/b d1/bb
547 $ hg rename d1/bb d1/bc
547 $ hg rename d1/bb d1/bc
548 $ hg status -C
548 $ hg status -C
549 A d1/bc
549 A d1/bc
550 d1/b
550 d1/b
551 R d1/b
551 R d1/b
552 $ hg update -C
552 $ hg update -C
553 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
553 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
554 $ rm d1/bc
554 $ rm d1/bc
555
555
556 transitive rename --after
556 transitive rename --after
557
557
558 $ hg rename d1/b d1/bb
558 $ hg rename d1/b d1/bb
559 $ mv d1/bb d1/bc
559 $ mv d1/bb d1/bc
560 $ hg rename --after d1/bb d1/bc
560 $ hg rename --after d1/bb d1/bc
561 $ hg status -C
561 $ hg status -C
562 A d1/bc
562 A d1/bc
563 d1/b
563 d1/b
564 R d1/b
564 R d1/b
565 $ hg update -C
565 $ hg update -C
566 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
566 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
567 $ rm d1/bc
567 $ rm d1/bc
568
568
569 $ echo "# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)"
569 $ echo "# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)"
570 # idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)
570 # idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)
571 $ hg rename d1/b d1/bb
571 $ hg rename d1/b d1/bb
572 $ echo "some stuff added to d1/bb" >> d1/bb
572 $ echo "some stuff added to d1/bb" >> d1/bb
573 $ hg rename d1/bb d1/b
573 $ hg rename d1/bb d1/b
574 $ hg status -C
574 $ hg status -C
575 M d1/b
575 M d1/b
576 $ hg update -C
576 $ hg update -C
577 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
577 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
578
578
579 overwriting with renames (issue1959)
579 overwriting with renames (issue1959)
580
580
581 $ hg rename d1/a d1/c
581 $ hg rename d1/a d1/c
582 $ hg rename d1/b d1/a
582 $ hg rename d1/b d1/a
583 $ hg status -C
583 $ hg status -C
584 M d1/a
584 M d1/a
585 d1/b
585 d1/b
586 A d1/c
586 A d1/c
587 d1/a
587 d1/a
588 R d1/b
588 R d1/b
589 $ hg diff --git
589 $ hg diff --git
590 diff --git a/d1/a b/d1/a
590 diff --git a/d1/a b/d1/a
591 --- a/d1/a
591 --- a/d1/a
592 +++ b/d1/a
592 +++ b/d1/a
593 @@ -1,1 +1,1 @@
593 @@ -1,1 +1,1 @@
594 -d1/a
594 -d1/a
595 +d1/b
595 +d1/b
596 diff --git a/d1/b b/d1/b
596 diff --git a/d1/b b/d1/b
597 deleted file mode 100644
597 deleted file mode 100644
598 --- a/d1/b
598 --- a/d1/b
599 +++ /dev/null
599 +++ /dev/null
600 @@ -1,1 +0,0 @@
600 @@ -1,1 +0,0 @@
601 -d1/b
601 -d1/b
602 diff --git a/d1/a b/d1/c
602 diff --git a/d1/a b/d1/c
603 copy from d1/a
603 copy from d1/a
604 copy to d1/c
604 copy to d1/c
605 $ hg update -C
605 $ hg update -C
606 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
606 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
607 $ rm d1/c # The file was marked as added, so 'hg update' action was 'forget'
607 $ rm d1/c # The file was marked as added, so 'hg update' action was 'forget'
608
608
609 check illegal path components
609 check illegal path components
610
610
611 $ hg rename d1/d11/a1 .hg/foo
611 $ hg rename d1/d11/a1 .hg/foo
612 abort: path contains illegal component: .hg/foo
612 abort: path contains illegal component: .hg/foo
613 [255]
613 [255]
614 $ hg status -C
614 $ hg status -C
615 $ hg rename d1/d11/a1 ../foo
615 $ hg rename d1/d11/a1 ../foo
616 abort: ../foo not under root '$TESTTMP'
616 abort: ../foo not under root '$TESTTMP'
617 [255]
617 [255]
618 $ hg status -C
618 $ hg status -C
619
619
620 $ mv d1/d11/a1 .hg/foo
620 $ mv d1/d11/a1 .hg/foo
621 $ hg rename --after d1/d11/a1 .hg/foo
621 $ hg rename --after d1/d11/a1 .hg/foo
622 abort: path contains illegal component: .hg/foo
622 abort: path contains illegal component: .hg/foo
623 [255]
623 [255]
624 $ hg status -C
624 $ hg status -C
625 ! d1/d11/a1
625 ! d1/d11/a1
626 $ hg update -C
626 $ hg update -C
627 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
627 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
628 $ rm .hg/foo
628 $ rm .hg/foo
629
629
630 $ hg rename d1/d11/a1 .hg
630 $ hg rename d1/d11/a1 .hg
631 abort: path contains illegal component: .hg/a1
631 abort: path contains illegal component: .hg/a1
632 [255]
632 [255]
633 $ hg --config extensions.largefiles= rename d1/d11/a1 .hg
633 $ hg --config extensions.largefiles= rename d1/d11/a1 .hg
634 The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
634 The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
635 abort: path contains illegal component: .hg/a1
635 abort: path contains illegal component: .hg/a1
636 [255]
636 [255]
637 $ hg status -C
637 $ hg status -C
638 $ hg rename d1/d11/a1 ..
638 $ hg rename d1/d11/a1 ..
639 abort: ../a1 not under root '$TESTTMP'
639 abort: ../a1 not under root '$TESTTMP'
640 [255]
640 [255]
641 $ hg --config extensions.largefiles= rename d1/d11/a1 ..
641 $ hg --config extensions.largefiles= rename d1/d11/a1 ..
642 The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
642 The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
643 abort: ../a1 not under root '$TESTTMP'
643 abort: ../a1 not under root '$TESTTMP'
644 [255]
644 [255]
645 $ hg status -C
645 $ hg status -C
646
646
647 $ mv d1/d11/a1 .hg
647 $ mv d1/d11/a1 .hg
648 $ hg rename --after d1/d11/a1 .hg
648 $ hg rename --after d1/d11/a1 .hg
649 abort: path contains illegal component: .hg/a1
649 abort: path contains illegal component: .hg/a1
650 [255]
650 [255]
651 $ hg status -C
651 $ hg status -C
652 ! d1/d11/a1
652 ! d1/d11/a1
653 $ hg update -C
653 $ hg update -C
654 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
654 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
655 $ rm .hg/a1
655 $ rm .hg/a1
656
656
657 $ (cd d1/d11; hg rename ../../d2/b ../../.hg/foo)
657 $ (cd d1/d11; hg rename ../../d2/b ../../.hg/foo)
658 abort: path contains illegal component: .hg/foo
658 abort: path contains illegal component: .hg/foo
659 [255]
659 [255]
660 $ hg status -C
660 $ hg status -C
661 $ (cd d1/d11; hg rename ../../d2/b ../../../foo)
661 $ (cd d1/d11; hg rename ../../d2/b ../../../foo)
662 abort: ../../../foo not under root '$TESTTMP'
662 abort: ../../../foo not under root '$TESTTMP'
663 [255]
663 [255]
664 $ hg status -C
664 $ hg status -C
665
665
666 check that stat information such as mtime is preserved on rename - it's unclear
666 check that stat information such as mtime is preserved on rename - it's unclear
667 whether the `touch` and `stat` commands are portable, so we mimic them using
667 whether the `touch` and `stat` commands are portable, so we mimic them using
668 python. Not all platforms support precision of even one-second granularity, so
668 python. Not all platforms support precision of even one-second granularity, so
669 we allow a rather generous fudge factor here; 1234567890 is 2009, and the
669 we allow a rather generous fudge factor here; 1234567890 is 2009, and the
670 primary thing we care about is that it's not the machine's current time;
670 primary thing we care about is that it's not the machine's current time;
671 hopefully it's really unlikely for a machine to have such a broken clock that
671 hopefully it's really unlikely for a machine to have such a broken clock that
672 this test fails. :)
672 this test fails. :)
673
673
674 $ mkdir mtime
674 $ mkdir mtime
675 Create the file (as empty), then update its mtime and atime to be 1234567890.
675 Create the file (as empty), then update its mtime and atime to be 1234567890.
676 >>> import os
676 >>> import os
677 >>> filename = "mtime/f"
677 >>> filename = "mtime/f"
678 >>> mtime = 1234567890
678 >>> mtime = 1234567890
679 >>> open(filename, "w").close()
679 >>> open(filename, "w").close()
680 >>> os.utime(filename, (mtime, mtime))
680 >>> os.utime(filename, (mtime, mtime))
681 $ hg ci -qAm 'add mtime dir'
681 $ hg ci -qAm 'add mtime dir'
682 "hg cp" does not preserve the mtime, so it should be newer than the 2009
682 "hg cp" does not preserve the mtime, so it should be newer than the 2009
683 timestamp.
683 timestamp.
684 $ hg cp -q mtime mtime_cp
684 $ hg cp -q mtime mtime_cp
685 >>> from __future__ import print_function
685 >>> from __future__ import print_function
686 >>> import os
686 >>> import os
687 >>> filename = "mtime_cp/f"
687 >>> filename = "mtime_cp/f"
688 >>> print(os.stat(filename).st_mtime < 1234567999)
688 >>> print(os.stat(filename).st_mtime < 1234567999)
689 False
689 False
690 "hg mv" preserves the mtime, so it should be ~equal to the 2009 timestamp
690 "hg mv" preserves the mtime, so it should be ~equal to the 2009 timestamp
691 (modulo some fudge factor due to not every system supporting 1s-level
691 (modulo some fudge factor due to not every system supporting 1s-level
692 precision).
692 precision).
693 $ hg mv -q mtime mtime_mv
693 $ hg mv -q mtime mtime_mv
694 >>> from __future__ import print_function
694 >>> from __future__ import print_function
695 >>> import os
695 >>> import os
696 >>> filename = "mtime_mv/f"
696 >>> filename = "mtime_mv/f"
697 >>> print(os.stat(filename).st_mtime < 1234567999)
697 >>> print(os.stat(filename).st_mtime < 1234567999)
698 True
698 True
General Comments 0
You need to be logged in to leave comments. Login now