##// END OF EJS Templates
py3: force bytestr conversion of "reason" in scmutil.callcatch()...
Denis Laxalde -
r44774:b4c82b70 5.2.2 stable
parent child Browse files
Show More
@@ -1,2221 +1,2221 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from .pycompat import getattr
29 from .pycompat import getattr
30
30
31 from . import (
31 from . import (
32 copies as copiesmod,
32 copies as copiesmod,
33 encoding,
33 encoding,
34 error,
34 error,
35 match as matchmod,
35 match as matchmod,
36 obsolete,
36 obsolete,
37 obsutil,
37 obsutil,
38 pathutil,
38 pathutil,
39 phases,
39 phases,
40 policy,
40 policy,
41 pycompat,
41 pycompat,
42 revsetlang,
42 revsetlang,
43 similar,
43 similar,
44 smartset,
44 smartset,
45 url,
45 url,
46 util,
46 util,
47 vfs,
47 vfs,
48 )
48 )
49
49
50 from .utils import (
50 from .utils import (
51 procutil,
51 procutil,
52 stringutil,
52 stringutil,
53 )
53 )
54
54
55 if pycompat.iswindows:
55 if pycompat.iswindows:
56 from . import scmwindows as scmplatform
56 from . import scmwindows as scmplatform
57 else:
57 else:
58 from . import scmposix as scmplatform
58 from . import scmposix as scmplatform
59
59
60 parsers = policy.importmod(r'parsers')
60 parsers = policy.importmod(r'parsers')
61
61
62 termsize = scmplatform.termsize
62 termsize = scmplatform.termsize
63
63
64
64
65 class status(tuple):
65 class status(tuple):
66 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
66 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
67 and 'ignored' properties are only relevant to the working copy.
67 and 'ignored' properties are only relevant to the working copy.
68 '''
68 '''
69
69
70 __slots__ = ()
70 __slots__ = ()
71
71
72 def __new__(
72 def __new__(
73 cls, modified, added, removed, deleted, unknown, ignored, clean
73 cls, modified, added, removed, deleted, unknown, ignored, clean
74 ):
74 ):
75 return tuple.__new__(
75 return tuple.__new__(
76 cls, (modified, added, removed, deleted, unknown, ignored, clean)
76 cls, (modified, added, removed, deleted, unknown, ignored, clean)
77 )
77 )
78
78
79 @property
79 @property
80 def modified(self):
80 def modified(self):
81 '''files that have been modified'''
81 '''files that have been modified'''
82 return self[0]
82 return self[0]
83
83
84 @property
84 @property
85 def added(self):
85 def added(self):
86 '''files that have been added'''
86 '''files that have been added'''
87 return self[1]
87 return self[1]
88
88
89 @property
89 @property
90 def removed(self):
90 def removed(self):
91 '''files that have been removed'''
91 '''files that have been removed'''
92 return self[2]
92 return self[2]
93
93
94 @property
94 @property
95 def deleted(self):
95 def deleted(self):
96 '''files that are in the dirstate, but have been deleted from the
96 '''files that are in the dirstate, but have been deleted from the
97 working copy (aka "missing")
97 working copy (aka "missing")
98 '''
98 '''
99 return self[3]
99 return self[3]
100
100
101 @property
101 @property
102 def unknown(self):
102 def unknown(self):
103 '''files not in the dirstate that are not ignored'''
103 '''files not in the dirstate that are not ignored'''
104 return self[4]
104 return self[4]
105
105
106 @property
106 @property
107 def ignored(self):
107 def ignored(self):
108 '''files not in the dirstate that are ignored (by _dirignore())'''
108 '''files not in the dirstate that are ignored (by _dirignore())'''
109 return self[5]
109 return self[5]
110
110
111 @property
111 @property
112 def clean(self):
112 def clean(self):
113 '''files that have not been modified'''
113 '''files that have not been modified'''
114 return self[6]
114 return self[6]
115
115
116 def __repr__(self, *args, **kwargs):
116 def __repr__(self, *args, **kwargs):
117 return (
117 return (
118 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
118 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
119 r'unknown=%s, ignored=%s, clean=%s>'
119 r'unknown=%s, ignored=%s, clean=%s>'
120 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
120 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
121
121
122
122
123 def itersubrepos(ctx1, ctx2):
123 def itersubrepos(ctx1, ctx2):
124 """find subrepos in ctx1 or ctx2"""
124 """find subrepos in ctx1 or ctx2"""
125 # Create a (subpath, ctx) mapping where we prefer subpaths from
125 # Create a (subpath, ctx) mapping where we prefer subpaths from
126 # ctx1. The subpaths from ctx2 are important when the .hgsub file
126 # ctx1. The subpaths from ctx2 are important when the .hgsub file
127 # has been modified (in ctx2) but not yet committed (in ctx1).
127 # has been modified (in ctx2) but not yet committed (in ctx1).
128 subpaths = dict.fromkeys(ctx2.substate, ctx2)
128 subpaths = dict.fromkeys(ctx2.substate, ctx2)
129 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
129 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
130
130
131 missing = set()
131 missing = set()
132
132
133 for subpath in ctx2.substate:
133 for subpath in ctx2.substate:
134 if subpath not in ctx1.substate:
134 if subpath not in ctx1.substate:
135 del subpaths[subpath]
135 del subpaths[subpath]
136 missing.add(subpath)
136 missing.add(subpath)
137
137
138 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
138 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
139 yield subpath, ctx.sub(subpath)
139 yield subpath, ctx.sub(subpath)
140
140
141 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
141 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
142 # status and diff will have an accurate result when it does
142 # status and diff will have an accurate result when it does
143 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
143 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
144 # against itself.
144 # against itself.
145 for subpath in missing:
145 for subpath in missing:
146 yield subpath, ctx2.nullsub(subpath, ctx1)
146 yield subpath, ctx2.nullsub(subpath, ctx1)
147
147
148
148
149 def nochangesfound(ui, repo, excluded=None):
149 def nochangesfound(ui, repo, excluded=None):
150 '''Report no changes for push/pull, excluded is None or a list of
150 '''Report no changes for push/pull, excluded is None or a list of
151 nodes excluded from the push/pull.
151 nodes excluded from the push/pull.
152 '''
152 '''
153 secretlist = []
153 secretlist = []
154 if excluded:
154 if excluded:
155 for n in excluded:
155 for n in excluded:
156 ctx = repo[n]
156 ctx = repo[n]
157 if ctx.phase() >= phases.secret and not ctx.extinct():
157 if ctx.phase() >= phases.secret and not ctx.extinct():
158 secretlist.append(n)
158 secretlist.append(n)
159
159
160 if secretlist:
160 if secretlist:
161 ui.status(
161 ui.status(
162 _(b"no changes found (ignored %d secret changesets)\n")
162 _(b"no changes found (ignored %d secret changesets)\n")
163 % len(secretlist)
163 % len(secretlist)
164 )
164 )
165 else:
165 else:
166 ui.status(_(b"no changes found\n"))
166 ui.status(_(b"no changes found\n"))
167
167
168
168
169 def callcatch(ui, func):
169 def callcatch(ui, func):
170 """call func() with global exception handling
170 """call func() with global exception handling
171
171
172 return func() if no exception happens. otherwise do some error handling
172 return func() if no exception happens. otherwise do some error handling
173 and return an exit code accordingly. does not handle all exceptions.
173 and return an exit code accordingly. does not handle all exceptions.
174 """
174 """
175 try:
175 try:
176 try:
176 try:
177 return func()
177 return func()
178 except: # re-raises
178 except: # re-raises
179 ui.traceback()
179 ui.traceback()
180 raise
180 raise
181 # Global exception handling, alphabetically
181 # Global exception handling, alphabetically
182 # Mercurial-specific first, followed by built-in and library exceptions
182 # Mercurial-specific first, followed by built-in and library exceptions
183 except error.LockHeld as inst:
183 except error.LockHeld as inst:
184 if inst.errno == errno.ETIMEDOUT:
184 if inst.errno == errno.ETIMEDOUT:
185 reason = _(b'timed out waiting for lock held by %r') % (
185 reason = _(b'timed out waiting for lock held by %r') % (
186 pycompat.bytestr(inst.locker)
186 pycompat.bytestr(inst.locker)
187 )
187 )
188 else:
188 else:
189 reason = _(b'lock held by %r') % inst.locker
189 reason = _(b'lock held by %r') % inst.locker
190 ui.error(
190 ui.error(
191 _(b"abort: %s: %s\n")
191 _(b"abort: %s: %s\n")
192 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
192 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
193 )
193 )
194 if not inst.locker:
194 if not inst.locker:
195 ui.error(_(b"(lock might be very busy)\n"))
195 ui.error(_(b"(lock might be very busy)\n"))
196 except error.LockUnavailable as inst:
196 except error.LockUnavailable as inst:
197 ui.error(
197 ui.error(
198 _(b"abort: could not lock %s: %s\n")
198 _(b"abort: could not lock %s: %s\n")
199 % (
199 % (
200 inst.desc or stringutil.forcebytestr(inst.filename),
200 inst.desc or stringutil.forcebytestr(inst.filename),
201 encoding.strtolocal(inst.strerror),
201 encoding.strtolocal(inst.strerror),
202 )
202 )
203 )
203 )
204 except error.OutOfBandError as inst:
204 except error.OutOfBandError as inst:
205 if inst.args:
205 if inst.args:
206 msg = _(b"abort: remote error:\n")
206 msg = _(b"abort: remote error:\n")
207 else:
207 else:
208 msg = _(b"abort: remote error\n")
208 msg = _(b"abort: remote error\n")
209 ui.error(msg)
209 ui.error(msg)
210 if inst.args:
210 if inst.args:
211 ui.error(b''.join(inst.args))
211 ui.error(b''.join(inst.args))
212 if inst.hint:
212 if inst.hint:
213 ui.error(b'(%s)\n' % inst.hint)
213 ui.error(b'(%s)\n' % inst.hint)
214 except error.RepoError as inst:
214 except error.RepoError as inst:
215 ui.error(_(b"abort: %s!\n") % inst)
215 ui.error(_(b"abort: %s!\n") % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.error(_(b"(%s)\n") % inst.hint)
217 ui.error(_(b"(%s)\n") % inst.hint)
218 except error.ResponseError as inst:
218 except error.ResponseError as inst:
219 ui.error(_(b"abort: %s") % inst.args[0])
219 ui.error(_(b"abort: %s") % inst.args[0])
220 msg = inst.args[1]
220 msg = inst.args[1]
221 if isinstance(msg, type(u'')):
221 if isinstance(msg, type(u'')):
222 msg = pycompat.sysbytes(msg)
222 msg = pycompat.sysbytes(msg)
223 if not isinstance(msg, bytes):
223 if not isinstance(msg, bytes):
224 ui.error(b" %r\n" % (msg,))
224 ui.error(b" %r\n" % (msg,))
225 elif not msg:
225 elif not msg:
226 ui.error(_(b" empty string\n"))
226 ui.error(_(b" empty string\n"))
227 else:
227 else:
228 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
228 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
229 except error.CensoredNodeError as inst:
229 except error.CensoredNodeError as inst:
230 ui.error(_(b"abort: file censored %s!\n") % inst)
230 ui.error(_(b"abort: file censored %s!\n") % inst)
231 except error.StorageError as inst:
231 except error.StorageError as inst:
232 ui.error(_(b"abort: %s!\n") % inst)
232 ui.error(_(b"abort: %s!\n") % inst)
233 if inst.hint:
233 if inst.hint:
234 ui.error(_(b"(%s)\n") % inst.hint)
234 ui.error(_(b"(%s)\n") % inst.hint)
235 except error.InterventionRequired as inst:
235 except error.InterventionRequired as inst:
236 ui.error(b"%s\n" % inst)
236 ui.error(b"%s\n" % inst)
237 if inst.hint:
237 if inst.hint:
238 ui.error(_(b"(%s)\n") % inst.hint)
238 ui.error(_(b"(%s)\n") % inst.hint)
239 return 1
239 return 1
240 except error.WdirUnsupported:
240 except error.WdirUnsupported:
241 ui.error(_(b"abort: working directory revision cannot be specified\n"))
241 ui.error(_(b"abort: working directory revision cannot be specified\n"))
242 except error.Abort as inst:
242 except error.Abort as inst:
243 ui.error(_(b"abort: %s\n") % inst)
243 ui.error(_(b"abort: %s\n") % inst)
244 if inst.hint:
244 if inst.hint:
245 ui.error(_(b"(%s)\n") % inst.hint)
245 ui.error(_(b"(%s)\n") % inst.hint)
246 except ImportError as inst:
246 except ImportError as inst:
247 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
247 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
248 m = stringutil.forcebytestr(inst).split()[-1]
248 m = stringutil.forcebytestr(inst).split()[-1]
249 if m in b"mpatch bdiff".split():
249 if m in b"mpatch bdiff".split():
250 ui.error(_(b"(did you forget to compile extensions?)\n"))
250 ui.error(_(b"(did you forget to compile extensions?)\n"))
251 elif m in b"zlib".split():
251 elif m in b"zlib".split():
252 ui.error(_(b"(is your Python install correct?)\n"))
252 ui.error(_(b"(is your Python install correct?)\n"))
253 except (IOError, OSError) as inst:
253 except (IOError, OSError) as inst:
254 if util.safehasattr(inst, b"code"): # HTTPError
254 if util.safehasattr(inst, b"code"): # HTTPError
255 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
255 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
256 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
256 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
257 try: # usually it is in the form (errno, strerror)
257 try: # usually it is in the form (errno, strerror)
258 reason = inst.reason.args[1]
258 reason = inst.reason.args[1]
259 except (AttributeError, IndexError):
259 except (AttributeError, IndexError):
260 # it might be anything, for example a string
260 # it might be anything, for example a string
261 reason = inst.reason
261 reason = inst.reason
262 if isinstance(reason, pycompat.unicode):
262 if isinstance(reason, pycompat.unicode):
263 # SSLError of Python 2.7.9 contains a unicode
263 # SSLError of Python 2.7.9 contains a unicode
264 reason = encoding.unitolocal(reason)
264 reason = encoding.unitolocal(reason)
265 ui.error(_(b"abort: error: %s\n") % reason)
265 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
266 elif (
266 elif (
267 util.safehasattr(inst, b"args")
267 util.safehasattr(inst, b"args")
268 and inst.args
268 and inst.args
269 and inst.args[0] == errno.EPIPE
269 and inst.args[0] == errno.EPIPE
270 ):
270 ):
271 pass
271 pass
272 elif getattr(inst, "strerror", None): # common IOError or OSError
272 elif getattr(inst, "strerror", None): # common IOError or OSError
273 if getattr(inst, "filename", None) is not None:
273 if getattr(inst, "filename", None) is not None:
274 ui.error(
274 ui.error(
275 _(b"abort: %s: '%s'\n")
275 _(b"abort: %s: '%s'\n")
276 % (
276 % (
277 encoding.strtolocal(inst.strerror),
277 encoding.strtolocal(inst.strerror),
278 stringutil.forcebytestr(inst.filename),
278 stringutil.forcebytestr(inst.filename),
279 )
279 )
280 )
280 )
281 else:
281 else:
282 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
282 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
283 else: # suspicious IOError
283 else: # suspicious IOError
284 raise
284 raise
285 except MemoryError:
285 except MemoryError:
286 ui.error(_(b"abort: out of memory\n"))
286 ui.error(_(b"abort: out of memory\n"))
287 except SystemExit as inst:
287 except SystemExit as inst:
288 # Commands shouldn't sys.exit directly, but give a return code.
288 # Commands shouldn't sys.exit directly, but give a return code.
289 # Just in case catch this and and pass exit code to caller.
289 # Just in case catch this and and pass exit code to caller.
290 return inst.code
290 return inst.code
291
291
292 return -1
292 return -1
293
293
294
294
295 def checknewlabel(repo, lbl, kind):
295 def checknewlabel(repo, lbl, kind):
296 # Do not use the "kind" parameter in ui output.
296 # Do not use the "kind" parameter in ui output.
297 # It makes strings difficult to translate.
297 # It makes strings difficult to translate.
298 if lbl in [b'tip', b'.', b'null']:
298 if lbl in [b'tip', b'.', b'null']:
299 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
299 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
300 for c in (b':', b'\0', b'\n', b'\r'):
300 for c in (b':', b'\0', b'\n', b'\r'):
301 if c in lbl:
301 if c in lbl:
302 raise error.Abort(
302 raise error.Abort(
303 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
303 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
304 )
304 )
305 try:
305 try:
306 int(lbl)
306 int(lbl)
307 raise error.Abort(_(b"cannot use an integer as a name"))
307 raise error.Abort(_(b"cannot use an integer as a name"))
308 except ValueError:
308 except ValueError:
309 pass
309 pass
310 if lbl.strip() != lbl:
310 if lbl.strip() != lbl:
311 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
311 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
312
312
313
313
314 def checkfilename(f):
314 def checkfilename(f):
315 '''Check that the filename f is an acceptable filename for a tracked file'''
315 '''Check that the filename f is an acceptable filename for a tracked file'''
316 if b'\r' in f or b'\n' in f:
316 if b'\r' in f or b'\n' in f:
317 raise error.Abort(
317 raise error.Abort(
318 _(b"'\\n' and '\\r' disallowed in filenames: %r")
318 _(b"'\\n' and '\\r' disallowed in filenames: %r")
319 % pycompat.bytestr(f)
319 % pycompat.bytestr(f)
320 )
320 )
321
321
322
322
323 def checkportable(ui, f):
323 def checkportable(ui, f):
324 '''Check if filename f is portable and warn or abort depending on config'''
324 '''Check if filename f is portable and warn or abort depending on config'''
325 checkfilename(f)
325 checkfilename(f)
326 abort, warn = checkportabilityalert(ui)
326 abort, warn = checkportabilityalert(ui)
327 if abort or warn:
327 if abort or warn:
328 msg = util.checkwinfilename(f)
328 msg = util.checkwinfilename(f)
329 if msg:
329 if msg:
330 msg = b"%s: %s" % (msg, procutil.shellquote(f))
330 msg = b"%s: %s" % (msg, procutil.shellquote(f))
331 if abort:
331 if abort:
332 raise error.Abort(msg)
332 raise error.Abort(msg)
333 ui.warn(_(b"warning: %s\n") % msg)
333 ui.warn(_(b"warning: %s\n") % msg)
334
334
335
335
336 def checkportabilityalert(ui):
336 def checkportabilityalert(ui):
337 '''check if the user's config requests nothing, a warning, or abort for
337 '''check if the user's config requests nothing, a warning, or abort for
338 non-portable filenames'''
338 non-portable filenames'''
339 val = ui.config(b'ui', b'portablefilenames')
339 val = ui.config(b'ui', b'portablefilenames')
340 lval = val.lower()
340 lval = val.lower()
341 bval = stringutil.parsebool(val)
341 bval = stringutil.parsebool(val)
342 abort = pycompat.iswindows or lval == b'abort'
342 abort = pycompat.iswindows or lval == b'abort'
343 warn = bval or lval == b'warn'
343 warn = bval or lval == b'warn'
344 if bval is None and not (warn or abort or lval == b'ignore'):
344 if bval is None and not (warn or abort or lval == b'ignore'):
345 raise error.ConfigError(
345 raise error.ConfigError(
346 _(b"ui.portablefilenames value is invalid ('%s')") % val
346 _(b"ui.portablefilenames value is invalid ('%s')") % val
347 )
347 )
348 return abort, warn
348 return abort, warn
349
349
350
350
351 class casecollisionauditor(object):
351 class casecollisionauditor(object):
352 def __init__(self, ui, abort, dirstate):
352 def __init__(self, ui, abort, dirstate):
353 self._ui = ui
353 self._ui = ui
354 self._abort = abort
354 self._abort = abort
355 allfiles = b'\0'.join(dirstate)
355 allfiles = b'\0'.join(dirstate)
356 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
356 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
357 self._dirstate = dirstate
357 self._dirstate = dirstate
358 # The purpose of _newfiles is so that we don't complain about
358 # The purpose of _newfiles is so that we don't complain about
359 # case collisions if someone were to call this object with the
359 # case collisions if someone were to call this object with the
360 # same filename twice.
360 # same filename twice.
361 self._newfiles = set()
361 self._newfiles = set()
362
362
363 def __call__(self, f):
363 def __call__(self, f):
364 if f in self._newfiles:
364 if f in self._newfiles:
365 return
365 return
366 fl = encoding.lower(f)
366 fl = encoding.lower(f)
367 if fl in self._loweredfiles and f not in self._dirstate:
367 if fl in self._loweredfiles and f not in self._dirstate:
368 msg = _(b'possible case-folding collision for %s') % f
368 msg = _(b'possible case-folding collision for %s') % f
369 if self._abort:
369 if self._abort:
370 raise error.Abort(msg)
370 raise error.Abort(msg)
371 self._ui.warn(_(b"warning: %s\n") % msg)
371 self._ui.warn(_(b"warning: %s\n") % msg)
372 self._loweredfiles.add(fl)
372 self._loweredfiles.add(fl)
373 self._newfiles.add(f)
373 self._newfiles.add(f)
374
374
375
375
376 def filteredhash(repo, maxrev):
376 def filteredhash(repo, maxrev):
377 """build hash of filtered revisions in the current repoview.
377 """build hash of filtered revisions in the current repoview.
378
378
379 Multiple caches perform up-to-date validation by checking that the
379 Multiple caches perform up-to-date validation by checking that the
380 tiprev and tipnode stored in the cache file match the current repository.
380 tiprev and tipnode stored in the cache file match the current repository.
381 However, this is not sufficient for validating repoviews because the set
381 However, this is not sufficient for validating repoviews because the set
382 of revisions in the view may change without the repository tiprev and
382 of revisions in the view may change without the repository tiprev and
383 tipnode changing.
383 tipnode changing.
384
384
385 This function hashes all the revs filtered from the view and returns
385 This function hashes all the revs filtered from the view and returns
386 that SHA-1 digest.
386 that SHA-1 digest.
387 """
387 """
388 cl = repo.changelog
388 cl = repo.changelog
389 if not cl.filteredrevs:
389 if not cl.filteredrevs:
390 return None
390 return None
391 key = None
391 key = None
392 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
392 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
393 if revs:
393 if revs:
394 s = hashlib.sha1()
394 s = hashlib.sha1()
395 for rev in revs:
395 for rev in revs:
396 s.update(b'%d;' % rev)
396 s.update(b'%d;' % rev)
397 key = s.digest()
397 key = s.digest()
398 return key
398 return key
399
399
400
400
401 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
401 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
402 '''yield every hg repository under path, always recursively.
402 '''yield every hg repository under path, always recursively.
403 The recurse flag will only control recursion into repo working dirs'''
403 The recurse flag will only control recursion into repo working dirs'''
404
404
405 def errhandler(err):
405 def errhandler(err):
406 if err.filename == path:
406 if err.filename == path:
407 raise err
407 raise err
408
408
409 samestat = getattr(os.path, 'samestat', None)
409 samestat = getattr(os.path, 'samestat', None)
410 if followsym and samestat is not None:
410 if followsym and samestat is not None:
411
411
412 def adddir(dirlst, dirname):
412 def adddir(dirlst, dirname):
413 dirstat = os.stat(dirname)
413 dirstat = os.stat(dirname)
414 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
414 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
415 if not match:
415 if not match:
416 dirlst.append(dirstat)
416 dirlst.append(dirstat)
417 return not match
417 return not match
418
418
419 else:
419 else:
420 followsym = False
420 followsym = False
421
421
422 if (seen_dirs is None) and followsym:
422 if (seen_dirs is None) and followsym:
423 seen_dirs = []
423 seen_dirs = []
424 adddir(seen_dirs, path)
424 adddir(seen_dirs, path)
425 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
425 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
426 dirs.sort()
426 dirs.sort()
427 if b'.hg' in dirs:
427 if b'.hg' in dirs:
428 yield root # found a repository
428 yield root # found a repository
429 qroot = os.path.join(root, b'.hg', b'patches')
429 qroot = os.path.join(root, b'.hg', b'patches')
430 if os.path.isdir(os.path.join(qroot, b'.hg')):
430 if os.path.isdir(os.path.join(qroot, b'.hg')):
431 yield qroot # we have a patch queue repo here
431 yield qroot # we have a patch queue repo here
432 if recurse:
432 if recurse:
433 # avoid recursing inside the .hg directory
433 # avoid recursing inside the .hg directory
434 dirs.remove(b'.hg')
434 dirs.remove(b'.hg')
435 else:
435 else:
436 dirs[:] = [] # don't descend further
436 dirs[:] = [] # don't descend further
437 elif followsym:
437 elif followsym:
438 newdirs = []
438 newdirs = []
439 for d in dirs:
439 for d in dirs:
440 fname = os.path.join(root, d)
440 fname = os.path.join(root, d)
441 if adddir(seen_dirs, fname):
441 if adddir(seen_dirs, fname):
442 if os.path.islink(fname):
442 if os.path.islink(fname):
443 for hgname in walkrepos(fname, True, seen_dirs):
443 for hgname in walkrepos(fname, True, seen_dirs):
444 yield hgname
444 yield hgname
445 else:
445 else:
446 newdirs.append(d)
446 newdirs.append(d)
447 dirs[:] = newdirs
447 dirs[:] = newdirs
448
448
449
449
450 def binnode(ctx):
450 def binnode(ctx):
451 """Return binary node id for a given basectx"""
451 """Return binary node id for a given basectx"""
452 node = ctx.node()
452 node = ctx.node()
453 if node is None:
453 if node is None:
454 return wdirid
454 return wdirid
455 return node
455 return node
456
456
457
457
458 def intrev(ctx):
458 def intrev(ctx):
459 """Return integer for a given basectx that can be used in comparison or
459 """Return integer for a given basectx that can be used in comparison or
460 arithmetic operation"""
460 arithmetic operation"""
461 rev = ctx.rev()
461 rev = ctx.rev()
462 if rev is None:
462 if rev is None:
463 return wdirrev
463 return wdirrev
464 return rev
464 return rev
465
465
466
466
467 def formatchangeid(ctx):
467 def formatchangeid(ctx):
468 """Format changectx as '{rev}:{node|formatnode}', which is the default
468 """Format changectx as '{rev}:{node|formatnode}', which is the default
469 template provided by logcmdutil.changesettemplater"""
469 template provided by logcmdutil.changesettemplater"""
470 repo = ctx.repo()
470 repo = ctx.repo()
471 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
471 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
472
472
473
473
474 def formatrevnode(ui, rev, node):
474 def formatrevnode(ui, rev, node):
475 """Format given revision and node depending on the current verbosity"""
475 """Format given revision and node depending on the current verbosity"""
476 if ui.debugflag:
476 if ui.debugflag:
477 hexfunc = hex
477 hexfunc = hex
478 else:
478 else:
479 hexfunc = short
479 hexfunc = short
480 return b'%d:%s' % (rev, hexfunc(node))
480 return b'%d:%s' % (rev, hexfunc(node))
481
481
482
482
483 def resolvehexnodeidprefix(repo, prefix):
483 def resolvehexnodeidprefix(repo, prefix):
484 if prefix.startswith(b'x') and repo.ui.configbool(
484 if prefix.startswith(b'x') and repo.ui.configbool(
485 b'experimental', b'revisions.prefixhexnode'
485 b'experimental', b'revisions.prefixhexnode'
486 ):
486 ):
487 prefix = prefix[1:]
487 prefix = prefix[1:]
488 try:
488 try:
489 # Uses unfiltered repo because it's faster when prefix is ambiguous/
489 # Uses unfiltered repo because it's faster when prefix is ambiguous/
490 # This matches the shortesthexnodeidprefix() function below.
490 # This matches the shortesthexnodeidprefix() function below.
491 node = repo.unfiltered().changelog._partialmatch(prefix)
491 node = repo.unfiltered().changelog._partialmatch(prefix)
492 except error.AmbiguousPrefixLookupError:
492 except error.AmbiguousPrefixLookupError:
493 revset = repo.ui.config(
493 revset = repo.ui.config(
494 b'experimental', b'revisions.disambiguatewithin'
494 b'experimental', b'revisions.disambiguatewithin'
495 )
495 )
496 if revset:
496 if revset:
497 # Clear config to avoid infinite recursion
497 # Clear config to avoid infinite recursion
498 configoverrides = {
498 configoverrides = {
499 (b'experimental', b'revisions.disambiguatewithin'): None
499 (b'experimental', b'revisions.disambiguatewithin'): None
500 }
500 }
501 with repo.ui.configoverride(configoverrides):
501 with repo.ui.configoverride(configoverrides):
502 revs = repo.anyrevs([revset], user=True)
502 revs = repo.anyrevs([revset], user=True)
503 matches = []
503 matches = []
504 for rev in revs:
504 for rev in revs:
505 node = repo.changelog.node(rev)
505 node = repo.changelog.node(rev)
506 if hex(node).startswith(prefix):
506 if hex(node).startswith(prefix):
507 matches.append(node)
507 matches.append(node)
508 if len(matches) == 1:
508 if len(matches) == 1:
509 return matches[0]
509 return matches[0]
510 raise
510 raise
511 if node is None:
511 if node is None:
512 return
512 return
513 repo.changelog.rev(node) # make sure node isn't filtered
513 repo.changelog.rev(node) # make sure node isn't filtered
514 return node
514 return node
515
515
516
516
517 def mayberevnum(repo, prefix):
517 def mayberevnum(repo, prefix):
518 """Checks if the given prefix may be mistaken for a revision number"""
518 """Checks if the given prefix may be mistaken for a revision number"""
519 try:
519 try:
520 i = int(prefix)
520 i = int(prefix)
521 # if we are a pure int, then starting with zero will not be
521 # if we are a pure int, then starting with zero will not be
522 # confused as a rev; or, obviously, if the int is larger
522 # confused as a rev; or, obviously, if the int is larger
523 # than the value of the tip rev. We still need to disambiguate if
523 # than the value of the tip rev. We still need to disambiguate if
524 # prefix == '0', since that *is* a valid revnum.
524 # prefix == '0', since that *is* a valid revnum.
525 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
525 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
526 return False
526 return False
527 return True
527 return True
528 except ValueError:
528 except ValueError:
529 return False
529 return False
530
530
531
531
532 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
532 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
533 """Find the shortest unambiguous prefix that matches hexnode.
533 """Find the shortest unambiguous prefix that matches hexnode.
534
534
535 If "cache" is not None, it must be a dictionary that can be used for
535 If "cache" is not None, it must be a dictionary that can be used for
536 caching between calls to this method.
536 caching between calls to this method.
537 """
537 """
538 # _partialmatch() of filtered changelog could take O(len(repo)) time,
538 # _partialmatch() of filtered changelog could take O(len(repo)) time,
539 # which would be unacceptably slow. so we look for hash collision in
539 # which would be unacceptably slow. so we look for hash collision in
540 # unfiltered space, which means some hashes may be slightly longer.
540 # unfiltered space, which means some hashes may be slightly longer.
541
541
542 minlength = max(minlength, 1)
542 minlength = max(minlength, 1)
543
543
544 def disambiguate(prefix):
544 def disambiguate(prefix):
545 """Disambiguate against revnums."""
545 """Disambiguate against revnums."""
546 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
546 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
547 if mayberevnum(repo, prefix):
547 if mayberevnum(repo, prefix):
548 return b'x' + prefix
548 return b'x' + prefix
549 else:
549 else:
550 return prefix
550 return prefix
551
551
552 hexnode = hex(node)
552 hexnode = hex(node)
553 for length in range(len(prefix), len(hexnode) + 1):
553 for length in range(len(prefix), len(hexnode) + 1):
554 prefix = hexnode[:length]
554 prefix = hexnode[:length]
555 if not mayberevnum(repo, prefix):
555 if not mayberevnum(repo, prefix):
556 return prefix
556 return prefix
557
557
558 cl = repo.unfiltered().changelog
558 cl = repo.unfiltered().changelog
559 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
559 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
560 if revset:
560 if revset:
561 revs = None
561 revs = None
562 if cache is not None:
562 if cache is not None:
563 revs = cache.get(b'disambiguationrevset')
563 revs = cache.get(b'disambiguationrevset')
564 if revs is None:
564 if revs is None:
565 revs = repo.anyrevs([revset], user=True)
565 revs = repo.anyrevs([revset], user=True)
566 if cache is not None:
566 if cache is not None:
567 cache[b'disambiguationrevset'] = revs
567 cache[b'disambiguationrevset'] = revs
568 if cl.rev(node) in revs:
568 if cl.rev(node) in revs:
569 hexnode = hex(node)
569 hexnode = hex(node)
570 nodetree = None
570 nodetree = None
571 if cache is not None:
571 if cache is not None:
572 nodetree = cache.get(b'disambiguationnodetree')
572 nodetree = cache.get(b'disambiguationnodetree')
573 if not nodetree:
573 if not nodetree:
574 try:
574 try:
575 nodetree = parsers.nodetree(cl.index, len(revs))
575 nodetree = parsers.nodetree(cl.index, len(revs))
576 except AttributeError:
576 except AttributeError:
577 # no native nodetree
577 # no native nodetree
578 pass
578 pass
579 else:
579 else:
580 for r in revs:
580 for r in revs:
581 nodetree.insert(r)
581 nodetree.insert(r)
582 if cache is not None:
582 if cache is not None:
583 cache[b'disambiguationnodetree'] = nodetree
583 cache[b'disambiguationnodetree'] = nodetree
584 if nodetree is not None:
584 if nodetree is not None:
585 length = max(nodetree.shortest(node), minlength)
585 length = max(nodetree.shortest(node), minlength)
586 prefix = hexnode[:length]
586 prefix = hexnode[:length]
587 return disambiguate(prefix)
587 return disambiguate(prefix)
588 for length in range(minlength, len(hexnode) + 1):
588 for length in range(minlength, len(hexnode) + 1):
589 matches = []
589 matches = []
590 prefix = hexnode[:length]
590 prefix = hexnode[:length]
591 for rev in revs:
591 for rev in revs:
592 otherhexnode = repo[rev].hex()
592 otherhexnode = repo[rev].hex()
593 if prefix == otherhexnode[:length]:
593 if prefix == otherhexnode[:length]:
594 matches.append(otherhexnode)
594 matches.append(otherhexnode)
595 if len(matches) == 1:
595 if len(matches) == 1:
596 return disambiguate(prefix)
596 return disambiguate(prefix)
597
597
598 try:
598 try:
599 return disambiguate(cl.shortest(node, minlength))
599 return disambiguate(cl.shortest(node, minlength))
600 except error.LookupError:
600 except error.LookupError:
601 raise error.RepoLookupError()
601 raise error.RepoLookupError()
602
602
603
603
604 def isrevsymbol(repo, symbol):
604 def isrevsymbol(repo, symbol):
605 """Checks if a symbol exists in the repo.
605 """Checks if a symbol exists in the repo.
606
606
607 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
607 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
608 symbol is an ambiguous nodeid prefix.
608 symbol is an ambiguous nodeid prefix.
609 """
609 """
610 try:
610 try:
611 revsymbol(repo, symbol)
611 revsymbol(repo, symbol)
612 return True
612 return True
613 except error.RepoLookupError:
613 except error.RepoLookupError:
614 return False
614 return False
615
615
616
616
617 def revsymbol(repo, symbol):
617 def revsymbol(repo, symbol):
618 """Returns a context given a single revision symbol (as string).
618 """Returns a context given a single revision symbol (as string).
619
619
620 This is similar to revsingle(), but accepts only a single revision symbol,
620 This is similar to revsingle(), but accepts only a single revision symbol,
621 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
621 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
622 not "max(public())".
622 not "max(public())".
623 """
623 """
624 if not isinstance(symbol, bytes):
624 if not isinstance(symbol, bytes):
625 msg = (
625 msg = (
626 b"symbol (%s of type %s) was not a string, did you mean "
626 b"symbol (%s of type %s) was not a string, did you mean "
627 b"repo[symbol]?" % (symbol, type(symbol))
627 b"repo[symbol]?" % (symbol, type(symbol))
628 )
628 )
629 raise error.ProgrammingError(msg)
629 raise error.ProgrammingError(msg)
630 try:
630 try:
631 if symbol in (b'.', b'tip', b'null'):
631 if symbol in (b'.', b'tip', b'null'):
632 return repo[symbol]
632 return repo[symbol]
633
633
634 try:
634 try:
635 r = int(symbol)
635 r = int(symbol)
636 if b'%d' % r != symbol:
636 if b'%d' % r != symbol:
637 raise ValueError
637 raise ValueError
638 l = len(repo.changelog)
638 l = len(repo.changelog)
639 if r < 0:
639 if r < 0:
640 r += l
640 r += l
641 if r < 0 or r >= l and r != wdirrev:
641 if r < 0 or r >= l and r != wdirrev:
642 raise ValueError
642 raise ValueError
643 return repo[r]
643 return repo[r]
644 except error.FilteredIndexError:
644 except error.FilteredIndexError:
645 raise
645 raise
646 except (ValueError, OverflowError, IndexError):
646 except (ValueError, OverflowError, IndexError):
647 pass
647 pass
648
648
649 if len(symbol) == 40:
649 if len(symbol) == 40:
650 try:
650 try:
651 node = bin(symbol)
651 node = bin(symbol)
652 rev = repo.changelog.rev(node)
652 rev = repo.changelog.rev(node)
653 return repo[rev]
653 return repo[rev]
654 except error.FilteredLookupError:
654 except error.FilteredLookupError:
655 raise
655 raise
656 except (TypeError, LookupError):
656 except (TypeError, LookupError):
657 pass
657 pass
658
658
659 # look up bookmarks through the name interface
659 # look up bookmarks through the name interface
660 try:
660 try:
661 node = repo.names.singlenode(repo, symbol)
661 node = repo.names.singlenode(repo, symbol)
662 rev = repo.changelog.rev(node)
662 rev = repo.changelog.rev(node)
663 return repo[rev]
663 return repo[rev]
664 except KeyError:
664 except KeyError:
665 pass
665 pass
666
666
667 node = resolvehexnodeidprefix(repo, symbol)
667 node = resolvehexnodeidprefix(repo, symbol)
668 if node is not None:
668 if node is not None:
669 rev = repo.changelog.rev(node)
669 rev = repo.changelog.rev(node)
670 return repo[rev]
670 return repo[rev]
671
671
672 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
672 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
673
673
674 except error.WdirUnsupported:
674 except error.WdirUnsupported:
675 return repo[None]
675 return repo[None]
676 except (
676 except (
677 error.FilteredIndexError,
677 error.FilteredIndexError,
678 error.FilteredLookupError,
678 error.FilteredLookupError,
679 error.FilteredRepoLookupError,
679 error.FilteredRepoLookupError,
680 ):
680 ):
681 raise _filterederror(repo, symbol)
681 raise _filterederror(repo, symbol)
682
682
683
683
684 def _filterederror(repo, changeid):
684 def _filterederror(repo, changeid):
685 """build an exception to be raised about a filtered changeid
685 """build an exception to be raised about a filtered changeid
686
686
687 This is extracted in a function to help extensions (eg: evolve) to
687 This is extracted in a function to help extensions (eg: evolve) to
688 experiment with various message variants."""
688 experiment with various message variants."""
689 if repo.filtername.startswith(b'visible'):
689 if repo.filtername.startswith(b'visible'):
690
690
691 # Check if the changeset is obsolete
691 # Check if the changeset is obsolete
692 unfilteredrepo = repo.unfiltered()
692 unfilteredrepo = repo.unfiltered()
693 ctx = revsymbol(unfilteredrepo, changeid)
693 ctx = revsymbol(unfilteredrepo, changeid)
694
694
695 # If the changeset is obsolete, enrich the message with the reason
695 # If the changeset is obsolete, enrich the message with the reason
696 # that made this changeset not visible
696 # that made this changeset not visible
697 if ctx.obsolete():
697 if ctx.obsolete():
698 msg = obsutil._getfilteredreason(repo, changeid, ctx)
698 msg = obsutil._getfilteredreason(repo, changeid, ctx)
699 else:
699 else:
700 msg = _(b"hidden revision '%s'") % changeid
700 msg = _(b"hidden revision '%s'") % changeid
701
701
702 hint = _(b'use --hidden to access hidden revisions')
702 hint = _(b'use --hidden to access hidden revisions')
703
703
704 return error.FilteredRepoLookupError(msg, hint=hint)
704 return error.FilteredRepoLookupError(msg, hint=hint)
705 msg = _(b"filtered revision '%s' (not in '%s' subset)")
705 msg = _(b"filtered revision '%s' (not in '%s' subset)")
706 msg %= (changeid, repo.filtername)
706 msg %= (changeid, repo.filtername)
707 return error.FilteredRepoLookupError(msg)
707 return error.FilteredRepoLookupError(msg)
708
708
709
709
710 def revsingle(repo, revspec, default=b'.', localalias=None):
710 def revsingle(repo, revspec, default=b'.', localalias=None):
711 if not revspec and revspec != 0:
711 if not revspec and revspec != 0:
712 return repo[default]
712 return repo[default]
713
713
714 l = revrange(repo, [revspec], localalias=localalias)
714 l = revrange(repo, [revspec], localalias=localalias)
715 if not l:
715 if not l:
716 raise error.Abort(_(b'empty revision set'))
716 raise error.Abort(_(b'empty revision set'))
717 return repo[l.last()]
717 return repo[l.last()]
718
718
719
719
720 def _pairspec(revspec):
720 def _pairspec(revspec):
721 tree = revsetlang.parse(revspec)
721 tree = revsetlang.parse(revspec)
722 return tree and tree[0] in (
722 return tree and tree[0] in (
723 b'range',
723 b'range',
724 b'rangepre',
724 b'rangepre',
725 b'rangepost',
725 b'rangepost',
726 b'rangeall',
726 b'rangeall',
727 )
727 )
728
728
729
729
730 def revpair(repo, revs):
730 def revpair(repo, revs):
731 if not revs:
731 if not revs:
732 return repo[b'.'], repo[None]
732 return repo[b'.'], repo[None]
733
733
734 l = revrange(repo, revs)
734 l = revrange(repo, revs)
735
735
736 if not l:
736 if not l:
737 raise error.Abort(_(b'empty revision range'))
737 raise error.Abort(_(b'empty revision range'))
738
738
739 first = l.first()
739 first = l.first()
740 second = l.last()
740 second = l.last()
741
741
742 if (
742 if (
743 first == second
743 first == second
744 and len(revs) >= 2
744 and len(revs) >= 2
745 and not all(revrange(repo, [r]) for r in revs)
745 and not all(revrange(repo, [r]) for r in revs)
746 ):
746 ):
747 raise error.Abort(_(b'empty revision on one side of range'))
747 raise error.Abort(_(b'empty revision on one side of range'))
748
748
749 # if top-level is range expression, the result must always be a pair
749 # if top-level is range expression, the result must always be a pair
750 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
750 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
751 return repo[first], repo[None]
751 return repo[first], repo[None]
752
752
753 return repo[first], repo[second]
753 return repo[first], repo[second]
754
754
755
755
756 def revrange(repo, specs, localalias=None):
756 def revrange(repo, specs, localalias=None):
757 """Execute 1 to many revsets and return the union.
757 """Execute 1 to many revsets and return the union.
758
758
759 This is the preferred mechanism for executing revsets using user-specified
759 This is the preferred mechanism for executing revsets using user-specified
760 config options, such as revset aliases.
760 config options, such as revset aliases.
761
761
762 The revsets specified by ``specs`` will be executed via a chained ``OR``
762 The revsets specified by ``specs`` will be executed via a chained ``OR``
763 expression. If ``specs`` is empty, an empty result is returned.
763 expression. If ``specs`` is empty, an empty result is returned.
764
764
765 ``specs`` can contain integers, in which case they are assumed to be
765 ``specs`` can contain integers, in which case they are assumed to be
766 revision numbers.
766 revision numbers.
767
767
768 It is assumed the revsets are already formatted. If you have arguments
768 It is assumed the revsets are already formatted. If you have arguments
769 that need to be expanded in the revset, call ``revsetlang.formatspec()``
769 that need to be expanded in the revset, call ``revsetlang.formatspec()``
770 and pass the result as an element of ``specs``.
770 and pass the result as an element of ``specs``.
771
771
772 Specifying a single revset is allowed.
772 Specifying a single revset is allowed.
773
773
774 Returns a ``revset.abstractsmartset`` which is a list-like interface over
774 Returns a ``revset.abstractsmartset`` which is a list-like interface over
775 integer revisions.
775 integer revisions.
776 """
776 """
777 allspecs = []
777 allspecs = []
778 for spec in specs:
778 for spec in specs:
779 if isinstance(spec, int):
779 if isinstance(spec, int):
780 spec = revsetlang.formatspec(b'%d', spec)
780 spec = revsetlang.formatspec(b'%d', spec)
781 allspecs.append(spec)
781 allspecs.append(spec)
782 return repo.anyrevs(allspecs, user=True, localalias=localalias)
782 return repo.anyrevs(allspecs, user=True, localalias=localalias)
783
783
784
784
785 def meaningfulparents(repo, ctx):
785 def meaningfulparents(repo, ctx):
786 """Return list of meaningful (or all if debug) parentrevs for rev.
786 """Return list of meaningful (or all if debug) parentrevs for rev.
787
787
788 For merges (two non-nullrev revisions) both parents are meaningful.
788 For merges (two non-nullrev revisions) both parents are meaningful.
789 Otherwise the first parent revision is considered meaningful if it
789 Otherwise the first parent revision is considered meaningful if it
790 is not the preceding revision.
790 is not the preceding revision.
791 """
791 """
792 parents = ctx.parents()
792 parents = ctx.parents()
793 if len(parents) > 1:
793 if len(parents) > 1:
794 return parents
794 return parents
795 if repo.ui.debugflag:
795 if repo.ui.debugflag:
796 return [parents[0], repo[nullrev]]
796 return [parents[0], repo[nullrev]]
797 if parents[0].rev() >= intrev(ctx) - 1:
797 if parents[0].rev() >= intrev(ctx) - 1:
798 return []
798 return []
799 return parents
799 return parents
800
800
801
801
802 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
802 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
803 """Return a function that produced paths for presenting to the user.
803 """Return a function that produced paths for presenting to the user.
804
804
805 The returned function takes a repo-relative path and produces a path
805 The returned function takes a repo-relative path and produces a path
806 that can be presented in the UI.
806 that can be presented in the UI.
807
807
808 Depending on the value of ui.relative-paths, either a repo-relative or
808 Depending on the value of ui.relative-paths, either a repo-relative or
809 cwd-relative path will be produced.
809 cwd-relative path will be produced.
810
810
811 legacyrelativevalue is the value to use if ui.relative-paths=legacy
811 legacyrelativevalue is the value to use if ui.relative-paths=legacy
812
812
813 If forcerelativevalue is not None, then that value will be used regardless
813 If forcerelativevalue is not None, then that value will be used regardless
814 of what ui.relative-paths is set to.
814 of what ui.relative-paths is set to.
815 """
815 """
816 if forcerelativevalue is not None:
816 if forcerelativevalue is not None:
817 relative = forcerelativevalue
817 relative = forcerelativevalue
818 else:
818 else:
819 config = repo.ui.config(b'ui', b'relative-paths')
819 config = repo.ui.config(b'ui', b'relative-paths')
820 if config == b'legacy':
820 if config == b'legacy':
821 relative = legacyrelativevalue
821 relative = legacyrelativevalue
822 else:
822 else:
823 relative = stringutil.parsebool(config)
823 relative = stringutil.parsebool(config)
824 if relative is None:
824 if relative is None:
825 raise error.ConfigError(
825 raise error.ConfigError(
826 _(b"ui.relative-paths is not a boolean ('%s')") % config
826 _(b"ui.relative-paths is not a boolean ('%s')") % config
827 )
827 )
828
828
829 if relative:
829 if relative:
830 cwd = repo.getcwd()
830 cwd = repo.getcwd()
831 pathto = repo.pathto
831 pathto = repo.pathto
832 return lambda f: pathto(f, cwd)
832 return lambda f: pathto(f, cwd)
833 elif repo.ui.configbool(b'ui', b'slash'):
833 elif repo.ui.configbool(b'ui', b'slash'):
834 return lambda f: f
834 return lambda f: f
835 else:
835 else:
836 return util.localpath
836 return util.localpath
837
837
838
838
839 def subdiruipathfn(subpath, uipathfn):
839 def subdiruipathfn(subpath, uipathfn):
840 '''Create a new uipathfn that treats the file as relative to subpath.'''
840 '''Create a new uipathfn that treats the file as relative to subpath.'''
841 return lambda f: uipathfn(posixpath.join(subpath, f))
841 return lambda f: uipathfn(posixpath.join(subpath, f))
842
842
843
843
844 def anypats(pats, opts):
844 def anypats(pats, opts):
845 '''Checks if any patterns, including --include and --exclude were given.
845 '''Checks if any patterns, including --include and --exclude were given.
846
846
847 Some commands (e.g. addremove) use this condition for deciding whether to
847 Some commands (e.g. addremove) use this condition for deciding whether to
848 print absolute or relative paths.
848 print absolute or relative paths.
849 '''
849 '''
850 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
850 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
851
851
852
852
853 def expandpats(pats):
853 def expandpats(pats):
854 '''Expand bare globs when running on windows.
854 '''Expand bare globs when running on windows.
855 On posix we assume it already has already been done by sh.'''
855 On posix we assume it already has already been done by sh.'''
856 if not util.expandglobs:
856 if not util.expandglobs:
857 return list(pats)
857 return list(pats)
858 ret = []
858 ret = []
859 for kindpat in pats:
859 for kindpat in pats:
860 kind, pat = matchmod._patsplit(kindpat, None)
860 kind, pat = matchmod._patsplit(kindpat, None)
861 if kind is None:
861 if kind is None:
862 try:
862 try:
863 globbed = glob.glob(pat)
863 globbed = glob.glob(pat)
864 except re.error:
864 except re.error:
865 globbed = [pat]
865 globbed = [pat]
866 if globbed:
866 if globbed:
867 ret.extend(globbed)
867 ret.extend(globbed)
868 continue
868 continue
869 ret.append(kindpat)
869 ret.append(kindpat)
870 return ret
870 return ret
871
871
872
872
873 def matchandpats(
873 def matchandpats(
874 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
874 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
875 ):
875 ):
876 '''Return a matcher and the patterns that were used.
876 '''Return a matcher and the patterns that were used.
877 The matcher will warn about bad matches, unless an alternate badfn callback
877 The matcher will warn about bad matches, unless an alternate badfn callback
878 is provided.'''
878 is provided.'''
879 if opts is None:
879 if opts is None:
880 opts = {}
880 opts = {}
881 if not globbed and default == b'relpath':
881 if not globbed and default == b'relpath':
882 pats = expandpats(pats or [])
882 pats = expandpats(pats or [])
883
883
884 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
884 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
885
885
886 def bad(f, msg):
886 def bad(f, msg):
887 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
887 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
888
888
889 if badfn is None:
889 if badfn is None:
890 badfn = bad
890 badfn = bad
891
891
892 m = ctx.match(
892 m = ctx.match(
893 pats,
893 pats,
894 opts.get(b'include'),
894 opts.get(b'include'),
895 opts.get(b'exclude'),
895 opts.get(b'exclude'),
896 default,
896 default,
897 listsubrepos=opts.get(b'subrepos'),
897 listsubrepos=opts.get(b'subrepos'),
898 badfn=badfn,
898 badfn=badfn,
899 )
899 )
900
900
901 if m.always():
901 if m.always():
902 pats = []
902 pats = []
903 return m, pats
903 return m, pats
904
904
905
905
906 def match(
906 def match(
907 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
907 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
908 ):
908 ):
909 '''Return a matcher that will warn about bad matches.'''
909 '''Return a matcher that will warn about bad matches.'''
910 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
910 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
911
911
912
912
913 def matchall(repo):
913 def matchall(repo):
914 '''Return a matcher that will efficiently match everything.'''
914 '''Return a matcher that will efficiently match everything.'''
915 return matchmod.always()
915 return matchmod.always()
916
916
917
917
918 def matchfiles(repo, files, badfn=None):
918 def matchfiles(repo, files, badfn=None):
919 '''Return a matcher that will efficiently match exactly these files.'''
919 '''Return a matcher that will efficiently match exactly these files.'''
920 return matchmod.exact(files, badfn=badfn)
920 return matchmod.exact(files, badfn=badfn)
921
921
922
922
923 def parsefollowlinespattern(repo, rev, pat, msg):
923 def parsefollowlinespattern(repo, rev, pat, msg):
924 """Return a file name from `pat` pattern suitable for usage in followlines
924 """Return a file name from `pat` pattern suitable for usage in followlines
925 logic.
925 logic.
926 """
926 """
927 if not matchmod.patkind(pat):
927 if not matchmod.patkind(pat):
928 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
928 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
929 else:
929 else:
930 ctx = repo[rev]
930 ctx = repo[rev]
931 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
931 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
932 files = [f for f in ctx if m(f)]
932 files = [f for f in ctx if m(f)]
933 if len(files) != 1:
933 if len(files) != 1:
934 raise error.ParseError(msg)
934 raise error.ParseError(msg)
935 return files[0]
935 return files[0]
936
936
937
937
938 def getorigvfs(ui, repo):
938 def getorigvfs(ui, repo):
939 """return a vfs suitable to save 'orig' file
939 """return a vfs suitable to save 'orig' file
940
940
941 return None if no special directory is configured"""
941 return None if no special directory is configured"""
942 origbackuppath = ui.config(b'ui', b'origbackuppath')
942 origbackuppath = ui.config(b'ui', b'origbackuppath')
943 if not origbackuppath:
943 if not origbackuppath:
944 return None
944 return None
945 return vfs.vfs(repo.wvfs.join(origbackuppath))
945 return vfs.vfs(repo.wvfs.join(origbackuppath))
946
946
947
947
948 def backuppath(ui, repo, filepath):
948 def backuppath(ui, repo, filepath):
949 '''customize where working copy backup files (.orig files) are created
949 '''customize where working copy backup files (.orig files) are created
950
950
951 Fetch user defined path from config file: [ui] origbackuppath = <path>
951 Fetch user defined path from config file: [ui] origbackuppath = <path>
952 Fall back to default (filepath with .orig suffix) if not specified
952 Fall back to default (filepath with .orig suffix) if not specified
953
953
954 filepath is repo-relative
954 filepath is repo-relative
955
955
956 Returns an absolute path
956 Returns an absolute path
957 '''
957 '''
958 origvfs = getorigvfs(ui, repo)
958 origvfs = getorigvfs(ui, repo)
959 if origvfs is None:
959 if origvfs is None:
960 return repo.wjoin(filepath + b".orig")
960 return repo.wjoin(filepath + b".orig")
961
961
962 origbackupdir = origvfs.dirname(filepath)
962 origbackupdir = origvfs.dirname(filepath)
963 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
963 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
964 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
964 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
965
965
966 # Remove any files that conflict with the backup file's path
966 # Remove any files that conflict with the backup file's path
967 for f in reversed(list(util.finddirs(filepath))):
967 for f in reversed(list(util.finddirs(filepath))):
968 if origvfs.isfileorlink(f):
968 if origvfs.isfileorlink(f):
969 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
969 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
970 origvfs.unlink(f)
970 origvfs.unlink(f)
971 break
971 break
972
972
973 origvfs.makedirs(origbackupdir)
973 origvfs.makedirs(origbackupdir)
974
974
975 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
975 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
976 ui.note(
976 ui.note(
977 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
977 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
978 )
978 )
979 origvfs.rmtree(filepath, forcibly=True)
979 origvfs.rmtree(filepath, forcibly=True)
980
980
981 return origvfs.join(filepath)
981 return origvfs.join(filepath)
982
982
983
983
984 class _containsnode(object):
984 class _containsnode(object):
985 """proxy __contains__(node) to container.__contains__ which accepts revs"""
985 """proxy __contains__(node) to container.__contains__ which accepts revs"""
986
986
987 def __init__(self, repo, revcontainer):
987 def __init__(self, repo, revcontainer):
988 self._torev = repo.changelog.rev
988 self._torev = repo.changelog.rev
989 self._revcontains = revcontainer.__contains__
989 self._revcontains = revcontainer.__contains__
990
990
991 def __contains__(self, node):
991 def __contains__(self, node):
992 return self._revcontains(self._torev(node))
992 return self._revcontains(self._torev(node))
993
993
994
994
995 def cleanupnodes(
995 def cleanupnodes(
996 repo,
996 repo,
997 replacements,
997 replacements,
998 operation,
998 operation,
999 moves=None,
999 moves=None,
1000 metadata=None,
1000 metadata=None,
1001 fixphase=False,
1001 fixphase=False,
1002 targetphase=None,
1002 targetphase=None,
1003 backup=True,
1003 backup=True,
1004 ):
1004 ):
1005 """do common cleanups when old nodes are replaced by new nodes
1005 """do common cleanups when old nodes are replaced by new nodes
1006
1006
1007 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1007 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1008 (we might also want to move working directory parent in the future)
1008 (we might also want to move working directory parent in the future)
1009
1009
1010 By default, bookmark moves are calculated automatically from 'replacements',
1010 By default, bookmark moves are calculated automatically from 'replacements',
1011 but 'moves' can be used to override that. Also, 'moves' may include
1011 but 'moves' can be used to override that. Also, 'moves' may include
1012 additional bookmark moves that should not have associated obsmarkers.
1012 additional bookmark moves that should not have associated obsmarkers.
1013
1013
1014 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1014 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1015 have replacements. operation is a string, like "rebase".
1015 have replacements. operation is a string, like "rebase".
1016
1016
1017 metadata is dictionary containing metadata to be stored in obsmarker if
1017 metadata is dictionary containing metadata to be stored in obsmarker if
1018 obsolescence is enabled.
1018 obsolescence is enabled.
1019 """
1019 """
1020 assert fixphase or targetphase is None
1020 assert fixphase or targetphase is None
1021 if not replacements and not moves:
1021 if not replacements and not moves:
1022 return
1022 return
1023
1023
1024 # translate mapping's other forms
1024 # translate mapping's other forms
1025 if not util.safehasattr(replacements, b'items'):
1025 if not util.safehasattr(replacements, b'items'):
1026 replacements = {(n,): () for n in replacements}
1026 replacements = {(n,): () for n in replacements}
1027 else:
1027 else:
1028 # upgrading non tuple "source" to tuple ones for BC
1028 # upgrading non tuple "source" to tuple ones for BC
1029 repls = {}
1029 repls = {}
1030 for key, value in replacements.items():
1030 for key, value in replacements.items():
1031 if not isinstance(key, tuple):
1031 if not isinstance(key, tuple):
1032 key = (key,)
1032 key = (key,)
1033 repls[key] = value
1033 repls[key] = value
1034 replacements = repls
1034 replacements = repls
1035
1035
1036 # Unfiltered repo is needed since nodes in replacements might be hidden.
1036 # Unfiltered repo is needed since nodes in replacements might be hidden.
1037 unfi = repo.unfiltered()
1037 unfi = repo.unfiltered()
1038
1038
1039 # Calculate bookmark movements
1039 # Calculate bookmark movements
1040 if moves is None:
1040 if moves is None:
1041 moves = {}
1041 moves = {}
1042 for oldnodes, newnodes in replacements.items():
1042 for oldnodes, newnodes in replacements.items():
1043 for oldnode in oldnodes:
1043 for oldnode in oldnodes:
1044 if oldnode in moves:
1044 if oldnode in moves:
1045 continue
1045 continue
1046 if len(newnodes) > 1:
1046 if len(newnodes) > 1:
1047 # usually a split, take the one with biggest rev number
1047 # usually a split, take the one with biggest rev number
1048 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1048 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1049 elif len(newnodes) == 0:
1049 elif len(newnodes) == 0:
1050 # move bookmark backwards
1050 # move bookmark backwards
1051 allreplaced = []
1051 allreplaced = []
1052 for rep in replacements:
1052 for rep in replacements:
1053 allreplaced.extend(rep)
1053 allreplaced.extend(rep)
1054 roots = list(
1054 roots = list(
1055 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1055 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1056 )
1056 )
1057 if roots:
1057 if roots:
1058 newnode = roots[0].node()
1058 newnode = roots[0].node()
1059 else:
1059 else:
1060 newnode = nullid
1060 newnode = nullid
1061 else:
1061 else:
1062 newnode = newnodes[0]
1062 newnode = newnodes[0]
1063 moves[oldnode] = newnode
1063 moves[oldnode] = newnode
1064
1064
1065 allnewnodes = [n for ns in replacements.values() for n in ns]
1065 allnewnodes = [n for ns in replacements.values() for n in ns]
1066 toretract = {}
1066 toretract = {}
1067 toadvance = {}
1067 toadvance = {}
1068 if fixphase:
1068 if fixphase:
1069 precursors = {}
1069 precursors = {}
1070 for oldnodes, newnodes in replacements.items():
1070 for oldnodes, newnodes in replacements.items():
1071 for oldnode in oldnodes:
1071 for oldnode in oldnodes:
1072 for newnode in newnodes:
1072 for newnode in newnodes:
1073 precursors.setdefault(newnode, []).append(oldnode)
1073 precursors.setdefault(newnode, []).append(oldnode)
1074
1074
1075 allnewnodes.sort(key=lambda n: unfi[n].rev())
1075 allnewnodes.sort(key=lambda n: unfi[n].rev())
1076 newphases = {}
1076 newphases = {}
1077
1077
1078 def phase(ctx):
1078 def phase(ctx):
1079 return newphases.get(ctx.node(), ctx.phase())
1079 return newphases.get(ctx.node(), ctx.phase())
1080
1080
1081 for newnode in allnewnodes:
1081 for newnode in allnewnodes:
1082 ctx = unfi[newnode]
1082 ctx = unfi[newnode]
1083 parentphase = max(phase(p) for p in ctx.parents())
1083 parentphase = max(phase(p) for p in ctx.parents())
1084 if targetphase is None:
1084 if targetphase is None:
1085 oldphase = max(
1085 oldphase = max(
1086 unfi[oldnode].phase() for oldnode in precursors[newnode]
1086 unfi[oldnode].phase() for oldnode in precursors[newnode]
1087 )
1087 )
1088 newphase = max(oldphase, parentphase)
1088 newphase = max(oldphase, parentphase)
1089 else:
1089 else:
1090 newphase = max(targetphase, parentphase)
1090 newphase = max(targetphase, parentphase)
1091 newphases[newnode] = newphase
1091 newphases[newnode] = newphase
1092 if newphase > ctx.phase():
1092 if newphase > ctx.phase():
1093 toretract.setdefault(newphase, []).append(newnode)
1093 toretract.setdefault(newphase, []).append(newnode)
1094 elif newphase < ctx.phase():
1094 elif newphase < ctx.phase():
1095 toadvance.setdefault(newphase, []).append(newnode)
1095 toadvance.setdefault(newphase, []).append(newnode)
1096
1096
1097 with repo.transaction(b'cleanup') as tr:
1097 with repo.transaction(b'cleanup') as tr:
1098 # Move bookmarks
1098 # Move bookmarks
1099 bmarks = repo._bookmarks
1099 bmarks = repo._bookmarks
1100 bmarkchanges = []
1100 bmarkchanges = []
1101 for oldnode, newnode in moves.items():
1101 for oldnode, newnode in moves.items():
1102 oldbmarks = repo.nodebookmarks(oldnode)
1102 oldbmarks = repo.nodebookmarks(oldnode)
1103 if not oldbmarks:
1103 if not oldbmarks:
1104 continue
1104 continue
1105 from . import bookmarks # avoid import cycle
1105 from . import bookmarks # avoid import cycle
1106
1106
1107 repo.ui.debug(
1107 repo.ui.debug(
1108 b'moving bookmarks %r from %s to %s\n'
1108 b'moving bookmarks %r from %s to %s\n'
1109 % (
1109 % (
1110 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1110 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1111 hex(oldnode),
1111 hex(oldnode),
1112 hex(newnode),
1112 hex(newnode),
1113 )
1113 )
1114 )
1114 )
1115 # Delete divergent bookmarks being parents of related newnodes
1115 # Delete divergent bookmarks being parents of related newnodes
1116 deleterevs = repo.revs(
1116 deleterevs = repo.revs(
1117 b'parents(roots(%ln & (::%n))) - parents(%n)',
1117 b'parents(roots(%ln & (::%n))) - parents(%n)',
1118 allnewnodes,
1118 allnewnodes,
1119 newnode,
1119 newnode,
1120 oldnode,
1120 oldnode,
1121 )
1121 )
1122 deletenodes = _containsnode(repo, deleterevs)
1122 deletenodes = _containsnode(repo, deleterevs)
1123 for name in oldbmarks:
1123 for name in oldbmarks:
1124 bmarkchanges.append((name, newnode))
1124 bmarkchanges.append((name, newnode))
1125 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1125 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1126 bmarkchanges.append((b, None))
1126 bmarkchanges.append((b, None))
1127
1127
1128 if bmarkchanges:
1128 if bmarkchanges:
1129 bmarks.applychanges(repo, tr, bmarkchanges)
1129 bmarks.applychanges(repo, tr, bmarkchanges)
1130
1130
1131 for phase, nodes in toretract.items():
1131 for phase, nodes in toretract.items():
1132 phases.retractboundary(repo, tr, phase, nodes)
1132 phases.retractboundary(repo, tr, phase, nodes)
1133 for phase, nodes in toadvance.items():
1133 for phase, nodes in toadvance.items():
1134 phases.advanceboundary(repo, tr, phase, nodes)
1134 phases.advanceboundary(repo, tr, phase, nodes)
1135
1135
1136 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1136 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1137 # Obsolete or strip nodes
1137 # Obsolete or strip nodes
1138 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1138 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1139 # If a node is already obsoleted, and we want to obsolete it
1139 # If a node is already obsoleted, and we want to obsolete it
1140 # without a successor, skip that obssolete request since it's
1140 # without a successor, skip that obssolete request since it's
1141 # unnecessary. That's the "if s or not isobs(n)" check below.
1141 # unnecessary. That's the "if s or not isobs(n)" check below.
1142 # Also sort the node in topology order, that might be useful for
1142 # Also sort the node in topology order, that might be useful for
1143 # some obsstore logic.
1143 # some obsstore logic.
1144 # NOTE: the sorting might belong to createmarkers.
1144 # NOTE: the sorting might belong to createmarkers.
1145 torev = unfi.changelog.rev
1145 torev = unfi.changelog.rev
1146 sortfunc = lambda ns: torev(ns[0][0])
1146 sortfunc = lambda ns: torev(ns[0][0])
1147 rels = []
1147 rels = []
1148 for ns, s in sorted(replacements.items(), key=sortfunc):
1148 for ns, s in sorted(replacements.items(), key=sortfunc):
1149 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1149 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1150 rels.append(rel)
1150 rels.append(rel)
1151 if rels:
1151 if rels:
1152 obsolete.createmarkers(
1152 obsolete.createmarkers(
1153 repo, rels, operation=operation, metadata=metadata
1153 repo, rels, operation=operation, metadata=metadata
1154 )
1154 )
1155 elif phases.supportinternal(repo) and mayusearchived:
1155 elif phases.supportinternal(repo) and mayusearchived:
1156 # this assume we do not have "unstable" nodes above the cleaned ones
1156 # this assume we do not have "unstable" nodes above the cleaned ones
1157 allreplaced = set()
1157 allreplaced = set()
1158 for ns in replacements.keys():
1158 for ns in replacements.keys():
1159 allreplaced.update(ns)
1159 allreplaced.update(ns)
1160 if backup:
1160 if backup:
1161 from . import repair # avoid import cycle
1161 from . import repair # avoid import cycle
1162
1162
1163 node = min(allreplaced, key=repo.changelog.rev)
1163 node = min(allreplaced, key=repo.changelog.rev)
1164 repair.backupbundle(
1164 repair.backupbundle(
1165 repo, allreplaced, allreplaced, node, operation
1165 repo, allreplaced, allreplaced, node, operation
1166 )
1166 )
1167 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1167 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1168 else:
1168 else:
1169 from . import repair # avoid import cycle
1169 from . import repair # avoid import cycle
1170
1170
1171 tostrip = list(n for ns in replacements for n in ns)
1171 tostrip = list(n for ns in replacements for n in ns)
1172 if tostrip:
1172 if tostrip:
1173 repair.delayedstrip(
1173 repair.delayedstrip(
1174 repo.ui, repo, tostrip, operation, backup=backup
1174 repo.ui, repo, tostrip, operation, backup=backup
1175 )
1175 )
1176
1176
1177
1177
1178 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1178 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1179 if opts is None:
1179 if opts is None:
1180 opts = {}
1180 opts = {}
1181 m = matcher
1181 m = matcher
1182 dry_run = opts.get(b'dry_run')
1182 dry_run = opts.get(b'dry_run')
1183 try:
1183 try:
1184 similarity = float(opts.get(b'similarity') or 0)
1184 similarity = float(opts.get(b'similarity') or 0)
1185 except ValueError:
1185 except ValueError:
1186 raise error.Abort(_(b'similarity must be a number'))
1186 raise error.Abort(_(b'similarity must be a number'))
1187 if similarity < 0 or similarity > 100:
1187 if similarity < 0 or similarity > 100:
1188 raise error.Abort(_(b'similarity must be between 0 and 100'))
1188 raise error.Abort(_(b'similarity must be between 0 and 100'))
1189 similarity /= 100.0
1189 similarity /= 100.0
1190
1190
1191 ret = 0
1191 ret = 0
1192
1192
1193 wctx = repo[None]
1193 wctx = repo[None]
1194 for subpath in sorted(wctx.substate):
1194 for subpath in sorted(wctx.substate):
1195 submatch = matchmod.subdirmatcher(subpath, m)
1195 submatch = matchmod.subdirmatcher(subpath, m)
1196 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1196 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1197 sub = wctx.sub(subpath)
1197 sub = wctx.sub(subpath)
1198 subprefix = repo.wvfs.reljoin(prefix, subpath)
1198 subprefix = repo.wvfs.reljoin(prefix, subpath)
1199 subuipathfn = subdiruipathfn(subpath, uipathfn)
1199 subuipathfn = subdiruipathfn(subpath, uipathfn)
1200 try:
1200 try:
1201 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1201 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1202 ret = 1
1202 ret = 1
1203 except error.LookupError:
1203 except error.LookupError:
1204 repo.ui.status(
1204 repo.ui.status(
1205 _(b"skipping missing subrepository: %s\n")
1205 _(b"skipping missing subrepository: %s\n")
1206 % uipathfn(subpath)
1206 % uipathfn(subpath)
1207 )
1207 )
1208
1208
1209 rejected = []
1209 rejected = []
1210
1210
1211 def badfn(f, msg):
1211 def badfn(f, msg):
1212 if f in m.files():
1212 if f in m.files():
1213 m.bad(f, msg)
1213 m.bad(f, msg)
1214 rejected.append(f)
1214 rejected.append(f)
1215
1215
1216 badmatch = matchmod.badmatch(m, badfn)
1216 badmatch = matchmod.badmatch(m, badfn)
1217 added, unknown, deleted, removed, forgotten = _interestingfiles(
1217 added, unknown, deleted, removed, forgotten = _interestingfiles(
1218 repo, badmatch
1218 repo, badmatch
1219 )
1219 )
1220
1220
1221 unknownset = set(unknown + forgotten)
1221 unknownset = set(unknown + forgotten)
1222 toprint = unknownset.copy()
1222 toprint = unknownset.copy()
1223 toprint.update(deleted)
1223 toprint.update(deleted)
1224 for abs in sorted(toprint):
1224 for abs in sorted(toprint):
1225 if repo.ui.verbose or not m.exact(abs):
1225 if repo.ui.verbose or not m.exact(abs):
1226 if abs in unknownset:
1226 if abs in unknownset:
1227 status = _(b'adding %s\n') % uipathfn(abs)
1227 status = _(b'adding %s\n') % uipathfn(abs)
1228 label = b'ui.addremove.added'
1228 label = b'ui.addremove.added'
1229 else:
1229 else:
1230 status = _(b'removing %s\n') % uipathfn(abs)
1230 status = _(b'removing %s\n') % uipathfn(abs)
1231 label = b'ui.addremove.removed'
1231 label = b'ui.addremove.removed'
1232 repo.ui.status(status, label=label)
1232 repo.ui.status(status, label=label)
1233
1233
1234 renames = _findrenames(
1234 renames = _findrenames(
1235 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1235 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1236 )
1236 )
1237
1237
1238 if not dry_run:
1238 if not dry_run:
1239 _markchanges(repo, unknown + forgotten, deleted, renames)
1239 _markchanges(repo, unknown + forgotten, deleted, renames)
1240
1240
1241 for f in rejected:
1241 for f in rejected:
1242 if f in m.files():
1242 if f in m.files():
1243 return 1
1243 return 1
1244 return ret
1244 return ret
1245
1245
1246
1246
1247 def marktouched(repo, files, similarity=0.0):
1247 def marktouched(repo, files, similarity=0.0):
1248 '''Assert that files have somehow been operated upon. files are relative to
1248 '''Assert that files have somehow been operated upon. files are relative to
1249 the repo root.'''
1249 the repo root.'''
1250 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1250 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1251 rejected = []
1251 rejected = []
1252
1252
1253 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1253 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1254
1254
1255 if repo.ui.verbose:
1255 if repo.ui.verbose:
1256 unknownset = set(unknown + forgotten)
1256 unknownset = set(unknown + forgotten)
1257 toprint = unknownset.copy()
1257 toprint = unknownset.copy()
1258 toprint.update(deleted)
1258 toprint.update(deleted)
1259 for abs in sorted(toprint):
1259 for abs in sorted(toprint):
1260 if abs in unknownset:
1260 if abs in unknownset:
1261 status = _(b'adding %s\n') % abs
1261 status = _(b'adding %s\n') % abs
1262 else:
1262 else:
1263 status = _(b'removing %s\n') % abs
1263 status = _(b'removing %s\n') % abs
1264 repo.ui.status(status)
1264 repo.ui.status(status)
1265
1265
1266 # TODO: We should probably have the caller pass in uipathfn and apply it to
1266 # TODO: We should probably have the caller pass in uipathfn and apply it to
1267 # the messages above too. legacyrelativevalue=True is consistent with how
1267 # the messages above too. legacyrelativevalue=True is consistent with how
1268 # it used to work.
1268 # it used to work.
1269 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1269 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1270 renames = _findrenames(
1270 renames = _findrenames(
1271 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1271 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1272 )
1272 )
1273
1273
1274 _markchanges(repo, unknown + forgotten, deleted, renames)
1274 _markchanges(repo, unknown + forgotten, deleted, renames)
1275
1275
1276 for f in rejected:
1276 for f in rejected:
1277 if f in m.files():
1277 if f in m.files():
1278 return 1
1278 return 1
1279 return 0
1279 return 0
1280
1280
1281
1281
1282 def _interestingfiles(repo, matcher):
1282 def _interestingfiles(repo, matcher):
1283 '''Walk dirstate with matcher, looking for files that addremove would care
1283 '''Walk dirstate with matcher, looking for files that addremove would care
1284 about.
1284 about.
1285
1285
1286 This is different from dirstate.status because it doesn't care about
1286 This is different from dirstate.status because it doesn't care about
1287 whether files are modified or clean.'''
1287 whether files are modified or clean.'''
1288 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1288 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1289 audit_path = pathutil.pathauditor(repo.root, cached=True)
1289 audit_path = pathutil.pathauditor(repo.root, cached=True)
1290
1290
1291 ctx = repo[None]
1291 ctx = repo[None]
1292 dirstate = repo.dirstate
1292 dirstate = repo.dirstate
1293 matcher = repo.narrowmatch(matcher, includeexact=True)
1293 matcher = repo.narrowmatch(matcher, includeexact=True)
1294 walkresults = dirstate.walk(
1294 walkresults = dirstate.walk(
1295 matcher,
1295 matcher,
1296 subrepos=sorted(ctx.substate),
1296 subrepos=sorted(ctx.substate),
1297 unknown=True,
1297 unknown=True,
1298 ignored=False,
1298 ignored=False,
1299 full=False,
1299 full=False,
1300 )
1300 )
1301 for abs, st in pycompat.iteritems(walkresults):
1301 for abs, st in pycompat.iteritems(walkresults):
1302 dstate = dirstate[abs]
1302 dstate = dirstate[abs]
1303 if dstate == b'?' and audit_path.check(abs):
1303 if dstate == b'?' and audit_path.check(abs):
1304 unknown.append(abs)
1304 unknown.append(abs)
1305 elif dstate != b'r' and not st:
1305 elif dstate != b'r' and not st:
1306 deleted.append(abs)
1306 deleted.append(abs)
1307 elif dstate == b'r' and st:
1307 elif dstate == b'r' and st:
1308 forgotten.append(abs)
1308 forgotten.append(abs)
1309 # for finding renames
1309 # for finding renames
1310 elif dstate == b'r' and not st:
1310 elif dstate == b'r' and not st:
1311 removed.append(abs)
1311 removed.append(abs)
1312 elif dstate == b'a':
1312 elif dstate == b'a':
1313 added.append(abs)
1313 added.append(abs)
1314
1314
1315 return added, unknown, deleted, removed, forgotten
1315 return added, unknown, deleted, removed, forgotten
1316
1316
1317
1317
1318 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1318 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1319 '''Find renames from removed files to added ones.'''
1319 '''Find renames from removed files to added ones.'''
1320 renames = {}
1320 renames = {}
1321 if similarity > 0:
1321 if similarity > 0:
1322 for old, new, score in similar.findrenames(
1322 for old, new, score in similar.findrenames(
1323 repo, added, removed, similarity
1323 repo, added, removed, similarity
1324 ):
1324 ):
1325 if (
1325 if (
1326 repo.ui.verbose
1326 repo.ui.verbose
1327 or not matcher.exact(old)
1327 or not matcher.exact(old)
1328 or not matcher.exact(new)
1328 or not matcher.exact(new)
1329 ):
1329 ):
1330 repo.ui.status(
1330 repo.ui.status(
1331 _(
1331 _(
1332 b'recording removal of %s as rename to %s '
1332 b'recording removal of %s as rename to %s '
1333 b'(%d%% similar)\n'
1333 b'(%d%% similar)\n'
1334 )
1334 )
1335 % (uipathfn(old), uipathfn(new), score * 100)
1335 % (uipathfn(old), uipathfn(new), score * 100)
1336 )
1336 )
1337 renames[new] = old
1337 renames[new] = old
1338 return renames
1338 return renames
1339
1339
1340
1340
1341 def _markchanges(repo, unknown, deleted, renames):
1341 def _markchanges(repo, unknown, deleted, renames):
1342 '''Marks the files in unknown as added, the files in deleted as removed,
1342 '''Marks the files in unknown as added, the files in deleted as removed,
1343 and the files in renames as copied.'''
1343 and the files in renames as copied.'''
1344 wctx = repo[None]
1344 wctx = repo[None]
1345 with repo.wlock():
1345 with repo.wlock():
1346 wctx.forget(deleted)
1346 wctx.forget(deleted)
1347 wctx.add(unknown)
1347 wctx.add(unknown)
1348 for new, old in pycompat.iteritems(renames):
1348 for new, old in pycompat.iteritems(renames):
1349 wctx.copy(old, new)
1349 wctx.copy(old, new)
1350
1350
1351
1351
1352 def getrenamedfn(repo, endrev=None):
1352 def getrenamedfn(repo, endrev=None):
1353 if copiesmod.usechangesetcentricalgo(repo):
1353 if copiesmod.usechangesetcentricalgo(repo):
1354
1354
1355 def getrenamed(fn, rev):
1355 def getrenamed(fn, rev):
1356 ctx = repo[rev]
1356 ctx = repo[rev]
1357 p1copies = ctx.p1copies()
1357 p1copies = ctx.p1copies()
1358 if fn in p1copies:
1358 if fn in p1copies:
1359 return p1copies[fn]
1359 return p1copies[fn]
1360 p2copies = ctx.p2copies()
1360 p2copies = ctx.p2copies()
1361 if fn in p2copies:
1361 if fn in p2copies:
1362 return p2copies[fn]
1362 return p2copies[fn]
1363 return None
1363 return None
1364
1364
1365 return getrenamed
1365 return getrenamed
1366
1366
1367 rcache = {}
1367 rcache = {}
1368 if endrev is None:
1368 if endrev is None:
1369 endrev = len(repo)
1369 endrev = len(repo)
1370
1370
1371 def getrenamed(fn, rev):
1371 def getrenamed(fn, rev):
1372 '''looks up all renames for a file (up to endrev) the first
1372 '''looks up all renames for a file (up to endrev) the first
1373 time the file is given. It indexes on the changerev and only
1373 time the file is given. It indexes on the changerev and only
1374 parses the manifest if linkrev != changerev.
1374 parses the manifest if linkrev != changerev.
1375 Returns rename info for fn at changerev rev.'''
1375 Returns rename info for fn at changerev rev.'''
1376 if fn not in rcache:
1376 if fn not in rcache:
1377 rcache[fn] = {}
1377 rcache[fn] = {}
1378 fl = repo.file(fn)
1378 fl = repo.file(fn)
1379 for i in fl:
1379 for i in fl:
1380 lr = fl.linkrev(i)
1380 lr = fl.linkrev(i)
1381 renamed = fl.renamed(fl.node(i))
1381 renamed = fl.renamed(fl.node(i))
1382 rcache[fn][lr] = renamed and renamed[0]
1382 rcache[fn][lr] = renamed and renamed[0]
1383 if lr >= endrev:
1383 if lr >= endrev:
1384 break
1384 break
1385 if rev in rcache[fn]:
1385 if rev in rcache[fn]:
1386 return rcache[fn][rev]
1386 return rcache[fn][rev]
1387
1387
1388 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1388 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1389 # filectx logic.
1389 # filectx logic.
1390 try:
1390 try:
1391 return repo[rev][fn].copysource()
1391 return repo[rev][fn].copysource()
1392 except error.LookupError:
1392 except error.LookupError:
1393 return None
1393 return None
1394
1394
1395 return getrenamed
1395 return getrenamed
1396
1396
1397
1397
1398 def getcopiesfn(repo, endrev=None):
1398 def getcopiesfn(repo, endrev=None):
1399 if copiesmod.usechangesetcentricalgo(repo):
1399 if copiesmod.usechangesetcentricalgo(repo):
1400
1400
1401 def copiesfn(ctx):
1401 def copiesfn(ctx):
1402 if ctx.p2copies():
1402 if ctx.p2copies():
1403 allcopies = ctx.p1copies().copy()
1403 allcopies = ctx.p1copies().copy()
1404 # There should be no overlap
1404 # There should be no overlap
1405 allcopies.update(ctx.p2copies())
1405 allcopies.update(ctx.p2copies())
1406 return sorted(allcopies.items())
1406 return sorted(allcopies.items())
1407 else:
1407 else:
1408 return sorted(ctx.p1copies().items())
1408 return sorted(ctx.p1copies().items())
1409
1409
1410 else:
1410 else:
1411 getrenamed = getrenamedfn(repo, endrev)
1411 getrenamed = getrenamedfn(repo, endrev)
1412
1412
1413 def copiesfn(ctx):
1413 def copiesfn(ctx):
1414 copies = []
1414 copies = []
1415 for fn in ctx.files():
1415 for fn in ctx.files():
1416 rename = getrenamed(fn, ctx.rev())
1416 rename = getrenamed(fn, ctx.rev())
1417 if rename:
1417 if rename:
1418 copies.append((fn, rename))
1418 copies.append((fn, rename))
1419 return copies
1419 return copies
1420
1420
1421 return copiesfn
1421 return copiesfn
1422
1422
1423
1423
1424 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1424 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1425 """Update the dirstate to reflect the intent of copying src to dst. For
1425 """Update the dirstate to reflect the intent of copying src to dst. For
1426 different reasons it might not end with dst being marked as copied from src.
1426 different reasons it might not end with dst being marked as copied from src.
1427 """
1427 """
1428 origsrc = repo.dirstate.copied(src) or src
1428 origsrc = repo.dirstate.copied(src) or src
1429 if dst == origsrc: # copying back a copy?
1429 if dst == origsrc: # copying back a copy?
1430 if repo.dirstate[dst] not in b'mn' and not dryrun:
1430 if repo.dirstate[dst] not in b'mn' and not dryrun:
1431 repo.dirstate.normallookup(dst)
1431 repo.dirstate.normallookup(dst)
1432 else:
1432 else:
1433 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1433 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1434 if not ui.quiet:
1434 if not ui.quiet:
1435 ui.warn(
1435 ui.warn(
1436 _(
1436 _(
1437 b"%s has not been committed yet, so no copy "
1437 b"%s has not been committed yet, so no copy "
1438 b"data will be stored for %s.\n"
1438 b"data will be stored for %s.\n"
1439 )
1439 )
1440 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1440 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1441 )
1441 )
1442 if repo.dirstate[dst] in b'?r' and not dryrun:
1442 if repo.dirstate[dst] in b'?r' and not dryrun:
1443 wctx.add([dst])
1443 wctx.add([dst])
1444 elif not dryrun:
1444 elif not dryrun:
1445 wctx.copy(origsrc, dst)
1445 wctx.copy(origsrc, dst)
1446
1446
1447
1447
1448 def movedirstate(repo, newctx, match=None):
1448 def movedirstate(repo, newctx, match=None):
1449 """Move the dirstate to newctx and adjust it as necessary.
1449 """Move the dirstate to newctx and adjust it as necessary.
1450
1450
1451 A matcher can be provided as an optimization. It is probably a bug to pass
1451 A matcher can be provided as an optimization. It is probably a bug to pass
1452 a matcher that doesn't match all the differences between the parent of the
1452 a matcher that doesn't match all the differences between the parent of the
1453 working copy and newctx.
1453 working copy and newctx.
1454 """
1454 """
1455 oldctx = repo[b'.']
1455 oldctx = repo[b'.']
1456 ds = repo.dirstate
1456 ds = repo.dirstate
1457 ds.setparents(newctx.node(), nullid)
1457 ds.setparents(newctx.node(), nullid)
1458 copies = dict(ds.copies())
1458 copies = dict(ds.copies())
1459 s = newctx.status(oldctx, match=match)
1459 s = newctx.status(oldctx, match=match)
1460 for f in s.modified:
1460 for f in s.modified:
1461 if ds[f] == b'r':
1461 if ds[f] == b'r':
1462 # modified + removed -> removed
1462 # modified + removed -> removed
1463 continue
1463 continue
1464 ds.normallookup(f)
1464 ds.normallookup(f)
1465
1465
1466 for f in s.added:
1466 for f in s.added:
1467 if ds[f] == b'r':
1467 if ds[f] == b'r':
1468 # added + removed -> unknown
1468 # added + removed -> unknown
1469 ds.drop(f)
1469 ds.drop(f)
1470 elif ds[f] != b'a':
1470 elif ds[f] != b'a':
1471 ds.add(f)
1471 ds.add(f)
1472
1472
1473 for f in s.removed:
1473 for f in s.removed:
1474 if ds[f] == b'a':
1474 if ds[f] == b'a':
1475 # removed + added -> normal
1475 # removed + added -> normal
1476 ds.normallookup(f)
1476 ds.normallookup(f)
1477 elif ds[f] != b'r':
1477 elif ds[f] != b'r':
1478 ds.remove(f)
1478 ds.remove(f)
1479
1479
1480 # Merge old parent and old working dir copies
1480 # Merge old parent and old working dir copies
1481 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1481 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1482 oldcopies.update(copies)
1482 oldcopies.update(copies)
1483 copies = dict(
1483 copies = dict(
1484 (dst, oldcopies.get(src, src))
1484 (dst, oldcopies.get(src, src))
1485 for dst, src in pycompat.iteritems(oldcopies)
1485 for dst, src in pycompat.iteritems(oldcopies)
1486 )
1486 )
1487 # Adjust the dirstate copies
1487 # Adjust the dirstate copies
1488 for dst, src in pycompat.iteritems(copies):
1488 for dst, src in pycompat.iteritems(copies):
1489 if src not in newctx or dst in newctx or ds[dst] != b'a':
1489 if src not in newctx or dst in newctx or ds[dst] != b'a':
1490 src = None
1490 src = None
1491 ds.copy(src, dst)
1491 ds.copy(src, dst)
1492
1492
1493
1493
1494 def writerequires(opener, requirements):
1494 def writerequires(opener, requirements):
1495 with opener(b'requires', b'w', atomictemp=True) as fp:
1495 with opener(b'requires', b'w', atomictemp=True) as fp:
1496 for r in sorted(requirements):
1496 for r in sorted(requirements):
1497 fp.write(b"%s\n" % r)
1497 fp.write(b"%s\n" % r)
1498
1498
1499
1499
1500 class filecachesubentry(object):
1500 class filecachesubentry(object):
1501 def __init__(self, path, stat):
1501 def __init__(self, path, stat):
1502 self.path = path
1502 self.path = path
1503 self.cachestat = None
1503 self.cachestat = None
1504 self._cacheable = None
1504 self._cacheable = None
1505
1505
1506 if stat:
1506 if stat:
1507 self.cachestat = filecachesubentry.stat(self.path)
1507 self.cachestat = filecachesubentry.stat(self.path)
1508
1508
1509 if self.cachestat:
1509 if self.cachestat:
1510 self._cacheable = self.cachestat.cacheable()
1510 self._cacheable = self.cachestat.cacheable()
1511 else:
1511 else:
1512 # None means we don't know yet
1512 # None means we don't know yet
1513 self._cacheable = None
1513 self._cacheable = None
1514
1514
1515 def refresh(self):
1515 def refresh(self):
1516 if self.cacheable():
1516 if self.cacheable():
1517 self.cachestat = filecachesubentry.stat(self.path)
1517 self.cachestat = filecachesubentry.stat(self.path)
1518
1518
1519 def cacheable(self):
1519 def cacheable(self):
1520 if self._cacheable is not None:
1520 if self._cacheable is not None:
1521 return self._cacheable
1521 return self._cacheable
1522
1522
1523 # we don't know yet, assume it is for now
1523 # we don't know yet, assume it is for now
1524 return True
1524 return True
1525
1525
1526 def changed(self):
1526 def changed(self):
1527 # no point in going further if we can't cache it
1527 # no point in going further if we can't cache it
1528 if not self.cacheable():
1528 if not self.cacheable():
1529 return True
1529 return True
1530
1530
1531 newstat = filecachesubentry.stat(self.path)
1531 newstat = filecachesubentry.stat(self.path)
1532
1532
1533 # we may not know if it's cacheable yet, check again now
1533 # we may not know if it's cacheable yet, check again now
1534 if newstat and self._cacheable is None:
1534 if newstat and self._cacheable is None:
1535 self._cacheable = newstat.cacheable()
1535 self._cacheable = newstat.cacheable()
1536
1536
1537 # check again
1537 # check again
1538 if not self._cacheable:
1538 if not self._cacheable:
1539 return True
1539 return True
1540
1540
1541 if self.cachestat != newstat:
1541 if self.cachestat != newstat:
1542 self.cachestat = newstat
1542 self.cachestat = newstat
1543 return True
1543 return True
1544 else:
1544 else:
1545 return False
1545 return False
1546
1546
1547 @staticmethod
1547 @staticmethod
1548 def stat(path):
1548 def stat(path):
1549 try:
1549 try:
1550 return util.cachestat(path)
1550 return util.cachestat(path)
1551 except OSError as e:
1551 except OSError as e:
1552 if e.errno != errno.ENOENT:
1552 if e.errno != errno.ENOENT:
1553 raise
1553 raise
1554
1554
1555
1555
1556 class filecacheentry(object):
1556 class filecacheentry(object):
1557 def __init__(self, paths, stat=True):
1557 def __init__(self, paths, stat=True):
1558 self._entries = []
1558 self._entries = []
1559 for path in paths:
1559 for path in paths:
1560 self._entries.append(filecachesubentry(path, stat))
1560 self._entries.append(filecachesubentry(path, stat))
1561
1561
1562 def changed(self):
1562 def changed(self):
1563 '''true if any entry has changed'''
1563 '''true if any entry has changed'''
1564 for entry in self._entries:
1564 for entry in self._entries:
1565 if entry.changed():
1565 if entry.changed():
1566 return True
1566 return True
1567 return False
1567 return False
1568
1568
1569 def refresh(self):
1569 def refresh(self):
1570 for entry in self._entries:
1570 for entry in self._entries:
1571 entry.refresh()
1571 entry.refresh()
1572
1572
1573
1573
1574 class filecache(object):
1574 class filecache(object):
1575 """A property like decorator that tracks files under .hg/ for updates.
1575 """A property like decorator that tracks files under .hg/ for updates.
1576
1576
1577 On first access, the files defined as arguments are stat()ed and the
1577 On first access, the files defined as arguments are stat()ed and the
1578 results cached. The decorated function is called. The results are stashed
1578 results cached. The decorated function is called. The results are stashed
1579 away in a ``_filecache`` dict on the object whose method is decorated.
1579 away in a ``_filecache`` dict on the object whose method is decorated.
1580
1580
1581 On subsequent access, the cached result is used as it is set to the
1581 On subsequent access, the cached result is used as it is set to the
1582 instance dictionary.
1582 instance dictionary.
1583
1583
1584 On external property set/delete operations, the caller must update the
1584 On external property set/delete operations, the caller must update the
1585 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1585 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1586 instead of directly setting <attr>.
1586 instead of directly setting <attr>.
1587
1587
1588 When using the property API, the cached data is always used if available.
1588 When using the property API, the cached data is always used if available.
1589 No stat() is performed to check if the file has changed.
1589 No stat() is performed to check if the file has changed.
1590
1590
1591 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1591 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1592 can populate an entry before the property's getter is called. In this case,
1592 can populate an entry before the property's getter is called. In this case,
1593 entries in ``_filecache`` will be used during property operations,
1593 entries in ``_filecache`` will be used during property operations,
1594 if available. If the underlying file changes, it is up to external callers
1594 if available. If the underlying file changes, it is up to external callers
1595 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1595 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1596 method result as well as possibly calling ``del obj._filecache[attr]`` to
1596 method result as well as possibly calling ``del obj._filecache[attr]`` to
1597 remove the ``filecacheentry``.
1597 remove the ``filecacheentry``.
1598 """
1598 """
1599
1599
1600 def __init__(self, *paths):
1600 def __init__(self, *paths):
1601 self.paths = paths
1601 self.paths = paths
1602
1602
1603 def join(self, obj, fname):
1603 def join(self, obj, fname):
1604 """Used to compute the runtime path of a cached file.
1604 """Used to compute the runtime path of a cached file.
1605
1605
1606 Users should subclass filecache and provide their own version of this
1606 Users should subclass filecache and provide their own version of this
1607 function to call the appropriate join function on 'obj' (an instance
1607 function to call the appropriate join function on 'obj' (an instance
1608 of the class that its member function was decorated).
1608 of the class that its member function was decorated).
1609 """
1609 """
1610 raise NotImplementedError
1610 raise NotImplementedError
1611
1611
1612 def __call__(self, func):
1612 def __call__(self, func):
1613 self.func = func
1613 self.func = func
1614 self.sname = func.__name__
1614 self.sname = func.__name__
1615 self.name = pycompat.sysbytes(self.sname)
1615 self.name = pycompat.sysbytes(self.sname)
1616 return self
1616 return self
1617
1617
1618 def __get__(self, obj, type=None):
1618 def __get__(self, obj, type=None):
1619 # if accessed on the class, return the descriptor itself.
1619 # if accessed on the class, return the descriptor itself.
1620 if obj is None:
1620 if obj is None:
1621 return self
1621 return self
1622
1622
1623 assert self.sname not in obj.__dict__
1623 assert self.sname not in obj.__dict__
1624
1624
1625 entry = obj._filecache.get(self.name)
1625 entry = obj._filecache.get(self.name)
1626
1626
1627 if entry:
1627 if entry:
1628 if entry.changed():
1628 if entry.changed():
1629 entry.obj = self.func(obj)
1629 entry.obj = self.func(obj)
1630 else:
1630 else:
1631 paths = [self.join(obj, path) for path in self.paths]
1631 paths = [self.join(obj, path) for path in self.paths]
1632
1632
1633 # We stat -before- creating the object so our cache doesn't lie if
1633 # We stat -before- creating the object so our cache doesn't lie if
1634 # a writer modified between the time we read and stat
1634 # a writer modified between the time we read and stat
1635 entry = filecacheentry(paths, True)
1635 entry = filecacheentry(paths, True)
1636 entry.obj = self.func(obj)
1636 entry.obj = self.func(obj)
1637
1637
1638 obj._filecache[self.name] = entry
1638 obj._filecache[self.name] = entry
1639
1639
1640 obj.__dict__[self.sname] = entry.obj
1640 obj.__dict__[self.sname] = entry.obj
1641 return entry.obj
1641 return entry.obj
1642
1642
1643 # don't implement __set__(), which would make __dict__ lookup as slow as
1643 # don't implement __set__(), which would make __dict__ lookup as slow as
1644 # function call.
1644 # function call.
1645
1645
1646 def set(self, obj, value):
1646 def set(self, obj, value):
1647 if self.name not in obj._filecache:
1647 if self.name not in obj._filecache:
1648 # we add an entry for the missing value because X in __dict__
1648 # we add an entry for the missing value because X in __dict__
1649 # implies X in _filecache
1649 # implies X in _filecache
1650 paths = [self.join(obj, path) for path in self.paths]
1650 paths = [self.join(obj, path) for path in self.paths]
1651 ce = filecacheentry(paths, False)
1651 ce = filecacheentry(paths, False)
1652 obj._filecache[self.name] = ce
1652 obj._filecache[self.name] = ce
1653 else:
1653 else:
1654 ce = obj._filecache[self.name]
1654 ce = obj._filecache[self.name]
1655
1655
1656 ce.obj = value # update cached copy
1656 ce.obj = value # update cached copy
1657 obj.__dict__[self.sname] = value # update copy returned by obj.x
1657 obj.__dict__[self.sname] = value # update copy returned by obj.x
1658
1658
1659
1659
1660 def extdatasource(repo, source):
1660 def extdatasource(repo, source):
1661 """Gather a map of rev -> value dict from the specified source
1661 """Gather a map of rev -> value dict from the specified source
1662
1662
1663 A source spec is treated as a URL, with a special case shell: type
1663 A source spec is treated as a URL, with a special case shell: type
1664 for parsing the output from a shell command.
1664 for parsing the output from a shell command.
1665
1665
1666 The data is parsed as a series of newline-separated records where
1666 The data is parsed as a series of newline-separated records where
1667 each record is a revision specifier optionally followed by a space
1667 each record is a revision specifier optionally followed by a space
1668 and a freeform string value. If the revision is known locally, it
1668 and a freeform string value. If the revision is known locally, it
1669 is converted to a rev, otherwise the record is skipped.
1669 is converted to a rev, otherwise the record is skipped.
1670
1670
1671 Note that both key and value are treated as UTF-8 and converted to
1671 Note that both key and value are treated as UTF-8 and converted to
1672 the local encoding. This allows uniformity between local and
1672 the local encoding. This allows uniformity between local and
1673 remote data sources.
1673 remote data sources.
1674 """
1674 """
1675
1675
1676 spec = repo.ui.config(b"extdata", source)
1676 spec = repo.ui.config(b"extdata", source)
1677 if not spec:
1677 if not spec:
1678 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1678 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1679
1679
1680 data = {}
1680 data = {}
1681 src = proc = None
1681 src = proc = None
1682 try:
1682 try:
1683 if spec.startswith(b"shell:"):
1683 if spec.startswith(b"shell:"):
1684 # external commands should be run relative to the repo root
1684 # external commands should be run relative to the repo root
1685 cmd = spec[6:]
1685 cmd = spec[6:]
1686 proc = subprocess.Popen(
1686 proc = subprocess.Popen(
1687 procutil.tonativestr(cmd),
1687 procutil.tonativestr(cmd),
1688 shell=True,
1688 shell=True,
1689 bufsize=-1,
1689 bufsize=-1,
1690 close_fds=procutil.closefds,
1690 close_fds=procutil.closefds,
1691 stdout=subprocess.PIPE,
1691 stdout=subprocess.PIPE,
1692 cwd=procutil.tonativestr(repo.root),
1692 cwd=procutil.tonativestr(repo.root),
1693 )
1693 )
1694 src = proc.stdout
1694 src = proc.stdout
1695 else:
1695 else:
1696 # treat as a URL or file
1696 # treat as a URL or file
1697 src = url.open(repo.ui, spec)
1697 src = url.open(repo.ui, spec)
1698 for l in src:
1698 for l in src:
1699 if b" " in l:
1699 if b" " in l:
1700 k, v = l.strip().split(b" ", 1)
1700 k, v = l.strip().split(b" ", 1)
1701 else:
1701 else:
1702 k, v = l.strip(), b""
1702 k, v = l.strip(), b""
1703
1703
1704 k = encoding.tolocal(k)
1704 k = encoding.tolocal(k)
1705 try:
1705 try:
1706 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1706 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1707 except (error.LookupError, error.RepoLookupError):
1707 except (error.LookupError, error.RepoLookupError):
1708 pass # we ignore data for nodes that don't exist locally
1708 pass # we ignore data for nodes that don't exist locally
1709 finally:
1709 finally:
1710 if proc:
1710 if proc:
1711 try:
1711 try:
1712 proc.communicate()
1712 proc.communicate()
1713 except ValueError:
1713 except ValueError:
1714 # This happens if we started iterating src and then
1714 # This happens if we started iterating src and then
1715 # get a parse error on a line. It should be safe to ignore.
1715 # get a parse error on a line. It should be safe to ignore.
1716 pass
1716 pass
1717 if src:
1717 if src:
1718 src.close()
1718 src.close()
1719 if proc and proc.returncode != 0:
1719 if proc and proc.returncode != 0:
1720 raise error.Abort(
1720 raise error.Abort(
1721 _(b"extdata command '%s' failed: %s")
1721 _(b"extdata command '%s' failed: %s")
1722 % (cmd, procutil.explainexit(proc.returncode))
1722 % (cmd, procutil.explainexit(proc.returncode))
1723 )
1723 )
1724
1724
1725 return data
1725 return data
1726
1726
1727
1727
1728 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1728 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1729 if lock is None:
1729 if lock is None:
1730 raise error.LockInheritanceContractViolation(
1730 raise error.LockInheritanceContractViolation(
1731 b'lock can only be inherited while held'
1731 b'lock can only be inherited while held'
1732 )
1732 )
1733 if environ is None:
1733 if environ is None:
1734 environ = {}
1734 environ = {}
1735 with lock.inherit() as locker:
1735 with lock.inherit() as locker:
1736 environ[envvar] = locker
1736 environ[envvar] = locker
1737 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1737 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1738
1738
1739
1739
1740 def wlocksub(repo, cmd, *args, **kwargs):
1740 def wlocksub(repo, cmd, *args, **kwargs):
1741 """run cmd as a subprocess that allows inheriting repo's wlock
1741 """run cmd as a subprocess that allows inheriting repo's wlock
1742
1742
1743 This can only be called while the wlock is held. This takes all the
1743 This can only be called while the wlock is held. This takes all the
1744 arguments that ui.system does, and returns the exit code of the
1744 arguments that ui.system does, and returns the exit code of the
1745 subprocess."""
1745 subprocess."""
1746 return _locksub(
1746 return _locksub(
1747 repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
1747 repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
1748 )
1748 )
1749
1749
1750
1750
1751 class progress(object):
1751 class progress(object):
1752 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1752 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1753 self.ui = ui
1753 self.ui = ui
1754 self.pos = 0
1754 self.pos = 0
1755 self.topic = topic
1755 self.topic = topic
1756 self.unit = unit
1756 self.unit = unit
1757 self.total = total
1757 self.total = total
1758 self.debug = ui.configbool(b'progress', b'debug')
1758 self.debug = ui.configbool(b'progress', b'debug')
1759 self._updatebar = updatebar
1759 self._updatebar = updatebar
1760
1760
1761 def __enter__(self):
1761 def __enter__(self):
1762 return self
1762 return self
1763
1763
1764 def __exit__(self, exc_type, exc_value, exc_tb):
1764 def __exit__(self, exc_type, exc_value, exc_tb):
1765 self.complete()
1765 self.complete()
1766
1766
1767 def update(self, pos, item=b"", total=None):
1767 def update(self, pos, item=b"", total=None):
1768 assert pos is not None
1768 assert pos is not None
1769 if total:
1769 if total:
1770 self.total = total
1770 self.total = total
1771 self.pos = pos
1771 self.pos = pos
1772 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1772 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1773 if self.debug:
1773 if self.debug:
1774 self._printdebug(item)
1774 self._printdebug(item)
1775
1775
1776 def increment(self, step=1, item=b"", total=None):
1776 def increment(self, step=1, item=b"", total=None):
1777 self.update(self.pos + step, item, total)
1777 self.update(self.pos + step, item, total)
1778
1778
1779 def complete(self):
1779 def complete(self):
1780 self.pos = None
1780 self.pos = None
1781 self.unit = b""
1781 self.unit = b""
1782 self.total = None
1782 self.total = None
1783 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1783 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1784
1784
1785 def _printdebug(self, item):
1785 def _printdebug(self, item):
1786 if self.unit:
1786 if self.unit:
1787 unit = b' ' + self.unit
1787 unit = b' ' + self.unit
1788 if item:
1788 if item:
1789 item = b' ' + item
1789 item = b' ' + item
1790
1790
1791 if self.total:
1791 if self.total:
1792 pct = 100.0 * self.pos / self.total
1792 pct = 100.0 * self.pos / self.total
1793 self.ui.debug(
1793 self.ui.debug(
1794 b'%s:%s %d/%d%s (%4.2f%%)\n'
1794 b'%s:%s %d/%d%s (%4.2f%%)\n'
1795 % (self.topic, item, self.pos, self.total, unit, pct)
1795 % (self.topic, item, self.pos, self.total, unit, pct)
1796 )
1796 )
1797 else:
1797 else:
1798 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1798 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1799
1799
1800
1800
1801 def gdinitconfig(ui):
1801 def gdinitconfig(ui):
1802 """helper function to know if a repo should be created as general delta
1802 """helper function to know if a repo should be created as general delta
1803 """
1803 """
1804 # experimental config: format.generaldelta
1804 # experimental config: format.generaldelta
1805 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1805 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1806 b'format', b'usegeneraldelta'
1806 b'format', b'usegeneraldelta'
1807 )
1807 )
1808
1808
1809
1809
1810 def gddeltaconfig(ui):
1810 def gddeltaconfig(ui):
1811 """helper function to know if incoming delta should be optimised
1811 """helper function to know if incoming delta should be optimised
1812 """
1812 """
1813 # experimental config: format.generaldelta
1813 # experimental config: format.generaldelta
1814 return ui.configbool(b'format', b'generaldelta')
1814 return ui.configbool(b'format', b'generaldelta')
1815
1815
1816
1816
1817 class simplekeyvaluefile(object):
1817 class simplekeyvaluefile(object):
1818 """A simple file with key=value lines
1818 """A simple file with key=value lines
1819
1819
1820 Keys must be alphanumerics and start with a letter, values must not
1820 Keys must be alphanumerics and start with a letter, values must not
1821 contain '\n' characters"""
1821 contain '\n' characters"""
1822
1822
1823 firstlinekey = b'__firstline'
1823 firstlinekey = b'__firstline'
1824
1824
1825 def __init__(self, vfs, path, keys=None):
1825 def __init__(self, vfs, path, keys=None):
1826 self.vfs = vfs
1826 self.vfs = vfs
1827 self.path = path
1827 self.path = path
1828
1828
1829 def read(self, firstlinenonkeyval=False):
1829 def read(self, firstlinenonkeyval=False):
1830 """Read the contents of a simple key-value file
1830 """Read the contents of a simple key-value file
1831
1831
1832 'firstlinenonkeyval' indicates whether the first line of file should
1832 'firstlinenonkeyval' indicates whether the first line of file should
1833 be treated as a key-value pair or reuturned fully under the
1833 be treated as a key-value pair or reuturned fully under the
1834 __firstline key."""
1834 __firstline key."""
1835 lines = self.vfs.readlines(self.path)
1835 lines = self.vfs.readlines(self.path)
1836 d = {}
1836 d = {}
1837 if firstlinenonkeyval:
1837 if firstlinenonkeyval:
1838 if not lines:
1838 if not lines:
1839 e = _(b"empty simplekeyvalue file")
1839 e = _(b"empty simplekeyvalue file")
1840 raise error.CorruptedState(e)
1840 raise error.CorruptedState(e)
1841 # we don't want to include '\n' in the __firstline
1841 # we don't want to include '\n' in the __firstline
1842 d[self.firstlinekey] = lines[0][:-1]
1842 d[self.firstlinekey] = lines[0][:-1]
1843 del lines[0]
1843 del lines[0]
1844
1844
1845 try:
1845 try:
1846 # the 'if line.strip()' part prevents us from failing on empty
1846 # the 'if line.strip()' part prevents us from failing on empty
1847 # lines which only contain '\n' therefore are not skipped
1847 # lines which only contain '\n' therefore are not skipped
1848 # by 'if line'
1848 # by 'if line'
1849 updatedict = dict(
1849 updatedict = dict(
1850 line[:-1].split(b'=', 1) for line in lines if line.strip()
1850 line[:-1].split(b'=', 1) for line in lines if line.strip()
1851 )
1851 )
1852 if self.firstlinekey in updatedict:
1852 if self.firstlinekey in updatedict:
1853 e = _(b"%r can't be used as a key")
1853 e = _(b"%r can't be used as a key")
1854 raise error.CorruptedState(e % self.firstlinekey)
1854 raise error.CorruptedState(e % self.firstlinekey)
1855 d.update(updatedict)
1855 d.update(updatedict)
1856 except ValueError as e:
1856 except ValueError as e:
1857 raise error.CorruptedState(stringutil.forcebytestr(e))
1857 raise error.CorruptedState(stringutil.forcebytestr(e))
1858 return d
1858 return d
1859
1859
1860 def write(self, data, firstline=None):
1860 def write(self, data, firstline=None):
1861 """Write key=>value mapping to a file
1861 """Write key=>value mapping to a file
1862 data is a dict. Keys must be alphanumerical and start with a letter.
1862 data is a dict. Keys must be alphanumerical and start with a letter.
1863 Values must not contain newline characters.
1863 Values must not contain newline characters.
1864
1864
1865 If 'firstline' is not None, it is written to file before
1865 If 'firstline' is not None, it is written to file before
1866 everything else, as it is, not in a key=value form"""
1866 everything else, as it is, not in a key=value form"""
1867 lines = []
1867 lines = []
1868 if firstline is not None:
1868 if firstline is not None:
1869 lines.append(b'%s\n' % firstline)
1869 lines.append(b'%s\n' % firstline)
1870
1870
1871 for k, v in data.items():
1871 for k, v in data.items():
1872 if k == self.firstlinekey:
1872 if k == self.firstlinekey:
1873 e = b"key name '%s' is reserved" % self.firstlinekey
1873 e = b"key name '%s' is reserved" % self.firstlinekey
1874 raise error.ProgrammingError(e)
1874 raise error.ProgrammingError(e)
1875 if not k[0:1].isalpha():
1875 if not k[0:1].isalpha():
1876 e = b"keys must start with a letter in a key-value file"
1876 e = b"keys must start with a letter in a key-value file"
1877 raise error.ProgrammingError(e)
1877 raise error.ProgrammingError(e)
1878 if not k.isalnum():
1878 if not k.isalnum():
1879 e = b"invalid key name in a simple key-value file"
1879 e = b"invalid key name in a simple key-value file"
1880 raise error.ProgrammingError(e)
1880 raise error.ProgrammingError(e)
1881 if b'\n' in v:
1881 if b'\n' in v:
1882 e = b"invalid value in a simple key-value file"
1882 e = b"invalid value in a simple key-value file"
1883 raise error.ProgrammingError(e)
1883 raise error.ProgrammingError(e)
1884 lines.append(b"%s=%s\n" % (k, v))
1884 lines.append(b"%s=%s\n" % (k, v))
1885 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1885 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1886 fp.write(b''.join(lines))
1886 fp.write(b''.join(lines))
1887
1887
1888
1888
1889 _reportobsoletedsource = [
1889 _reportobsoletedsource = [
1890 b'debugobsolete',
1890 b'debugobsolete',
1891 b'pull',
1891 b'pull',
1892 b'push',
1892 b'push',
1893 b'serve',
1893 b'serve',
1894 b'unbundle',
1894 b'unbundle',
1895 ]
1895 ]
1896
1896
1897 _reportnewcssource = [
1897 _reportnewcssource = [
1898 b'pull',
1898 b'pull',
1899 b'unbundle',
1899 b'unbundle',
1900 ]
1900 ]
1901
1901
1902
1902
1903 def prefetchfiles(repo, revs, match):
1903 def prefetchfiles(repo, revs, match):
1904 """Invokes the registered file prefetch functions, allowing extensions to
1904 """Invokes the registered file prefetch functions, allowing extensions to
1905 ensure the corresponding files are available locally, before the command
1905 ensure the corresponding files are available locally, before the command
1906 uses them."""
1906 uses them."""
1907 if match:
1907 if match:
1908 # The command itself will complain about files that don't exist, so
1908 # The command itself will complain about files that don't exist, so
1909 # don't duplicate the message.
1909 # don't duplicate the message.
1910 match = matchmod.badmatch(match, lambda fn, msg: None)
1910 match = matchmod.badmatch(match, lambda fn, msg: None)
1911 else:
1911 else:
1912 match = matchall(repo)
1912 match = matchall(repo)
1913
1913
1914 fileprefetchhooks(repo, revs, match)
1914 fileprefetchhooks(repo, revs, match)
1915
1915
1916
1916
1917 # a list of (repo, revs, match) prefetch functions
1917 # a list of (repo, revs, match) prefetch functions
1918 fileprefetchhooks = util.hooks()
1918 fileprefetchhooks = util.hooks()
1919
1919
1920 # A marker that tells the evolve extension to suppress its own reporting
1920 # A marker that tells the evolve extension to suppress its own reporting
1921 _reportstroubledchangesets = True
1921 _reportstroubledchangesets = True
1922
1922
1923
1923
1924 def registersummarycallback(repo, otr, txnname=b''):
1924 def registersummarycallback(repo, otr, txnname=b''):
1925 """register a callback to issue a summary after the transaction is closed
1925 """register a callback to issue a summary after the transaction is closed
1926 """
1926 """
1927
1927
1928 def txmatch(sources):
1928 def txmatch(sources):
1929 return any(txnname.startswith(source) for source in sources)
1929 return any(txnname.startswith(source) for source in sources)
1930
1930
1931 categories = []
1931 categories = []
1932
1932
1933 def reportsummary(func):
1933 def reportsummary(func):
1934 """decorator for report callbacks."""
1934 """decorator for report callbacks."""
1935 # The repoview life cycle is shorter than the one of the actual
1935 # The repoview life cycle is shorter than the one of the actual
1936 # underlying repository. So the filtered object can die before the
1936 # underlying repository. So the filtered object can die before the
1937 # weakref is used leading to troubles. We keep a reference to the
1937 # weakref is used leading to troubles. We keep a reference to the
1938 # unfiltered object and restore the filtering when retrieving the
1938 # unfiltered object and restore the filtering when retrieving the
1939 # repository through the weakref.
1939 # repository through the weakref.
1940 filtername = repo.filtername
1940 filtername = repo.filtername
1941 reporef = weakref.ref(repo.unfiltered())
1941 reporef = weakref.ref(repo.unfiltered())
1942
1942
1943 def wrapped(tr):
1943 def wrapped(tr):
1944 repo = reporef()
1944 repo = reporef()
1945 if filtername:
1945 if filtername:
1946 repo = repo.filtered(filtername)
1946 repo = repo.filtered(filtername)
1947 func(repo, tr)
1947 func(repo, tr)
1948
1948
1949 newcat = b'%02i-txnreport' % len(categories)
1949 newcat = b'%02i-txnreport' % len(categories)
1950 otr.addpostclose(newcat, wrapped)
1950 otr.addpostclose(newcat, wrapped)
1951 categories.append(newcat)
1951 categories.append(newcat)
1952 return wrapped
1952 return wrapped
1953
1953
1954 @reportsummary
1954 @reportsummary
1955 def reportchangegroup(repo, tr):
1955 def reportchangegroup(repo, tr):
1956 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
1956 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
1957 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
1957 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
1958 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
1958 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
1959 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
1959 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
1960 if cgchangesets or cgrevisions or cgfiles:
1960 if cgchangesets or cgrevisions or cgfiles:
1961 htext = b""
1961 htext = b""
1962 if cgheads:
1962 if cgheads:
1963 htext = _(b" (%+d heads)") % cgheads
1963 htext = _(b" (%+d heads)") % cgheads
1964 msg = _(b"added %d changesets with %d changes to %d files%s\n")
1964 msg = _(b"added %d changesets with %d changes to %d files%s\n")
1965 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
1965 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
1966
1966
1967 if txmatch(_reportobsoletedsource):
1967 if txmatch(_reportobsoletedsource):
1968
1968
1969 @reportsummary
1969 @reportsummary
1970 def reportobsoleted(repo, tr):
1970 def reportobsoleted(repo, tr):
1971 obsoleted = obsutil.getobsoleted(repo, tr)
1971 obsoleted = obsutil.getobsoleted(repo, tr)
1972 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
1972 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
1973 if newmarkers:
1973 if newmarkers:
1974 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
1974 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
1975 if obsoleted:
1975 if obsoleted:
1976 repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
1976 repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
1977
1977
1978 if obsolete.isenabled(
1978 if obsolete.isenabled(
1979 repo, obsolete.createmarkersopt
1979 repo, obsolete.createmarkersopt
1980 ) and repo.ui.configbool(
1980 ) and repo.ui.configbool(
1981 b'experimental', b'evolution.report-instabilities'
1981 b'experimental', b'evolution.report-instabilities'
1982 ):
1982 ):
1983 instabilitytypes = [
1983 instabilitytypes = [
1984 (b'orphan', b'orphan'),
1984 (b'orphan', b'orphan'),
1985 (b'phase-divergent', b'phasedivergent'),
1985 (b'phase-divergent', b'phasedivergent'),
1986 (b'content-divergent', b'contentdivergent'),
1986 (b'content-divergent', b'contentdivergent'),
1987 ]
1987 ]
1988
1988
1989 def getinstabilitycounts(repo):
1989 def getinstabilitycounts(repo):
1990 filtered = repo.changelog.filteredrevs
1990 filtered = repo.changelog.filteredrevs
1991 counts = {}
1991 counts = {}
1992 for instability, revset in instabilitytypes:
1992 for instability, revset in instabilitytypes:
1993 counts[instability] = len(
1993 counts[instability] = len(
1994 set(obsolete.getrevs(repo, revset)) - filtered
1994 set(obsolete.getrevs(repo, revset)) - filtered
1995 )
1995 )
1996 return counts
1996 return counts
1997
1997
1998 oldinstabilitycounts = getinstabilitycounts(repo)
1998 oldinstabilitycounts = getinstabilitycounts(repo)
1999
1999
2000 @reportsummary
2000 @reportsummary
2001 def reportnewinstabilities(repo, tr):
2001 def reportnewinstabilities(repo, tr):
2002 newinstabilitycounts = getinstabilitycounts(repo)
2002 newinstabilitycounts = getinstabilitycounts(repo)
2003 for instability, revset in instabilitytypes:
2003 for instability, revset in instabilitytypes:
2004 delta = (
2004 delta = (
2005 newinstabilitycounts[instability]
2005 newinstabilitycounts[instability]
2006 - oldinstabilitycounts[instability]
2006 - oldinstabilitycounts[instability]
2007 )
2007 )
2008 msg = getinstabilitymessage(delta, instability)
2008 msg = getinstabilitymessage(delta, instability)
2009 if msg:
2009 if msg:
2010 repo.ui.warn(msg)
2010 repo.ui.warn(msg)
2011
2011
2012 if txmatch(_reportnewcssource):
2012 if txmatch(_reportnewcssource):
2013
2013
2014 @reportsummary
2014 @reportsummary
2015 def reportnewcs(repo, tr):
2015 def reportnewcs(repo, tr):
2016 """Report the range of new revisions pulled/unbundled."""
2016 """Report the range of new revisions pulled/unbundled."""
2017 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2017 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2018 unfi = repo.unfiltered()
2018 unfi = repo.unfiltered()
2019 if origrepolen >= len(unfi):
2019 if origrepolen >= len(unfi):
2020 return
2020 return
2021
2021
2022 # Compute the bounds of new visible revisions' range.
2022 # Compute the bounds of new visible revisions' range.
2023 revs = smartset.spanset(repo, start=origrepolen)
2023 revs = smartset.spanset(repo, start=origrepolen)
2024 if revs:
2024 if revs:
2025 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2025 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2026
2026
2027 if minrev == maxrev:
2027 if minrev == maxrev:
2028 revrange = minrev
2028 revrange = minrev
2029 else:
2029 else:
2030 revrange = b'%s:%s' % (minrev, maxrev)
2030 revrange = b'%s:%s' % (minrev, maxrev)
2031 draft = len(repo.revs(b'%ld and draft()', revs))
2031 draft = len(repo.revs(b'%ld and draft()', revs))
2032 secret = len(repo.revs(b'%ld and secret()', revs))
2032 secret = len(repo.revs(b'%ld and secret()', revs))
2033 if not (draft or secret):
2033 if not (draft or secret):
2034 msg = _(b'new changesets %s\n') % revrange
2034 msg = _(b'new changesets %s\n') % revrange
2035 elif draft and secret:
2035 elif draft and secret:
2036 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2036 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2037 msg %= (revrange, draft, secret)
2037 msg %= (revrange, draft, secret)
2038 elif draft:
2038 elif draft:
2039 msg = _(b'new changesets %s (%d drafts)\n')
2039 msg = _(b'new changesets %s (%d drafts)\n')
2040 msg %= (revrange, draft)
2040 msg %= (revrange, draft)
2041 elif secret:
2041 elif secret:
2042 msg = _(b'new changesets %s (%d secrets)\n')
2042 msg = _(b'new changesets %s (%d secrets)\n')
2043 msg %= (revrange, secret)
2043 msg %= (revrange, secret)
2044 else:
2044 else:
2045 errormsg = b'entered unreachable condition'
2045 errormsg = b'entered unreachable condition'
2046 raise error.ProgrammingError(errormsg)
2046 raise error.ProgrammingError(errormsg)
2047 repo.ui.status(msg)
2047 repo.ui.status(msg)
2048
2048
2049 # search new changesets directly pulled as obsolete
2049 # search new changesets directly pulled as obsolete
2050 duplicates = tr.changes.get(b'revduplicates', ())
2050 duplicates = tr.changes.get(b'revduplicates', ())
2051 obsadded = unfi.revs(
2051 obsadded = unfi.revs(
2052 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2052 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2053 )
2053 )
2054 cl = repo.changelog
2054 cl = repo.changelog
2055 extinctadded = [r for r in obsadded if r not in cl]
2055 extinctadded = [r for r in obsadded if r not in cl]
2056 if extinctadded:
2056 if extinctadded:
2057 # They are not just obsolete, but obsolete and invisible
2057 # They are not just obsolete, but obsolete and invisible
2058 # we call them "extinct" internally but the terms have not been
2058 # we call them "extinct" internally but the terms have not been
2059 # exposed to users.
2059 # exposed to users.
2060 msg = b'(%d other changesets obsolete on arrival)\n'
2060 msg = b'(%d other changesets obsolete on arrival)\n'
2061 repo.ui.status(msg % len(extinctadded))
2061 repo.ui.status(msg % len(extinctadded))
2062
2062
2063 @reportsummary
2063 @reportsummary
2064 def reportphasechanges(repo, tr):
2064 def reportphasechanges(repo, tr):
2065 """Report statistics of phase changes for changesets pre-existing
2065 """Report statistics of phase changes for changesets pre-existing
2066 pull/unbundle.
2066 pull/unbundle.
2067 """
2067 """
2068 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2068 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2069 phasetracking = tr.changes.get(b'phases', {})
2069 phasetracking = tr.changes.get(b'phases', {})
2070 if not phasetracking:
2070 if not phasetracking:
2071 return
2071 return
2072 published = [
2072 published = [
2073 rev
2073 rev
2074 for rev, (old, new) in pycompat.iteritems(phasetracking)
2074 for rev, (old, new) in pycompat.iteritems(phasetracking)
2075 if new == phases.public and rev < origrepolen
2075 if new == phases.public and rev < origrepolen
2076 ]
2076 ]
2077 if not published:
2077 if not published:
2078 return
2078 return
2079 repo.ui.status(
2079 repo.ui.status(
2080 _(b'%d local changesets published\n') % len(published)
2080 _(b'%d local changesets published\n') % len(published)
2081 )
2081 )
2082
2082
2083
2083
2084 def getinstabilitymessage(delta, instability):
2084 def getinstabilitymessage(delta, instability):
2085 """function to return the message to show warning about new instabilities
2085 """function to return the message to show warning about new instabilities
2086
2086
2087 exists as a separate function so that extension can wrap to show more
2087 exists as a separate function so that extension can wrap to show more
2088 information like how to fix instabilities"""
2088 information like how to fix instabilities"""
2089 if delta > 0:
2089 if delta > 0:
2090 return _(b'%i new %s changesets\n') % (delta, instability)
2090 return _(b'%i new %s changesets\n') % (delta, instability)
2091
2091
2092
2092
2093 def nodesummaries(repo, nodes, maxnumnodes=4):
2093 def nodesummaries(repo, nodes, maxnumnodes=4):
2094 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2094 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2095 return b' '.join(short(h) for h in nodes)
2095 return b' '.join(short(h) for h in nodes)
2096 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2096 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2097 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2097 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2098
2098
2099
2099
2100 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2100 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2101 """check that no named branch has multiple heads"""
2101 """check that no named branch has multiple heads"""
2102 if desc in (b'strip', b'repair'):
2102 if desc in (b'strip', b'repair'):
2103 # skip the logic during strip
2103 # skip the logic during strip
2104 return
2104 return
2105 visible = repo.filtered(b'visible')
2105 visible = repo.filtered(b'visible')
2106 # possible improvement: we could restrict the check to affected branch
2106 # possible improvement: we could restrict the check to affected branch
2107 bm = visible.branchmap()
2107 bm = visible.branchmap()
2108 for name in bm:
2108 for name in bm:
2109 heads = bm.branchheads(name, closed=accountclosed)
2109 heads = bm.branchheads(name, closed=accountclosed)
2110 if len(heads) > 1:
2110 if len(heads) > 1:
2111 msg = _(b'rejecting multiple heads on branch "%s"')
2111 msg = _(b'rejecting multiple heads on branch "%s"')
2112 msg %= name
2112 msg %= name
2113 hint = _(b'%d heads: %s')
2113 hint = _(b'%d heads: %s')
2114 hint %= (len(heads), nodesummaries(repo, heads))
2114 hint %= (len(heads), nodesummaries(repo, heads))
2115 raise error.Abort(msg, hint=hint)
2115 raise error.Abort(msg, hint=hint)
2116
2116
2117
2117
2118 def wrapconvertsink(sink):
2118 def wrapconvertsink(sink):
2119 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2119 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2120 before it is used, whether or not the convert extension was formally loaded.
2120 before it is used, whether or not the convert extension was formally loaded.
2121 """
2121 """
2122 return sink
2122 return sink
2123
2123
2124
2124
2125 def unhidehashlikerevs(repo, specs, hiddentype):
2125 def unhidehashlikerevs(repo, specs, hiddentype):
2126 """parse the user specs and unhide changesets whose hash or revision number
2126 """parse the user specs and unhide changesets whose hash or revision number
2127 is passed.
2127 is passed.
2128
2128
2129 hiddentype can be: 1) 'warn': warn while unhiding changesets
2129 hiddentype can be: 1) 'warn': warn while unhiding changesets
2130 2) 'nowarn': don't warn while unhiding changesets
2130 2) 'nowarn': don't warn while unhiding changesets
2131
2131
2132 returns a repo object with the required changesets unhidden
2132 returns a repo object with the required changesets unhidden
2133 """
2133 """
2134 if not repo.filtername or not repo.ui.configbool(
2134 if not repo.filtername or not repo.ui.configbool(
2135 b'experimental', b'directaccess'
2135 b'experimental', b'directaccess'
2136 ):
2136 ):
2137 return repo
2137 return repo
2138
2138
2139 if repo.filtername not in (b'visible', b'visible-hidden'):
2139 if repo.filtername not in (b'visible', b'visible-hidden'):
2140 return repo
2140 return repo
2141
2141
2142 symbols = set()
2142 symbols = set()
2143 for spec in specs:
2143 for spec in specs:
2144 try:
2144 try:
2145 tree = revsetlang.parse(spec)
2145 tree = revsetlang.parse(spec)
2146 except error.ParseError: # will be reported by scmutil.revrange()
2146 except error.ParseError: # will be reported by scmutil.revrange()
2147 continue
2147 continue
2148
2148
2149 symbols.update(revsetlang.gethashlikesymbols(tree))
2149 symbols.update(revsetlang.gethashlikesymbols(tree))
2150
2150
2151 if not symbols:
2151 if not symbols:
2152 return repo
2152 return repo
2153
2153
2154 revs = _getrevsfromsymbols(repo, symbols)
2154 revs = _getrevsfromsymbols(repo, symbols)
2155
2155
2156 if not revs:
2156 if not revs:
2157 return repo
2157 return repo
2158
2158
2159 if hiddentype == b'warn':
2159 if hiddentype == b'warn':
2160 unfi = repo.unfiltered()
2160 unfi = repo.unfiltered()
2161 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2161 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2162 repo.ui.warn(
2162 repo.ui.warn(
2163 _(
2163 _(
2164 b"warning: accessing hidden changesets for write "
2164 b"warning: accessing hidden changesets for write "
2165 b"operation: %s\n"
2165 b"operation: %s\n"
2166 )
2166 )
2167 % revstr
2167 % revstr
2168 )
2168 )
2169
2169
2170 # we have to use new filtername to separate branch/tags cache until we can
2170 # we have to use new filtername to separate branch/tags cache until we can
2171 # disbale these cache when revisions are dynamically pinned.
2171 # disbale these cache when revisions are dynamically pinned.
2172 return repo.filtered(b'visible-hidden', revs)
2172 return repo.filtered(b'visible-hidden', revs)
2173
2173
2174
2174
2175 def _getrevsfromsymbols(repo, symbols):
2175 def _getrevsfromsymbols(repo, symbols):
2176 """parse the list of symbols and returns a set of revision numbers of hidden
2176 """parse the list of symbols and returns a set of revision numbers of hidden
2177 changesets present in symbols"""
2177 changesets present in symbols"""
2178 revs = set()
2178 revs = set()
2179 unfi = repo.unfiltered()
2179 unfi = repo.unfiltered()
2180 unficl = unfi.changelog
2180 unficl = unfi.changelog
2181 cl = repo.changelog
2181 cl = repo.changelog
2182 tiprev = len(unficl)
2182 tiprev = len(unficl)
2183 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2183 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2184 for s in symbols:
2184 for s in symbols:
2185 try:
2185 try:
2186 n = int(s)
2186 n = int(s)
2187 if n <= tiprev:
2187 if n <= tiprev:
2188 if not allowrevnums:
2188 if not allowrevnums:
2189 continue
2189 continue
2190 else:
2190 else:
2191 if n not in cl:
2191 if n not in cl:
2192 revs.add(n)
2192 revs.add(n)
2193 continue
2193 continue
2194 except ValueError:
2194 except ValueError:
2195 pass
2195 pass
2196
2196
2197 try:
2197 try:
2198 s = resolvehexnodeidprefix(unfi, s)
2198 s = resolvehexnodeidprefix(unfi, s)
2199 except (error.LookupError, error.WdirUnsupported):
2199 except (error.LookupError, error.WdirUnsupported):
2200 s = None
2200 s = None
2201
2201
2202 if s is not None:
2202 if s is not None:
2203 rev = unficl.rev(s)
2203 rev = unficl.rev(s)
2204 if rev not in cl:
2204 if rev not in cl:
2205 revs.add(rev)
2205 revs.add(rev)
2206
2206
2207 return revs
2207 return revs
2208
2208
2209
2209
2210 def bookmarkrevs(repo, mark):
2210 def bookmarkrevs(repo, mark):
2211 """
2211 """
2212 Select revisions reachable by a given bookmark
2212 Select revisions reachable by a given bookmark
2213 """
2213 """
2214 return repo.revs(
2214 return repo.revs(
2215 b"ancestors(bookmark(%s)) - "
2215 b"ancestors(bookmark(%s)) - "
2216 b"ancestors(head() and not bookmark(%s)) - "
2216 b"ancestors(head() and not bookmark(%s)) - "
2217 b"ancestors(bookmark() and not bookmark(%s))",
2217 b"ancestors(bookmark() and not bookmark(%s))",
2218 mark,
2218 mark,
2219 mark,
2219 mark,
2220 mark,
2220 mark,
2221 )
2221 )
@@ -1,1301 +1,1307 b''
1 #testcases sshv1 sshv2
1 #testcases sshv1 sshv2
2
2
3 #if sshv2
3 #if sshv2
4 $ cat >> $HGRCPATH << EOF
4 $ cat >> $HGRCPATH << EOF
5 > [experimental]
5 > [experimental]
6 > sshpeer.advertise-v2 = true
6 > sshpeer.advertise-v2 = true
7 > sshserver.support-v2 = true
7 > sshserver.support-v2 = true
8 > EOF
8 > EOF
9 #endif
9 #endif
10
10
11 Prepare repo a:
11 Prepare repo a:
12
12
13 $ hg init a
13 $ hg init a
14 $ cd a
14 $ cd a
15 $ echo a > a
15 $ echo a > a
16 $ hg add a
16 $ hg add a
17 $ hg commit -m test
17 $ hg commit -m test
18 $ echo first line > b
18 $ echo first line > b
19 $ hg add b
19 $ hg add b
20
20
21 Create a non-inlined filelog:
21 Create a non-inlined filelog:
22
22
23 $ "$PYTHON" -c 'open("data1", "wb").write(b"".join(b"%d\n" % x for x in range(10000)))'
23 $ "$PYTHON" -c 'open("data1", "wb").write(b"".join(b"%d\n" % x for x in range(10000)))'
24 $ for j in 0 1 2 3 4 5 6 7 8 9; do
24 $ for j in 0 1 2 3 4 5 6 7 8 9; do
25 > cat data1 >> b
25 > cat data1 >> b
26 > hg commit -m test
26 > hg commit -m test
27 > done
27 > done
28
28
29 List files in store/data (should show a 'b.d'):
29 List files in store/data (should show a 'b.d'):
30
30
31 #if reporevlogstore
31 #if reporevlogstore
32 $ for i in .hg/store/data/*; do
32 $ for i in .hg/store/data/*; do
33 > echo $i
33 > echo $i
34 > done
34 > done
35 .hg/store/data/a.i
35 .hg/store/data/a.i
36 .hg/store/data/b.d
36 .hg/store/data/b.d
37 .hg/store/data/b.i
37 .hg/store/data/b.i
38 #endif
38 #endif
39
39
40 Trigger branchcache creation:
40 Trigger branchcache creation:
41
41
42 $ hg branches
42 $ hg branches
43 default 10:a7949464abda
43 default 10:a7949464abda
44 $ ls .hg/cache
44 $ ls .hg/cache
45 branch2-served
45 branch2-served
46 rbc-names-v1
46 rbc-names-v1
47 rbc-revs-v1
47 rbc-revs-v1
48
48
49 Default operation:
49 Default operation:
50
50
51 $ hg clone . ../b
51 $ hg clone . ../b
52 updating to branch default
52 updating to branch default
53 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
53 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
54 $ cd ../b
54 $ cd ../b
55
55
56 Ensure branchcache got copied over:
56 Ensure branchcache got copied over:
57
57
58 $ ls .hg/cache
58 $ ls .hg/cache
59 branch2-served
59 branch2-served
60 rbc-names-v1
60 rbc-names-v1
61 rbc-revs-v1
61 rbc-revs-v1
62
62
63 $ cat a
63 $ cat a
64 a
64 a
65 $ hg verify
65 $ hg verify
66 checking changesets
66 checking changesets
67 checking manifests
67 checking manifests
68 crosschecking files in changesets and manifests
68 crosschecking files in changesets and manifests
69 checking files
69 checking files
70 checked 11 changesets with 11 changes to 2 files
70 checked 11 changesets with 11 changes to 2 files
71
71
72 Invalid dest '' must abort:
72 Invalid dest '' must abort:
73
73
74 $ hg clone . ''
74 $ hg clone . ''
75 abort: empty destination path is not valid
75 abort: empty destination path is not valid
76 [255]
76 [255]
77
77
78 No update, with debug option:
78 No update, with debug option:
79
79
80 #if hardlink
80 #if hardlink
81 $ hg --debug clone -U . ../c --config progress.debug=true
81 $ hg --debug clone -U . ../c --config progress.debug=true
82 linking: 1 files
82 linking: 1 files
83 linking: 2 files
83 linking: 2 files
84 linking: 3 files
84 linking: 3 files
85 linking: 4 files
85 linking: 4 files
86 linking: 5 files
86 linking: 5 files
87 linking: 6 files
87 linking: 6 files
88 linking: 7 files
88 linking: 7 files
89 linking: 8 files
89 linking: 8 files
90 linked 8 files (reporevlogstore !)
90 linked 8 files (reporevlogstore !)
91 linking: 9 files (reposimplestore !)
91 linking: 9 files (reposimplestore !)
92 linking: 10 files (reposimplestore !)
92 linking: 10 files (reposimplestore !)
93 linking: 11 files (reposimplestore !)
93 linking: 11 files (reposimplestore !)
94 linking: 12 files (reposimplestore !)
94 linking: 12 files (reposimplestore !)
95 linking: 13 files (reposimplestore !)
95 linking: 13 files (reposimplestore !)
96 linking: 14 files (reposimplestore !)
96 linking: 14 files (reposimplestore !)
97 linking: 15 files (reposimplestore !)
97 linking: 15 files (reposimplestore !)
98 linking: 16 files (reposimplestore !)
98 linking: 16 files (reposimplestore !)
99 linking: 17 files (reposimplestore !)
99 linking: 17 files (reposimplestore !)
100 linking: 18 files (reposimplestore !)
100 linking: 18 files (reposimplestore !)
101 linked 18 files (reposimplestore !)
101 linked 18 files (reposimplestore !)
102 #else
102 #else
103 $ hg --debug clone -U . ../c --config progress.debug=true
103 $ hg --debug clone -U . ../c --config progress.debug=true
104 linking: 1 files
104 linking: 1 files
105 copying: 2 files
105 copying: 2 files
106 copying: 3 files
106 copying: 3 files
107 copying: 4 files
107 copying: 4 files
108 copying: 5 files
108 copying: 5 files
109 copying: 6 files
109 copying: 6 files
110 copying: 7 files
110 copying: 7 files
111 copying: 8 files
111 copying: 8 files
112 copied 8 files (reporevlogstore !)
112 copied 8 files (reporevlogstore !)
113 copying: 9 files (reposimplestore !)
113 copying: 9 files (reposimplestore !)
114 copying: 10 files (reposimplestore !)
114 copying: 10 files (reposimplestore !)
115 copying: 11 files (reposimplestore !)
115 copying: 11 files (reposimplestore !)
116 copying: 12 files (reposimplestore !)
116 copying: 12 files (reposimplestore !)
117 copying: 13 files (reposimplestore !)
117 copying: 13 files (reposimplestore !)
118 copying: 14 files (reposimplestore !)
118 copying: 14 files (reposimplestore !)
119 copying: 15 files (reposimplestore !)
119 copying: 15 files (reposimplestore !)
120 copying: 16 files (reposimplestore !)
120 copying: 16 files (reposimplestore !)
121 copying: 17 files (reposimplestore !)
121 copying: 17 files (reposimplestore !)
122 copying: 18 files (reposimplestore !)
122 copying: 18 files (reposimplestore !)
123 copied 18 files (reposimplestore !)
123 copied 18 files (reposimplestore !)
124 #endif
124 #endif
125 $ cd ../c
125 $ cd ../c
126
126
127 Ensure branchcache got copied over:
127 Ensure branchcache got copied over:
128
128
129 $ ls .hg/cache
129 $ ls .hg/cache
130 branch2-served
130 branch2-served
131 rbc-names-v1
131 rbc-names-v1
132 rbc-revs-v1
132 rbc-revs-v1
133
133
134 $ cat a 2>/dev/null || echo "a not present"
134 $ cat a 2>/dev/null || echo "a not present"
135 a not present
135 a not present
136 $ hg verify
136 $ hg verify
137 checking changesets
137 checking changesets
138 checking manifests
138 checking manifests
139 crosschecking files in changesets and manifests
139 crosschecking files in changesets and manifests
140 checking files
140 checking files
141 checked 11 changesets with 11 changes to 2 files
141 checked 11 changesets with 11 changes to 2 files
142
142
143 Default destination:
143 Default destination:
144
144
145 $ mkdir ../d
145 $ mkdir ../d
146 $ cd ../d
146 $ cd ../d
147 $ hg clone ../a
147 $ hg clone ../a
148 destination directory: a
148 destination directory: a
149 updating to branch default
149 updating to branch default
150 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
150 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
151 $ cd a
151 $ cd a
152 $ hg cat a
152 $ hg cat a
153 a
153 a
154 $ cd ../..
154 $ cd ../..
155
155
156 Check that we drop the 'file:' from the path before writing the .hgrc:
156 Check that we drop the 'file:' from the path before writing the .hgrc:
157
157
158 $ hg clone file:a e
158 $ hg clone file:a e
159 updating to branch default
159 updating to branch default
160 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
160 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
161 $ grep 'file:' e/.hg/hgrc
161 $ grep 'file:' e/.hg/hgrc
162 [1]
162 [1]
163
163
164 Check that path aliases are expanded:
164 Check that path aliases are expanded:
165
165
166 $ hg clone -q -U --config 'paths.foobar=a#0' foobar f
166 $ hg clone -q -U --config 'paths.foobar=a#0' foobar f
167 $ hg -R f showconfig paths.default
167 $ hg -R f showconfig paths.default
168 $TESTTMP/a#0
168 $TESTTMP/a#0
169
169
170 Use --pull:
170 Use --pull:
171
171
172 $ hg clone --pull a g
172 $ hg clone --pull a g
173 requesting all changes
173 requesting all changes
174 adding changesets
174 adding changesets
175 adding manifests
175 adding manifests
176 adding file changes
176 adding file changes
177 added 11 changesets with 11 changes to 2 files
177 added 11 changesets with 11 changes to 2 files
178 new changesets acb14030fe0a:a7949464abda
178 new changesets acb14030fe0a:a7949464abda
179 updating to branch default
179 updating to branch default
180 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
180 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
181 $ hg -R g verify
181 $ hg -R g verify
182 checking changesets
182 checking changesets
183 checking manifests
183 checking manifests
184 crosschecking files in changesets and manifests
184 crosschecking files in changesets and manifests
185 checking files
185 checking files
186 checked 11 changesets with 11 changes to 2 files
186 checked 11 changesets with 11 changes to 2 files
187
187
188 Invalid dest '' with --pull must abort (issue2528):
188 Invalid dest '' with --pull must abort (issue2528):
189
189
190 $ hg clone --pull a ''
190 $ hg clone --pull a ''
191 abort: empty destination path is not valid
191 abort: empty destination path is not valid
192 [255]
192 [255]
193
193
194 Clone to '.':
194 Clone to '.':
195
195
196 $ mkdir h
196 $ mkdir h
197 $ cd h
197 $ cd h
198 $ hg clone ../a .
198 $ hg clone ../a .
199 updating to branch default
199 updating to branch default
200 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
200 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
201 $ cd ..
201 $ cd ..
202
202
203
203
204 *** Tests for option -u ***
204 *** Tests for option -u ***
205
205
206 Adding some more history to repo a:
206 Adding some more history to repo a:
207
207
208 $ cd a
208 $ cd a
209 $ hg tag ref1
209 $ hg tag ref1
210 $ echo the quick brown fox >a
210 $ echo the quick brown fox >a
211 $ hg ci -m "hacked default"
211 $ hg ci -m "hacked default"
212 $ hg up ref1
212 $ hg up ref1
213 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
213 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
214 $ hg branch stable
214 $ hg branch stable
215 marked working directory as branch stable
215 marked working directory as branch stable
216 (branches are permanent and global, did you want a bookmark?)
216 (branches are permanent and global, did you want a bookmark?)
217 $ echo some text >a
217 $ echo some text >a
218 $ hg ci -m "starting branch stable"
218 $ hg ci -m "starting branch stable"
219 $ hg tag ref2
219 $ hg tag ref2
220 $ echo some more text >a
220 $ echo some more text >a
221 $ hg ci -m "another change for branch stable"
221 $ hg ci -m "another change for branch stable"
222 $ hg up ref2
222 $ hg up ref2
223 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
223 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
224 $ hg parents
224 $ hg parents
225 changeset: 13:e8ece76546a6
225 changeset: 13:e8ece76546a6
226 branch: stable
226 branch: stable
227 tag: ref2
227 tag: ref2
228 parent: 10:a7949464abda
228 parent: 10:a7949464abda
229 user: test
229 user: test
230 date: Thu Jan 01 00:00:00 1970 +0000
230 date: Thu Jan 01 00:00:00 1970 +0000
231 summary: starting branch stable
231 summary: starting branch stable
232
232
233
233
234 Repo a has two heads:
234 Repo a has two heads:
235
235
236 $ hg heads
236 $ hg heads
237 changeset: 15:0aae7cf88f0d
237 changeset: 15:0aae7cf88f0d
238 branch: stable
238 branch: stable
239 tag: tip
239 tag: tip
240 user: test
240 user: test
241 date: Thu Jan 01 00:00:00 1970 +0000
241 date: Thu Jan 01 00:00:00 1970 +0000
242 summary: another change for branch stable
242 summary: another change for branch stable
243
243
244 changeset: 12:f21241060d6a
244 changeset: 12:f21241060d6a
245 user: test
245 user: test
246 date: Thu Jan 01 00:00:00 1970 +0000
246 date: Thu Jan 01 00:00:00 1970 +0000
247 summary: hacked default
247 summary: hacked default
248
248
249
249
250 $ cd ..
250 $ cd ..
251
251
252
252
253 Testing --noupdate with --updaterev (must abort):
253 Testing --noupdate with --updaterev (must abort):
254
254
255 $ hg clone --noupdate --updaterev 1 a ua
255 $ hg clone --noupdate --updaterev 1 a ua
256 abort: cannot specify both --noupdate and --updaterev
256 abort: cannot specify both --noupdate and --updaterev
257 [255]
257 [255]
258
258
259
259
260 Testing clone -u:
260 Testing clone -u:
261
261
262 $ hg clone -u . a ua
262 $ hg clone -u . a ua
263 updating to branch stable
263 updating to branch stable
264 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
264 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
265
265
266 Repo ua has both heads:
266 Repo ua has both heads:
267
267
268 $ hg -R ua heads
268 $ hg -R ua heads
269 changeset: 15:0aae7cf88f0d
269 changeset: 15:0aae7cf88f0d
270 branch: stable
270 branch: stable
271 tag: tip
271 tag: tip
272 user: test
272 user: test
273 date: Thu Jan 01 00:00:00 1970 +0000
273 date: Thu Jan 01 00:00:00 1970 +0000
274 summary: another change for branch stable
274 summary: another change for branch stable
275
275
276 changeset: 12:f21241060d6a
276 changeset: 12:f21241060d6a
277 user: test
277 user: test
278 date: Thu Jan 01 00:00:00 1970 +0000
278 date: Thu Jan 01 00:00:00 1970 +0000
279 summary: hacked default
279 summary: hacked default
280
280
281
281
282 Same revision checked out in repo a and ua:
282 Same revision checked out in repo a and ua:
283
283
284 $ hg -R a parents --template "{node|short}\n"
284 $ hg -R a parents --template "{node|short}\n"
285 e8ece76546a6
285 e8ece76546a6
286 $ hg -R ua parents --template "{node|short}\n"
286 $ hg -R ua parents --template "{node|short}\n"
287 e8ece76546a6
287 e8ece76546a6
288
288
289 $ rm -r ua
289 $ rm -r ua
290
290
291
291
292 Testing clone --pull -u:
292 Testing clone --pull -u:
293
293
294 $ hg clone --pull -u . a ua
294 $ hg clone --pull -u . a ua
295 requesting all changes
295 requesting all changes
296 adding changesets
296 adding changesets
297 adding manifests
297 adding manifests
298 adding file changes
298 adding file changes
299 added 16 changesets with 16 changes to 3 files (+1 heads)
299 added 16 changesets with 16 changes to 3 files (+1 heads)
300 new changesets acb14030fe0a:0aae7cf88f0d
300 new changesets acb14030fe0a:0aae7cf88f0d
301 updating to branch stable
301 updating to branch stable
302 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
302 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
303
303
304 Repo ua has both heads:
304 Repo ua has both heads:
305
305
306 $ hg -R ua heads
306 $ hg -R ua heads
307 changeset: 15:0aae7cf88f0d
307 changeset: 15:0aae7cf88f0d
308 branch: stable
308 branch: stable
309 tag: tip
309 tag: tip
310 user: test
310 user: test
311 date: Thu Jan 01 00:00:00 1970 +0000
311 date: Thu Jan 01 00:00:00 1970 +0000
312 summary: another change for branch stable
312 summary: another change for branch stable
313
313
314 changeset: 12:f21241060d6a
314 changeset: 12:f21241060d6a
315 user: test
315 user: test
316 date: Thu Jan 01 00:00:00 1970 +0000
316 date: Thu Jan 01 00:00:00 1970 +0000
317 summary: hacked default
317 summary: hacked default
318
318
319
319
320 Same revision checked out in repo a and ua:
320 Same revision checked out in repo a and ua:
321
321
322 $ hg -R a parents --template "{node|short}\n"
322 $ hg -R a parents --template "{node|short}\n"
323 e8ece76546a6
323 e8ece76546a6
324 $ hg -R ua parents --template "{node|short}\n"
324 $ hg -R ua parents --template "{node|short}\n"
325 e8ece76546a6
325 e8ece76546a6
326
326
327 $ rm -r ua
327 $ rm -r ua
328
328
329
329
330 Testing clone -u <branch>:
330 Testing clone -u <branch>:
331
331
332 $ hg clone -u stable a ua
332 $ hg clone -u stable a ua
333 updating to branch stable
333 updating to branch stable
334 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
334 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
335
335
336 Repo ua has both heads:
336 Repo ua has both heads:
337
337
338 $ hg -R ua heads
338 $ hg -R ua heads
339 changeset: 15:0aae7cf88f0d
339 changeset: 15:0aae7cf88f0d
340 branch: stable
340 branch: stable
341 tag: tip
341 tag: tip
342 user: test
342 user: test
343 date: Thu Jan 01 00:00:00 1970 +0000
343 date: Thu Jan 01 00:00:00 1970 +0000
344 summary: another change for branch stable
344 summary: another change for branch stable
345
345
346 changeset: 12:f21241060d6a
346 changeset: 12:f21241060d6a
347 user: test
347 user: test
348 date: Thu Jan 01 00:00:00 1970 +0000
348 date: Thu Jan 01 00:00:00 1970 +0000
349 summary: hacked default
349 summary: hacked default
350
350
351
351
352 Branch 'stable' is checked out:
352 Branch 'stable' is checked out:
353
353
354 $ hg -R ua parents
354 $ hg -R ua parents
355 changeset: 15:0aae7cf88f0d
355 changeset: 15:0aae7cf88f0d
356 branch: stable
356 branch: stable
357 tag: tip
357 tag: tip
358 user: test
358 user: test
359 date: Thu Jan 01 00:00:00 1970 +0000
359 date: Thu Jan 01 00:00:00 1970 +0000
360 summary: another change for branch stable
360 summary: another change for branch stable
361
361
362
362
363 $ rm -r ua
363 $ rm -r ua
364
364
365
365
366 Testing default checkout:
366 Testing default checkout:
367
367
368 $ hg clone a ua
368 $ hg clone a ua
369 updating to branch default
369 updating to branch default
370 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
370 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
371
371
372 Repo ua has both heads:
372 Repo ua has both heads:
373
373
374 $ hg -R ua heads
374 $ hg -R ua heads
375 changeset: 15:0aae7cf88f0d
375 changeset: 15:0aae7cf88f0d
376 branch: stable
376 branch: stable
377 tag: tip
377 tag: tip
378 user: test
378 user: test
379 date: Thu Jan 01 00:00:00 1970 +0000
379 date: Thu Jan 01 00:00:00 1970 +0000
380 summary: another change for branch stable
380 summary: another change for branch stable
381
381
382 changeset: 12:f21241060d6a
382 changeset: 12:f21241060d6a
383 user: test
383 user: test
384 date: Thu Jan 01 00:00:00 1970 +0000
384 date: Thu Jan 01 00:00:00 1970 +0000
385 summary: hacked default
385 summary: hacked default
386
386
387
387
388 Branch 'default' is checked out:
388 Branch 'default' is checked out:
389
389
390 $ hg -R ua parents
390 $ hg -R ua parents
391 changeset: 12:f21241060d6a
391 changeset: 12:f21241060d6a
392 user: test
392 user: test
393 date: Thu Jan 01 00:00:00 1970 +0000
393 date: Thu Jan 01 00:00:00 1970 +0000
394 summary: hacked default
394 summary: hacked default
395
395
396 Test clone with a branch named "@" (issue3677)
396 Test clone with a branch named "@" (issue3677)
397
397
398 $ hg -R ua branch @
398 $ hg -R ua branch @
399 marked working directory as branch @
399 marked working directory as branch @
400 $ hg -R ua commit -m 'created branch @'
400 $ hg -R ua commit -m 'created branch @'
401 $ hg clone ua atbranch
401 $ hg clone ua atbranch
402 updating to branch default
402 updating to branch default
403 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
403 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
404 $ hg -R atbranch heads
404 $ hg -R atbranch heads
405 changeset: 16:798b6d97153e
405 changeset: 16:798b6d97153e
406 branch: @
406 branch: @
407 tag: tip
407 tag: tip
408 parent: 12:f21241060d6a
408 parent: 12:f21241060d6a
409 user: test
409 user: test
410 date: Thu Jan 01 00:00:00 1970 +0000
410 date: Thu Jan 01 00:00:00 1970 +0000
411 summary: created branch @
411 summary: created branch @
412
412
413 changeset: 15:0aae7cf88f0d
413 changeset: 15:0aae7cf88f0d
414 branch: stable
414 branch: stable
415 user: test
415 user: test
416 date: Thu Jan 01 00:00:00 1970 +0000
416 date: Thu Jan 01 00:00:00 1970 +0000
417 summary: another change for branch stable
417 summary: another change for branch stable
418
418
419 changeset: 12:f21241060d6a
419 changeset: 12:f21241060d6a
420 user: test
420 user: test
421 date: Thu Jan 01 00:00:00 1970 +0000
421 date: Thu Jan 01 00:00:00 1970 +0000
422 summary: hacked default
422 summary: hacked default
423
423
424 $ hg -R atbranch parents
424 $ hg -R atbranch parents
425 changeset: 12:f21241060d6a
425 changeset: 12:f21241060d6a
426 user: test
426 user: test
427 date: Thu Jan 01 00:00:00 1970 +0000
427 date: Thu Jan 01 00:00:00 1970 +0000
428 summary: hacked default
428 summary: hacked default
429
429
430
430
431 $ rm -r ua atbranch
431 $ rm -r ua atbranch
432
432
433
433
434 Testing #<branch>:
434 Testing #<branch>:
435
435
436 $ hg clone -u . a#stable ua
436 $ hg clone -u . a#stable ua
437 adding changesets
437 adding changesets
438 adding manifests
438 adding manifests
439 adding file changes
439 adding file changes
440 added 14 changesets with 14 changes to 3 files
440 added 14 changesets with 14 changes to 3 files
441 new changesets acb14030fe0a:0aae7cf88f0d
441 new changesets acb14030fe0a:0aae7cf88f0d
442 updating to branch stable
442 updating to branch stable
443 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
443 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
444
444
445 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
445 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
446
446
447 $ hg -R ua heads
447 $ hg -R ua heads
448 changeset: 13:0aae7cf88f0d
448 changeset: 13:0aae7cf88f0d
449 branch: stable
449 branch: stable
450 tag: tip
450 tag: tip
451 user: test
451 user: test
452 date: Thu Jan 01 00:00:00 1970 +0000
452 date: Thu Jan 01 00:00:00 1970 +0000
453 summary: another change for branch stable
453 summary: another change for branch stable
454
454
455 changeset: 10:a7949464abda
455 changeset: 10:a7949464abda
456 user: test
456 user: test
457 date: Thu Jan 01 00:00:00 1970 +0000
457 date: Thu Jan 01 00:00:00 1970 +0000
458 summary: test
458 summary: test
459
459
460
460
461 Same revision checked out in repo a and ua:
461 Same revision checked out in repo a and ua:
462
462
463 $ hg -R a parents --template "{node|short}\n"
463 $ hg -R a parents --template "{node|short}\n"
464 e8ece76546a6
464 e8ece76546a6
465 $ hg -R ua parents --template "{node|short}\n"
465 $ hg -R ua parents --template "{node|short}\n"
466 e8ece76546a6
466 e8ece76546a6
467
467
468 $ rm -r ua
468 $ rm -r ua
469
469
470
470
471 Testing -u -r <branch>:
471 Testing -u -r <branch>:
472
472
473 $ hg clone -u . -r stable a ua
473 $ hg clone -u . -r stable a ua
474 adding changesets
474 adding changesets
475 adding manifests
475 adding manifests
476 adding file changes
476 adding file changes
477 added 14 changesets with 14 changes to 3 files
477 added 14 changesets with 14 changes to 3 files
478 new changesets acb14030fe0a:0aae7cf88f0d
478 new changesets acb14030fe0a:0aae7cf88f0d
479 updating to branch stable
479 updating to branch stable
480 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
480 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
481
481
482 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
482 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
483
483
484 $ hg -R ua heads
484 $ hg -R ua heads
485 changeset: 13:0aae7cf88f0d
485 changeset: 13:0aae7cf88f0d
486 branch: stable
486 branch: stable
487 tag: tip
487 tag: tip
488 user: test
488 user: test
489 date: Thu Jan 01 00:00:00 1970 +0000
489 date: Thu Jan 01 00:00:00 1970 +0000
490 summary: another change for branch stable
490 summary: another change for branch stable
491
491
492 changeset: 10:a7949464abda
492 changeset: 10:a7949464abda
493 user: test
493 user: test
494 date: Thu Jan 01 00:00:00 1970 +0000
494 date: Thu Jan 01 00:00:00 1970 +0000
495 summary: test
495 summary: test
496
496
497
497
498 Same revision checked out in repo a and ua:
498 Same revision checked out in repo a and ua:
499
499
500 $ hg -R a parents --template "{node|short}\n"
500 $ hg -R a parents --template "{node|short}\n"
501 e8ece76546a6
501 e8ece76546a6
502 $ hg -R ua parents --template "{node|short}\n"
502 $ hg -R ua parents --template "{node|short}\n"
503 e8ece76546a6
503 e8ece76546a6
504
504
505 $ rm -r ua
505 $ rm -r ua
506
506
507
507
508 Testing -r <branch>:
508 Testing -r <branch>:
509
509
510 $ hg clone -r stable a ua
510 $ hg clone -r stable a ua
511 adding changesets
511 adding changesets
512 adding manifests
512 adding manifests
513 adding file changes
513 adding file changes
514 added 14 changesets with 14 changes to 3 files
514 added 14 changesets with 14 changes to 3 files
515 new changesets acb14030fe0a:0aae7cf88f0d
515 new changesets acb14030fe0a:0aae7cf88f0d
516 updating to branch stable
516 updating to branch stable
517 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
517 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
518
518
519 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
519 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
520
520
521 $ hg -R ua heads
521 $ hg -R ua heads
522 changeset: 13:0aae7cf88f0d
522 changeset: 13:0aae7cf88f0d
523 branch: stable
523 branch: stable
524 tag: tip
524 tag: tip
525 user: test
525 user: test
526 date: Thu Jan 01 00:00:00 1970 +0000
526 date: Thu Jan 01 00:00:00 1970 +0000
527 summary: another change for branch stable
527 summary: another change for branch stable
528
528
529 changeset: 10:a7949464abda
529 changeset: 10:a7949464abda
530 user: test
530 user: test
531 date: Thu Jan 01 00:00:00 1970 +0000
531 date: Thu Jan 01 00:00:00 1970 +0000
532 summary: test
532 summary: test
533
533
534
534
535 Branch 'stable' is checked out:
535 Branch 'stable' is checked out:
536
536
537 $ hg -R ua parents
537 $ hg -R ua parents
538 changeset: 13:0aae7cf88f0d
538 changeset: 13:0aae7cf88f0d
539 branch: stable
539 branch: stable
540 tag: tip
540 tag: tip
541 user: test
541 user: test
542 date: Thu Jan 01 00:00:00 1970 +0000
542 date: Thu Jan 01 00:00:00 1970 +0000
543 summary: another change for branch stable
543 summary: another change for branch stable
544
544
545
545
546 $ rm -r ua
546 $ rm -r ua
547
547
548
548
549 Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not
549 Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not
550 iterable in addbranchrevs()
550 iterable in addbranchrevs()
551
551
552 $ cat <<EOF > simpleclone.py
552 $ cat <<EOF > simpleclone.py
553 > from mercurial import hg, ui as uimod
553 > from mercurial import hg, ui as uimod
554 > myui = uimod.ui.load()
554 > myui = uimod.ui.load()
555 > repo = hg.repository(myui, b'a')
555 > repo = hg.repository(myui, b'a')
556 > hg.clone(myui, {}, repo, dest=b"ua")
556 > hg.clone(myui, {}, repo, dest=b"ua")
557 > EOF
557 > EOF
558
558
559 $ "$PYTHON" simpleclone.py
559 $ "$PYTHON" simpleclone.py
560 updating to branch default
560 updating to branch default
561 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
561 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
562
562
563 $ rm -r ua
563 $ rm -r ua
564
564
565 $ cat <<EOF > branchclone.py
565 $ cat <<EOF > branchclone.py
566 > from mercurial import extensions, hg, ui as uimod
566 > from mercurial import extensions, hg, ui as uimod
567 > myui = uimod.ui.load()
567 > myui = uimod.ui.load()
568 > extensions.loadall(myui)
568 > extensions.loadall(myui)
569 > extensions.populateui(myui)
569 > extensions.populateui(myui)
570 > repo = hg.repository(myui, b'a')
570 > repo = hg.repository(myui, b'a')
571 > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable"])
571 > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable"])
572 > EOF
572 > EOF
573
573
574 $ "$PYTHON" branchclone.py
574 $ "$PYTHON" branchclone.py
575 adding changesets
575 adding changesets
576 adding manifests
576 adding manifests
577 adding file changes
577 adding file changes
578 added 14 changesets with 14 changes to 3 files
578 added 14 changesets with 14 changes to 3 files
579 new changesets acb14030fe0a:0aae7cf88f0d
579 new changesets acb14030fe0a:0aae7cf88f0d
580 updating to branch stable
580 updating to branch stable
581 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
581 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
582 $ rm -r ua
582 $ rm -r ua
583
583
584
584
585 Test clone with special '@' bookmark:
585 Test clone with special '@' bookmark:
586 $ cd a
586 $ cd a
587 $ hg bookmark -r a7949464abda @ # branch point of stable from default
587 $ hg bookmark -r a7949464abda @ # branch point of stable from default
588 $ hg clone . ../i
588 $ hg clone . ../i
589 updating to bookmark @
589 updating to bookmark @
590 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
590 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
591 $ hg id -i ../i
591 $ hg id -i ../i
592 a7949464abda
592 a7949464abda
593 $ rm -r ../i
593 $ rm -r ../i
594
594
595 $ hg bookmark -f -r stable @
595 $ hg bookmark -f -r stable @
596 $ hg bookmarks
596 $ hg bookmarks
597 @ 15:0aae7cf88f0d
597 @ 15:0aae7cf88f0d
598 $ hg clone . ../i
598 $ hg clone . ../i
599 updating to bookmark @ on branch stable
599 updating to bookmark @ on branch stable
600 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
600 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
601 $ hg id -i ../i
601 $ hg id -i ../i
602 0aae7cf88f0d
602 0aae7cf88f0d
603 $ cd "$TESTTMP"
603 $ cd "$TESTTMP"
604
604
605
605
606 Testing failures:
606 Testing failures:
607
607
608 $ mkdir fail
608 $ mkdir fail
609 $ cd fail
609 $ cd fail
610
610
611 No local source
611 No local source
612
612
613 $ hg clone a b
613 $ hg clone a b
614 abort: repository a not found!
614 abort: repository a not found!
615 [255]
615 [255]
616
616
617 Invalid URL
618
619 $ hg clone http://invalid:url/a b
620 abort: error: nonnumeric port: 'url'
621 [255]
622
617 No remote source
623 No remote source
618
624
619 #if windows
625 #if windows
620 $ hg clone http://$LOCALIP:3121/a b
626 $ hg clone http://$LOCALIP:3121/a b
621 abort: error: * (glob)
627 abort: error: * (glob)
622 [255]
628 [255]
623 #else
629 #else
624 $ hg clone http://$LOCALIP:3121/a b
630 $ hg clone http://$LOCALIP:3121/a b
625 abort: error: *refused* (glob)
631 abort: error: *refused* (glob)
626 [255]
632 [255]
627 #endif
633 #endif
628 $ rm -rf b # work around bug with http clone
634 $ rm -rf b # work around bug with http clone
629
635
630
636
631 #if unix-permissions no-root
637 #if unix-permissions no-root
632
638
633 Inaccessible source
639 Inaccessible source
634
640
635 $ mkdir a
641 $ mkdir a
636 $ chmod 000 a
642 $ chmod 000 a
637 $ hg clone a b
643 $ hg clone a b
638 abort: Permission denied: *$TESTTMP/fail/a/.hg* (glob)
644 abort: Permission denied: *$TESTTMP/fail/a/.hg* (glob)
639 [255]
645 [255]
640
646
641 Inaccessible destination
647 Inaccessible destination
642
648
643 $ hg init b
649 $ hg init b
644 $ cd b
650 $ cd b
645 $ hg clone . ../a
651 $ hg clone . ../a
646 abort: Permission denied: *../a* (glob)
652 abort: Permission denied: *../a* (glob)
647 [255]
653 [255]
648 $ cd ..
654 $ cd ..
649 $ chmod 700 a
655 $ chmod 700 a
650 $ rm -r a b
656 $ rm -r a b
651
657
652 #endif
658 #endif
653
659
654
660
655 #if fifo
661 #if fifo
656
662
657 Source of wrong type
663 Source of wrong type
658
664
659 $ mkfifo a
665 $ mkfifo a
660 $ hg clone a b
666 $ hg clone a b
661 abort: $ENOTDIR$: *$TESTTMP/fail/a/.hg* (glob)
667 abort: $ENOTDIR$: *$TESTTMP/fail/a/.hg* (glob)
662 [255]
668 [255]
663 $ rm a
669 $ rm a
664
670
665 #endif
671 #endif
666
672
667 Default destination, same directory
673 Default destination, same directory
668
674
669 $ hg init q
675 $ hg init q
670 $ hg clone q
676 $ hg clone q
671 destination directory: q
677 destination directory: q
672 abort: destination 'q' is not empty
678 abort: destination 'q' is not empty
673 [255]
679 [255]
674
680
675 destination directory not empty
681 destination directory not empty
676
682
677 $ mkdir a
683 $ mkdir a
678 $ echo stuff > a/a
684 $ echo stuff > a/a
679 $ hg clone q a
685 $ hg clone q a
680 abort: destination 'a' is not empty
686 abort: destination 'a' is not empty
681 [255]
687 [255]
682
688
683
689
684 #if unix-permissions no-root
690 #if unix-permissions no-root
685
691
686 leave existing directory in place after clone failure
692 leave existing directory in place after clone failure
687
693
688 $ hg init c
694 $ hg init c
689 $ cd c
695 $ cd c
690 $ echo c > c
696 $ echo c > c
691 $ hg commit -A -m test
697 $ hg commit -A -m test
692 adding c
698 adding c
693 $ chmod -rx .hg/store/data
699 $ chmod -rx .hg/store/data
694 $ cd ..
700 $ cd ..
695 $ mkdir d
701 $ mkdir d
696 $ hg clone c d 2> err
702 $ hg clone c d 2> err
697 [255]
703 [255]
698 $ test -d d
704 $ test -d d
699 $ test -d d/.hg
705 $ test -d d/.hg
700 [1]
706 [1]
701
707
702 re-enable perm to allow deletion
708 re-enable perm to allow deletion
703
709
704 $ chmod +rx c/.hg/store/data
710 $ chmod +rx c/.hg/store/data
705
711
706 #endif
712 #endif
707
713
708 $ cd ..
714 $ cd ..
709
715
710 Test clone from the repository in (emulated) revlog format 0 (issue4203):
716 Test clone from the repository in (emulated) revlog format 0 (issue4203):
711
717
712 $ mkdir issue4203
718 $ mkdir issue4203
713 $ mkdir -p src/.hg
719 $ mkdir -p src/.hg
714 $ echo foo > src/foo
720 $ echo foo > src/foo
715 $ hg -R src add src/foo
721 $ hg -R src add src/foo
716 $ hg -R src commit -m '#0'
722 $ hg -R src commit -m '#0'
717 $ hg -R src log -q
723 $ hg -R src log -q
718 0:e1bab28bca43
724 0:e1bab28bca43
719 $ hg -R src debugrevlog -c | egrep 'format|flags'
725 $ hg -R src debugrevlog -c | egrep 'format|flags'
720 format : 0
726 format : 0
721 flags : (none)
727 flags : (none)
722 $ hg root -R src -T json | sed 's|\\\\|\\|g'
728 $ hg root -R src -T json | sed 's|\\\\|\\|g'
723 [
729 [
724 {
730 {
725 "hgpath": "$TESTTMP/src/.hg",
731 "hgpath": "$TESTTMP/src/.hg",
726 "reporoot": "$TESTTMP/src",
732 "reporoot": "$TESTTMP/src",
727 "storepath": "$TESTTMP/src/.hg"
733 "storepath": "$TESTTMP/src/.hg"
728 }
734 }
729 ]
735 ]
730 $ hg clone -U -q src dst
736 $ hg clone -U -q src dst
731 $ hg -R dst log -q
737 $ hg -R dst log -q
732 0:e1bab28bca43
738 0:e1bab28bca43
733
739
734 Create repositories to test auto sharing functionality
740 Create repositories to test auto sharing functionality
735
741
736 $ cat >> $HGRCPATH << EOF
742 $ cat >> $HGRCPATH << EOF
737 > [extensions]
743 > [extensions]
738 > share=
744 > share=
739 > EOF
745 > EOF
740
746
741 $ hg init empty
747 $ hg init empty
742 $ hg init source1a
748 $ hg init source1a
743 $ cd source1a
749 $ cd source1a
744 $ echo initial1 > foo
750 $ echo initial1 > foo
745 $ hg -q commit -A -m initial
751 $ hg -q commit -A -m initial
746 $ echo second > foo
752 $ echo second > foo
747 $ hg commit -m second
753 $ hg commit -m second
748 $ cd ..
754 $ cd ..
749
755
750 $ hg init filteredrev0
756 $ hg init filteredrev0
751 $ cd filteredrev0
757 $ cd filteredrev0
752 $ cat >> .hg/hgrc << EOF
758 $ cat >> .hg/hgrc << EOF
753 > [experimental]
759 > [experimental]
754 > evolution.createmarkers=True
760 > evolution.createmarkers=True
755 > EOF
761 > EOF
756 $ echo initial1 > foo
762 $ echo initial1 > foo
757 $ hg -q commit -A -m initial0
763 $ hg -q commit -A -m initial0
758 $ hg -q up -r null
764 $ hg -q up -r null
759 $ echo initial2 > foo
765 $ echo initial2 > foo
760 $ hg -q commit -A -m initial1
766 $ hg -q commit -A -m initial1
761 $ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8
767 $ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8
762 1 new obsolescence markers
768 1 new obsolescence markers
763 obsoleted 1 changesets
769 obsoleted 1 changesets
764 $ cd ..
770 $ cd ..
765
771
766 $ hg -q clone --pull source1a source1b
772 $ hg -q clone --pull source1a source1b
767 $ cd source1a
773 $ cd source1a
768 $ hg bookmark bookA
774 $ hg bookmark bookA
769 $ echo 1a > foo
775 $ echo 1a > foo
770 $ hg commit -m 1a
776 $ hg commit -m 1a
771 $ cd ../source1b
777 $ cd ../source1b
772 $ hg -q up -r 0
778 $ hg -q up -r 0
773 $ echo head1 > foo
779 $ echo head1 > foo
774 $ hg commit -m head1
780 $ hg commit -m head1
775 created new head
781 created new head
776 $ hg bookmark head1
782 $ hg bookmark head1
777 $ hg -q up -r 0
783 $ hg -q up -r 0
778 $ echo head2 > foo
784 $ echo head2 > foo
779 $ hg commit -m head2
785 $ hg commit -m head2
780 created new head
786 created new head
781 $ hg bookmark head2
787 $ hg bookmark head2
782 $ hg -q up -r 0
788 $ hg -q up -r 0
783 $ hg branch branch1
789 $ hg branch branch1
784 marked working directory as branch branch1
790 marked working directory as branch branch1
785 (branches are permanent and global, did you want a bookmark?)
791 (branches are permanent and global, did you want a bookmark?)
786 $ echo branch1 > foo
792 $ echo branch1 > foo
787 $ hg commit -m branch1
793 $ hg commit -m branch1
788 $ hg -q up -r 0
794 $ hg -q up -r 0
789 $ hg branch branch2
795 $ hg branch branch2
790 marked working directory as branch branch2
796 marked working directory as branch branch2
791 $ echo branch2 > foo
797 $ echo branch2 > foo
792 $ hg commit -m branch2
798 $ hg commit -m branch2
793 $ cd ..
799 $ cd ..
794 $ hg init source2
800 $ hg init source2
795 $ cd source2
801 $ cd source2
796 $ echo initial2 > foo
802 $ echo initial2 > foo
797 $ hg -q commit -A -m initial2
803 $ hg -q commit -A -m initial2
798 $ echo second > foo
804 $ echo second > foo
799 $ hg commit -m second
805 $ hg commit -m second
800 $ cd ..
806 $ cd ..
801
807
802 Clone with auto share from an empty repo should not result in share
808 Clone with auto share from an empty repo should not result in share
803
809
804 $ mkdir share
810 $ mkdir share
805 $ hg --config share.pool=share clone empty share-empty
811 $ hg --config share.pool=share clone empty share-empty
806 (not using pooled storage: remote appears to be empty)
812 (not using pooled storage: remote appears to be empty)
807 updating to branch default
813 updating to branch default
808 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
814 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
809 $ ls share
815 $ ls share
810 $ test -d share-empty/.hg/store
816 $ test -d share-empty/.hg/store
811 $ test -f share-empty/.hg/sharedpath
817 $ test -f share-empty/.hg/sharedpath
812 [1]
818 [1]
813
819
814 Clone with auto share from a repo with filtered revision 0 should not result in share
820 Clone with auto share from a repo with filtered revision 0 should not result in share
815
821
816 $ hg --config share.pool=share clone filteredrev0 share-filtered
822 $ hg --config share.pool=share clone filteredrev0 share-filtered
817 (not using pooled storage: unable to resolve identity of remote)
823 (not using pooled storage: unable to resolve identity of remote)
818 requesting all changes
824 requesting all changes
819 adding changesets
825 adding changesets
820 adding manifests
826 adding manifests
821 adding file changes
827 adding file changes
822 added 1 changesets with 1 changes to 1 files
828 added 1 changesets with 1 changes to 1 files
823 new changesets e082c1832e09
829 new changesets e082c1832e09
824 updating to branch default
830 updating to branch default
825 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
831 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
826
832
827 Clone from repo with content should result in shared store being created
833 Clone from repo with content should result in shared store being created
828
834
829 $ hg --config share.pool=share clone source1a share-dest1a
835 $ hg --config share.pool=share clone source1a share-dest1a
830 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
836 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
831 requesting all changes
837 requesting all changes
832 adding changesets
838 adding changesets
833 adding manifests
839 adding manifests
834 adding file changes
840 adding file changes
835 added 3 changesets with 3 changes to 1 files
841 added 3 changesets with 3 changes to 1 files
836 new changesets b5f04eac9d8f:e5bfe23c0b47
842 new changesets b5f04eac9d8f:e5bfe23c0b47
837 searching for changes
843 searching for changes
838 no changes found
844 no changes found
839 adding remote bookmark bookA
845 adding remote bookmark bookA
840 updating working directory
846 updating working directory
841 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
847 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
842
848
843 The shared repo should have been created
849 The shared repo should have been created
844
850
845 $ ls share
851 $ ls share
846 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
852 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
847
853
848 The destination should point to it
854 The destination should point to it
849
855
850 $ cat share-dest1a/.hg/sharedpath; echo
856 $ cat share-dest1a/.hg/sharedpath; echo
851 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
857 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
852
858
853 The destination should have bookmarks
859 The destination should have bookmarks
854
860
855 $ hg -R share-dest1a bookmarks
861 $ hg -R share-dest1a bookmarks
856 bookA 2:e5bfe23c0b47
862 bookA 2:e5bfe23c0b47
857
863
858 The default path should be the remote, not the share
864 The default path should be the remote, not the share
859
865
860 $ hg -R share-dest1a config paths.default
866 $ hg -R share-dest1a config paths.default
861 $TESTTMP/source1a
867 $TESTTMP/source1a
862
868
863 Clone with existing share dir should result in pull + share
869 Clone with existing share dir should result in pull + share
864
870
865 $ hg --config share.pool=share clone source1b share-dest1b
871 $ hg --config share.pool=share clone source1b share-dest1b
866 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
872 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
867 searching for changes
873 searching for changes
868 adding changesets
874 adding changesets
869 adding manifests
875 adding manifests
870 adding file changes
876 adding file changes
871 adding remote bookmark head1
877 adding remote bookmark head1
872 adding remote bookmark head2
878 adding remote bookmark head2
873 added 4 changesets with 4 changes to 1 files (+4 heads)
879 added 4 changesets with 4 changes to 1 files (+4 heads)
874 new changesets 4a8dc1ab4c13:6bacf4683960
880 new changesets 4a8dc1ab4c13:6bacf4683960
875 updating working directory
881 updating working directory
876 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
882 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
877
883
878 $ ls share
884 $ ls share
879 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
885 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
880
886
881 $ cat share-dest1b/.hg/sharedpath; echo
887 $ cat share-dest1b/.hg/sharedpath; echo
882 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
888 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
883
889
884 We only get bookmarks from the remote, not everything in the share
890 We only get bookmarks from the remote, not everything in the share
885
891
886 $ hg -R share-dest1b bookmarks
892 $ hg -R share-dest1b bookmarks
887 head1 3:4a8dc1ab4c13
893 head1 3:4a8dc1ab4c13
888 head2 4:99f71071f117
894 head2 4:99f71071f117
889
895
890 Default path should be source, not share.
896 Default path should be source, not share.
891
897
892 $ hg -R share-dest1b config paths.default
898 $ hg -R share-dest1b config paths.default
893 $TESTTMP/source1b
899 $TESTTMP/source1b
894
900
895 Checked out revision should be head of default branch
901 Checked out revision should be head of default branch
896
902
897 $ hg -R share-dest1b log -r .
903 $ hg -R share-dest1b log -r .
898 changeset: 4:99f71071f117
904 changeset: 4:99f71071f117
899 bookmark: head2
905 bookmark: head2
900 parent: 0:b5f04eac9d8f
906 parent: 0:b5f04eac9d8f
901 user: test
907 user: test
902 date: Thu Jan 01 00:00:00 1970 +0000
908 date: Thu Jan 01 00:00:00 1970 +0000
903 summary: head2
909 summary: head2
904
910
905
911
906 Clone from unrelated repo should result in new share
912 Clone from unrelated repo should result in new share
907
913
908 $ hg --config share.pool=share clone source2 share-dest2
914 $ hg --config share.pool=share clone source2 share-dest2
909 (sharing from new pooled repository 22aeff664783fd44c6d9b435618173c118c3448e)
915 (sharing from new pooled repository 22aeff664783fd44c6d9b435618173c118c3448e)
910 requesting all changes
916 requesting all changes
911 adding changesets
917 adding changesets
912 adding manifests
918 adding manifests
913 adding file changes
919 adding file changes
914 added 2 changesets with 2 changes to 1 files
920 added 2 changesets with 2 changes to 1 files
915 new changesets 22aeff664783:63cf6c3dba4a
921 new changesets 22aeff664783:63cf6c3dba4a
916 searching for changes
922 searching for changes
917 no changes found
923 no changes found
918 updating working directory
924 updating working directory
919 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
925 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
920
926
921 $ ls share
927 $ ls share
922 22aeff664783fd44c6d9b435618173c118c3448e
928 22aeff664783fd44c6d9b435618173c118c3448e
923 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
929 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
924
930
925 remote naming mode works as advertised
931 remote naming mode works as advertised
926
932
927 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1a share-remote1a
933 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1a share-remote1a
928 (sharing from new pooled repository 195bb1fcdb595c14a6c13e0269129ed78f6debde)
934 (sharing from new pooled repository 195bb1fcdb595c14a6c13e0269129ed78f6debde)
929 requesting all changes
935 requesting all changes
930 adding changesets
936 adding changesets
931 adding manifests
937 adding manifests
932 adding file changes
938 adding file changes
933 added 3 changesets with 3 changes to 1 files
939 added 3 changesets with 3 changes to 1 files
934 new changesets b5f04eac9d8f:e5bfe23c0b47
940 new changesets b5f04eac9d8f:e5bfe23c0b47
935 searching for changes
941 searching for changes
936 no changes found
942 no changes found
937 adding remote bookmark bookA
943 adding remote bookmark bookA
938 updating working directory
944 updating working directory
939 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
945 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
940
946
941 $ ls shareremote
947 $ ls shareremote
942 195bb1fcdb595c14a6c13e0269129ed78f6debde
948 195bb1fcdb595c14a6c13e0269129ed78f6debde
943
949
944 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1b share-remote1b
950 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1b share-remote1b
945 (sharing from new pooled repository c0d4f83847ca2a873741feb7048a45085fd47c46)
951 (sharing from new pooled repository c0d4f83847ca2a873741feb7048a45085fd47c46)
946 requesting all changes
952 requesting all changes
947 adding changesets
953 adding changesets
948 adding manifests
954 adding manifests
949 adding file changes
955 adding file changes
950 added 6 changesets with 6 changes to 1 files (+4 heads)
956 added 6 changesets with 6 changes to 1 files (+4 heads)
951 new changesets b5f04eac9d8f:6bacf4683960
957 new changesets b5f04eac9d8f:6bacf4683960
952 searching for changes
958 searching for changes
953 no changes found
959 no changes found
954 adding remote bookmark head1
960 adding remote bookmark head1
955 adding remote bookmark head2
961 adding remote bookmark head2
956 updating working directory
962 updating working directory
957 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
963 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
958
964
959 $ ls shareremote
965 $ ls shareremote
960 195bb1fcdb595c14a6c13e0269129ed78f6debde
966 195bb1fcdb595c14a6c13e0269129ed78f6debde
961 c0d4f83847ca2a873741feb7048a45085fd47c46
967 c0d4f83847ca2a873741feb7048a45085fd47c46
962
968
963 request to clone a single revision is respected in sharing mode
969 request to clone a single revision is respected in sharing mode
964
970
965 $ hg --config share.pool=sharerevs clone -r 4a8dc1ab4c13 source1b share-1arev
971 $ hg --config share.pool=sharerevs clone -r 4a8dc1ab4c13 source1b share-1arev
966 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
972 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
967 adding changesets
973 adding changesets
968 adding manifests
974 adding manifests
969 adding file changes
975 adding file changes
970 added 2 changesets with 2 changes to 1 files
976 added 2 changesets with 2 changes to 1 files
971 new changesets b5f04eac9d8f:4a8dc1ab4c13
977 new changesets b5f04eac9d8f:4a8dc1ab4c13
972 no changes found
978 no changes found
973 adding remote bookmark head1
979 adding remote bookmark head1
974 updating working directory
980 updating working directory
975 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
981 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
976
982
977 $ hg -R share-1arev log -G
983 $ hg -R share-1arev log -G
978 @ changeset: 1:4a8dc1ab4c13
984 @ changeset: 1:4a8dc1ab4c13
979 | bookmark: head1
985 | bookmark: head1
980 | tag: tip
986 | tag: tip
981 | user: test
987 | user: test
982 | date: Thu Jan 01 00:00:00 1970 +0000
988 | date: Thu Jan 01 00:00:00 1970 +0000
983 | summary: head1
989 | summary: head1
984 |
990 |
985 o changeset: 0:b5f04eac9d8f
991 o changeset: 0:b5f04eac9d8f
986 user: test
992 user: test
987 date: Thu Jan 01 00:00:00 1970 +0000
993 date: Thu Jan 01 00:00:00 1970 +0000
988 summary: initial
994 summary: initial
989
995
990
996
991 making another clone should only pull down requested rev
997 making another clone should only pull down requested rev
992
998
993 $ hg --config share.pool=sharerevs clone -r 99f71071f117 source1b share-1brev
999 $ hg --config share.pool=sharerevs clone -r 99f71071f117 source1b share-1brev
994 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1000 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
995 searching for changes
1001 searching for changes
996 adding changesets
1002 adding changesets
997 adding manifests
1003 adding manifests
998 adding file changes
1004 adding file changes
999 adding remote bookmark head1
1005 adding remote bookmark head1
1000 adding remote bookmark head2
1006 adding remote bookmark head2
1001 added 1 changesets with 1 changes to 1 files (+1 heads)
1007 added 1 changesets with 1 changes to 1 files (+1 heads)
1002 new changesets 99f71071f117
1008 new changesets 99f71071f117
1003 updating working directory
1009 updating working directory
1004 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1010 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1005
1011
1006 $ hg -R share-1brev log -G
1012 $ hg -R share-1brev log -G
1007 @ changeset: 2:99f71071f117
1013 @ changeset: 2:99f71071f117
1008 | bookmark: head2
1014 | bookmark: head2
1009 | tag: tip
1015 | tag: tip
1010 | parent: 0:b5f04eac9d8f
1016 | parent: 0:b5f04eac9d8f
1011 | user: test
1017 | user: test
1012 | date: Thu Jan 01 00:00:00 1970 +0000
1018 | date: Thu Jan 01 00:00:00 1970 +0000
1013 | summary: head2
1019 | summary: head2
1014 |
1020 |
1015 | o changeset: 1:4a8dc1ab4c13
1021 | o changeset: 1:4a8dc1ab4c13
1016 |/ bookmark: head1
1022 |/ bookmark: head1
1017 | user: test
1023 | user: test
1018 | date: Thu Jan 01 00:00:00 1970 +0000
1024 | date: Thu Jan 01 00:00:00 1970 +0000
1019 | summary: head1
1025 | summary: head1
1020 |
1026 |
1021 o changeset: 0:b5f04eac9d8f
1027 o changeset: 0:b5f04eac9d8f
1022 user: test
1028 user: test
1023 date: Thu Jan 01 00:00:00 1970 +0000
1029 date: Thu Jan 01 00:00:00 1970 +0000
1024 summary: initial
1030 summary: initial
1025
1031
1026
1032
1027 Request to clone a single branch is respected in sharing mode
1033 Request to clone a single branch is respected in sharing mode
1028
1034
1029 $ hg --config share.pool=sharebranch clone -b branch1 source1b share-1bbranch1
1035 $ hg --config share.pool=sharebranch clone -b branch1 source1b share-1bbranch1
1030 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1036 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1031 adding changesets
1037 adding changesets
1032 adding manifests
1038 adding manifests
1033 adding file changes
1039 adding file changes
1034 added 2 changesets with 2 changes to 1 files
1040 added 2 changesets with 2 changes to 1 files
1035 new changesets b5f04eac9d8f:5f92a6c1a1b1
1041 new changesets b5f04eac9d8f:5f92a6c1a1b1
1036 no changes found
1042 no changes found
1037 updating working directory
1043 updating working directory
1038 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1044 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1039
1045
1040 $ hg -R share-1bbranch1 log -G
1046 $ hg -R share-1bbranch1 log -G
1041 o changeset: 1:5f92a6c1a1b1
1047 o changeset: 1:5f92a6c1a1b1
1042 | branch: branch1
1048 | branch: branch1
1043 | tag: tip
1049 | tag: tip
1044 | user: test
1050 | user: test
1045 | date: Thu Jan 01 00:00:00 1970 +0000
1051 | date: Thu Jan 01 00:00:00 1970 +0000
1046 | summary: branch1
1052 | summary: branch1
1047 |
1053 |
1048 @ changeset: 0:b5f04eac9d8f
1054 @ changeset: 0:b5f04eac9d8f
1049 user: test
1055 user: test
1050 date: Thu Jan 01 00:00:00 1970 +0000
1056 date: Thu Jan 01 00:00:00 1970 +0000
1051 summary: initial
1057 summary: initial
1052
1058
1053
1059
1054 $ hg --config share.pool=sharebranch clone -b branch2 source1b share-1bbranch2
1060 $ hg --config share.pool=sharebranch clone -b branch2 source1b share-1bbranch2
1055 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1061 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1056 searching for changes
1062 searching for changes
1057 adding changesets
1063 adding changesets
1058 adding manifests
1064 adding manifests
1059 adding file changes
1065 adding file changes
1060 added 1 changesets with 1 changes to 1 files (+1 heads)
1066 added 1 changesets with 1 changes to 1 files (+1 heads)
1061 new changesets 6bacf4683960
1067 new changesets 6bacf4683960
1062 updating working directory
1068 updating working directory
1063 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1069 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1064
1070
1065 $ hg -R share-1bbranch2 log -G
1071 $ hg -R share-1bbranch2 log -G
1066 o changeset: 2:6bacf4683960
1072 o changeset: 2:6bacf4683960
1067 | branch: branch2
1073 | branch: branch2
1068 | tag: tip
1074 | tag: tip
1069 | parent: 0:b5f04eac9d8f
1075 | parent: 0:b5f04eac9d8f
1070 | user: test
1076 | user: test
1071 | date: Thu Jan 01 00:00:00 1970 +0000
1077 | date: Thu Jan 01 00:00:00 1970 +0000
1072 | summary: branch2
1078 | summary: branch2
1073 |
1079 |
1074 | o changeset: 1:5f92a6c1a1b1
1080 | o changeset: 1:5f92a6c1a1b1
1075 |/ branch: branch1
1081 |/ branch: branch1
1076 | user: test
1082 | user: test
1077 | date: Thu Jan 01 00:00:00 1970 +0000
1083 | date: Thu Jan 01 00:00:00 1970 +0000
1078 | summary: branch1
1084 | summary: branch1
1079 |
1085 |
1080 @ changeset: 0:b5f04eac9d8f
1086 @ changeset: 0:b5f04eac9d8f
1081 user: test
1087 user: test
1082 date: Thu Jan 01 00:00:00 1970 +0000
1088 date: Thu Jan 01 00:00:00 1970 +0000
1083 summary: initial
1089 summary: initial
1084
1090
1085
1091
1086 -U is respected in share clone mode
1092 -U is respected in share clone mode
1087
1093
1088 $ hg --config share.pool=share clone -U source1a share-1anowc
1094 $ hg --config share.pool=share clone -U source1a share-1anowc
1089 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1095 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1090 searching for changes
1096 searching for changes
1091 no changes found
1097 no changes found
1092 adding remote bookmark bookA
1098 adding remote bookmark bookA
1093
1099
1094 $ ls share-1anowc
1100 $ ls share-1anowc
1095
1101
1096 Test that auto sharing doesn't cause failure of "hg clone local remote"
1102 Test that auto sharing doesn't cause failure of "hg clone local remote"
1097
1103
1098 $ cd $TESTTMP
1104 $ cd $TESTTMP
1099 $ hg -R a id -r 0
1105 $ hg -R a id -r 0
1100 acb14030fe0a
1106 acb14030fe0a
1101 $ hg id -R remote -r 0
1107 $ hg id -R remote -r 0
1102 abort: repository remote not found!
1108 abort: repository remote not found!
1103 [255]
1109 [255]
1104 $ hg --config share.pool=share -q clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
1110 $ hg --config share.pool=share -q clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
1105 $ hg -R remote id -r 0
1111 $ hg -R remote id -r 0
1106 acb14030fe0a
1112 acb14030fe0a
1107
1113
1108 Cloning into pooled storage doesn't race (issue5104)
1114 Cloning into pooled storage doesn't race (issue5104)
1109
1115
1110 $ HGPOSTLOCKDELAY=2.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace1 > race1.log 2>&1 &
1116 $ HGPOSTLOCKDELAY=2.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace1 > race1.log 2>&1 &
1111 $ HGPRELOCKDELAY=1.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace2 > race2.log 2>&1
1117 $ HGPRELOCKDELAY=1.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace2 > race2.log 2>&1
1112 $ wait
1118 $ wait
1113
1119
1114 $ hg -R share-destrace1 log -r tip
1120 $ hg -R share-destrace1 log -r tip
1115 changeset: 2:e5bfe23c0b47
1121 changeset: 2:e5bfe23c0b47
1116 bookmark: bookA
1122 bookmark: bookA
1117 tag: tip
1123 tag: tip
1118 user: test
1124 user: test
1119 date: Thu Jan 01 00:00:00 1970 +0000
1125 date: Thu Jan 01 00:00:00 1970 +0000
1120 summary: 1a
1126 summary: 1a
1121
1127
1122
1128
1123 $ hg -R share-destrace2 log -r tip
1129 $ hg -R share-destrace2 log -r tip
1124 changeset: 2:e5bfe23c0b47
1130 changeset: 2:e5bfe23c0b47
1125 bookmark: bookA
1131 bookmark: bookA
1126 tag: tip
1132 tag: tip
1127 user: test
1133 user: test
1128 date: Thu Jan 01 00:00:00 1970 +0000
1134 date: Thu Jan 01 00:00:00 1970 +0000
1129 summary: 1a
1135 summary: 1a
1130
1136
1131 One repo should be new, the other should be shared from the pool. We
1137 One repo should be new, the other should be shared from the pool. We
1132 don't care which is which, so we just make sure we always print the
1138 don't care which is which, so we just make sure we always print the
1133 one containing "new pooled" first, then one one containing "existing
1139 one containing "new pooled" first, then one one containing "existing
1134 pooled".
1140 pooled".
1135
1141
1136 $ (grep 'new pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1142 $ (grep 'new pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1137 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1143 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1138 requesting all changes
1144 requesting all changes
1139 adding changesets
1145 adding changesets
1140 adding manifests
1146 adding manifests
1141 adding file changes
1147 adding file changes
1142 added 3 changesets with 3 changes to 1 files
1148 added 3 changesets with 3 changes to 1 files
1143 new changesets b5f04eac9d8f:e5bfe23c0b47
1149 new changesets b5f04eac9d8f:e5bfe23c0b47
1144 searching for changes
1150 searching for changes
1145 no changes found
1151 no changes found
1146 adding remote bookmark bookA
1152 adding remote bookmark bookA
1147 updating working directory
1153 updating working directory
1148 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1154 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1149
1155
1150 $ (grep 'existing pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1156 $ (grep 'existing pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1151 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1157 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1152 searching for changes
1158 searching for changes
1153 no changes found
1159 no changes found
1154 adding remote bookmark bookA
1160 adding remote bookmark bookA
1155 updating working directory
1161 updating working directory
1156 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1162 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1157
1163
1158 SEC: check for unsafe ssh url
1164 SEC: check for unsafe ssh url
1159
1165
1160 $ cat >> $HGRCPATH << EOF
1166 $ cat >> $HGRCPATH << EOF
1161 > [ui]
1167 > [ui]
1162 > ssh = sh -c "read l; read l; read l"
1168 > ssh = sh -c "read l; read l; read l"
1163 > EOF
1169 > EOF
1164
1170
1165 $ hg clone 'ssh://-oProxyCommand=touch${IFS}owned/path'
1171 $ hg clone 'ssh://-oProxyCommand=touch${IFS}owned/path'
1166 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1172 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1167 [255]
1173 [255]
1168 $ hg clone 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
1174 $ hg clone 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
1169 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1175 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1170 [255]
1176 [255]
1171 $ hg clone 'ssh://fakehost|touch%20owned/path'
1177 $ hg clone 'ssh://fakehost|touch%20owned/path'
1172 abort: no suitable response from remote hg!
1178 abort: no suitable response from remote hg!
1173 [255]
1179 [255]
1174 $ hg clone 'ssh://fakehost%7Ctouch%20owned/path'
1180 $ hg clone 'ssh://fakehost%7Ctouch%20owned/path'
1175 abort: no suitable response from remote hg!
1181 abort: no suitable response from remote hg!
1176 [255]
1182 [255]
1177
1183
1178 $ hg clone 'ssh://-oProxyCommand=touch owned%20foo@example.com/nonexistent/path'
1184 $ hg clone 'ssh://-oProxyCommand=touch owned%20foo@example.com/nonexistent/path'
1179 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch owned foo@example.com/nonexistent/path'
1185 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch owned foo@example.com/nonexistent/path'
1180 [255]
1186 [255]
1181
1187
1182 #if windows
1188 #if windows
1183 $ hg clone "ssh://%26touch%20owned%20/" --debug
1189 $ hg clone "ssh://%26touch%20owned%20/" --debug
1184 running sh -c "read l; read l; read l" "&touch owned " "hg -R . serve --stdio"
1190 running sh -c "read l; read l; read l" "&touch owned " "hg -R . serve --stdio"
1185 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1191 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1186 sending hello command
1192 sending hello command
1187 sending between command
1193 sending between command
1188 abort: no suitable response from remote hg!
1194 abort: no suitable response from remote hg!
1189 [255]
1195 [255]
1190 $ hg clone "ssh://example.com:%26touch%20owned%20/" --debug
1196 $ hg clone "ssh://example.com:%26touch%20owned%20/" --debug
1191 running sh -c "read l; read l; read l" -p "&touch owned " example.com "hg -R . serve --stdio"
1197 running sh -c "read l; read l; read l" -p "&touch owned " example.com "hg -R . serve --stdio"
1192 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1198 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1193 sending hello command
1199 sending hello command
1194 sending between command
1200 sending between command
1195 abort: no suitable response from remote hg!
1201 abort: no suitable response from remote hg!
1196 [255]
1202 [255]
1197 #else
1203 #else
1198 $ hg clone "ssh://%3btouch%20owned%20/" --debug
1204 $ hg clone "ssh://%3btouch%20owned%20/" --debug
1199 running sh -c "read l; read l; read l" ';touch owned ' 'hg -R . serve --stdio'
1205 running sh -c "read l; read l; read l" ';touch owned ' 'hg -R . serve --stdio'
1200 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1206 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1201 sending hello command
1207 sending hello command
1202 sending between command
1208 sending between command
1203 abort: no suitable response from remote hg!
1209 abort: no suitable response from remote hg!
1204 [255]
1210 [255]
1205 $ hg clone "ssh://example.com:%3btouch%20owned%20/" --debug
1211 $ hg clone "ssh://example.com:%3btouch%20owned%20/" --debug
1206 running sh -c "read l; read l; read l" -p ';touch owned ' example.com 'hg -R . serve --stdio'
1212 running sh -c "read l; read l; read l" -p ';touch owned ' example.com 'hg -R . serve --stdio'
1207 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1213 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1208 sending hello command
1214 sending hello command
1209 sending between command
1215 sending between command
1210 abort: no suitable response from remote hg!
1216 abort: no suitable response from remote hg!
1211 [255]
1217 [255]
1212 #endif
1218 #endif
1213
1219
1214 $ hg clone "ssh://v-alid.example.com/" --debug
1220 $ hg clone "ssh://v-alid.example.com/" --debug
1215 running sh -c "read l; read l; read l" v-alid\.example\.com ['"]hg -R \. serve --stdio['"] (re)
1221 running sh -c "read l; read l; read l" v-alid\.example\.com ['"]hg -R \. serve --stdio['"] (re)
1216 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1222 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1217 sending hello command
1223 sending hello command
1218 sending between command
1224 sending between command
1219 abort: no suitable response from remote hg!
1225 abort: no suitable response from remote hg!
1220 [255]
1226 [255]
1221
1227
1222 We should not have created a file named owned - if it exists, the
1228 We should not have created a file named owned - if it exists, the
1223 attack succeeded.
1229 attack succeeded.
1224 $ if test -f owned; then echo 'you got owned'; fi
1230 $ if test -f owned; then echo 'you got owned'; fi
1225
1231
1226 Cloning without fsmonitor enabled does not print a warning for small repos
1232 Cloning without fsmonitor enabled does not print a warning for small repos
1227
1233
1228 $ hg clone a fsmonitor-default
1234 $ hg clone a fsmonitor-default
1229 updating to bookmark @ on branch stable
1235 updating to bookmark @ on branch stable
1230 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1236 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1231
1237
1232 Lower the warning threshold to simulate a large repo
1238 Lower the warning threshold to simulate a large repo
1233
1239
1234 $ cat >> $HGRCPATH << EOF
1240 $ cat >> $HGRCPATH << EOF
1235 > [fsmonitor]
1241 > [fsmonitor]
1236 > warn_update_file_count = 2
1242 > warn_update_file_count = 2
1237 > EOF
1243 > EOF
1238
1244
1239 We should see a warning about no fsmonitor on supported platforms
1245 We should see a warning about no fsmonitor on supported platforms
1240
1246
1241 #if linuxormacos no-fsmonitor
1247 #if linuxormacos no-fsmonitor
1242 $ hg clone a nofsmonitor
1248 $ hg clone a nofsmonitor
1243 updating to bookmark @ on branch stable
1249 updating to bookmark @ on branch stable
1244 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1250 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1245 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1251 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1246 #else
1252 #else
1247 $ hg clone a nofsmonitor
1253 $ hg clone a nofsmonitor
1248 updating to bookmark @ on branch stable
1254 updating to bookmark @ on branch stable
1249 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1255 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1250 #endif
1256 #endif
1251
1257
1252 We should not see warning about fsmonitor when it is enabled
1258 We should not see warning about fsmonitor when it is enabled
1253
1259
1254 #if fsmonitor
1260 #if fsmonitor
1255 $ hg clone a fsmonitor-enabled
1261 $ hg clone a fsmonitor-enabled
1256 updating to bookmark @ on branch stable
1262 updating to bookmark @ on branch stable
1257 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1263 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1258 #endif
1264 #endif
1259
1265
1260 We can disable the fsmonitor warning
1266 We can disable the fsmonitor warning
1261
1267
1262 $ hg --config fsmonitor.warn_when_unused=false clone a fsmonitor-disable-warning
1268 $ hg --config fsmonitor.warn_when_unused=false clone a fsmonitor-disable-warning
1263 updating to bookmark @ on branch stable
1269 updating to bookmark @ on branch stable
1264 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1270 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1265
1271
1266 Loaded fsmonitor but disabled in config should still print warning
1272 Loaded fsmonitor but disabled in config should still print warning
1267
1273
1268 #if linuxormacos fsmonitor
1274 #if linuxormacos fsmonitor
1269 $ hg --config fsmonitor.mode=off clone a fsmonitor-mode-off
1275 $ hg --config fsmonitor.mode=off clone a fsmonitor-mode-off
1270 updating to bookmark @ on branch stable
1276 updating to bookmark @ on branch stable
1271 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (fsmonitor !)
1277 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (fsmonitor !)
1272 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1278 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1273 #endif
1279 #endif
1274
1280
1275 Warning not printed if working directory isn't empty
1281 Warning not printed if working directory isn't empty
1276
1282
1277 $ hg -q clone a fsmonitor-update
1283 $ hg -q clone a fsmonitor-update
1278 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (?)
1284 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (?)
1279 $ cd fsmonitor-update
1285 $ cd fsmonitor-update
1280 $ hg up acb14030fe0a
1286 $ hg up acb14030fe0a
1281 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
1287 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
1282 (leaving bookmark @)
1288 (leaving bookmark @)
1283 $ hg up cf0fe1914066
1289 $ hg up cf0fe1914066
1284 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1290 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1285
1291
1286 `hg update` from null revision also prints
1292 `hg update` from null revision also prints
1287
1293
1288 $ hg up null
1294 $ hg up null
1289 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1295 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1290
1296
1291 #if linuxormacos no-fsmonitor
1297 #if linuxormacos no-fsmonitor
1292 $ hg up cf0fe1914066
1298 $ hg up cf0fe1914066
1293 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1299 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1294 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1300 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1295 #else
1301 #else
1296 $ hg up cf0fe1914066
1302 $ hg up cf0fe1914066
1297 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1303 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1298 #endif
1304 #endif
1299
1305
1300 $ cd ..
1306 $ cd ..
1301
1307
General Comments 0
You need to be logged in to leave comments. Login now