##// END OF EJS Templates
directaccess: do not abort by 'ff...' hash...
Yuya Nishihara -
r37112:7f025c9b default
parent child Browse files
Show More
@@ -1,1430 +1,1430 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 from .utils import (
44 from .utils import (
45 stringutil,
45 stringutil,
46 )
46 )
47
47
48 if pycompat.iswindows:
48 if pycompat.iswindows:
49 from . import scmwindows as scmplatform
49 from . import scmwindows as scmplatform
50 else:
50 else:
51 from . import scmposix as scmplatform
51 from . import scmposix as scmplatform
52
52
53 termsize = scmplatform.termsize
53 termsize = scmplatform.termsize
54
54
55 class status(tuple):
55 class status(tuple):
56 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
56 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
57 and 'ignored' properties are only relevant to the working copy.
57 and 'ignored' properties are only relevant to the working copy.
58 '''
58 '''
59
59
60 __slots__ = ()
60 __slots__ = ()
61
61
62 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
62 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
63 clean):
63 clean):
64 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
64 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
65 ignored, clean))
65 ignored, clean))
66
66
67 @property
67 @property
68 def modified(self):
68 def modified(self):
69 '''files that have been modified'''
69 '''files that have been modified'''
70 return self[0]
70 return self[0]
71
71
72 @property
72 @property
73 def added(self):
73 def added(self):
74 '''files that have been added'''
74 '''files that have been added'''
75 return self[1]
75 return self[1]
76
76
77 @property
77 @property
78 def removed(self):
78 def removed(self):
79 '''files that have been removed'''
79 '''files that have been removed'''
80 return self[2]
80 return self[2]
81
81
82 @property
82 @property
83 def deleted(self):
83 def deleted(self):
84 '''files that are in the dirstate, but have been deleted from the
84 '''files that are in the dirstate, but have been deleted from the
85 working copy (aka "missing")
85 working copy (aka "missing")
86 '''
86 '''
87 return self[3]
87 return self[3]
88
88
89 @property
89 @property
90 def unknown(self):
90 def unknown(self):
91 '''files not in the dirstate that are not ignored'''
91 '''files not in the dirstate that are not ignored'''
92 return self[4]
92 return self[4]
93
93
94 @property
94 @property
95 def ignored(self):
95 def ignored(self):
96 '''files not in the dirstate that are ignored (by _dirignore())'''
96 '''files not in the dirstate that are ignored (by _dirignore())'''
97 return self[5]
97 return self[5]
98
98
99 @property
99 @property
100 def clean(self):
100 def clean(self):
101 '''files that have not been modified'''
101 '''files that have not been modified'''
102 return self[6]
102 return self[6]
103
103
104 def __repr__(self, *args, **kwargs):
104 def __repr__(self, *args, **kwargs):
105 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
105 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
106 'unknown=%r, ignored=%r, clean=%r>') % self)
106 'unknown=%r, ignored=%r, clean=%r>') % self)
107
107
108 def itersubrepos(ctx1, ctx2):
108 def itersubrepos(ctx1, ctx2):
109 """find subrepos in ctx1 or ctx2"""
109 """find subrepos in ctx1 or ctx2"""
110 # Create a (subpath, ctx) mapping where we prefer subpaths from
110 # Create a (subpath, ctx) mapping where we prefer subpaths from
111 # ctx1. The subpaths from ctx2 are important when the .hgsub file
111 # ctx1. The subpaths from ctx2 are important when the .hgsub file
112 # has been modified (in ctx2) but not yet committed (in ctx1).
112 # has been modified (in ctx2) but not yet committed (in ctx1).
113 subpaths = dict.fromkeys(ctx2.substate, ctx2)
113 subpaths = dict.fromkeys(ctx2.substate, ctx2)
114 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
114 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
115
115
116 missing = set()
116 missing = set()
117
117
118 for subpath in ctx2.substate:
118 for subpath in ctx2.substate:
119 if subpath not in ctx1.substate:
119 if subpath not in ctx1.substate:
120 del subpaths[subpath]
120 del subpaths[subpath]
121 missing.add(subpath)
121 missing.add(subpath)
122
122
123 for subpath, ctx in sorted(subpaths.iteritems()):
123 for subpath, ctx in sorted(subpaths.iteritems()):
124 yield subpath, ctx.sub(subpath)
124 yield subpath, ctx.sub(subpath)
125
125
126 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
126 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
127 # status and diff will have an accurate result when it does
127 # status and diff will have an accurate result when it does
128 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
128 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
129 # against itself.
129 # against itself.
130 for subpath in missing:
130 for subpath in missing:
131 yield subpath, ctx2.nullsub(subpath, ctx1)
131 yield subpath, ctx2.nullsub(subpath, ctx1)
132
132
133 def nochangesfound(ui, repo, excluded=None):
133 def nochangesfound(ui, repo, excluded=None):
134 '''Report no changes for push/pull, excluded is None or a list of
134 '''Report no changes for push/pull, excluded is None or a list of
135 nodes excluded from the push/pull.
135 nodes excluded from the push/pull.
136 '''
136 '''
137 secretlist = []
137 secretlist = []
138 if excluded:
138 if excluded:
139 for n in excluded:
139 for n in excluded:
140 ctx = repo[n]
140 ctx = repo[n]
141 if ctx.phase() >= phases.secret and not ctx.extinct():
141 if ctx.phase() >= phases.secret and not ctx.extinct():
142 secretlist.append(n)
142 secretlist.append(n)
143
143
144 if secretlist:
144 if secretlist:
145 ui.status(_("no changes found (ignored %d secret changesets)\n")
145 ui.status(_("no changes found (ignored %d secret changesets)\n")
146 % len(secretlist))
146 % len(secretlist))
147 else:
147 else:
148 ui.status(_("no changes found\n"))
148 ui.status(_("no changes found\n"))
149
149
150 def callcatch(ui, func):
150 def callcatch(ui, func):
151 """call func() with global exception handling
151 """call func() with global exception handling
152
152
153 return func() if no exception happens. otherwise do some error handling
153 return func() if no exception happens. otherwise do some error handling
154 and return an exit code accordingly. does not handle all exceptions.
154 and return an exit code accordingly. does not handle all exceptions.
155 """
155 """
156 try:
156 try:
157 try:
157 try:
158 return func()
158 return func()
159 except: # re-raises
159 except: # re-raises
160 ui.traceback()
160 ui.traceback()
161 raise
161 raise
162 # Global exception handling, alphabetically
162 # Global exception handling, alphabetically
163 # Mercurial-specific first, followed by built-in and library exceptions
163 # Mercurial-specific first, followed by built-in and library exceptions
164 except error.LockHeld as inst:
164 except error.LockHeld as inst:
165 if inst.errno == errno.ETIMEDOUT:
165 if inst.errno == errno.ETIMEDOUT:
166 reason = _('timed out waiting for lock held by %r') % inst.locker
166 reason = _('timed out waiting for lock held by %r') % inst.locker
167 else:
167 else:
168 reason = _('lock held by %r') % inst.locker
168 reason = _('lock held by %r') % inst.locker
169 ui.warn(_("abort: %s: %s\n")
169 ui.warn(_("abort: %s: %s\n")
170 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
170 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
171 if not inst.locker:
171 if not inst.locker:
172 ui.warn(_("(lock might be very busy)\n"))
172 ui.warn(_("(lock might be very busy)\n"))
173 except error.LockUnavailable as inst:
173 except error.LockUnavailable as inst:
174 ui.warn(_("abort: could not lock %s: %s\n") %
174 ui.warn(_("abort: could not lock %s: %s\n") %
175 (inst.desc or stringutil.forcebytestr(inst.filename),
175 (inst.desc or stringutil.forcebytestr(inst.filename),
176 encoding.strtolocal(inst.strerror)))
176 encoding.strtolocal(inst.strerror)))
177 except error.OutOfBandError as inst:
177 except error.OutOfBandError as inst:
178 if inst.args:
178 if inst.args:
179 msg = _("abort: remote error:\n")
179 msg = _("abort: remote error:\n")
180 else:
180 else:
181 msg = _("abort: remote error\n")
181 msg = _("abort: remote error\n")
182 ui.warn(msg)
182 ui.warn(msg)
183 if inst.args:
183 if inst.args:
184 ui.warn(''.join(inst.args))
184 ui.warn(''.join(inst.args))
185 if inst.hint:
185 if inst.hint:
186 ui.warn('(%s)\n' % inst.hint)
186 ui.warn('(%s)\n' % inst.hint)
187 except error.RepoError as inst:
187 except error.RepoError as inst:
188 ui.warn(_("abort: %s!\n") % inst)
188 ui.warn(_("abort: %s!\n") % inst)
189 if inst.hint:
189 if inst.hint:
190 ui.warn(_("(%s)\n") % inst.hint)
190 ui.warn(_("(%s)\n") % inst.hint)
191 except error.ResponseError as inst:
191 except error.ResponseError as inst:
192 ui.warn(_("abort: %s") % inst.args[0])
192 ui.warn(_("abort: %s") % inst.args[0])
193 msg = inst.args[1]
193 msg = inst.args[1]
194 if isinstance(msg, type(u'')):
194 if isinstance(msg, type(u'')):
195 msg = pycompat.sysbytes(msg)
195 msg = pycompat.sysbytes(msg)
196 if not isinstance(msg, bytes):
196 if not isinstance(msg, bytes):
197 ui.warn(" %r\n" % (msg,))
197 ui.warn(" %r\n" % (msg,))
198 elif not msg:
198 elif not msg:
199 ui.warn(_(" empty string\n"))
199 ui.warn(_(" empty string\n"))
200 else:
200 else:
201 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
201 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
202 except error.CensoredNodeError as inst:
202 except error.CensoredNodeError as inst:
203 ui.warn(_("abort: file censored %s!\n") % inst)
203 ui.warn(_("abort: file censored %s!\n") % inst)
204 except error.RevlogError as inst:
204 except error.RevlogError as inst:
205 ui.warn(_("abort: %s!\n") % inst)
205 ui.warn(_("abort: %s!\n") % inst)
206 except error.InterventionRequired as inst:
206 except error.InterventionRequired as inst:
207 ui.warn("%s\n" % inst)
207 ui.warn("%s\n" % inst)
208 if inst.hint:
208 if inst.hint:
209 ui.warn(_("(%s)\n") % inst.hint)
209 ui.warn(_("(%s)\n") % inst.hint)
210 return 1
210 return 1
211 except error.WdirUnsupported:
211 except error.WdirUnsupported:
212 ui.warn(_("abort: working directory revision cannot be specified\n"))
212 ui.warn(_("abort: working directory revision cannot be specified\n"))
213 except error.Abort as inst:
213 except error.Abort as inst:
214 ui.warn(_("abort: %s\n") % inst)
214 ui.warn(_("abort: %s\n") % inst)
215 if inst.hint:
215 if inst.hint:
216 ui.warn(_("(%s)\n") % inst.hint)
216 ui.warn(_("(%s)\n") % inst.hint)
217 except ImportError as inst:
217 except ImportError as inst:
218 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
218 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
219 m = stringutil.forcebytestr(inst).split()[-1]
219 m = stringutil.forcebytestr(inst).split()[-1]
220 if m in "mpatch bdiff".split():
220 if m in "mpatch bdiff".split():
221 ui.warn(_("(did you forget to compile extensions?)\n"))
221 ui.warn(_("(did you forget to compile extensions?)\n"))
222 elif m in "zlib".split():
222 elif m in "zlib".split():
223 ui.warn(_("(is your Python install correct?)\n"))
223 ui.warn(_("(is your Python install correct?)\n"))
224 except IOError as inst:
224 except IOError as inst:
225 if util.safehasattr(inst, "code"):
225 if util.safehasattr(inst, "code"):
226 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
226 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
227 elif util.safehasattr(inst, "reason"):
227 elif util.safehasattr(inst, "reason"):
228 try: # usually it is in the form (errno, strerror)
228 try: # usually it is in the form (errno, strerror)
229 reason = inst.reason.args[1]
229 reason = inst.reason.args[1]
230 except (AttributeError, IndexError):
230 except (AttributeError, IndexError):
231 # it might be anything, for example a string
231 # it might be anything, for example a string
232 reason = inst.reason
232 reason = inst.reason
233 if isinstance(reason, unicode):
233 if isinstance(reason, unicode):
234 # SSLError of Python 2.7.9 contains a unicode
234 # SSLError of Python 2.7.9 contains a unicode
235 reason = encoding.unitolocal(reason)
235 reason = encoding.unitolocal(reason)
236 ui.warn(_("abort: error: %s\n") % reason)
236 ui.warn(_("abort: error: %s\n") % reason)
237 elif (util.safehasattr(inst, "args")
237 elif (util.safehasattr(inst, "args")
238 and inst.args and inst.args[0] == errno.EPIPE):
238 and inst.args and inst.args[0] == errno.EPIPE):
239 pass
239 pass
240 elif getattr(inst, "strerror", None):
240 elif getattr(inst, "strerror", None):
241 if getattr(inst, "filename", None):
241 if getattr(inst, "filename", None):
242 ui.warn(_("abort: %s: %s\n") % (
242 ui.warn(_("abort: %s: %s\n") % (
243 encoding.strtolocal(inst.strerror),
243 encoding.strtolocal(inst.strerror),
244 stringutil.forcebytestr(inst.filename)))
244 stringutil.forcebytestr(inst.filename)))
245 else:
245 else:
246 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
247 else:
247 else:
248 raise
248 raise
249 except OSError as inst:
249 except OSError as inst:
250 if getattr(inst, "filename", None) is not None:
250 if getattr(inst, "filename", None) is not None:
251 ui.warn(_("abort: %s: '%s'\n") % (
251 ui.warn(_("abort: %s: '%s'\n") % (
252 encoding.strtolocal(inst.strerror),
252 encoding.strtolocal(inst.strerror),
253 stringutil.forcebytestr(inst.filename)))
253 stringutil.forcebytestr(inst.filename)))
254 else:
254 else:
255 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
255 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 except MemoryError:
256 except MemoryError:
257 ui.warn(_("abort: out of memory\n"))
257 ui.warn(_("abort: out of memory\n"))
258 except SystemExit as inst:
258 except SystemExit as inst:
259 # Commands shouldn't sys.exit directly, but give a return code.
259 # Commands shouldn't sys.exit directly, but give a return code.
260 # Just in case catch this and and pass exit code to caller.
260 # Just in case catch this and and pass exit code to caller.
261 return inst.code
261 return inst.code
262 except socket.error as inst:
262 except socket.error as inst:
263 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
263 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
264
264
265 return -1
265 return -1
266
266
267 def checknewlabel(repo, lbl, kind):
267 def checknewlabel(repo, lbl, kind):
268 # Do not use the "kind" parameter in ui output.
268 # Do not use the "kind" parameter in ui output.
269 # It makes strings difficult to translate.
269 # It makes strings difficult to translate.
270 if lbl in ['tip', '.', 'null']:
270 if lbl in ['tip', '.', 'null']:
271 raise error.Abort(_("the name '%s' is reserved") % lbl)
271 raise error.Abort(_("the name '%s' is reserved") % lbl)
272 for c in (':', '\0', '\n', '\r'):
272 for c in (':', '\0', '\n', '\r'):
273 if c in lbl:
273 if c in lbl:
274 raise error.Abort(
274 raise error.Abort(
275 _("%r cannot be used in a name") % pycompat.bytestr(c))
275 _("%r cannot be used in a name") % pycompat.bytestr(c))
276 try:
276 try:
277 int(lbl)
277 int(lbl)
278 raise error.Abort(_("cannot use an integer as a name"))
278 raise error.Abort(_("cannot use an integer as a name"))
279 except ValueError:
279 except ValueError:
280 pass
280 pass
281 if lbl.strip() != lbl:
281 if lbl.strip() != lbl:
282 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
282 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
283
283
284 def checkfilename(f):
284 def checkfilename(f):
285 '''Check that the filename f is an acceptable filename for a tracked file'''
285 '''Check that the filename f is an acceptable filename for a tracked file'''
286 if '\r' in f or '\n' in f:
286 if '\r' in f or '\n' in f:
287 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
287 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
288
288
289 def checkportable(ui, f):
289 def checkportable(ui, f):
290 '''Check if filename f is portable and warn or abort depending on config'''
290 '''Check if filename f is portable and warn or abort depending on config'''
291 checkfilename(f)
291 checkfilename(f)
292 abort, warn = checkportabilityalert(ui)
292 abort, warn = checkportabilityalert(ui)
293 if abort or warn:
293 if abort or warn:
294 msg = util.checkwinfilename(f)
294 msg = util.checkwinfilename(f)
295 if msg:
295 if msg:
296 msg = "%s: %s" % (msg, util.shellquote(f))
296 msg = "%s: %s" % (msg, util.shellquote(f))
297 if abort:
297 if abort:
298 raise error.Abort(msg)
298 raise error.Abort(msg)
299 ui.warn(_("warning: %s\n") % msg)
299 ui.warn(_("warning: %s\n") % msg)
300
300
301 def checkportabilityalert(ui):
301 def checkportabilityalert(ui):
302 '''check if the user's config requests nothing, a warning, or abort for
302 '''check if the user's config requests nothing, a warning, or abort for
303 non-portable filenames'''
303 non-portable filenames'''
304 val = ui.config('ui', 'portablefilenames')
304 val = ui.config('ui', 'portablefilenames')
305 lval = val.lower()
305 lval = val.lower()
306 bval = stringutil.parsebool(val)
306 bval = stringutil.parsebool(val)
307 abort = pycompat.iswindows or lval == 'abort'
307 abort = pycompat.iswindows or lval == 'abort'
308 warn = bval or lval == 'warn'
308 warn = bval or lval == 'warn'
309 if bval is None and not (warn or abort or lval == 'ignore'):
309 if bval is None and not (warn or abort or lval == 'ignore'):
310 raise error.ConfigError(
310 raise error.ConfigError(
311 _("ui.portablefilenames value is invalid ('%s')") % val)
311 _("ui.portablefilenames value is invalid ('%s')") % val)
312 return abort, warn
312 return abort, warn
313
313
314 class casecollisionauditor(object):
314 class casecollisionauditor(object):
315 def __init__(self, ui, abort, dirstate):
315 def __init__(self, ui, abort, dirstate):
316 self._ui = ui
316 self._ui = ui
317 self._abort = abort
317 self._abort = abort
318 allfiles = '\0'.join(dirstate._map)
318 allfiles = '\0'.join(dirstate._map)
319 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
319 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
320 self._dirstate = dirstate
320 self._dirstate = dirstate
321 # The purpose of _newfiles is so that we don't complain about
321 # The purpose of _newfiles is so that we don't complain about
322 # case collisions if someone were to call this object with the
322 # case collisions if someone were to call this object with the
323 # same filename twice.
323 # same filename twice.
324 self._newfiles = set()
324 self._newfiles = set()
325
325
326 def __call__(self, f):
326 def __call__(self, f):
327 if f in self._newfiles:
327 if f in self._newfiles:
328 return
328 return
329 fl = encoding.lower(f)
329 fl = encoding.lower(f)
330 if fl in self._loweredfiles and f not in self._dirstate:
330 if fl in self._loweredfiles and f not in self._dirstate:
331 msg = _('possible case-folding collision for %s') % f
331 msg = _('possible case-folding collision for %s') % f
332 if self._abort:
332 if self._abort:
333 raise error.Abort(msg)
333 raise error.Abort(msg)
334 self._ui.warn(_("warning: %s\n") % msg)
334 self._ui.warn(_("warning: %s\n") % msg)
335 self._loweredfiles.add(fl)
335 self._loweredfiles.add(fl)
336 self._newfiles.add(f)
336 self._newfiles.add(f)
337
337
338 def filteredhash(repo, maxrev):
338 def filteredhash(repo, maxrev):
339 """build hash of filtered revisions in the current repoview.
339 """build hash of filtered revisions in the current repoview.
340
340
341 Multiple caches perform up-to-date validation by checking that the
341 Multiple caches perform up-to-date validation by checking that the
342 tiprev and tipnode stored in the cache file match the current repository.
342 tiprev and tipnode stored in the cache file match the current repository.
343 However, this is not sufficient for validating repoviews because the set
343 However, this is not sufficient for validating repoviews because the set
344 of revisions in the view may change without the repository tiprev and
344 of revisions in the view may change without the repository tiprev and
345 tipnode changing.
345 tipnode changing.
346
346
347 This function hashes all the revs filtered from the view and returns
347 This function hashes all the revs filtered from the view and returns
348 that SHA-1 digest.
348 that SHA-1 digest.
349 """
349 """
350 cl = repo.changelog
350 cl = repo.changelog
351 if not cl.filteredrevs:
351 if not cl.filteredrevs:
352 return None
352 return None
353 key = None
353 key = None
354 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
354 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
355 if revs:
355 if revs:
356 s = hashlib.sha1()
356 s = hashlib.sha1()
357 for rev in revs:
357 for rev in revs:
358 s.update('%d;' % rev)
358 s.update('%d;' % rev)
359 key = s.digest()
359 key = s.digest()
360 return key
360 return key
361
361
362 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
362 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
363 '''yield every hg repository under path, always recursively.
363 '''yield every hg repository under path, always recursively.
364 The recurse flag will only control recursion into repo working dirs'''
364 The recurse flag will only control recursion into repo working dirs'''
365 def errhandler(err):
365 def errhandler(err):
366 if err.filename == path:
366 if err.filename == path:
367 raise err
367 raise err
368 samestat = getattr(os.path, 'samestat', None)
368 samestat = getattr(os.path, 'samestat', None)
369 if followsym and samestat is not None:
369 if followsym and samestat is not None:
370 def adddir(dirlst, dirname):
370 def adddir(dirlst, dirname):
371 dirstat = os.stat(dirname)
371 dirstat = os.stat(dirname)
372 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
372 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
373 if not match:
373 if not match:
374 dirlst.append(dirstat)
374 dirlst.append(dirstat)
375 return not match
375 return not match
376 else:
376 else:
377 followsym = False
377 followsym = False
378
378
379 if (seen_dirs is None) and followsym:
379 if (seen_dirs is None) and followsym:
380 seen_dirs = []
380 seen_dirs = []
381 adddir(seen_dirs, path)
381 adddir(seen_dirs, path)
382 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
382 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
383 dirs.sort()
383 dirs.sort()
384 if '.hg' in dirs:
384 if '.hg' in dirs:
385 yield root # found a repository
385 yield root # found a repository
386 qroot = os.path.join(root, '.hg', 'patches')
386 qroot = os.path.join(root, '.hg', 'patches')
387 if os.path.isdir(os.path.join(qroot, '.hg')):
387 if os.path.isdir(os.path.join(qroot, '.hg')):
388 yield qroot # we have a patch queue repo here
388 yield qroot # we have a patch queue repo here
389 if recurse:
389 if recurse:
390 # avoid recursing inside the .hg directory
390 # avoid recursing inside the .hg directory
391 dirs.remove('.hg')
391 dirs.remove('.hg')
392 else:
392 else:
393 dirs[:] = [] # don't descend further
393 dirs[:] = [] # don't descend further
394 elif followsym:
394 elif followsym:
395 newdirs = []
395 newdirs = []
396 for d in dirs:
396 for d in dirs:
397 fname = os.path.join(root, d)
397 fname = os.path.join(root, d)
398 if adddir(seen_dirs, fname):
398 if adddir(seen_dirs, fname):
399 if os.path.islink(fname):
399 if os.path.islink(fname):
400 for hgname in walkrepos(fname, True, seen_dirs):
400 for hgname in walkrepos(fname, True, seen_dirs):
401 yield hgname
401 yield hgname
402 else:
402 else:
403 newdirs.append(d)
403 newdirs.append(d)
404 dirs[:] = newdirs
404 dirs[:] = newdirs
405
405
406 def binnode(ctx):
406 def binnode(ctx):
407 """Return binary node id for a given basectx"""
407 """Return binary node id for a given basectx"""
408 node = ctx.node()
408 node = ctx.node()
409 if node is None:
409 if node is None:
410 return wdirid
410 return wdirid
411 return node
411 return node
412
412
413 def intrev(ctx):
413 def intrev(ctx):
414 """Return integer for a given basectx that can be used in comparison or
414 """Return integer for a given basectx that can be used in comparison or
415 arithmetic operation"""
415 arithmetic operation"""
416 rev = ctx.rev()
416 rev = ctx.rev()
417 if rev is None:
417 if rev is None:
418 return wdirrev
418 return wdirrev
419 return rev
419 return rev
420
420
421 def formatchangeid(ctx):
421 def formatchangeid(ctx):
422 """Format changectx as '{rev}:{node|formatnode}', which is the default
422 """Format changectx as '{rev}:{node|formatnode}', which is the default
423 template provided by logcmdutil.changesettemplater"""
423 template provided by logcmdutil.changesettemplater"""
424 repo = ctx.repo()
424 repo = ctx.repo()
425 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
425 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
426
426
427 def formatrevnode(ui, rev, node):
427 def formatrevnode(ui, rev, node):
428 """Format given revision and node depending on the current verbosity"""
428 """Format given revision and node depending on the current verbosity"""
429 if ui.debugflag:
429 if ui.debugflag:
430 hexfunc = hex
430 hexfunc = hex
431 else:
431 else:
432 hexfunc = short
432 hexfunc = short
433 return '%d:%s' % (rev, hexfunc(node))
433 return '%d:%s' % (rev, hexfunc(node))
434
434
435 def revsingle(repo, revspec, default='.', localalias=None):
435 def revsingle(repo, revspec, default='.', localalias=None):
436 if not revspec and revspec != 0:
436 if not revspec and revspec != 0:
437 return repo[default]
437 return repo[default]
438
438
439 l = revrange(repo, [revspec], localalias=localalias)
439 l = revrange(repo, [revspec], localalias=localalias)
440 if not l:
440 if not l:
441 raise error.Abort(_('empty revision set'))
441 raise error.Abort(_('empty revision set'))
442 return repo[l.last()]
442 return repo[l.last()]
443
443
444 def _pairspec(revspec):
444 def _pairspec(revspec):
445 tree = revsetlang.parse(revspec)
445 tree = revsetlang.parse(revspec)
446 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
446 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
447
447
448 def revpair(repo, revs):
448 def revpair(repo, revs):
449 if not revs:
449 if not revs:
450 return repo.dirstate.p1(), None
450 return repo.dirstate.p1(), None
451
451
452 l = revrange(repo, revs)
452 l = revrange(repo, revs)
453
453
454 if not l:
454 if not l:
455 first = second = None
455 first = second = None
456 elif l.isascending():
456 elif l.isascending():
457 first = l.min()
457 first = l.min()
458 second = l.max()
458 second = l.max()
459 elif l.isdescending():
459 elif l.isdescending():
460 first = l.max()
460 first = l.max()
461 second = l.min()
461 second = l.min()
462 else:
462 else:
463 first = l.first()
463 first = l.first()
464 second = l.last()
464 second = l.last()
465
465
466 if first is None:
466 if first is None:
467 raise error.Abort(_('empty revision range'))
467 raise error.Abort(_('empty revision range'))
468 if (first == second and len(revs) >= 2
468 if (first == second and len(revs) >= 2
469 and not all(revrange(repo, [r]) for r in revs)):
469 and not all(revrange(repo, [r]) for r in revs)):
470 raise error.Abort(_('empty revision on one side of range'))
470 raise error.Abort(_('empty revision on one side of range'))
471
471
472 # if top-level is range expression, the result must always be a pair
472 # if top-level is range expression, the result must always be a pair
473 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
473 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
474 return repo.lookup(first), None
474 return repo.lookup(first), None
475
475
476 return repo.lookup(first), repo.lookup(second)
476 return repo.lookup(first), repo.lookup(second)
477
477
478 def revrange(repo, specs, localalias=None):
478 def revrange(repo, specs, localalias=None):
479 """Execute 1 to many revsets and return the union.
479 """Execute 1 to many revsets and return the union.
480
480
481 This is the preferred mechanism for executing revsets using user-specified
481 This is the preferred mechanism for executing revsets using user-specified
482 config options, such as revset aliases.
482 config options, such as revset aliases.
483
483
484 The revsets specified by ``specs`` will be executed via a chained ``OR``
484 The revsets specified by ``specs`` will be executed via a chained ``OR``
485 expression. If ``specs`` is empty, an empty result is returned.
485 expression. If ``specs`` is empty, an empty result is returned.
486
486
487 ``specs`` can contain integers, in which case they are assumed to be
487 ``specs`` can contain integers, in which case they are assumed to be
488 revision numbers.
488 revision numbers.
489
489
490 It is assumed the revsets are already formatted. If you have arguments
490 It is assumed the revsets are already formatted. If you have arguments
491 that need to be expanded in the revset, call ``revsetlang.formatspec()``
491 that need to be expanded in the revset, call ``revsetlang.formatspec()``
492 and pass the result as an element of ``specs``.
492 and pass the result as an element of ``specs``.
493
493
494 Specifying a single revset is allowed.
494 Specifying a single revset is allowed.
495
495
496 Returns a ``revset.abstractsmartset`` which is a list-like interface over
496 Returns a ``revset.abstractsmartset`` which is a list-like interface over
497 integer revisions.
497 integer revisions.
498 """
498 """
499 allspecs = []
499 allspecs = []
500 for spec in specs:
500 for spec in specs:
501 if isinstance(spec, int):
501 if isinstance(spec, int):
502 spec = revsetlang.formatspec('rev(%d)', spec)
502 spec = revsetlang.formatspec('rev(%d)', spec)
503 allspecs.append(spec)
503 allspecs.append(spec)
504 return repo.anyrevs(allspecs, user=True, localalias=localalias)
504 return repo.anyrevs(allspecs, user=True, localalias=localalias)
505
505
506 def meaningfulparents(repo, ctx):
506 def meaningfulparents(repo, ctx):
507 """Return list of meaningful (or all if debug) parentrevs for rev.
507 """Return list of meaningful (or all if debug) parentrevs for rev.
508
508
509 For merges (two non-nullrev revisions) both parents are meaningful.
509 For merges (two non-nullrev revisions) both parents are meaningful.
510 Otherwise the first parent revision is considered meaningful if it
510 Otherwise the first parent revision is considered meaningful if it
511 is not the preceding revision.
511 is not the preceding revision.
512 """
512 """
513 parents = ctx.parents()
513 parents = ctx.parents()
514 if len(parents) > 1:
514 if len(parents) > 1:
515 return parents
515 return parents
516 if repo.ui.debugflag:
516 if repo.ui.debugflag:
517 return [parents[0], repo['null']]
517 return [parents[0], repo['null']]
518 if parents[0].rev() >= intrev(ctx) - 1:
518 if parents[0].rev() >= intrev(ctx) - 1:
519 return []
519 return []
520 return parents
520 return parents
521
521
522 def expandpats(pats):
522 def expandpats(pats):
523 '''Expand bare globs when running on windows.
523 '''Expand bare globs when running on windows.
524 On posix we assume it already has already been done by sh.'''
524 On posix we assume it already has already been done by sh.'''
525 if not util.expandglobs:
525 if not util.expandglobs:
526 return list(pats)
526 return list(pats)
527 ret = []
527 ret = []
528 for kindpat in pats:
528 for kindpat in pats:
529 kind, pat = matchmod._patsplit(kindpat, None)
529 kind, pat = matchmod._patsplit(kindpat, None)
530 if kind is None:
530 if kind is None:
531 try:
531 try:
532 globbed = glob.glob(pat)
532 globbed = glob.glob(pat)
533 except re.error:
533 except re.error:
534 globbed = [pat]
534 globbed = [pat]
535 if globbed:
535 if globbed:
536 ret.extend(globbed)
536 ret.extend(globbed)
537 continue
537 continue
538 ret.append(kindpat)
538 ret.append(kindpat)
539 return ret
539 return ret
540
540
541 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
541 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
542 badfn=None):
542 badfn=None):
543 '''Return a matcher and the patterns that were used.
543 '''Return a matcher and the patterns that were used.
544 The matcher will warn about bad matches, unless an alternate badfn callback
544 The matcher will warn about bad matches, unless an alternate badfn callback
545 is provided.'''
545 is provided.'''
546 if pats == ("",):
546 if pats == ("",):
547 pats = []
547 pats = []
548 if opts is None:
548 if opts is None:
549 opts = {}
549 opts = {}
550 if not globbed and default == 'relpath':
550 if not globbed and default == 'relpath':
551 pats = expandpats(pats or [])
551 pats = expandpats(pats or [])
552
552
553 def bad(f, msg):
553 def bad(f, msg):
554 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
554 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
555
555
556 if badfn is None:
556 if badfn is None:
557 badfn = bad
557 badfn = bad
558
558
559 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
559 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
560 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
560 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
561
561
562 if m.always():
562 if m.always():
563 pats = []
563 pats = []
564 return m, pats
564 return m, pats
565
565
566 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
566 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
567 badfn=None):
567 badfn=None):
568 '''Return a matcher that will warn about bad matches.'''
568 '''Return a matcher that will warn about bad matches.'''
569 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
569 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
570
570
571 def matchall(repo):
571 def matchall(repo):
572 '''Return a matcher that will efficiently match everything.'''
572 '''Return a matcher that will efficiently match everything.'''
573 return matchmod.always(repo.root, repo.getcwd())
573 return matchmod.always(repo.root, repo.getcwd())
574
574
575 def matchfiles(repo, files, badfn=None):
575 def matchfiles(repo, files, badfn=None):
576 '''Return a matcher that will efficiently match exactly these files.'''
576 '''Return a matcher that will efficiently match exactly these files.'''
577 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
577 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
578
578
579 def parsefollowlinespattern(repo, rev, pat, msg):
579 def parsefollowlinespattern(repo, rev, pat, msg):
580 """Return a file name from `pat` pattern suitable for usage in followlines
580 """Return a file name from `pat` pattern suitable for usage in followlines
581 logic.
581 logic.
582 """
582 """
583 if not matchmod.patkind(pat):
583 if not matchmod.patkind(pat):
584 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
584 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
585 else:
585 else:
586 ctx = repo[rev]
586 ctx = repo[rev]
587 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
587 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
588 files = [f for f in ctx if m(f)]
588 files = [f for f in ctx if m(f)]
589 if len(files) != 1:
589 if len(files) != 1:
590 raise error.ParseError(msg)
590 raise error.ParseError(msg)
591 return files[0]
591 return files[0]
592
592
593 def origpath(ui, repo, filepath):
593 def origpath(ui, repo, filepath):
594 '''customize where .orig files are created
594 '''customize where .orig files are created
595
595
596 Fetch user defined path from config file: [ui] origbackuppath = <path>
596 Fetch user defined path from config file: [ui] origbackuppath = <path>
597 Fall back to default (filepath with .orig suffix) if not specified
597 Fall back to default (filepath with .orig suffix) if not specified
598 '''
598 '''
599 origbackuppath = ui.config('ui', 'origbackuppath')
599 origbackuppath = ui.config('ui', 'origbackuppath')
600 if not origbackuppath:
600 if not origbackuppath:
601 return filepath + ".orig"
601 return filepath + ".orig"
602
602
603 # Convert filepath from an absolute path into a path inside the repo.
603 # Convert filepath from an absolute path into a path inside the repo.
604 filepathfromroot = util.normpath(os.path.relpath(filepath,
604 filepathfromroot = util.normpath(os.path.relpath(filepath,
605 start=repo.root))
605 start=repo.root))
606
606
607 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
607 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
608 origbackupdir = origvfs.dirname(filepathfromroot)
608 origbackupdir = origvfs.dirname(filepathfromroot)
609 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
609 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
610 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
610 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
611
611
612 # Remove any files that conflict with the backup file's path
612 # Remove any files that conflict with the backup file's path
613 for f in reversed(list(util.finddirs(filepathfromroot))):
613 for f in reversed(list(util.finddirs(filepathfromroot))):
614 if origvfs.isfileorlink(f):
614 if origvfs.isfileorlink(f):
615 ui.note(_('removing conflicting file: %s\n')
615 ui.note(_('removing conflicting file: %s\n')
616 % origvfs.join(f))
616 % origvfs.join(f))
617 origvfs.unlink(f)
617 origvfs.unlink(f)
618 break
618 break
619
619
620 origvfs.makedirs(origbackupdir)
620 origvfs.makedirs(origbackupdir)
621
621
622 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
622 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
623 ui.note(_('removing conflicting directory: %s\n')
623 ui.note(_('removing conflicting directory: %s\n')
624 % origvfs.join(filepathfromroot))
624 % origvfs.join(filepathfromroot))
625 origvfs.rmtree(filepathfromroot, forcibly=True)
625 origvfs.rmtree(filepathfromroot, forcibly=True)
626
626
627 return origvfs.join(filepathfromroot)
627 return origvfs.join(filepathfromroot)
628
628
629 class _containsnode(object):
629 class _containsnode(object):
630 """proxy __contains__(node) to container.__contains__ which accepts revs"""
630 """proxy __contains__(node) to container.__contains__ which accepts revs"""
631
631
632 def __init__(self, repo, revcontainer):
632 def __init__(self, repo, revcontainer):
633 self._torev = repo.changelog.rev
633 self._torev = repo.changelog.rev
634 self._revcontains = revcontainer.__contains__
634 self._revcontains = revcontainer.__contains__
635
635
636 def __contains__(self, node):
636 def __contains__(self, node):
637 return self._revcontains(self._torev(node))
637 return self._revcontains(self._torev(node))
638
638
639 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
639 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
640 """do common cleanups when old nodes are replaced by new nodes
640 """do common cleanups when old nodes are replaced by new nodes
641
641
642 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
642 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
643 (we might also want to move working directory parent in the future)
643 (we might also want to move working directory parent in the future)
644
644
645 By default, bookmark moves are calculated automatically from 'replacements',
645 By default, bookmark moves are calculated automatically from 'replacements',
646 but 'moves' can be used to override that. Also, 'moves' may include
646 but 'moves' can be used to override that. Also, 'moves' may include
647 additional bookmark moves that should not have associated obsmarkers.
647 additional bookmark moves that should not have associated obsmarkers.
648
648
649 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
649 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
650 have replacements. operation is a string, like "rebase".
650 have replacements. operation is a string, like "rebase".
651
651
652 metadata is dictionary containing metadata to be stored in obsmarker if
652 metadata is dictionary containing metadata to be stored in obsmarker if
653 obsolescence is enabled.
653 obsolescence is enabled.
654 """
654 """
655 if not replacements and not moves:
655 if not replacements and not moves:
656 return
656 return
657
657
658 # translate mapping's other forms
658 # translate mapping's other forms
659 if not util.safehasattr(replacements, 'items'):
659 if not util.safehasattr(replacements, 'items'):
660 replacements = {n: () for n in replacements}
660 replacements = {n: () for n in replacements}
661
661
662 # Calculate bookmark movements
662 # Calculate bookmark movements
663 if moves is None:
663 if moves is None:
664 moves = {}
664 moves = {}
665 # Unfiltered repo is needed since nodes in replacements might be hidden.
665 # Unfiltered repo is needed since nodes in replacements might be hidden.
666 unfi = repo.unfiltered()
666 unfi = repo.unfiltered()
667 for oldnode, newnodes in replacements.items():
667 for oldnode, newnodes in replacements.items():
668 if oldnode in moves:
668 if oldnode in moves:
669 continue
669 continue
670 if len(newnodes) > 1:
670 if len(newnodes) > 1:
671 # usually a split, take the one with biggest rev number
671 # usually a split, take the one with biggest rev number
672 newnode = next(unfi.set('max(%ln)', newnodes)).node()
672 newnode = next(unfi.set('max(%ln)', newnodes)).node()
673 elif len(newnodes) == 0:
673 elif len(newnodes) == 0:
674 # move bookmark backwards
674 # move bookmark backwards
675 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
675 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
676 list(replacements)))
676 list(replacements)))
677 if roots:
677 if roots:
678 newnode = roots[0].node()
678 newnode = roots[0].node()
679 else:
679 else:
680 newnode = nullid
680 newnode = nullid
681 else:
681 else:
682 newnode = newnodes[0]
682 newnode = newnodes[0]
683 moves[oldnode] = newnode
683 moves[oldnode] = newnode
684
684
685 with repo.transaction('cleanup') as tr:
685 with repo.transaction('cleanup') as tr:
686 # Move bookmarks
686 # Move bookmarks
687 bmarks = repo._bookmarks
687 bmarks = repo._bookmarks
688 bmarkchanges = []
688 bmarkchanges = []
689 allnewnodes = [n for ns in replacements.values() for n in ns]
689 allnewnodes = [n for ns in replacements.values() for n in ns]
690 for oldnode, newnode in moves.items():
690 for oldnode, newnode in moves.items():
691 oldbmarks = repo.nodebookmarks(oldnode)
691 oldbmarks = repo.nodebookmarks(oldnode)
692 if not oldbmarks:
692 if not oldbmarks:
693 continue
693 continue
694 from . import bookmarks # avoid import cycle
694 from . import bookmarks # avoid import cycle
695 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
695 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
696 (util.rapply(pycompat.maybebytestr, oldbmarks),
696 (util.rapply(pycompat.maybebytestr, oldbmarks),
697 hex(oldnode), hex(newnode)))
697 hex(oldnode), hex(newnode)))
698 # Delete divergent bookmarks being parents of related newnodes
698 # Delete divergent bookmarks being parents of related newnodes
699 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
699 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
700 allnewnodes, newnode, oldnode)
700 allnewnodes, newnode, oldnode)
701 deletenodes = _containsnode(repo, deleterevs)
701 deletenodes = _containsnode(repo, deleterevs)
702 for name in oldbmarks:
702 for name in oldbmarks:
703 bmarkchanges.append((name, newnode))
703 bmarkchanges.append((name, newnode))
704 for b in bookmarks.divergent2delete(repo, deletenodes, name):
704 for b in bookmarks.divergent2delete(repo, deletenodes, name):
705 bmarkchanges.append((b, None))
705 bmarkchanges.append((b, None))
706
706
707 if bmarkchanges:
707 if bmarkchanges:
708 bmarks.applychanges(repo, tr, bmarkchanges)
708 bmarks.applychanges(repo, tr, bmarkchanges)
709
709
710 # Obsolete or strip nodes
710 # Obsolete or strip nodes
711 if obsolete.isenabled(repo, obsolete.createmarkersopt):
711 if obsolete.isenabled(repo, obsolete.createmarkersopt):
712 # If a node is already obsoleted, and we want to obsolete it
712 # If a node is already obsoleted, and we want to obsolete it
713 # without a successor, skip that obssolete request since it's
713 # without a successor, skip that obssolete request since it's
714 # unnecessary. That's the "if s or not isobs(n)" check below.
714 # unnecessary. That's the "if s or not isobs(n)" check below.
715 # Also sort the node in topology order, that might be useful for
715 # Also sort the node in topology order, that might be useful for
716 # some obsstore logic.
716 # some obsstore logic.
717 # NOTE: the filtering and sorting might belong to createmarkers.
717 # NOTE: the filtering and sorting might belong to createmarkers.
718 isobs = unfi.obsstore.successors.__contains__
718 isobs = unfi.obsstore.successors.__contains__
719 torev = unfi.changelog.rev
719 torev = unfi.changelog.rev
720 sortfunc = lambda ns: torev(ns[0])
720 sortfunc = lambda ns: torev(ns[0])
721 rels = [(unfi[n], tuple(unfi[m] for m in s))
721 rels = [(unfi[n], tuple(unfi[m] for m in s))
722 for n, s in sorted(replacements.items(), key=sortfunc)
722 for n, s in sorted(replacements.items(), key=sortfunc)
723 if s or not isobs(n)]
723 if s or not isobs(n)]
724 if rels:
724 if rels:
725 obsolete.createmarkers(repo, rels, operation=operation,
725 obsolete.createmarkers(repo, rels, operation=operation,
726 metadata=metadata)
726 metadata=metadata)
727 else:
727 else:
728 from . import repair # avoid import cycle
728 from . import repair # avoid import cycle
729 tostrip = list(replacements)
729 tostrip = list(replacements)
730 if tostrip:
730 if tostrip:
731 repair.delayedstrip(repo.ui, repo, tostrip, operation)
731 repair.delayedstrip(repo.ui, repo, tostrip, operation)
732
732
733 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
733 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
734 if opts is None:
734 if opts is None:
735 opts = {}
735 opts = {}
736 m = matcher
736 m = matcher
737 if dry_run is None:
737 if dry_run is None:
738 dry_run = opts.get('dry_run')
738 dry_run = opts.get('dry_run')
739 if similarity is None:
739 if similarity is None:
740 similarity = float(opts.get('similarity') or 0)
740 similarity = float(opts.get('similarity') or 0)
741
741
742 ret = 0
742 ret = 0
743 join = lambda f: os.path.join(prefix, f)
743 join = lambda f: os.path.join(prefix, f)
744
744
745 wctx = repo[None]
745 wctx = repo[None]
746 for subpath in sorted(wctx.substate):
746 for subpath in sorted(wctx.substate):
747 submatch = matchmod.subdirmatcher(subpath, m)
747 submatch = matchmod.subdirmatcher(subpath, m)
748 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
748 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
749 sub = wctx.sub(subpath)
749 sub = wctx.sub(subpath)
750 try:
750 try:
751 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
751 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
752 ret = 1
752 ret = 1
753 except error.LookupError:
753 except error.LookupError:
754 repo.ui.status(_("skipping missing subrepository: %s\n")
754 repo.ui.status(_("skipping missing subrepository: %s\n")
755 % join(subpath))
755 % join(subpath))
756
756
757 rejected = []
757 rejected = []
758 def badfn(f, msg):
758 def badfn(f, msg):
759 if f in m.files():
759 if f in m.files():
760 m.bad(f, msg)
760 m.bad(f, msg)
761 rejected.append(f)
761 rejected.append(f)
762
762
763 badmatch = matchmod.badmatch(m, badfn)
763 badmatch = matchmod.badmatch(m, badfn)
764 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
764 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
765 badmatch)
765 badmatch)
766
766
767 unknownset = set(unknown + forgotten)
767 unknownset = set(unknown + forgotten)
768 toprint = unknownset.copy()
768 toprint = unknownset.copy()
769 toprint.update(deleted)
769 toprint.update(deleted)
770 for abs in sorted(toprint):
770 for abs in sorted(toprint):
771 if repo.ui.verbose or not m.exact(abs):
771 if repo.ui.verbose or not m.exact(abs):
772 if abs in unknownset:
772 if abs in unknownset:
773 status = _('adding %s\n') % m.uipath(abs)
773 status = _('adding %s\n') % m.uipath(abs)
774 else:
774 else:
775 status = _('removing %s\n') % m.uipath(abs)
775 status = _('removing %s\n') % m.uipath(abs)
776 repo.ui.status(status)
776 repo.ui.status(status)
777
777
778 renames = _findrenames(repo, m, added + unknown, removed + deleted,
778 renames = _findrenames(repo, m, added + unknown, removed + deleted,
779 similarity)
779 similarity)
780
780
781 if not dry_run:
781 if not dry_run:
782 _markchanges(repo, unknown + forgotten, deleted, renames)
782 _markchanges(repo, unknown + forgotten, deleted, renames)
783
783
784 for f in rejected:
784 for f in rejected:
785 if f in m.files():
785 if f in m.files():
786 return 1
786 return 1
787 return ret
787 return ret
788
788
789 def marktouched(repo, files, similarity=0.0):
789 def marktouched(repo, files, similarity=0.0):
790 '''Assert that files have somehow been operated upon. files are relative to
790 '''Assert that files have somehow been operated upon. files are relative to
791 the repo root.'''
791 the repo root.'''
792 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
792 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
793 rejected = []
793 rejected = []
794
794
795 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
795 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
796
796
797 if repo.ui.verbose:
797 if repo.ui.verbose:
798 unknownset = set(unknown + forgotten)
798 unknownset = set(unknown + forgotten)
799 toprint = unknownset.copy()
799 toprint = unknownset.copy()
800 toprint.update(deleted)
800 toprint.update(deleted)
801 for abs in sorted(toprint):
801 for abs in sorted(toprint):
802 if abs in unknownset:
802 if abs in unknownset:
803 status = _('adding %s\n') % abs
803 status = _('adding %s\n') % abs
804 else:
804 else:
805 status = _('removing %s\n') % abs
805 status = _('removing %s\n') % abs
806 repo.ui.status(status)
806 repo.ui.status(status)
807
807
808 renames = _findrenames(repo, m, added + unknown, removed + deleted,
808 renames = _findrenames(repo, m, added + unknown, removed + deleted,
809 similarity)
809 similarity)
810
810
811 _markchanges(repo, unknown + forgotten, deleted, renames)
811 _markchanges(repo, unknown + forgotten, deleted, renames)
812
812
813 for f in rejected:
813 for f in rejected:
814 if f in m.files():
814 if f in m.files():
815 return 1
815 return 1
816 return 0
816 return 0
817
817
818 def _interestingfiles(repo, matcher):
818 def _interestingfiles(repo, matcher):
819 '''Walk dirstate with matcher, looking for files that addremove would care
819 '''Walk dirstate with matcher, looking for files that addremove would care
820 about.
820 about.
821
821
822 This is different from dirstate.status because it doesn't care about
822 This is different from dirstate.status because it doesn't care about
823 whether files are modified or clean.'''
823 whether files are modified or clean.'''
824 added, unknown, deleted, removed, forgotten = [], [], [], [], []
824 added, unknown, deleted, removed, forgotten = [], [], [], [], []
825 audit_path = pathutil.pathauditor(repo.root, cached=True)
825 audit_path = pathutil.pathauditor(repo.root, cached=True)
826
826
827 ctx = repo[None]
827 ctx = repo[None]
828 dirstate = repo.dirstate
828 dirstate = repo.dirstate
829 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
829 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
830 unknown=True, ignored=False, full=False)
830 unknown=True, ignored=False, full=False)
831 for abs, st in walkresults.iteritems():
831 for abs, st in walkresults.iteritems():
832 dstate = dirstate[abs]
832 dstate = dirstate[abs]
833 if dstate == '?' and audit_path.check(abs):
833 if dstate == '?' and audit_path.check(abs):
834 unknown.append(abs)
834 unknown.append(abs)
835 elif dstate != 'r' and not st:
835 elif dstate != 'r' and not st:
836 deleted.append(abs)
836 deleted.append(abs)
837 elif dstate == 'r' and st:
837 elif dstate == 'r' and st:
838 forgotten.append(abs)
838 forgotten.append(abs)
839 # for finding renames
839 # for finding renames
840 elif dstate == 'r' and not st:
840 elif dstate == 'r' and not st:
841 removed.append(abs)
841 removed.append(abs)
842 elif dstate == 'a':
842 elif dstate == 'a':
843 added.append(abs)
843 added.append(abs)
844
844
845 return added, unknown, deleted, removed, forgotten
845 return added, unknown, deleted, removed, forgotten
846
846
847 def _findrenames(repo, matcher, added, removed, similarity):
847 def _findrenames(repo, matcher, added, removed, similarity):
848 '''Find renames from removed files to added ones.'''
848 '''Find renames from removed files to added ones.'''
849 renames = {}
849 renames = {}
850 if similarity > 0:
850 if similarity > 0:
851 for old, new, score in similar.findrenames(repo, added, removed,
851 for old, new, score in similar.findrenames(repo, added, removed,
852 similarity):
852 similarity):
853 if (repo.ui.verbose or not matcher.exact(old)
853 if (repo.ui.verbose or not matcher.exact(old)
854 or not matcher.exact(new)):
854 or not matcher.exact(new)):
855 repo.ui.status(_('recording removal of %s as rename to %s '
855 repo.ui.status(_('recording removal of %s as rename to %s '
856 '(%d%% similar)\n') %
856 '(%d%% similar)\n') %
857 (matcher.rel(old), matcher.rel(new),
857 (matcher.rel(old), matcher.rel(new),
858 score * 100))
858 score * 100))
859 renames[new] = old
859 renames[new] = old
860 return renames
860 return renames
861
861
862 def _markchanges(repo, unknown, deleted, renames):
862 def _markchanges(repo, unknown, deleted, renames):
863 '''Marks the files in unknown as added, the files in deleted as removed,
863 '''Marks the files in unknown as added, the files in deleted as removed,
864 and the files in renames as copied.'''
864 and the files in renames as copied.'''
865 wctx = repo[None]
865 wctx = repo[None]
866 with repo.wlock():
866 with repo.wlock():
867 wctx.forget(deleted)
867 wctx.forget(deleted)
868 wctx.add(unknown)
868 wctx.add(unknown)
869 for new, old in renames.iteritems():
869 for new, old in renames.iteritems():
870 wctx.copy(old, new)
870 wctx.copy(old, new)
871
871
872 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
872 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
873 """Update the dirstate to reflect the intent of copying src to dst. For
873 """Update the dirstate to reflect the intent of copying src to dst. For
874 different reasons it might not end with dst being marked as copied from src.
874 different reasons it might not end with dst being marked as copied from src.
875 """
875 """
876 origsrc = repo.dirstate.copied(src) or src
876 origsrc = repo.dirstate.copied(src) or src
877 if dst == origsrc: # copying back a copy?
877 if dst == origsrc: # copying back a copy?
878 if repo.dirstate[dst] not in 'mn' and not dryrun:
878 if repo.dirstate[dst] not in 'mn' and not dryrun:
879 repo.dirstate.normallookup(dst)
879 repo.dirstate.normallookup(dst)
880 else:
880 else:
881 if repo.dirstate[origsrc] == 'a' and origsrc == src:
881 if repo.dirstate[origsrc] == 'a' and origsrc == src:
882 if not ui.quiet:
882 if not ui.quiet:
883 ui.warn(_("%s has not been committed yet, so no copy "
883 ui.warn(_("%s has not been committed yet, so no copy "
884 "data will be stored for %s.\n")
884 "data will be stored for %s.\n")
885 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
885 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
886 if repo.dirstate[dst] in '?r' and not dryrun:
886 if repo.dirstate[dst] in '?r' and not dryrun:
887 wctx.add([dst])
887 wctx.add([dst])
888 elif not dryrun:
888 elif not dryrun:
889 wctx.copy(origsrc, dst)
889 wctx.copy(origsrc, dst)
890
890
891 def readrequires(opener, supported):
891 def readrequires(opener, supported):
892 '''Reads and parses .hg/requires and checks if all entries found
892 '''Reads and parses .hg/requires and checks if all entries found
893 are in the list of supported features.'''
893 are in the list of supported features.'''
894 requirements = set(opener.read("requires").splitlines())
894 requirements = set(opener.read("requires").splitlines())
895 missings = []
895 missings = []
896 for r in requirements:
896 for r in requirements:
897 if r not in supported:
897 if r not in supported:
898 if not r or not r[0:1].isalnum():
898 if not r or not r[0:1].isalnum():
899 raise error.RequirementError(_(".hg/requires file is corrupt"))
899 raise error.RequirementError(_(".hg/requires file is corrupt"))
900 missings.append(r)
900 missings.append(r)
901 missings.sort()
901 missings.sort()
902 if missings:
902 if missings:
903 raise error.RequirementError(
903 raise error.RequirementError(
904 _("repository requires features unknown to this Mercurial: %s")
904 _("repository requires features unknown to this Mercurial: %s")
905 % " ".join(missings),
905 % " ".join(missings),
906 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
906 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
907 " for more information"))
907 " for more information"))
908 return requirements
908 return requirements
909
909
910 def writerequires(opener, requirements):
910 def writerequires(opener, requirements):
911 with opener('requires', 'w') as fp:
911 with opener('requires', 'w') as fp:
912 for r in sorted(requirements):
912 for r in sorted(requirements):
913 fp.write("%s\n" % r)
913 fp.write("%s\n" % r)
914
914
915 class filecachesubentry(object):
915 class filecachesubentry(object):
916 def __init__(self, path, stat):
916 def __init__(self, path, stat):
917 self.path = path
917 self.path = path
918 self.cachestat = None
918 self.cachestat = None
919 self._cacheable = None
919 self._cacheable = None
920
920
921 if stat:
921 if stat:
922 self.cachestat = filecachesubentry.stat(self.path)
922 self.cachestat = filecachesubentry.stat(self.path)
923
923
924 if self.cachestat:
924 if self.cachestat:
925 self._cacheable = self.cachestat.cacheable()
925 self._cacheable = self.cachestat.cacheable()
926 else:
926 else:
927 # None means we don't know yet
927 # None means we don't know yet
928 self._cacheable = None
928 self._cacheable = None
929
929
930 def refresh(self):
930 def refresh(self):
931 if self.cacheable():
931 if self.cacheable():
932 self.cachestat = filecachesubentry.stat(self.path)
932 self.cachestat = filecachesubentry.stat(self.path)
933
933
934 def cacheable(self):
934 def cacheable(self):
935 if self._cacheable is not None:
935 if self._cacheable is not None:
936 return self._cacheable
936 return self._cacheable
937
937
938 # we don't know yet, assume it is for now
938 # we don't know yet, assume it is for now
939 return True
939 return True
940
940
941 def changed(self):
941 def changed(self):
942 # no point in going further if we can't cache it
942 # no point in going further if we can't cache it
943 if not self.cacheable():
943 if not self.cacheable():
944 return True
944 return True
945
945
946 newstat = filecachesubentry.stat(self.path)
946 newstat = filecachesubentry.stat(self.path)
947
947
948 # we may not know if it's cacheable yet, check again now
948 # we may not know if it's cacheable yet, check again now
949 if newstat and self._cacheable is None:
949 if newstat and self._cacheable is None:
950 self._cacheable = newstat.cacheable()
950 self._cacheable = newstat.cacheable()
951
951
952 # check again
952 # check again
953 if not self._cacheable:
953 if not self._cacheable:
954 return True
954 return True
955
955
956 if self.cachestat != newstat:
956 if self.cachestat != newstat:
957 self.cachestat = newstat
957 self.cachestat = newstat
958 return True
958 return True
959 else:
959 else:
960 return False
960 return False
961
961
962 @staticmethod
962 @staticmethod
963 def stat(path):
963 def stat(path):
964 try:
964 try:
965 return util.cachestat(path)
965 return util.cachestat(path)
966 except OSError as e:
966 except OSError as e:
967 if e.errno != errno.ENOENT:
967 if e.errno != errno.ENOENT:
968 raise
968 raise
969
969
970 class filecacheentry(object):
970 class filecacheentry(object):
971 def __init__(self, paths, stat=True):
971 def __init__(self, paths, stat=True):
972 self._entries = []
972 self._entries = []
973 for path in paths:
973 for path in paths:
974 self._entries.append(filecachesubentry(path, stat))
974 self._entries.append(filecachesubentry(path, stat))
975
975
976 def changed(self):
976 def changed(self):
977 '''true if any entry has changed'''
977 '''true if any entry has changed'''
978 for entry in self._entries:
978 for entry in self._entries:
979 if entry.changed():
979 if entry.changed():
980 return True
980 return True
981 return False
981 return False
982
982
983 def refresh(self):
983 def refresh(self):
984 for entry in self._entries:
984 for entry in self._entries:
985 entry.refresh()
985 entry.refresh()
986
986
987 class filecache(object):
987 class filecache(object):
988 '''A property like decorator that tracks files under .hg/ for updates.
988 '''A property like decorator that tracks files under .hg/ for updates.
989
989
990 Records stat info when called in _filecache.
990 Records stat info when called in _filecache.
991
991
992 On subsequent calls, compares old stat info with new info, and recreates the
992 On subsequent calls, compares old stat info with new info, and recreates the
993 object when any of the files changes, updating the new stat info in
993 object when any of the files changes, updating the new stat info in
994 _filecache.
994 _filecache.
995
995
996 Mercurial either atomic renames or appends for files under .hg,
996 Mercurial either atomic renames or appends for files under .hg,
997 so to ensure the cache is reliable we need the filesystem to be able
997 so to ensure the cache is reliable we need the filesystem to be able
998 to tell us if a file has been replaced. If it can't, we fallback to
998 to tell us if a file has been replaced. If it can't, we fallback to
999 recreating the object on every call (essentially the same behavior as
999 recreating the object on every call (essentially the same behavior as
1000 propertycache).
1000 propertycache).
1001
1001
1002 '''
1002 '''
1003 def __init__(self, *paths):
1003 def __init__(self, *paths):
1004 self.paths = paths
1004 self.paths = paths
1005
1005
1006 def join(self, obj, fname):
1006 def join(self, obj, fname):
1007 """Used to compute the runtime path of a cached file.
1007 """Used to compute the runtime path of a cached file.
1008
1008
1009 Users should subclass filecache and provide their own version of this
1009 Users should subclass filecache and provide their own version of this
1010 function to call the appropriate join function on 'obj' (an instance
1010 function to call the appropriate join function on 'obj' (an instance
1011 of the class that its member function was decorated).
1011 of the class that its member function was decorated).
1012 """
1012 """
1013 raise NotImplementedError
1013 raise NotImplementedError
1014
1014
1015 def __call__(self, func):
1015 def __call__(self, func):
1016 self.func = func
1016 self.func = func
1017 self.name = func.__name__.encode('ascii')
1017 self.name = func.__name__.encode('ascii')
1018 return self
1018 return self
1019
1019
1020 def __get__(self, obj, type=None):
1020 def __get__(self, obj, type=None):
1021 # if accessed on the class, return the descriptor itself.
1021 # if accessed on the class, return the descriptor itself.
1022 if obj is None:
1022 if obj is None:
1023 return self
1023 return self
1024 # do we need to check if the file changed?
1024 # do we need to check if the file changed?
1025 if self.name in obj.__dict__:
1025 if self.name in obj.__dict__:
1026 assert self.name in obj._filecache, self.name
1026 assert self.name in obj._filecache, self.name
1027 return obj.__dict__[self.name]
1027 return obj.__dict__[self.name]
1028
1028
1029 entry = obj._filecache.get(self.name)
1029 entry = obj._filecache.get(self.name)
1030
1030
1031 if entry:
1031 if entry:
1032 if entry.changed():
1032 if entry.changed():
1033 entry.obj = self.func(obj)
1033 entry.obj = self.func(obj)
1034 else:
1034 else:
1035 paths = [self.join(obj, path) for path in self.paths]
1035 paths = [self.join(obj, path) for path in self.paths]
1036
1036
1037 # We stat -before- creating the object so our cache doesn't lie if
1037 # We stat -before- creating the object so our cache doesn't lie if
1038 # a writer modified between the time we read and stat
1038 # a writer modified between the time we read and stat
1039 entry = filecacheentry(paths, True)
1039 entry = filecacheentry(paths, True)
1040 entry.obj = self.func(obj)
1040 entry.obj = self.func(obj)
1041
1041
1042 obj._filecache[self.name] = entry
1042 obj._filecache[self.name] = entry
1043
1043
1044 obj.__dict__[self.name] = entry.obj
1044 obj.__dict__[self.name] = entry.obj
1045 return entry.obj
1045 return entry.obj
1046
1046
1047 def __set__(self, obj, value):
1047 def __set__(self, obj, value):
1048 if self.name not in obj._filecache:
1048 if self.name not in obj._filecache:
1049 # we add an entry for the missing value because X in __dict__
1049 # we add an entry for the missing value because X in __dict__
1050 # implies X in _filecache
1050 # implies X in _filecache
1051 paths = [self.join(obj, path) for path in self.paths]
1051 paths = [self.join(obj, path) for path in self.paths]
1052 ce = filecacheentry(paths, False)
1052 ce = filecacheentry(paths, False)
1053 obj._filecache[self.name] = ce
1053 obj._filecache[self.name] = ce
1054 else:
1054 else:
1055 ce = obj._filecache[self.name]
1055 ce = obj._filecache[self.name]
1056
1056
1057 ce.obj = value # update cached copy
1057 ce.obj = value # update cached copy
1058 obj.__dict__[self.name] = value # update copy returned by obj.x
1058 obj.__dict__[self.name] = value # update copy returned by obj.x
1059
1059
1060 def __delete__(self, obj):
1060 def __delete__(self, obj):
1061 try:
1061 try:
1062 del obj.__dict__[self.name]
1062 del obj.__dict__[self.name]
1063 except KeyError:
1063 except KeyError:
1064 raise AttributeError(self.name)
1064 raise AttributeError(self.name)
1065
1065
1066 def extdatasource(repo, source):
1066 def extdatasource(repo, source):
1067 """Gather a map of rev -> value dict from the specified source
1067 """Gather a map of rev -> value dict from the specified source
1068
1068
1069 A source spec is treated as a URL, with a special case shell: type
1069 A source spec is treated as a URL, with a special case shell: type
1070 for parsing the output from a shell command.
1070 for parsing the output from a shell command.
1071
1071
1072 The data is parsed as a series of newline-separated records where
1072 The data is parsed as a series of newline-separated records where
1073 each record is a revision specifier optionally followed by a space
1073 each record is a revision specifier optionally followed by a space
1074 and a freeform string value. If the revision is known locally, it
1074 and a freeform string value. If the revision is known locally, it
1075 is converted to a rev, otherwise the record is skipped.
1075 is converted to a rev, otherwise the record is skipped.
1076
1076
1077 Note that both key and value are treated as UTF-8 and converted to
1077 Note that both key and value are treated as UTF-8 and converted to
1078 the local encoding. This allows uniformity between local and
1078 the local encoding. This allows uniformity between local and
1079 remote data sources.
1079 remote data sources.
1080 """
1080 """
1081
1081
1082 spec = repo.ui.config("extdata", source)
1082 spec = repo.ui.config("extdata", source)
1083 if not spec:
1083 if not spec:
1084 raise error.Abort(_("unknown extdata source '%s'") % source)
1084 raise error.Abort(_("unknown extdata source '%s'") % source)
1085
1085
1086 data = {}
1086 data = {}
1087 src = proc = None
1087 src = proc = None
1088 try:
1088 try:
1089 if spec.startswith("shell:"):
1089 if spec.startswith("shell:"):
1090 # external commands should be run relative to the repo root
1090 # external commands should be run relative to the repo root
1091 cmd = spec[6:]
1091 cmd = spec[6:]
1092 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1092 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1093 close_fds=util.closefds,
1093 close_fds=util.closefds,
1094 stdout=subprocess.PIPE, cwd=repo.root)
1094 stdout=subprocess.PIPE, cwd=repo.root)
1095 src = proc.stdout
1095 src = proc.stdout
1096 else:
1096 else:
1097 # treat as a URL or file
1097 # treat as a URL or file
1098 src = url.open(repo.ui, spec)
1098 src = url.open(repo.ui, spec)
1099 for l in src:
1099 for l in src:
1100 if " " in l:
1100 if " " in l:
1101 k, v = l.strip().split(" ", 1)
1101 k, v = l.strip().split(" ", 1)
1102 else:
1102 else:
1103 k, v = l.strip(), ""
1103 k, v = l.strip(), ""
1104
1104
1105 k = encoding.tolocal(k)
1105 k = encoding.tolocal(k)
1106 try:
1106 try:
1107 data[repo[k].rev()] = encoding.tolocal(v)
1107 data[repo[k].rev()] = encoding.tolocal(v)
1108 except (error.LookupError, error.RepoLookupError):
1108 except (error.LookupError, error.RepoLookupError):
1109 pass # we ignore data for nodes that don't exist locally
1109 pass # we ignore data for nodes that don't exist locally
1110 finally:
1110 finally:
1111 if proc:
1111 if proc:
1112 proc.communicate()
1112 proc.communicate()
1113 if src:
1113 if src:
1114 src.close()
1114 src.close()
1115 if proc and proc.returncode != 0:
1115 if proc and proc.returncode != 0:
1116 raise error.Abort(_("extdata command '%s' failed: %s")
1116 raise error.Abort(_("extdata command '%s' failed: %s")
1117 % (cmd, util.explainexit(proc.returncode)[0]))
1117 % (cmd, util.explainexit(proc.returncode)[0]))
1118
1118
1119 return data
1119 return data
1120
1120
1121 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1121 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1122 if lock is None:
1122 if lock is None:
1123 raise error.LockInheritanceContractViolation(
1123 raise error.LockInheritanceContractViolation(
1124 'lock can only be inherited while held')
1124 'lock can only be inherited while held')
1125 if environ is None:
1125 if environ is None:
1126 environ = {}
1126 environ = {}
1127 with lock.inherit() as locker:
1127 with lock.inherit() as locker:
1128 environ[envvar] = locker
1128 environ[envvar] = locker
1129 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1129 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1130
1130
1131 def wlocksub(repo, cmd, *args, **kwargs):
1131 def wlocksub(repo, cmd, *args, **kwargs):
1132 """run cmd as a subprocess that allows inheriting repo's wlock
1132 """run cmd as a subprocess that allows inheriting repo's wlock
1133
1133
1134 This can only be called while the wlock is held. This takes all the
1134 This can only be called while the wlock is held. This takes all the
1135 arguments that ui.system does, and returns the exit code of the
1135 arguments that ui.system does, and returns the exit code of the
1136 subprocess."""
1136 subprocess."""
1137 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1137 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1138 **kwargs)
1138 **kwargs)
1139
1139
1140 def gdinitconfig(ui):
1140 def gdinitconfig(ui):
1141 """helper function to know if a repo should be created as general delta
1141 """helper function to know if a repo should be created as general delta
1142 """
1142 """
1143 # experimental config: format.generaldelta
1143 # experimental config: format.generaldelta
1144 return (ui.configbool('format', 'generaldelta')
1144 return (ui.configbool('format', 'generaldelta')
1145 or ui.configbool('format', 'usegeneraldelta'))
1145 or ui.configbool('format', 'usegeneraldelta'))
1146
1146
1147 def gddeltaconfig(ui):
1147 def gddeltaconfig(ui):
1148 """helper function to know if incoming delta should be optimised
1148 """helper function to know if incoming delta should be optimised
1149 """
1149 """
1150 # experimental config: format.generaldelta
1150 # experimental config: format.generaldelta
1151 return ui.configbool('format', 'generaldelta')
1151 return ui.configbool('format', 'generaldelta')
1152
1152
1153 class simplekeyvaluefile(object):
1153 class simplekeyvaluefile(object):
1154 """A simple file with key=value lines
1154 """A simple file with key=value lines
1155
1155
1156 Keys must be alphanumerics and start with a letter, values must not
1156 Keys must be alphanumerics and start with a letter, values must not
1157 contain '\n' characters"""
1157 contain '\n' characters"""
1158 firstlinekey = '__firstline'
1158 firstlinekey = '__firstline'
1159
1159
1160 def __init__(self, vfs, path, keys=None):
1160 def __init__(self, vfs, path, keys=None):
1161 self.vfs = vfs
1161 self.vfs = vfs
1162 self.path = path
1162 self.path = path
1163
1163
1164 def read(self, firstlinenonkeyval=False):
1164 def read(self, firstlinenonkeyval=False):
1165 """Read the contents of a simple key-value file
1165 """Read the contents of a simple key-value file
1166
1166
1167 'firstlinenonkeyval' indicates whether the first line of file should
1167 'firstlinenonkeyval' indicates whether the first line of file should
1168 be treated as a key-value pair or reuturned fully under the
1168 be treated as a key-value pair or reuturned fully under the
1169 __firstline key."""
1169 __firstline key."""
1170 lines = self.vfs.readlines(self.path)
1170 lines = self.vfs.readlines(self.path)
1171 d = {}
1171 d = {}
1172 if firstlinenonkeyval:
1172 if firstlinenonkeyval:
1173 if not lines:
1173 if not lines:
1174 e = _("empty simplekeyvalue file")
1174 e = _("empty simplekeyvalue file")
1175 raise error.CorruptedState(e)
1175 raise error.CorruptedState(e)
1176 # we don't want to include '\n' in the __firstline
1176 # we don't want to include '\n' in the __firstline
1177 d[self.firstlinekey] = lines[0][:-1]
1177 d[self.firstlinekey] = lines[0][:-1]
1178 del lines[0]
1178 del lines[0]
1179
1179
1180 try:
1180 try:
1181 # the 'if line.strip()' part prevents us from failing on empty
1181 # the 'if line.strip()' part prevents us from failing on empty
1182 # lines which only contain '\n' therefore are not skipped
1182 # lines which only contain '\n' therefore are not skipped
1183 # by 'if line'
1183 # by 'if line'
1184 updatedict = dict(line[:-1].split('=', 1) for line in lines
1184 updatedict = dict(line[:-1].split('=', 1) for line in lines
1185 if line.strip())
1185 if line.strip())
1186 if self.firstlinekey in updatedict:
1186 if self.firstlinekey in updatedict:
1187 e = _("%r can't be used as a key")
1187 e = _("%r can't be used as a key")
1188 raise error.CorruptedState(e % self.firstlinekey)
1188 raise error.CorruptedState(e % self.firstlinekey)
1189 d.update(updatedict)
1189 d.update(updatedict)
1190 except ValueError as e:
1190 except ValueError as e:
1191 raise error.CorruptedState(str(e))
1191 raise error.CorruptedState(str(e))
1192 return d
1192 return d
1193
1193
1194 def write(self, data, firstline=None):
1194 def write(self, data, firstline=None):
1195 """Write key=>value mapping to a file
1195 """Write key=>value mapping to a file
1196 data is a dict. Keys must be alphanumerical and start with a letter.
1196 data is a dict. Keys must be alphanumerical and start with a letter.
1197 Values must not contain newline characters.
1197 Values must not contain newline characters.
1198
1198
1199 If 'firstline' is not None, it is written to file before
1199 If 'firstline' is not None, it is written to file before
1200 everything else, as it is, not in a key=value form"""
1200 everything else, as it is, not in a key=value form"""
1201 lines = []
1201 lines = []
1202 if firstline is not None:
1202 if firstline is not None:
1203 lines.append('%s\n' % firstline)
1203 lines.append('%s\n' % firstline)
1204
1204
1205 for k, v in data.items():
1205 for k, v in data.items():
1206 if k == self.firstlinekey:
1206 if k == self.firstlinekey:
1207 e = "key name '%s' is reserved" % self.firstlinekey
1207 e = "key name '%s' is reserved" % self.firstlinekey
1208 raise error.ProgrammingError(e)
1208 raise error.ProgrammingError(e)
1209 if not k[0:1].isalpha():
1209 if not k[0:1].isalpha():
1210 e = "keys must start with a letter in a key-value file"
1210 e = "keys must start with a letter in a key-value file"
1211 raise error.ProgrammingError(e)
1211 raise error.ProgrammingError(e)
1212 if not k.isalnum():
1212 if not k.isalnum():
1213 e = "invalid key name in a simple key-value file"
1213 e = "invalid key name in a simple key-value file"
1214 raise error.ProgrammingError(e)
1214 raise error.ProgrammingError(e)
1215 if '\n' in v:
1215 if '\n' in v:
1216 e = "invalid value in a simple key-value file"
1216 e = "invalid value in a simple key-value file"
1217 raise error.ProgrammingError(e)
1217 raise error.ProgrammingError(e)
1218 lines.append("%s=%s\n" % (k, v))
1218 lines.append("%s=%s\n" % (k, v))
1219 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1219 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1220 fp.write(''.join(lines))
1220 fp.write(''.join(lines))
1221
1221
1222 _reportobsoletedsource = [
1222 _reportobsoletedsource = [
1223 'debugobsolete',
1223 'debugobsolete',
1224 'pull',
1224 'pull',
1225 'push',
1225 'push',
1226 'serve',
1226 'serve',
1227 'unbundle',
1227 'unbundle',
1228 ]
1228 ]
1229
1229
1230 _reportnewcssource = [
1230 _reportnewcssource = [
1231 'pull',
1231 'pull',
1232 'unbundle',
1232 'unbundle',
1233 ]
1233 ]
1234
1234
1235 # a list of (repo, ctx, files) functions called by various commands to allow
1235 # a list of (repo, ctx, files) functions called by various commands to allow
1236 # extensions to ensure the corresponding files are available locally, before the
1236 # extensions to ensure the corresponding files are available locally, before the
1237 # command uses them.
1237 # command uses them.
1238 fileprefetchhooks = util.hooks()
1238 fileprefetchhooks = util.hooks()
1239
1239
1240 # A marker that tells the evolve extension to suppress its own reporting
1240 # A marker that tells the evolve extension to suppress its own reporting
1241 _reportstroubledchangesets = True
1241 _reportstroubledchangesets = True
1242
1242
1243 def registersummarycallback(repo, otr, txnname=''):
1243 def registersummarycallback(repo, otr, txnname=''):
1244 """register a callback to issue a summary after the transaction is closed
1244 """register a callback to issue a summary after the transaction is closed
1245 """
1245 """
1246 def txmatch(sources):
1246 def txmatch(sources):
1247 return any(txnname.startswith(source) for source in sources)
1247 return any(txnname.startswith(source) for source in sources)
1248
1248
1249 categories = []
1249 categories = []
1250
1250
1251 def reportsummary(func):
1251 def reportsummary(func):
1252 """decorator for report callbacks."""
1252 """decorator for report callbacks."""
1253 # The repoview life cycle is shorter than the one of the actual
1253 # The repoview life cycle is shorter than the one of the actual
1254 # underlying repository. So the filtered object can die before the
1254 # underlying repository. So the filtered object can die before the
1255 # weakref is used leading to troubles. We keep a reference to the
1255 # weakref is used leading to troubles. We keep a reference to the
1256 # unfiltered object and restore the filtering when retrieving the
1256 # unfiltered object and restore the filtering when retrieving the
1257 # repository through the weakref.
1257 # repository through the weakref.
1258 filtername = repo.filtername
1258 filtername = repo.filtername
1259 reporef = weakref.ref(repo.unfiltered())
1259 reporef = weakref.ref(repo.unfiltered())
1260 def wrapped(tr):
1260 def wrapped(tr):
1261 repo = reporef()
1261 repo = reporef()
1262 if filtername:
1262 if filtername:
1263 repo = repo.filtered(filtername)
1263 repo = repo.filtered(filtername)
1264 func(repo, tr)
1264 func(repo, tr)
1265 newcat = '%02i-txnreport' % len(categories)
1265 newcat = '%02i-txnreport' % len(categories)
1266 otr.addpostclose(newcat, wrapped)
1266 otr.addpostclose(newcat, wrapped)
1267 categories.append(newcat)
1267 categories.append(newcat)
1268 return wrapped
1268 return wrapped
1269
1269
1270 if txmatch(_reportobsoletedsource):
1270 if txmatch(_reportobsoletedsource):
1271 @reportsummary
1271 @reportsummary
1272 def reportobsoleted(repo, tr):
1272 def reportobsoleted(repo, tr):
1273 obsoleted = obsutil.getobsoleted(repo, tr)
1273 obsoleted = obsutil.getobsoleted(repo, tr)
1274 if obsoleted:
1274 if obsoleted:
1275 repo.ui.status(_('obsoleted %i changesets\n')
1275 repo.ui.status(_('obsoleted %i changesets\n')
1276 % len(obsoleted))
1276 % len(obsoleted))
1277
1277
1278 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1278 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1279 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1279 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1280 instabilitytypes = [
1280 instabilitytypes = [
1281 ('orphan', 'orphan'),
1281 ('orphan', 'orphan'),
1282 ('phase-divergent', 'phasedivergent'),
1282 ('phase-divergent', 'phasedivergent'),
1283 ('content-divergent', 'contentdivergent'),
1283 ('content-divergent', 'contentdivergent'),
1284 ]
1284 ]
1285
1285
1286 def getinstabilitycounts(repo):
1286 def getinstabilitycounts(repo):
1287 filtered = repo.changelog.filteredrevs
1287 filtered = repo.changelog.filteredrevs
1288 counts = {}
1288 counts = {}
1289 for instability, revset in instabilitytypes:
1289 for instability, revset in instabilitytypes:
1290 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1290 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1291 filtered)
1291 filtered)
1292 return counts
1292 return counts
1293
1293
1294 oldinstabilitycounts = getinstabilitycounts(repo)
1294 oldinstabilitycounts = getinstabilitycounts(repo)
1295 @reportsummary
1295 @reportsummary
1296 def reportnewinstabilities(repo, tr):
1296 def reportnewinstabilities(repo, tr):
1297 newinstabilitycounts = getinstabilitycounts(repo)
1297 newinstabilitycounts = getinstabilitycounts(repo)
1298 for instability, revset in instabilitytypes:
1298 for instability, revset in instabilitytypes:
1299 delta = (newinstabilitycounts[instability] -
1299 delta = (newinstabilitycounts[instability] -
1300 oldinstabilitycounts[instability])
1300 oldinstabilitycounts[instability])
1301 if delta > 0:
1301 if delta > 0:
1302 repo.ui.warn(_('%i new %s changesets\n') %
1302 repo.ui.warn(_('%i new %s changesets\n') %
1303 (delta, instability))
1303 (delta, instability))
1304
1304
1305 if txmatch(_reportnewcssource):
1305 if txmatch(_reportnewcssource):
1306 @reportsummary
1306 @reportsummary
1307 def reportnewcs(repo, tr):
1307 def reportnewcs(repo, tr):
1308 """Report the range of new revisions pulled/unbundled."""
1308 """Report the range of new revisions pulled/unbundled."""
1309 newrevs = tr.changes.get('revs', xrange(0, 0))
1309 newrevs = tr.changes.get('revs', xrange(0, 0))
1310 if not newrevs:
1310 if not newrevs:
1311 return
1311 return
1312
1312
1313 # Compute the bounds of new revisions' range, excluding obsoletes.
1313 # Compute the bounds of new revisions' range, excluding obsoletes.
1314 unfi = repo.unfiltered()
1314 unfi = repo.unfiltered()
1315 revs = unfi.revs('%ld and not obsolete()', newrevs)
1315 revs = unfi.revs('%ld and not obsolete()', newrevs)
1316 if not revs:
1316 if not revs:
1317 # Got only obsoletes.
1317 # Got only obsoletes.
1318 return
1318 return
1319 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1319 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1320
1320
1321 if minrev == maxrev:
1321 if minrev == maxrev:
1322 revrange = minrev
1322 revrange = minrev
1323 else:
1323 else:
1324 revrange = '%s:%s' % (minrev, maxrev)
1324 revrange = '%s:%s' % (minrev, maxrev)
1325 repo.ui.status(_('new changesets %s\n') % revrange)
1325 repo.ui.status(_('new changesets %s\n') % revrange)
1326
1326
1327 def nodesummaries(repo, nodes, maxnumnodes=4):
1327 def nodesummaries(repo, nodes, maxnumnodes=4):
1328 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1328 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1329 return ' '.join(short(h) for h in nodes)
1329 return ' '.join(short(h) for h in nodes)
1330 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1330 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1331 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1331 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1332
1332
1333 def enforcesinglehead(repo, tr, desc):
1333 def enforcesinglehead(repo, tr, desc):
1334 """check that no named branch has multiple heads"""
1334 """check that no named branch has multiple heads"""
1335 if desc in ('strip', 'repair'):
1335 if desc in ('strip', 'repair'):
1336 # skip the logic during strip
1336 # skip the logic during strip
1337 return
1337 return
1338 visible = repo.filtered('visible')
1338 visible = repo.filtered('visible')
1339 # possible improvement: we could restrict the check to affected branch
1339 # possible improvement: we could restrict the check to affected branch
1340 for name, heads in visible.branchmap().iteritems():
1340 for name, heads in visible.branchmap().iteritems():
1341 if len(heads) > 1:
1341 if len(heads) > 1:
1342 msg = _('rejecting multiple heads on branch "%s"')
1342 msg = _('rejecting multiple heads on branch "%s"')
1343 msg %= name
1343 msg %= name
1344 hint = _('%d heads: %s')
1344 hint = _('%d heads: %s')
1345 hint %= (len(heads), nodesummaries(repo, heads))
1345 hint %= (len(heads), nodesummaries(repo, heads))
1346 raise error.Abort(msg, hint=hint)
1346 raise error.Abort(msg, hint=hint)
1347
1347
1348 def wrapconvertsink(sink):
1348 def wrapconvertsink(sink):
1349 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1349 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1350 before it is used, whether or not the convert extension was formally loaded.
1350 before it is used, whether or not the convert extension was formally loaded.
1351 """
1351 """
1352 return sink
1352 return sink
1353
1353
1354 def unhidehashlikerevs(repo, specs, hiddentype):
1354 def unhidehashlikerevs(repo, specs, hiddentype):
1355 """parse the user specs and unhide changesets whose hash or revision number
1355 """parse the user specs and unhide changesets whose hash or revision number
1356 is passed.
1356 is passed.
1357
1357
1358 hiddentype can be: 1) 'warn': warn while unhiding changesets
1358 hiddentype can be: 1) 'warn': warn while unhiding changesets
1359 2) 'nowarn': don't warn while unhiding changesets
1359 2) 'nowarn': don't warn while unhiding changesets
1360
1360
1361 returns a repo object with the required changesets unhidden
1361 returns a repo object with the required changesets unhidden
1362 """
1362 """
1363 if not repo.filtername or not repo.ui.configbool('experimental',
1363 if not repo.filtername or not repo.ui.configbool('experimental',
1364 'directaccess'):
1364 'directaccess'):
1365 return repo
1365 return repo
1366
1366
1367 if repo.filtername not in ('visible', 'visible-hidden'):
1367 if repo.filtername not in ('visible', 'visible-hidden'):
1368 return repo
1368 return repo
1369
1369
1370 symbols = set()
1370 symbols = set()
1371 for spec in specs:
1371 for spec in specs:
1372 try:
1372 try:
1373 tree = revsetlang.parse(spec)
1373 tree = revsetlang.parse(spec)
1374 except error.ParseError: # will be reported by scmutil.revrange()
1374 except error.ParseError: # will be reported by scmutil.revrange()
1375 continue
1375 continue
1376
1376
1377 symbols.update(revsetlang.gethashlikesymbols(tree))
1377 symbols.update(revsetlang.gethashlikesymbols(tree))
1378
1378
1379 if not symbols:
1379 if not symbols:
1380 return repo
1380 return repo
1381
1381
1382 revs = _getrevsfromsymbols(repo, symbols)
1382 revs = _getrevsfromsymbols(repo, symbols)
1383
1383
1384 if not revs:
1384 if not revs:
1385 return repo
1385 return repo
1386
1386
1387 if hiddentype == 'warn':
1387 if hiddentype == 'warn':
1388 unfi = repo.unfiltered()
1388 unfi = repo.unfiltered()
1389 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1389 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1390 repo.ui.warn(_("warning: accessing hidden changesets for write "
1390 repo.ui.warn(_("warning: accessing hidden changesets for write "
1391 "operation: %s\n") % revstr)
1391 "operation: %s\n") % revstr)
1392
1392
1393 # we have to use new filtername to separate branch/tags cache until we can
1393 # we have to use new filtername to separate branch/tags cache until we can
1394 # disbale these cache when revisions are dynamically pinned.
1394 # disbale these cache when revisions are dynamically pinned.
1395 return repo.filtered('visible-hidden', revs)
1395 return repo.filtered('visible-hidden', revs)
1396
1396
1397 def _getrevsfromsymbols(repo, symbols):
1397 def _getrevsfromsymbols(repo, symbols):
1398 """parse the list of symbols and returns a set of revision numbers of hidden
1398 """parse the list of symbols and returns a set of revision numbers of hidden
1399 changesets present in symbols"""
1399 changesets present in symbols"""
1400 revs = set()
1400 revs = set()
1401 unfi = repo.unfiltered()
1401 unfi = repo.unfiltered()
1402 unficl = unfi.changelog
1402 unficl = unfi.changelog
1403 cl = repo.changelog
1403 cl = repo.changelog
1404 tiprev = len(unficl)
1404 tiprev = len(unficl)
1405 pmatch = unficl._partialmatch
1405 pmatch = unficl._partialmatch
1406 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1406 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1407 for s in symbols:
1407 for s in symbols:
1408 try:
1408 try:
1409 n = int(s)
1409 n = int(s)
1410 if n <= tiprev:
1410 if n <= tiprev:
1411 if not allowrevnums:
1411 if not allowrevnums:
1412 continue
1412 continue
1413 else:
1413 else:
1414 if n not in cl:
1414 if n not in cl:
1415 revs.add(n)
1415 revs.add(n)
1416 continue
1416 continue
1417 except ValueError:
1417 except ValueError:
1418 pass
1418 pass
1419
1419
1420 try:
1420 try:
1421 s = pmatch(s)
1421 s = pmatch(s)
1422 except error.LookupError:
1422 except (error.LookupError, error.WdirUnsupported):
1423 s = None
1423 s = None
1424
1424
1425 if s is not None:
1425 if s is not None:
1426 rev = unficl.rev(s)
1426 rev = unficl.rev(s)
1427 if rev not in cl:
1427 if rev not in cl:
1428 revs.add(rev)
1428 revs.add(rev)
1429
1429
1430 return revs
1430 return revs
@@ -1,201 +1,214 b''
1 Tests for access level on hidden commits by various commands on based of their
1 Tests for access level on hidden commits by various commands on based of their
2 type.
2 type.
3
3
4 Setting the required config to start this
4 Setting the required config to start this
5
5
6 $ cat >> $HGRCPATH <<EOF
6 $ cat >> $HGRCPATH <<EOF
7 > [experimental]
7 > [experimental]
8 > evolution=createmarkers, allowunstable
8 > evolution=createmarkers, allowunstable
9 > directaccess=True
9 > directaccess=True
10 > directaccess.revnums=True
10 > directaccess.revnums=True
11 > [extensions]
11 > [extensions]
12 > amend =
12 > amend =
13 > EOF
13 > EOF
14
14
15 $ hg init repo
15 $ hg init repo
16 $ cd repo
16 $ cd repo
17 $ for ch in a b c; do touch $ch; echo "foo" >> $ch; hg ci -Aqm "Added "$ch; done
17 $ for ch in a b c; do touch $ch; echo "foo" >> $ch; hg ci -Aqm "Added "$ch; done
18
18
19 $ hg log -G -T '{rev}:{node} {desc}' --hidden
19 $ hg log -G -T '{rev}:{node} {desc}' --hidden
20 @ 2:28ad74487de9599d00d81085be739c61fc340652 Added c
20 @ 2:28ad74487de9599d00d81085be739c61fc340652 Added c
21 |
21 |
22 o 1:29becc82797a4bc11ec8880b58eaecd2ab3e7760 Added b
22 o 1:29becc82797a4bc11ec8880b58eaecd2ab3e7760 Added b
23 |
23 |
24 o 0:18d04c59bb5d2d4090ad9a5b59bd6274adb63add Added a
24 o 0:18d04c59bb5d2d4090ad9a5b59bd6274adb63add Added a
25
25
26 $ echo "bar" >> c
26 $ echo "bar" >> c
27 $ hg amend
27 $ hg amend
28
28
29 $ hg log -G -T '{rev}:{node} {desc}' --hidden
29 $ hg log -G -T '{rev}:{node} {desc}' --hidden
30 @ 3:2443a0e664694756d8b435d06b6ad84f941b6fc0 Added c
30 @ 3:2443a0e664694756d8b435d06b6ad84f941b6fc0 Added c
31 |
31 |
32 | x 2:28ad74487de9599d00d81085be739c61fc340652 Added c
32 | x 2:28ad74487de9599d00d81085be739c61fc340652 Added c
33 |/
33 |/
34 o 1:29becc82797a4bc11ec8880b58eaecd2ab3e7760 Added b
34 o 1:29becc82797a4bc11ec8880b58eaecd2ab3e7760 Added b
35 |
35 |
36 o 0:18d04c59bb5d2d4090ad9a5b59bd6274adb63add Added a
36 o 0:18d04c59bb5d2d4090ad9a5b59bd6274adb63add Added a
37
37
38 Testing read only commands on the hidden revision
38 Testing read only commands on the hidden revision
39
39
40 Testing with rev number
40 Testing with rev number
41
41
42 $ hg exp 2 --config experimental.directaccess.revnums=False
42 $ hg exp 2 --config experimental.directaccess.revnums=False
43 abort: hidden revision '2' was rewritten as: 2443a0e66469!
43 abort: hidden revision '2' was rewritten as: 2443a0e66469!
44 (use --hidden to access hidden revisions)
44 (use --hidden to access hidden revisions)
45 [255]
45 [255]
46
46
47 $ hg exp 2
47 $ hg exp 2
48 # HG changeset patch
48 # HG changeset patch
49 # User test
49 # User test
50 # Date 0 0
50 # Date 0 0
51 # Thu Jan 01 00:00:00 1970 +0000
51 # Thu Jan 01 00:00:00 1970 +0000
52 # Node ID 28ad74487de9599d00d81085be739c61fc340652
52 # Node ID 28ad74487de9599d00d81085be739c61fc340652
53 # Parent 29becc82797a4bc11ec8880b58eaecd2ab3e7760
53 # Parent 29becc82797a4bc11ec8880b58eaecd2ab3e7760
54 Added c
54 Added c
55
55
56 diff -r 29becc82797a -r 28ad74487de9 c
56 diff -r 29becc82797a -r 28ad74487de9 c
57 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
57 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
58 +++ b/c Thu Jan 01 00:00:00 1970 +0000
58 +++ b/c Thu Jan 01 00:00:00 1970 +0000
59 @@ -0,0 +1,1 @@
59 @@ -0,0 +1,1 @@
60 +foo
60 +foo
61
61
62 $ hg log -r 2
62 $ hg log -r 2
63 changeset: 2:28ad74487de9
63 changeset: 2:28ad74487de9
64 user: test
64 user: test
65 date: Thu Jan 01 00:00:00 1970 +0000
65 date: Thu Jan 01 00:00:00 1970 +0000
66 obsolete: rewritten using amend as 3:2443a0e66469
66 obsolete: rewritten using amend as 3:2443a0e66469
67 summary: Added c
67 summary: Added c
68
68
69 $ hg identify -r 2
69 $ hg identify -r 2
70 28ad74487de9
70 28ad74487de9
71
71
72 $ hg status --change 2
72 $ hg status --change 2
73 A c
73 A c
74
74
75 $ hg status --change 2 --config experimental.directaccess.revnums=False
75 $ hg status --change 2 --config experimental.directaccess.revnums=False
76 abort: hidden revision '2' was rewritten as: 2443a0e66469!
76 abort: hidden revision '2' was rewritten as: 2443a0e66469!
77 (use --hidden to access hidden revisions)
77 (use --hidden to access hidden revisions)
78 [255]
78 [255]
79
79
80 $ hg diff -c 2
80 $ hg diff -c 2
81 diff -r 29becc82797a -r 28ad74487de9 c
81 diff -r 29becc82797a -r 28ad74487de9 c
82 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
82 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
83 +++ b/c Thu Jan 01 00:00:00 1970 +0000
83 +++ b/c Thu Jan 01 00:00:00 1970 +0000
84 @@ -0,0 +1,1 @@
84 @@ -0,0 +1,1 @@
85 +foo
85 +foo
86
86
87 Testing with hash
87 Testing with hash
88
88
89 `hg export`
89 `hg export`
90
90
91 $ hg exp 28ad74
91 $ hg exp 28ad74
92 # HG changeset patch
92 # HG changeset patch
93 # User test
93 # User test
94 # Date 0 0
94 # Date 0 0
95 # Thu Jan 01 00:00:00 1970 +0000
95 # Thu Jan 01 00:00:00 1970 +0000
96 # Node ID 28ad74487de9599d00d81085be739c61fc340652
96 # Node ID 28ad74487de9599d00d81085be739c61fc340652
97 # Parent 29becc82797a4bc11ec8880b58eaecd2ab3e7760
97 # Parent 29becc82797a4bc11ec8880b58eaecd2ab3e7760
98 Added c
98 Added c
99
99
100 diff -r 29becc82797a -r 28ad74487de9 c
100 diff -r 29becc82797a -r 28ad74487de9 c
101 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
101 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
102 +++ b/c Thu Jan 01 00:00:00 1970 +0000
102 +++ b/c Thu Jan 01 00:00:00 1970 +0000
103 @@ -0,0 +1,1 @@
103 @@ -0,0 +1,1 @@
104 +foo
104 +foo
105
105
106 `hg log`
106 `hg log`
107
107
108 $ hg log -r 28ad74
108 $ hg log -r 28ad74
109 changeset: 2:28ad74487de9
109 changeset: 2:28ad74487de9
110 user: test
110 user: test
111 date: Thu Jan 01 00:00:00 1970 +0000
111 date: Thu Jan 01 00:00:00 1970 +0000
112 obsolete: rewritten using amend as 3:2443a0e66469
112 obsolete: rewritten using amend as 3:2443a0e66469
113 summary: Added c
113 summary: Added c
114
114
115 `hg cat`
115 `hg cat`
116
116
117 $ hg cat -r 28ad74 c
117 $ hg cat -r 28ad74 c
118 foo
118 foo
119
119
120 `hg diff`
120 `hg diff`
121
121
122 $ hg diff -c 28ad74
122 $ hg diff -c 28ad74
123 diff -r 29becc82797a -r 28ad74487de9 c
123 diff -r 29becc82797a -r 28ad74487de9 c
124 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
124 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
125 +++ b/c Thu Jan 01 00:00:00 1970 +0000
125 +++ b/c Thu Jan 01 00:00:00 1970 +0000
126 @@ -0,0 +1,1 @@
126 @@ -0,0 +1,1 @@
127 +foo
127 +foo
128
128
129 `hg files`
129 `hg files`
130
130
131 $ hg files -r 28ad74
131 $ hg files -r 28ad74
132 a
132 a
133 b
133 b
134 c
134 c
135
135
136 `hg identify`
136 `hg identify`
137
137
138 $ hg identify -r 28ad74
138 $ hg identify -r 28ad74
139 28ad74487de9
139 28ad74487de9
140
140
141 `hg annotate`
141 `hg annotate`
142
142
143 $ hg annotate -r 28ad74 a
143 $ hg annotate -r 28ad74 a
144 0: foo
144 0: foo
145
145
146 `hg status`
146 `hg status`
147
147
148 $ hg status --change 28ad74
148 $ hg status --change 28ad74
149 A c
149 A c
150
150
151 `hg archive`
151 `hg archive`
152
152
153 This should not throw error
153 This should not throw error
154 $ hg archive -r 28ad74 foo
154 $ hg archive -r 28ad74 foo
155
155
156 `hg update`
156 `hg update`
157
157
158 $ hg up 28ad74
158 $ hg up 28ad74
159 updating to a hidden changeset 28ad74487de9
159 updating to a hidden changeset 28ad74487de9
160 (hidden revision '28ad74487de9' was rewritten as: 2443a0e66469)
160 (hidden revision '28ad74487de9' was rewritten as: 2443a0e66469)
161 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
161 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
162
162
163 $ hg up 3
163 $ hg up 3
164 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
164 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
165
165
166 $ hg up
166 $ hg up
167 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
167 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
168
168
169 `hg revert`
169 `hg revert`
170
170
171 $ hg revert -r 28ad74 --all
171 $ hg revert -r 28ad74 --all
172 reverting c
172 reverting c
173
173
174 $ hg diff
174 $ hg diff
175 diff -r 2443a0e66469 c
175 diff -r 2443a0e66469 c
176 --- a/c Thu Jan 01 00:00:00 1970 +0000
176 --- a/c Thu Jan 01 00:00:00 1970 +0000
177 +++ b/c Thu Jan 01 00:00:00 1970 +0000
177 +++ b/c Thu Jan 01 00:00:00 1970 +0000
178 @@ -1,2 +1,1 @@
178 @@ -1,2 +1,1 @@
179 foo
179 foo
180 -bar
180 -bar
181
181
182 Test special hash/rev
183
184 $ hg log -qr 'null:wdir() & 000000000000'
185 -1:000000000000
186 $ hg log -qr 'null:wdir() & ffffffffffff'
187 2147483647:ffffffffffff
188 $ hg log -qr 'null:wdir() & rev(-1)'
189 -1:000000000000
190 $ hg log -qr 'null:wdir() & rev(2147483647)'
191 2147483647:ffffffffffff
192 $ hg log -qr 'null:wdir() & 2147483647'
193 2147483647:ffffffffffff
194
182 Commands with undefined cmdtype should not work right now
195 Commands with undefined cmdtype should not work right now
183
196
184 $ hg phase -r 28ad74
197 $ hg phase -r 28ad74
185 abort: hidden revision '28ad74' was rewritten as: 2443a0e66469!
198 abort: hidden revision '28ad74' was rewritten as: 2443a0e66469!
186 (use --hidden to access hidden revisions)
199 (use --hidden to access hidden revisions)
187 [255]
200 [255]
188
201
189 $ hg phase -r 2
202 $ hg phase -r 2
190 abort: hidden revision '2' was rewritten as: 2443a0e66469!
203 abort: hidden revision '2' was rewritten as: 2443a0e66469!
191 (use --hidden to access hidden revisions)
204 (use --hidden to access hidden revisions)
192 [255]
205 [255]
193
206
194 Setting a bookmark will make that changeset unhidden, so this should come in end
207 Setting a bookmark will make that changeset unhidden, so this should come in end
195
208
196 $ hg bookmarks -r 28ad74 book
209 $ hg bookmarks -r 28ad74 book
197 bookmarking hidden changeset 28ad74487de9
210 bookmarking hidden changeset 28ad74487de9
198 (hidden revision '28ad74487de9' was rewritten as: 2443a0e66469)
211 (hidden revision '28ad74487de9' was rewritten as: 2443a0e66469)
199
212
200 $ hg bookmarks
213 $ hg bookmarks
201 book 2:28ad74487de9
214 book 2:28ad74487de9
General Comments 0
You need to be logged in to leave comments. Login now