##// END OF EJS Templates
extdatasource: use revsymbol() for converting to node...
Martin von Zweigbergk -
r37378:d0d55980 default
parent child Browse files
Show More
@@ -1,1460 +1,1460 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 from .utils import (
44 from .utils import (
45 procutil,
45 procutil,
46 stringutil,
46 stringutil,
47 )
47 )
48
48
49 if pycompat.iswindows:
49 if pycompat.iswindows:
50 from . import scmwindows as scmplatform
50 from . import scmwindows as scmplatform
51 else:
51 else:
52 from . import scmposix as scmplatform
52 from . import scmposix as scmplatform
53
53
54 termsize = scmplatform.termsize
54 termsize = scmplatform.termsize
55
55
56 class status(tuple):
56 class status(tuple):
57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 and 'ignored' properties are only relevant to the working copy.
58 and 'ignored' properties are only relevant to the working copy.
59 '''
59 '''
60
60
61 __slots__ = ()
61 __slots__ = ()
62
62
63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 clean):
64 clean):
65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 ignored, clean))
66 ignored, clean))
67
67
68 @property
68 @property
69 def modified(self):
69 def modified(self):
70 '''files that have been modified'''
70 '''files that have been modified'''
71 return self[0]
71 return self[0]
72
72
73 @property
73 @property
74 def added(self):
74 def added(self):
75 '''files that have been added'''
75 '''files that have been added'''
76 return self[1]
76 return self[1]
77
77
78 @property
78 @property
79 def removed(self):
79 def removed(self):
80 '''files that have been removed'''
80 '''files that have been removed'''
81 return self[2]
81 return self[2]
82
82
83 @property
83 @property
84 def deleted(self):
84 def deleted(self):
85 '''files that are in the dirstate, but have been deleted from the
85 '''files that are in the dirstate, but have been deleted from the
86 working copy (aka "missing")
86 working copy (aka "missing")
87 '''
87 '''
88 return self[3]
88 return self[3]
89
89
90 @property
90 @property
91 def unknown(self):
91 def unknown(self):
92 '''files not in the dirstate that are not ignored'''
92 '''files not in the dirstate that are not ignored'''
93 return self[4]
93 return self[4]
94
94
95 @property
95 @property
96 def ignored(self):
96 def ignored(self):
97 '''files not in the dirstate that are ignored (by _dirignore())'''
97 '''files not in the dirstate that are ignored (by _dirignore())'''
98 return self[5]
98 return self[5]
99
99
100 @property
100 @property
101 def clean(self):
101 def clean(self):
102 '''files that have not been modified'''
102 '''files that have not been modified'''
103 return self[6]
103 return self[6]
104
104
105 def __repr__(self, *args, **kwargs):
105 def __repr__(self, *args, **kwargs):
106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 'unknown=%r, ignored=%r, clean=%r>') % self)
107 'unknown=%r, ignored=%r, clean=%r>') % self)
108
108
109 def itersubrepos(ctx1, ctx2):
109 def itersubrepos(ctx1, ctx2):
110 """find subrepos in ctx1 or ctx2"""
110 """find subrepos in ctx1 or ctx2"""
111 # Create a (subpath, ctx) mapping where we prefer subpaths from
111 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 # has been modified (in ctx2) but not yet committed (in ctx1).
113 # has been modified (in ctx2) but not yet committed (in ctx1).
114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116
116
117 missing = set()
117 missing = set()
118
118
119 for subpath in ctx2.substate:
119 for subpath in ctx2.substate:
120 if subpath not in ctx1.substate:
120 if subpath not in ctx1.substate:
121 del subpaths[subpath]
121 del subpaths[subpath]
122 missing.add(subpath)
122 missing.add(subpath)
123
123
124 for subpath, ctx in sorted(subpaths.iteritems()):
124 for subpath, ctx in sorted(subpaths.iteritems()):
125 yield subpath, ctx.sub(subpath)
125 yield subpath, ctx.sub(subpath)
126
126
127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 # status and diff will have an accurate result when it does
128 # status and diff will have an accurate result when it does
129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 # against itself.
130 # against itself.
131 for subpath in missing:
131 for subpath in missing:
132 yield subpath, ctx2.nullsub(subpath, ctx1)
132 yield subpath, ctx2.nullsub(subpath, ctx1)
133
133
134 def nochangesfound(ui, repo, excluded=None):
134 def nochangesfound(ui, repo, excluded=None):
135 '''Report no changes for push/pull, excluded is None or a list of
135 '''Report no changes for push/pull, excluded is None or a list of
136 nodes excluded from the push/pull.
136 nodes excluded from the push/pull.
137 '''
137 '''
138 secretlist = []
138 secretlist = []
139 if excluded:
139 if excluded:
140 for n in excluded:
140 for n in excluded:
141 ctx = repo[n]
141 ctx = repo[n]
142 if ctx.phase() >= phases.secret and not ctx.extinct():
142 if ctx.phase() >= phases.secret and not ctx.extinct():
143 secretlist.append(n)
143 secretlist.append(n)
144
144
145 if secretlist:
145 if secretlist:
146 ui.status(_("no changes found (ignored %d secret changesets)\n")
146 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 % len(secretlist))
147 % len(secretlist))
148 else:
148 else:
149 ui.status(_("no changes found\n"))
149 ui.status(_("no changes found\n"))
150
150
151 def callcatch(ui, func):
151 def callcatch(ui, func):
152 """call func() with global exception handling
152 """call func() with global exception handling
153
153
154 return func() if no exception happens. otherwise do some error handling
154 return func() if no exception happens. otherwise do some error handling
155 and return an exit code accordingly. does not handle all exceptions.
155 and return an exit code accordingly. does not handle all exceptions.
156 """
156 """
157 try:
157 try:
158 try:
158 try:
159 return func()
159 return func()
160 except: # re-raises
160 except: # re-raises
161 ui.traceback()
161 ui.traceback()
162 raise
162 raise
163 # Global exception handling, alphabetically
163 # Global exception handling, alphabetically
164 # Mercurial-specific first, followed by built-in and library exceptions
164 # Mercurial-specific first, followed by built-in and library exceptions
165 except error.LockHeld as inst:
165 except error.LockHeld as inst:
166 if inst.errno == errno.ETIMEDOUT:
166 if inst.errno == errno.ETIMEDOUT:
167 reason = _('timed out waiting for lock held by %r') % inst.locker
167 reason = _('timed out waiting for lock held by %r') % inst.locker
168 else:
168 else:
169 reason = _('lock held by %r') % inst.locker
169 reason = _('lock held by %r') % inst.locker
170 ui.warn(_("abort: %s: %s\n")
170 ui.warn(_("abort: %s: %s\n")
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 if not inst.locker:
172 if not inst.locker:
173 ui.warn(_("(lock might be very busy)\n"))
173 ui.warn(_("(lock might be very busy)\n"))
174 except error.LockUnavailable as inst:
174 except error.LockUnavailable as inst:
175 ui.warn(_("abort: could not lock %s: %s\n") %
175 ui.warn(_("abort: could not lock %s: %s\n") %
176 (inst.desc or stringutil.forcebytestr(inst.filename),
176 (inst.desc or stringutil.forcebytestr(inst.filename),
177 encoding.strtolocal(inst.strerror)))
177 encoding.strtolocal(inst.strerror)))
178 except error.OutOfBandError as inst:
178 except error.OutOfBandError as inst:
179 if inst.args:
179 if inst.args:
180 msg = _("abort: remote error:\n")
180 msg = _("abort: remote error:\n")
181 else:
181 else:
182 msg = _("abort: remote error\n")
182 msg = _("abort: remote error\n")
183 ui.warn(msg)
183 ui.warn(msg)
184 if inst.args:
184 if inst.args:
185 ui.warn(''.join(inst.args))
185 ui.warn(''.join(inst.args))
186 if inst.hint:
186 if inst.hint:
187 ui.warn('(%s)\n' % inst.hint)
187 ui.warn('(%s)\n' % inst.hint)
188 except error.RepoError as inst:
188 except error.RepoError as inst:
189 ui.warn(_("abort: %s!\n") % inst)
189 ui.warn(_("abort: %s!\n") % inst)
190 if inst.hint:
190 if inst.hint:
191 ui.warn(_("(%s)\n") % inst.hint)
191 ui.warn(_("(%s)\n") % inst.hint)
192 except error.ResponseError as inst:
192 except error.ResponseError as inst:
193 ui.warn(_("abort: %s") % inst.args[0])
193 ui.warn(_("abort: %s") % inst.args[0])
194 msg = inst.args[1]
194 msg = inst.args[1]
195 if isinstance(msg, type(u'')):
195 if isinstance(msg, type(u'')):
196 msg = pycompat.sysbytes(msg)
196 msg = pycompat.sysbytes(msg)
197 if not isinstance(msg, bytes):
197 if not isinstance(msg, bytes):
198 ui.warn(" %r\n" % (msg,))
198 ui.warn(" %r\n" % (msg,))
199 elif not msg:
199 elif not msg:
200 ui.warn(_(" empty string\n"))
200 ui.warn(_(" empty string\n"))
201 else:
201 else:
202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 except error.CensoredNodeError as inst:
203 except error.CensoredNodeError as inst:
204 ui.warn(_("abort: file censored %s!\n") % inst)
204 ui.warn(_("abort: file censored %s!\n") % inst)
205 except error.RevlogError as inst:
205 except error.RevlogError as inst:
206 ui.warn(_("abort: %s!\n") % inst)
206 ui.warn(_("abort: %s!\n") % inst)
207 except error.InterventionRequired as inst:
207 except error.InterventionRequired as inst:
208 ui.warn("%s\n" % inst)
208 ui.warn("%s\n" % inst)
209 if inst.hint:
209 if inst.hint:
210 ui.warn(_("(%s)\n") % inst.hint)
210 ui.warn(_("(%s)\n") % inst.hint)
211 return 1
211 return 1
212 except error.WdirUnsupported:
212 except error.WdirUnsupported:
213 ui.warn(_("abort: working directory revision cannot be specified\n"))
213 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 except error.Abort as inst:
214 except error.Abort as inst:
215 ui.warn(_("abort: %s\n") % inst)
215 ui.warn(_("abort: %s\n") % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.warn(_("(%s)\n") % inst.hint)
217 ui.warn(_("(%s)\n") % inst.hint)
218 except ImportError as inst:
218 except ImportError as inst:
219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 m = stringutil.forcebytestr(inst).split()[-1]
220 m = stringutil.forcebytestr(inst).split()[-1]
221 if m in "mpatch bdiff".split():
221 if m in "mpatch bdiff".split():
222 ui.warn(_("(did you forget to compile extensions?)\n"))
222 ui.warn(_("(did you forget to compile extensions?)\n"))
223 elif m in "zlib".split():
223 elif m in "zlib".split():
224 ui.warn(_("(is your Python install correct?)\n"))
224 ui.warn(_("(is your Python install correct?)\n"))
225 except IOError as inst:
225 except IOError as inst:
226 if util.safehasattr(inst, "code"):
226 if util.safehasattr(inst, "code"):
227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 elif util.safehasattr(inst, "reason"):
228 elif util.safehasattr(inst, "reason"):
229 try: # usually it is in the form (errno, strerror)
229 try: # usually it is in the form (errno, strerror)
230 reason = inst.reason.args[1]
230 reason = inst.reason.args[1]
231 except (AttributeError, IndexError):
231 except (AttributeError, IndexError):
232 # it might be anything, for example a string
232 # it might be anything, for example a string
233 reason = inst.reason
233 reason = inst.reason
234 if isinstance(reason, unicode):
234 if isinstance(reason, unicode):
235 # SSLError of Python 2.7.9 contains a unicode
235 # SSLError of Python 2.7.9 contains a unicode
236 reason = encoding.unitolocal(reason)
236 reason = encoding.unitolocal(reason)
237 ui.warn(_("abort: error: %s\n") % reason)
237 ui.warn(_("abort: error: %s\n") % reason)
238 elif (util.safehasattr(inst, "args")
238 elif (util.safehasattr(inst, "args")
239 and inst.args and inst.args[0] == errno.EPIPE):
239 and inst.args and inst.args[0] == errno.EPIPE):
240 pass
240 pass
241 elif getattr(inst, "strerror", None):
241 elif getattr(inst, "strerror", None):
242 if getattr(inst, "filename", None):
242 if getattr(inst, "filename", None):
243 ui.warn(_("abort: %s: %s\n") % (
243 ui.warn(_("abort: %s: %s\n") % (
244 encoding.strtolocal(inst.strerror),
244 encoding.strtolocal(inst.strerror),
245 stringutil.forcebytestr(inst.filename)))
245 stringutil.forcebytestr(inst.filename)))
246 else:
246 else:
247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 else:
248 else:
249 raise
249 raise
250 except OSError as inst:
250 except OSError as inst:
251 if getattr(inst, "filename", None) is not None:
251 if getattr(inst, "filename", None) is not None:
252 ui.warn(_("abort: %s: '%s'\n") % (
252 ui.warn(_("abort: %s: '%s'\n") % (
253 encoding.strtolocal(inst.strerror),
253 encoding.strtolocal(inst.strerror),
254 stringutil.forcebytestr(inst.filename)))
254 stringutil.forcebytestr(inst.filename)))
255 else:
255 else:
256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 except MemoryError:
257 except MemoryError:
258 ui.warn(_("abort: out of memory\n"))
258 ui.warn(_("abort: out of memory\n"))
259 except SystemExit as inst:
259 except SystemExit as inst:
260 # Commands shouldn't sys.exit directly, but give a return code.
260 # Commands shouldn't sys.exit directly, but give a return code.
261 # Just in case catch this and and pass exit code to caller.
261 # Just in case catch this and and pass exit code to caller.
262 return inst.code
262 return inst.code
263 except socket.error as inst:
263 except socket.error as inst:
264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265
265
266 return -1
266 return -1
267
267
268 def checknewlabel(repo, lbl, kind):
268 def checknewlabel(repo, lbl, kind):
269 # Do not use the "kind" parameter in ui output.
269 # Do not use the "kind" parameter in ui output.
270 # It makes strings difficult to translate.
270 # It makes strings difficult to translate.
271 if lbl in ['tip', '.', 'null']:
271 if lbl in ['tip', '.', 'null']:
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 for c in (':', '\0', '\n', '\r'):
273 for c in (':', '\0', '\n', '\r'):
274 if c in lbl:
274 if c in lbl:
275 raise error.Abort(
275 raise error.Abort(
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 try:
277 try:
278 int(lbl)
278 int(lbl)
279 raise error.Abort(_("cannot use an integer as a name"))
279 raise error.Abort(_("cannot use an integer as a name"))
280 except ValueError:
280 except ValueError:
281 pass
281 pass
282 if lbl.strip() != lbl:
282 if lbl.strip() != lbl:
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284
284
285 def checkfilename(f):
285 def checkfilename(f):
286 '''Check that the filename f is an acceptable filename for a tracked file'''
286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 if '\r' in f or '\n' in f:
287 if '\r' in f or '\n' in f:
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289
289
290 def checkportable(ui, f):
290 def checkportable(ui, f):
291 '''Check if filename f is portable and warn or abort depending on config'''
291 '''Check if filename f is portable and warn or abort depending on config'''
292 checkfilename(f)
292 checkfilename(f)
293 abort, warn = checkportabilityalert(ui)
293 abort, warn = checkportabilityalert(ui)
294 if abort or warn:
294 if abort or warn:
295 msg = util.checkwinfilename(f)
295 msg = util.checkwinfilename(f)
296 if msg:
296 if msg:
297 msg = "%s: %s" % (msg, procutil.shellquote(f))
297 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 if abort:
298 if abort:
299 raise error.Abort(msg)
299 raise error.Abort(msg)
300 ui.warn(_("warning: %s\n") % msg)
300 ui.warn(_("warning: %s\n") % msg)
301
301
302 def checkportabilityalert(ui):
302 def checkportabilityalert(ui):
303 '''check if the user's config requests nothing, a warning, or abort for
303 '''check if the user's config requests nothing, a warning, or abort for
304 non-portable filenames'''
304 non-portable filenames'''
305 val = ui.config('ui', 'portablefilenames')
305 val = ui.config('ui', 'portablefilenames')
306 lval = val.lower()
306 lval = val.lower()
307 bval = stringutil.parsebool(val)
307 bval = stringutil.parsebool(val)
308 abort = pycompat.iswindows or lval == 'abort'
308 abort = pycompat.iswindows or lval == 'abort'
309 warn = bval or lval == 'warn'
309 warn = bval or lval == 'warn'
310 if bval is None and not (warn or abort or lval == 'ignore'):
310 if bval is None and not (warn or abort or lval == 'ignore'):
311 raise error.ConfigError(
311 raise error.ConfigError(
312 _("ui.portablefilenames value is invalid ('%s')") % val)
312 _("ui.portablefilenames value is invalid ('%s')") % val)
313 return abort, warn
313 return abort, warn
314
314
315 class casecollisionauditor(object):
315 class casecollisionauditor(object):
316 def __init__(self, ui, abort, dirstate):
316 def __init__(self, ui, abort, dirstate):
317 self._ui = ui
317 self._ui = ui
318 self._abort = abort
318 self._abort = abort
319 allfiles = '\0'.join(dirstate._map)
319 allfiles = '\0'.join(dirstate._map)
320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._dirstate = dirstate
321 self._dirstate = dirstate
322 # The purpose of _newfiles is so that we don't complain about
322 # The purpose of _newfiles is so that we don't complain about
323 # case collisions if someone were to call this object with the
323 # case collisions if someone were to call this object with the
324 # same filename twice.
324 # same filename twice.
325 self._newfiles = set()
325 self._newfiles = set()
326
326
327 def __call__(self, f):
327 def __call__(self, f):
328 if f in self._newfiles:
328 if f in self._newfiles:
329 return
329 return
330 fl = encoding.lower(f)
330 fl = encoding.lower(f)
331 if fl in self._loweredfiles and f not in self._dirstate:
331 if fl in self._loweredfiles and f not in self._dirstate:
332 msg = _('possible case-folding collision for %s') % f
332 msg = _('possible case-folding collision for %s') % f
333 if self._abort:
333 if self._abort:
334 raise error.Abort(msg)
334 raise error.Abort(msg)
335 self._ui.warn(_("warning: %s\n") % msg)
335 self._ui.warn(_("warning: %s\n") % msg)
336 self._loweredfiles.add(fl)
336 self._loweredfiles.add(fl)
337 self._newfiles.add(f)
337 self._newfiles.add(f)
338
338
339 def filteredhash(repo, maxrev):
339 def filteredhash(repo, maxrev):
340 """build hash of filtered revisions in the current repoview.
340 """build hash of filtered revisions in the current repoview.
341
341
342 Multiple caches perform up-to-date validation by checking that the
342 Multiple caches perform up-to-date validation by checking that the
343 tiprev and tipnode stored in the cache file match the current repository.
343 tiprev and tipnode stored in the cache file match the current repository.
344 However, this is not sufficient for validating repoviews because the set
344 However, this is not sufficient for validating repoviews because the set
345 of revisions in the view may change without the repository tiprev and
345 of revisions in the view may change without the repository tiprev and
346 tipnode changing.
346 tipnode changing.
347
347
348 This function hashes all the revs filtered from the view and returns
348 This function hashes all the revs filtered from the view and returns
349 that SHA-1 digest.
349 that SHA-1 digest.
350 """
350 """
351 cl = repo.changelog
351 cl = repo.changelog
352 if not cl.filteredrevs:
352 if not cl.filteredrevs:
353 return None
353 return None
354 key = None
354 key = None
355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 if revs:
356 if revs:
357 s = hashlib.sha1()
357 s = hashlib.sha1()
358 for rev in revs:
358 for rev in revs:
359 s.update('%d;' % rev)
359 s.update('%d;' % rev)
360 key = s.digest()
360 key = s.digest()
361 return key
361 return key
362
362
363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 '''yield every hg repository under path, always recursively.
364 '''yield every hg repository under path, always recursively.
365 The recurse flag will only control recursion into repo working dirs'''
365 The recurse flag will only control recursion into repo working dirs'''
366 def errhandler(err):
366 def errhandler(err):
367 if err.filename == path:
367 if err.filename == path:
368 raise err
368 raise err
369 samestat = getattr(os.path, 'samestat', None)
369 samestat = getattr(os.path, 'samestat', None)
370 if followsym and samestat is not None:
370 if followsym and samestat is not None:
371 def adddir(dirlst, dirname):
371 def adddir(dirlst, dirname):
372 dirstat = os.stat(dirname)
372 dirstat = os.stat(dirname)
373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 if not match:
374 if not match:
375 dirlst.append(dirstat)
375 dirlst.append(dirstat)
376 return not match
376 return not match
377 else:
377 else:
378 followsym = False
378 followsym = False
379
379
380 if (seen_dirs is None) and followsym:
380 if (seen_dirs is None) and followsym:
381 seen_dirs = []
381 seen_dirs = []
382 adddir(seen_dirs, path)
382 adddir(seen_dirs, path)
383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 dirs.sort()
384 dirs.sort()
385 if '.hg' in dirs:
385 if '.hg' in dirs:
386 yield root # found a repository
386 yield root # found a repository
387 qroot = os.path.join(root, '.hg', 'patches')
387 qroot = os.path.join(root, '.hg', 'patches')
388 if os.path.isdir(os.path.join(qroot, '.hg')):
388 if os.path.isdir(os.path.join(qroot, '.hg')):
389 yield qroot # we have a patch queue repo here
389 yield qroot # we have a patch queue repo here
390 if recurse:
390 if recurse:
391 # avoid recursing inside the .hg directory
391 # avoid recursing inside the .hg directory
392 dirs.remove('.hg')
392 dirs.remove('.hg')
393 else:
393 else:
394 dirs[:] = [] # don't descend further
394 dirs[:] = [] # don't descend further
395 elif followsym:
395 elif followsym:
396 newdirs = []
396 newdirs = []
397 for d in dirs:
397 for d in dirs:
398 fname = os.path.join(root, d)
398 fname = os.path.join(root, d)
399 if adddir(seen_dirs, fname):
399 if adddir(seen_dirs, fname):
400 if os.path.islink(fname):
400 if os.path.islink(fname):
401 for hgname in walkrepos(fname, True, seen_dirs):
401 for hgname in walkrepos(fname, True, seen_dirs):
402 yield hgname
402 yield hgname
403 else:
403 else:
404 newdirs.append(d)
404 newdirs.append(d)
405 dirs[:] = newdirs
405 dirs[:] = newdirs
406
406
407 def binnode(ctx):
407 def binnode(ctx):
408 """Return binary node id for a given basectx"""
408 """Return binary node id for a given basectx"""
409 node = ctx.node()
409 node = ctx.node()
410 if node is None:
410 if node is None:
411 return wdirid
411 return wdirid
412 return node
412 return node
413
413
414 def intrev(ctx):
414 def intrev(ctx):
415 """Return integer for a given basectx that can be used in comparison or
415 """Return integer for a given basectx that can be used in comparison or
416 arithmetic operation"""
416 arithmetic operation"""
417 rev = ctx.rev()
417 rev = ctx.rev()
418 if rev is None:
418 if rev is None:
419 return wdirrev
419 return wdirrev
420 return rev
420 return rev
421
421
422 def formatchangeid(ctx):
422 def formatchangeid(ctx):
423 """Format changectx as '{rev}:{node|formatnode}', which is the default
423 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 template provided by logcmdutil.changesettemplater"""
424 template provided by logcmdutil.changesettemplater"""
425 repo = ctx.repo()
425 repo = ctx.repo()
426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427
427
428 def formatrevnode(ui, rev, node):
428 def formatrevnode(ui, rev, node):
429 """Format given revision and node depending on the current verbosity"""
429 """Format given revision and node depending on the current verbosity"""
430 if ui.debugflag:
430 if ui.debugflag:
431 hexfunc = hex
431 hexfunc = hex
432 else:
432 else:
433 hexfunc = short
433 hexfunc = short
434 return '%d:%s' % (rev, hexfunc(node))
434 return '%d:%s' % (rev, hexfunc(node))
435
435
436 def isrevsymbol(repo, symbol):
436 def isrevsymbol(repo, symbol):
437 try:
437 try:
438 revsymbol(repo, symbol)
438 revsymbol(repo, symbol)
439 return True
439 return True
440 except error.RepoLookupError:
440 except error.RepoLookupError:
441 return False
441 return False
442
442
443 def revsymbol(repo, symbol):
443 def revsymbol(repo, symbol):
444 """Returns a context given a single revision symbol (as string).
444 """Returns a context given a single revision symbol (as string).
445
445
446 This is similar to revsingle(), but accepts only a single revision symbol,
446 This is similar to revsingle(), but accepts only a single revision symbol,
447 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
447 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
448 not "max(public())".
448 not "max(public())".
449 """
449 """
450 if not isinstance(symbol, bytes):
450 if not isinstance(symbol, bytes):
451 msg = ("symbol (%s of type %s) was not a string, did you mean "
451 msg = ("symbol (%s of type %s) was not a string, did you mean "
452 "repo[symbol]?" % (symbol, type(symbol)))
452 "repo[symbol]?" % (symbol, type(symbol)))
453 raise error.ProgrammingError(msg)
453 raise error.ProgrammingError(msg)
454 return repo[symbol]
454 return repo[symbol]
455
455
456 def revsingle(repo, revspec, default='.', localalias=None):
456 def revsingle(repo, revspec, default='.', localalias=None):
457 if not revspec and revspec != 0:
457 if not revspec and revspec != 0:
458 return repo[default]
458 return repo[default]
459
459
460 l = revrange(repo, [revspec], localalias=localalias)
460 l = revrange(repo, [revspec], localalias=localalias)
461 if not l:
461 if not l:
462 raise error.Abort(_('empty revision set'))
462 raise error.Abort(_('empty revision set'))
463 return repo[l.last()]
463 return repo[l.last()]
464
464
465 def _pairspec(revspec):
465 def _pairspec(revspec):
466 tree = revsetlang.parse(revspec)
466 tree = revsetlang.parse(revspec)
467 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
467 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
468
468
469 def revpairnodes(repo, revs):
469 def revpairnodes(repo, revs):
470 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
470 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
471 ctx1, ctx2 = revpair(repo, revs)
471 ctx1, ctx2 = revpair(repo, revs)
472 return ctx1.node(), ctx2.node()
472 return ctx1.node(), ctx2.node()
473
473
474 def revpair(repo, revs):
474 def revpair(repo, revs):
475 if not revs:
475 if not revs:
476 return repo['.'], repo[None]
476 return repo['.'], repo[None]
477
477
478 l = revrange(repo, revs)
478 l = revrange(repo, revs)
479
479
480 if not l:
480 if not l:
481 first = second = None
481 first = second = None
482 elif l.isascending():
482 elif l.isascending():
483 first = l.min()
483 first = l.min()
484 second = l.max()
484 second = l.max()
485 elif l.isdescending():
485 elif l.isdescending():
486 first = l.max()
486 first = l.max()
487 second = l.min()
487 second = l.min()
488 else:
488 else:
489 first = l.first()
489 first = l.first()
490 second = l.last()
490 second = l.last()
491
491
492 if first is None:
492 if first is None:
493 raise error.Abort(_('empty revision range'))
493 raise error.Abort(_('empty revision range'))
494 if (first == second and len(revs) >= 2
494 if (first == second and len(revs) >= 2
495 and not all(revrange(repo, [r]) for r in revs)):
495 and not all(revrange(repo, [r]) for r in revs)):
496 raise error.Abort(_('empty revision on one side of range'))
496 raise error.Abort(_('empty revision on one side of range'))
497
497
498 # if top-level is range expression, the result must always be a pair
498 # if top-level is range expression, the result must always be a pair
499 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
499 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
500 return repo[first], repo[None]
500 return repo[first], repo[None]
501
501
502 return repo[first], repo[second]
502 return repo[first], repo[second]
503
503
504 def revrange(repo, specs, localalias=None):
504 def revrange(repo, specs, localalias=None):
505 """Execute 1 to many revsets and return the union.
505 """Execute 1 to many revsets and return the union.
506
506
507 This is the preferred mechanism for executing revsets using user-specified
507 This is the preferred mechanism for executing revsets using user-specified
508 config options, such as revset aliases.
508 config options, such as revset aliases.
509
509
510 The revsets specified by ``specs`` will be executed via a chained ``OR``
510 The revsets specified by ``specs`` will be executed via a chained ``OR``
511 expression. If ``specs`` is empty, an empty result is returned.
511 expression. If ``specs`` is empty, an empty result is returned.
512
512
513 ``specs`` can contain integers, in which case they are assumed to be
513 ``specs`` can contain integers, in which case they are assumed to be
514 revision numbers.
514 revision numbers.
515
515
516 It is assumed the revsets are already formatted. If you have arguments
516 It is assumed the revsets are already formatted. If you have arguments
517 that need to be expanded in the revset, call ``revsetlang.formatspec()``
517 that need to be expanded in the revset, call ``revsetlang.formatspec()``
518 and pass the result as an element of ``specs``.
518 and pass the result as an element of ``specs``.
519
519
520 Specifying a single revset is allowed.
520 Specifying a single revset is allowed.
521
521
522 Returns a ``revset.abstractsmartset`` which is a list-like interface over
522 Returns a ``revset.abstractsmartset`` which is a list-like interface over
523 integer revisions.
523 integer revisions.
524 """
524 """
525 allspecs = []
525 allspecs = []
526 for spec in specs:
526 for spec in specs:
527 if isinstance(spec, int):
527 if isinstance(spec, int):
528 spec = revsetlang.formatspec('rev(%d)', spec)
528 spec = revsetlang.formatspec('rev(%d)', spec)
529 allspecs.append(spec)
529 allspecs.append(spec)
530 return repo.anyrevs(allspecs, user=True, localalias=localalias)
530 return repo.anyrevs(allspecs, user=True, localalias=localalias)
531
531
532 def meaningfulparents(repo, ctx):
532 def meaningfulparents(repo, ctx):
533 """Return list of meaningful (or all if debug) parentrevs for rev.
533 """Return list of meaningful (or all if debug) parentrevs for rev.
534
534
535 For merges (two non-nullrev revisions) both parents are meaningful.
535 For merges (two non-nullrev revisions) both parents are meaningful.
536 Otherwise the first parent revision is considered meaningful if it
536 Otherwise the first parent revision is considered meaningful if it
537 is not the preceding revision.
537 is not the preceding revision.
538 """
538 """
539 parents = ctx.parents()
539 parents = ctx.parents()
540 if len(parents) > 1:
540 if len(parents) > 1:
541 return parents
541 return parents
542 if repo.ui.debugflag:
542 if repo.ui.debugflag:
543 return [parents[0], repo['null']]
543 return [parents[0], repo['null']]
544 if parents[0].rev() >= intrev(ctx) - 1:
544 if parents[0].rev() >= intrev(ctx) - 1:
545 return []
545 return []
546 return parents
546 return parents
547
547
548 def expandpats(pats):
548 def expandpats(pats):
549 '''Expand bare globs when running on windows.
549 '''Expand bare globs when running on windows.
550 On posix we assume it already has already been done by sh.'''
550 On posix we assume it already has already been done by sh.'''
551 if not util.expandglobs:
551 if not util.expandglobs:
552 return list(pats)
552 return list(pats)
553 ret = []
553 ret = []
554 for kindpat in pats:
554 for kindpat in pats:
555 kind, pat = matchmod._patsplit(kindpat, None)
555 kind, pat = matchmod._patsplit(kindpat, None)
556 if kind is None:
556 if kind is None:
557 try:
557 try:
558 globbed = glob.glob(pat)
558 globbed = glob.glob(pat)
559 except re.error:
559 except re.error:
560 globbed = [pat]
560 globbed = [pat]
561 if globbed:
561 if globbed:
562 ret.extend(globbed)
562 ret.extend(globbed)
563 continue
563 continue
564 ret.append(kindpat)
564 ret.append(kindpat)
565 return ret
565 return ret
566
566
567 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
567 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
568 badfn=None):
568 badfn=None):
569 '''Return a matcher and the patterns that were used.
569 '''Return a matcher and the patterns that were used.
570 The matcher will warn about bad matches, unless an alternate badfn callback
570 The matcher will warn about bad matches, unless an alternate badfn callback
571 is provided.'''
571 is provided.'''
572 if pats == ("",):
572 if pats == ("",):
573 pats = []
573 pats = []
574 if opts is None:
574 if opts is None:
575 opts = {}
575 opts = {}
576 if not globbed and default == 'relpath':
576 if not globbed and default == 'relpath':
577 pats = expandpats(pats or [])
577 pats = expandpats(pats or [])
578
578
579 def bad(f, msg):
579 def bad(f, msg):
580 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
580 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
581
581
582 if badfn is None:
582 if badfn is None:
583 badfn = bad
583 badfn = bad
584
584
585 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
585 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
586 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
586 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
587
587
588 if m.always():
588 if m.always():
589 pats = []
589 pats = []
590 return m, pats
590 return m, pats
591
591
592 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
592 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
593 badfn=None):
593 badfn=None):
594 '''Return a matcher that will warn about bad matches.'''
594 '''Return a matcher that will warn about bad matches.'''
595 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
595 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
596
596
597 def matchall(repo):
597 def matchall(repo):
598 '''Return a matcher that will efficiently match everything.'''
598 '''Return a matcher that will efficiently match everything.'''
599 return matchmod.always(repo.root, repo.getcwd())
599 return matchmod.always(repo.root, repo.getcwd())
600
600
601 def matchfiles(repo, files, badfn=None):
601 def matchfiles(repo, files, badfn=None):
602 '''Return a matcher that will efficiently match exactly these files.'''
602 '''Return a matcher that will efficiently match exactly these files.'''
603 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
603 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
604
604
605 def parsefollowlinespattern(repo, rev, pat, msg):
605 def parsefollowlinespattern(repo, rev, pat, msg):
606 """Return a file name from `pat` pattern suitable for usage in followlines
606 """Return a file name from `pat` pattern suitable for usage in followlines
607 logic.
607 logic.
608 """
608 """
609 if not matchmod.patkind(pat):
609 if not matchmod.patkind(pat):
610 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
610 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
611 else:
611 else:
612 ctx = repo[rev]
612 ctx = repo[rev]
613 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
613 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
614 files = [f for f in ctx if m(f)]
614 files = [f for f in ctx if m(f)]
615 if len(files) != 1:
615 if len(files) != 1:
616 raise error.ParseError(msg)
616 raise error.ParseError(msg)
617 return files[0]
617 return files[0]
618
618
619 def origpath(ui, repo, filepath):
619 def origpath(ui, repo, filepath):
620 '''customize where .orig files are created
620 '''customize where .orig files are created
621
621
622 Fetch user defined path from config file: [ui] origbackuppath = <path>
622 Fetch user defined path from config file: [ui] origbackuppath = <path>
623 Fall back to default (filepath with .orig suffix) if not specified
623 Fall back to default (filepath with .orig suffix) if not specified
624 '''
624 '''
625 origbackuppath = ui.config('ui', 'origbackuppath')
625 origbackuppath = ui.config('ui', 'origbackuppath')
626 if not origbackuppath:
626 if not origbackuppath:
627 return filepath + ".orig"
627 return filepath + ".orig"
628
628
629 # Convert filepath from an absolute path into a path inside the repo.
629 # Convert filepath from an absolute path into a path inside the repo.
630 filepathfromroot = util.normpath(os.path.relpath(filepath,
630 filepathfromroot = util.normpath(os.path.relpath(filepath,
631 start=repo.root))
631 start=repo.root))
632
632
633 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
633 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
634 origbackupdir = origvfs.dirname(filepathfromroot)
634 origbackupdir = origvfs.dirname(filepathfromroot)
635 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
635 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
636 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
636 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
637
637
638 # Remove any files that conflict with the backup file's path
638 # Remove any files that conflict with the backup file's path
639 for f in reversed(list(util.finddirs(filepathfromroot))):
639 for f in reversed(list(util.finddirs(filepathfromroot))):
640 if origvfs.isfileorlink(f):
640 if origvfs.isfileorlink(f):
641 ui.note(_('removing conflicting file: %s\n')
641 ui.note(_('removing conflicting file: %s\n')
642 % origvfs.join(f))
642 % origvfs.join(f))
643 origvfs.unlink(f)
643 origvfs.unlink(f)
644 break
644 break
645
645
646 origvfs.makedirs(origbackupdir)
646 origvfs.makedirs(origbackupdir)
647
647
648 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
648 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
649 ui.note(_('removing conflicting directory: %s\n')
649 ui.note(_('removing conflicting directory: %s\n')
650 % origvfs.join(filepathfromroot))
650 % origvfs.join(filepathfromroot))
651 origvfs.rmtree(filepathfromroot, forcibly=True)
651 origvfs.rmtree(filepathfromroot, forcibly=True)
652
652
653 return origvfs.join(filepathfromroot)
653 return origvfs.join(filepathfromroot)
654
654
655 class _containsnode(object):
655 class _containsnode(object):
656 """proxy __contains__(node) to container.__contains__ which accepts revs"""
656 """proxy __contains__(node) to container.__contains__ which accepts revs"""
657
657
658 def __init__(self, repo, revcontainer):
658 def __init__(self, repo, revcontainer):
659 self._torev = repo.changelog.rev
659 self._torev = repo.changelog.rev
660 self._revcontains = revcontainer.__contains__
660 self._revcontains = revcontainer.__contains__
661
661
662 def __contains__(self, node):
662 def __contains__(self, node):
663 return self._revcontains(self._torev(node))
663 return self._revcontains(self._torev(node))
664
664
665 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
665 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
666 """do common cleanups when old nodes are replaced by new nodes
666 """do common cleanups when old nodes are replaced by new nodes
667
667
668 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
668 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
669 (we might also want to move working directory parent in the future)
669 (we might also want to move working directory parent in the future)
670
670
671 By default, bookmark moves are calculated automatically from 'replacements',
671 By default, bookmark moves are calculated automatically from 'replacements',
672 but 'moves' can be used to override that. Also, 'moves' may include
672 but 'moves' can be used to override that. Also, 'moves' may include
673 additional bookmark moves that should not have associated obsmarkers.
673 additional bookmark moves that should not have associated obsmarkers.
674
674
675 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
675 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
676 have replacements. operation is a string, like "rebase".
676 have replacements. operation is a string, like "rebase".
677
677
678 metadata is dictionary containing metadata to be stored in obsmarker if
678 metadata is dictionary containing metadata to be stored in obsmarker if
679 obsolescence is enabled.
679 obsolescence is enabled.
680 """
680 """
681 if not replacements and not moves:
681 if not replacements and not moves:
682 return
682 return
683
683
684 # translate mapping's other forms
684 # translate mapping's other forms
685 if not util.safehasattr(replacements, 'items'):
685 if not util.safehasattr(replacements, 'items'):
686 replacements = {n: () for n in replacements}
686 replacements = {n: () for n in replacements}
687
687
688 # Calculate bookmark movements
688 # Calculate bookmark movements
689 if moves is None:
689 if moves is None:
690 moves = {}
690 moves = {}
691 # Unfiltered repo is needed since nodes in replacements might be hidden.
691 # Unfiltered repo is needed since nodes in replacements might be hidden.
692 unfi = repo.unfiltered()
692 unfi = repo.unfiltered()
693 for oldnode, newnodes in replacements.items():
693 for oldnode, newnodes in replacements.items():
694 if oldnode in moves:
694 if oldnode in moves:
695 continue
695 continue
696 if len(newnodes) > 1:
696 if len(newnodes) > 1:
697 # usually a split, take the one with biggest rev number
697 # usually a split, take the one with biggest rev number
698 newnode = next(unfi.set('max(%ln)', newnodes)).node()
698 newnode = next(unfi.set('max(%ln)', newnodes)).node()
699 elif len(newnodes) == 0:
699 elif len(newnodes) == 0:
700 # move bookmark backwards
700 # move bookmark backwards
701 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
701 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
702 list(replacements)))
702 list(replacements)))
703 if roots:
703 if roots:
704 newnode = roots[0].node()
704 newnode = roots[0].node()
705 else:
705 else:
706 newnode = nullid
706 newnode = nullid
707 else:
707 else:
708 newnode = newnodes[0]
708 newnode = newnodes[0]
709 moves[oldnode] = newnode
709 moves[oldnode] = newnode
710
710
711 with repo.transaction('cleanup') as tr:
711 with repo.transaction('cleanup') as tr:
712 # Move bookmarks
712 # Move bookmarks
713 bmarks = repo._bookmarks
713 bmarks = repo._bookmarks
714 bmarkchanges = []
714 bmarkchanges = []
715 allnewnodes = [n for ns in replacements.values() for n in ns]
715 allnewnodes = [n for ns in replacements.values() for n in ns]
716 for oldnode, newnode in moves.items():
716 for oldnode, newnode in moves.items():
717 oldbmarks = repo.nodebookmarks(oldnode)
717 oldbmarks = repo.nodebookmarks(oldnode)
718 if not oldbmarks:
718 if not oldbmarks:
719 continue
719 continue
720 from . import bookmarks # avoid import cycle
720 from . import bookmarks # avoid import cycle
721 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
721 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
722 (util.rapply(pycompat.maybebytestr, oldbmarks),
722 (util.rapply(pycompat.maybebytestr, oldbmarks),
723 hex(oldnode), hex(newnode)))
723 hex(oldnode), hex(newnode)))
724 # Delete divergent bookmarks being parents of related newnodes
724 # Delete divergent bookmarks being parents of related newnodes
725 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
725 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
726 allnewnodes, newnode, oldnode)
726 allnewnodes, newnode, oldnode)
727 deletenodes = _containsnode(repo, deleterevs)
727 deletenodes = _containsnode(repo, deleterevs)
728 for name in oldbmarks:
728 for name in oldbmarks:
729 bmarkchanges.append((name, newnode))
729 bmarkchanges.append((name, newnode))
730 for b in bookmarks.divergent2delete(repo, deletenodes, name):
730 for b in bookmarks.divergent2delete(repo, deletenodes, name):
731 bmarkchanges.append((b, None))
731 bmarkchanges.append((b, None))
732
732
733 if bmarkchanges:
733 if bmarkchanges:
734 bmarks.applychanges(repo, tr, bmarkchanges)
734 bmarks.applychanges(repo, tr, bmarkchanges)
735
735
736 # Obsolete or strip nodes
736 # Obsolete or strip nodes
737 if obsolete.isenabled(repo, obsolete.createmarkersopt):
737 if obsolete.isenabled(repo, obsolete.createmarkersopt):
738 # If a node is already obsoleted, and we want to obsolete it
738 # If a node is already obsoleted, and we want to obsolete it
739 # without a successor, skip that obssolete request since it's
739 # without a successor, skip that obssolete request since it's
740 # unnecessary. That's the "if s or not isobs(n)" check below.
740 # unnecessary. That's the "if s or not isobs(n)" check below.
741 # Also sort the node in topology order, that might be useful for
741 # Also sort the node in topology order, that might be useful for
742 # some obsstore logic.
742 # some obsstore logic.
743 # NOTE: the filtering and sorting might belong to createmarkers.
743 # NOTE: the filtering and sorting might belong to createmarkers.
744 isobs = unfi.obsstore.successors.__contains__
744 isobs = unfi.obsstore.successors.__contains__
745 torev = unfi.changelog.rev
745 torev = unfi.changelog.rev
746 sortfunc = lambda ns: torev(ns[0])
746 sortfunc = lambda ns: torev(ns[0])
747 rels = [(unfi[n], tuple(unfi[m] for m in s))
747 rels = [(unfi[n], tuple(unfi[m] for m in s))
748 for n, s in sorted(replacements.items(), key=sortfunc)
748 for n, s in sorted(replacements.items(), key=sortfunc)
749 if s or not isobs(n)]
749 if s or not isobs(n)]
750 if rels:
750 if rels:
751 obsolete.createmarkers(repo, rels, operation=operation,
751 obsolete.createmarkers(repo, rels, operation=operation,
752 metadata=metadata)
752 metadata=metadata)
753 else:
753 else:
754 from . import repair # avoid import cycle
754 from . import repair # avoid import cycle
755 tostrip = list(replacements)
755 tostrip = list(replacements)
756 if tostrip:
756 if tostrip:
757 repair.delayedstrip(repo.ui, repo, tostrip, operation)
757 repair.delayedstrip(repo.ui, repo, tostrip, operation)
758
758
759 def addremove(repo, matcher, prefix, opts=None):
759 def addremove(repo, matcher, prefix, opts=None):
760 if opts is None:
760 if opts is None:
761 opts = {}
761 opts = {}
762 m = matcher
762 m = matcher
763 dry_run = opts.get('dry_run')
763 dry_run = opts.get('dry_run')
764 try:
764 try:
765 similarity = float(opts.get('similarity') or 0)
765 similarity = float(opts.get('similarity') or 0)
766 except ValueError:
766 except ValueError:
767 raise error.Abort(_('similarity must be a number'))
767 raise error.Abort(_('similarity must be a number'))
768 if similarity < 0 or similarity > 100:
768 if similarity < 0 or similarity > 100:
769 raise error.Abort(_('similarity must be between 0 and 100'))
769 raise error.Abort(_('similarity must be between 0 and 100'))
770 similarity /= 100.0
770 similarity /= 100.0
771
771
772 ret = 0
772 ret = 0
773 join = lambda f: os.path.join(prefix, f)
773 join = lambda f: os.path.join(prefix, f)
774
774
775 wctx = repo[None]
775 wctx = repo[None]
776 for subpath in sorted(wctx.substate):
776 for subpath in sorted(wctx.substate):
777 submatch = matchmod.subdirmatcher(subpath, m)
777 submatch = matchmod.subdirmatcher(subpath, m)
778 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
778 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
779 sub = wctx.sub(subpath)
779 sub = wctx.sub(subpath)
780 try:
780 try:
781 if sub.addremove(submatch, prefix, opts):
781 if sub.addremove(submatch, prefix, opts):
782 ret = 1
782 ret = 1
783 except error.LookupError:
783 except error.LookupError:
784 repo.ui.status(_("skipping missing subrepository: %s\n")
784 repo.ui.status(_("skipping missing subrepository: %s\n")
785 % join(subpath))
785 % join(subpath))
786
786
787 rejected = []
787 rejected = []
788 def badfn(f, msg):
788 def badfn(f, msg):
789 if f in m.files():
789 if f in m.files():
790 m.bad(f, msg)
790 m.bad(f, msg)
791 rejected.append(f)
791 rejected.append(f)
792
792
793 badmatch = matchmod.badmatch(m, badfn)
793 badmatch = matchmod.badmatch(m, badfn)
794 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
794 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
795 badmatch)
795 badmatch)
796
796
797 unknownset = set(unknown + forgotten)
797 unknownset = set(unknown + forgotten)
798 toprint = unknownset.copy()
798 toprint = unknownset.copy()
799 toprint.update(deleted)
799 toprint.update(deleted)
800 for abs in sorted(toprint):
800 for abs in sorted(toprint):
801 if repo.ui.verbose or not m.exact(abs):
801 if repo.ui.verbose or not m.exact(abs):
802 if abs in unknownset:
802 if abs in unknownset:
803 status = _('adding %s\n') % m.uipath(abs)
803 status = _('adding %s\n') % m.uipath(abs)
804 else:
804 else:
805 status = _('removing %s\n') % m.uipath(abs)
805 status = _('removing %s\n') % m.uipath(abs)
806 repo.ui.status(status)
806 repo.ui.status(status)
807
807
808 renames = _findrenames(repo, m, added + unknown, removed + deleted,
808 renames = _findrenames(repo, m, added + unknown, removed + deleted,
809 similarity)
809 similarity)
810
810
811 if not dry_run:
811 if not dry_run:
812 _markchanges(repo, unknown + forgotten, deleted, renames)
812 _markchanges(repo, unknown + forgotten, deleted, renames)
813
813
814 for f in rejected:
814 for f in rejected:
815 if f in m.files():
815 if f in m.files():
816 return 1
816 return 1
817 return ret
817 return ret
818
818
819 def marktouched(repo, files, similarity=0.0):
819 def marktouched(repo, files, similarity=0.0):
820 '''Assert that files have somehow been operated upon. files are relative to
820 '''Assert that files have somehow been operated upon. files are relative to
821 the repo root.'''
821 the repo root.'''
822 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
822 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
823 rejected = []
823 rejected = []
824
824
825 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
825 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
826
826
827 if repo.ui.verbose:
827 if repo.ui.verbose:
828 unknownset = set(unknown + forgotten)
828 unknownset = set(unknown + forgotten)
829 toprint = unknownset.copy()
829 toprint = unknownset.copy()
830 toprint.update(deleted)
830 toprint.update(deleted)
831 for abs in sorted(toprint):
831 for abs in sorted(toprint):
832 if abs in unknownset:
832 if abs in unknownset:
833 status = _('adding %s\n') % abs
833 status = _('adding %s\n') % abs
834 else:
834 else:
835 status = _('removing %s\n') % abs
835 status = _('removing %s\n') % abs
836 repo.ui.status(status)
836 repo.ui.status(status)
837
837
838 renames = _findrenames(repo, m, added + unknown, removed + deleted,
838 renames = _findrenames(repo, m, added + unknown, removed + deleted,
839 similarity)
839 similarity)
840
840
841 _markchanges(repo, unknown + forgotten, deleted, renames)
841 _markchanges(repo, unknown + forgotten, deleted, renames)
842
842
843 for f in rejected:
843 for f in rejected:
844 if f in m.files():
844 if f in m.files():
845 return 1
845 return 1
846 return 0
846 return 0
847
847
848 def _interestingfiles(repo, matcher):
848 def _interestingfiles(repo, matcher):
849 '''Walk dirstate with matcher, looking for files that addremove would care
849 '''Walk dirstate with matcher, looking for files that addremove would care
850 about.
850 about.
851
851
852 This is different from dirstate.status because it doesn't care about
852 This is different from dirstate.status because it doesn't care about
853 whether files are modified or clean.'''
853 whether files are modified or clean.'''
854 added, unknown, deleted, removed, forgotten = [], [], [], [], []
854 added, unknown, deleted, removed, forgotten = [], [], [], [], []
855 audit_path = pathutil.pathauditor(repo.root, cached=True)
855 audit_path = pathutil.pathauditor(repo.root, cached=True)
856
856
857 ctx = repo[None]
857 ctx = repo[None]
858 dirstate = repo.dirstate
858 dirstate = repo.dirstate
859 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
859 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
860 unknown=True, ignored=False, full=False)
860 unknown=True, ignored=False, full=False)
861 for abs, st in walkresults.iteritems():
861 for abs, st in walkresults.iteritems():
862 dstate = dirstate[abs]
862 dstate = dirstate[abs]
863 if dstate == '?' and audit_path.check(abs):
863 if dstate == '?' and audit_path.check(abs):
864 unknown.append(abs)
864 unknown.append(abs)
865 elif dstate != 'r' and not st:
865 elif dstate != 'r' and not st:
866 deleted.append(abs)
866 deleted.append(abs)
867 elif dstate == 'r' and st:
867 elif dstate == 'r' and st:
868 forgotten.append(abs)
868 forgotten.append(abs)
869 # for finding renames
869 # for finding renames
870 elif dstate == 'r' and not st:
870 elif dstate == 'r' and not st:
871 removed.append(abs)
871 removed.append(abs)
872 elif dstate == 'a':
872 elif dstate == 'a':
873 added.append(abs)
873 added.append(abs)
874
874
875 return added, unknown, deleted, removed, forgotten
875 return added, unknown, deleted, removed, forgotten
876
876
877 def _findrenames(repo, matcher, added, removed, similarity):
877 def _findrenames(repo, matcher, added, removed, similarity):
878 '''Find renames from removed files to added ones.'''
878 '''Find renames from removed files to added ones.'''
879 renames = {}
879 renames = {}
880 if similarity > 0:
880 if similarity > 0:
881 for old, new, score in similar.findrenames(repo, added, removed,
881 for old, new, score in similar.findrenames(repo, added, removed,
882 similarity):
882 similarity):
883 if (repo.ui.verbose or not matcher.exact(old)
883 if (repo.ui.verbose or not matcher.exact(old)
884 or not matcher.exact(new)):
884 or not matcher.exact(new)):
885 repo.ui.status(_('recording removal of %s as rename to %s '
885 repo.ui.status(_('recording removal of %s as rename to %s '
886 '(%d%% similar)\n') %
886 '(%d%% similar)\n') %
887 (matcher.rel(old), matcher.rel(new),
887 (matcher.rel(old), matcher.rel(new),
888 score * 100))
888 score * 100))
889 renames[new] = old
889 renames[new] = old
890 return renames
890 return renames
891
891
892 def _markchanges(repo, unknown, deleted, renames):
892 def _markchanges(repo, unknown, deleted, renames):
893 '''Marks the files in unknown as added, the files in deleted as removed,
893 '''Marks the files in unknown as added, the files in deleted as removed,
894 and the files in renames as copied.'''
894 and the files in renames as copied.'''
895 wctx = repo[None]
895 wctx = repo[None]
896 with repo.wlock():
896 with repo.wlock():
897 wctx.forget(deleted)
897 wctx.forget(deleted)
898 wctx.add(unknown)
898 wctx.add(unknown)
899 for new, old in renames.iteritems():
899 for new, old in renames.iteritems():
900 wctx.copy(old, new)
900 wctx.copy(old, new)
901
901
902 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
902 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
903 """Update the dirstate to reflect the intent of copying src to dst. For
903 """Update the dirstate to reflect the intent of copying src to dst. For
904 different reasons it might not end with dst being marked as copied from src.
904 different reasons it might not end with dst being marked as copied from src.
905 """
905 """
906 origsrc = repo.dirstate.copied(src) or src
906 origsrc = repo.dirstate.copied(src) or src
907 if dst == origsrc: # copying back a copy?
907 if dst == origsrc: # copying back a copy?
908 if repo.dirstate[dst] not in 'mn' and not dryrun:
908 if repo.dirstate[dst] not in 'mn' and not dryrun:
909 repo.dirstate.normallookup(dst)
909 repo.dirstate.normallookup(dst)
910 else:
910 else:
911 if repo.dirstate[origsrc] == 'a' and origsrc == src:
911 if repo.dirstate[origsrc] == 'a' and origsrc == src:
912 if not ui.quiet:
912 if not ui.quiet:
913 ui.warn(_("%s has not been committed yet, so no copy "
913 ui.warn(_("%s has not been committed yet, so no copy "
914 "data will be stored for %s.\n")
914 "data will be stored for %s.\n")
915 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
915 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
916 if repo.dirstate[dst] in '?r' and not dryrun:
916 if repo.dirstate[dst] in '?r' and not dryrun:
917 wctx.add([dst])
917 wctx.add([dst])
918 elif not dryrun:
918 elif not dryrun:
919 wctx.copy(origsrc, dst)
919 wctx.copy(origsrc, dst)
920
920
921 def readrequires(opener, supported):
921 def readrequires(opener, supported):
922 '''Reads and parses .hg/requires and checks if all entries found
922 '''Reads and parses .hg/requires and checks if all entries found
923 are in the list of supported features.'''
923 are in the list of supported features.'''
924 requirements = set(opener.read("requires").splitlines())
924 requirements = set(opener.read("requires").splitlines())
925 missings = []
925 missings = []
926 for r in requirements:
926 for r in requirements:
927 if r not in supported:
927 if r not in supported:
928 if not r or not r[0:1].isalnum():
928 if not r or not r[0:1].isalnum():
929 raise error.RequirementError(_(".hg/requires file is corrupt"))
929 raise error.RequirementError(_(".hg/requires file is corrupt"))
930 missings.append(r)
930 missings.append(r)
931 missings.sort()
931 missings.sort()
932 if missings:
932 if missings:
933 raise error.RequirementError(
933 raise error.RequirementError(
934 _("repository requires features unknown to this Mercurial: %s")
934 _("repository requires features unknown to this Mercurial: %s")
935 % " ".join(missings),
935 % " ".join(missings),
936 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
936 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
937 " for more information"))
937 " for more information"))
938 return requirements
938 return requirements
939
939
940 def writerequires(opener, requirements):
940 def writerequires(opener, requirements):
941 with opener('requires', 'w') as fp:
941 with opener('requires', 'w') as fp:
942 for r in sorted(requirements):
942 for r in sorted(requirements):
943 fp.write("%s\n" % r)
943 fp.write("%s\n" % r)
944
944
945 class filecachesubentry(object):
945 class filecachesubentry(object):
946 def __init__(self, path, stat):
946 def __init__(self, path, stat):
947 self.path = path
947 self.path = path
948 self.cachestat = None
948 self.cachestat = None
949 self._cacheable = None
949 self._cacheable = None
950
950
951 if stat:
951 if stat:
952 self.cachestat = filecachesubentry.stat(self.path)
952 self.cachestat = filecachesubentry.stat(self.path)
953
953
954 if self.cachestat:
954 if self.cachestat:
955 self._cacheable = self.cachestat.cacheable()
955 self._cacheable = self.cachestat.cacheable()
956 else:
956 else:
957 # None means we don't know yet
957 # None means we don't know yet
958 self._cacheable = None
958 self._cacheable = None
959
959
960 def refresh(self):
960 def refresh(self):
961 if self.cacheable():
961 if self.cacheable():
962 self.cachestat = filecachesubentry.stat(self.path)
962 self.cachestat = filecachesubentry.stat(self.path)
963
963
964 def cacheable(self):
964 def cacheable(self):
965 if self._cacheable is not None:
965 if self._cacheable is not None:
966 return self._cacheable
966 return self._cacheable
967
967
968 # we don't know yet, assume it is for now
968 # we don't know yet, assume it is for now
969 return True
969 return True
970
970
971 def changed(self):
971 def changed(self):
972 # no point in going further if we can't cache it
972 # no point in going further if we can't cache it
973 if not self.cacheable():
973 if not self.cacheable():
974 return True
974 return True
975
975
976 newstat = filecachesubentry.stat(self.path)
976 newstat = filecachesubentry.stat(self.path)
977
977
978 # we may not know if it's cacheable yet, check again now
978 # we may not know if it's cacheable yet, check again now
979 if newstat and self._cacheable is None:
979 if newstat and self._cacheable is None:
980 self._cacheable = newstat.cacheable()
980 self._cacheable = newstat.cacheable()
981
981
982 # check again
982 # check again
983 if not self._cacheable:
983 if not self._cacheable:
984 return True
984 return True
985
985
986 if self.cachestat != newstat:
986 if self.cachestat != newstat:
987 self.cachestat = newstat
987 self.cachestat = newstat
988 return True
988 return True
989 else:
989 else:
990 return False
990 return False
991
991
992 @staticmethod
992 @staticmethod
993 def stat(path):
993 def stat(path):
994 try:
994 try:
995 return util.cachestat(path)
995 return util.cachestat(path)
996 except OSError as e:
996 except OSError as e:
997 if e.errno != errno.ENOENT:
997 if e.errno != errno.ENOENT:
998 raise
998 raise
999
999
1000 class filecacheentry(object):
1000 class filecacheentry(object):
1001 def __init__(self, paths, stat=True):
1001 def __init__(self, paths, stat=True):
1002 self._entries = []
1002 self._entries = []
1003 for path in paths:
1003 for path in paths:
1004 self._entries.append(filecachesubentry(path, stat))
1004 self._entries.append(filecachesubentry(path, stat))
1005
1005
1006 def changed(self):
1006 def changed(self):
1007 '''true if any entry has changed'''
1007 '''true if any entry has changed'''
1008 for entry in self._entries:
1008 for entry in self._entries:
1009 if entry.changed():
1009 if entry.changed():
1010 return True
1010 return True
1011 return False
1011 return False
1012
1012
1013 def refresh(self):
1013 def refresh(self):
1014 for entry in self._entries:
1014 for entry in self._entries:
1015 entry.refresh()
1015 entry.refresh()
1016
1016
1017 class filecache(object):
1017 class filecache(object):
1018 '''A property like decorator that tracks files under .hg/ for updates.
1018 '''A property like decorator that tracks files under .hg/ for updates.
1019
1019
1020 Records stat info when called in _filecache.
1020 Records stat info when called in _filecache.
1021
1021
1022 On subsequent calls, compares old stat info with new info, and recreates the
1022 On subsequent calls, compares old stat info with new info, and recreates the
1023 object when any of the files changes, updating the new stat info in
1023 object when any of the files changes, updating the new stat info in
1024 _filecache.
1024 _filecache.
1025
1025
1026 Mercurial either atomic renames or appends for files under .hg,
1026 Mercurial either atomic renames or appends for files under .hg,
1027 so to ensure the cache is reliable we need the filesystem to be able
1027 so to ensure the cache is reliable we need the filesystem to be able
1028 to tell us if a file has been replaced. If it can't, we fallback to
1028 to tell us if a file has been replaced. If it can't, we fallback to
1029 recreating the object on every call (essentially the same behavior as
1029 recreating the object on every call (essentially the same behavior as
1030 propertycache).
1030 propertycache).
1031
1031
1032 '''
1032 '''
1033 def __init__(self, *paths):
1033 def __init__(self, *paths):
1034 self.paths = paths
1034 self.paths = paths
1035
1035
1036 def join(self, obj, fname):
1036 def join(self, obj, fname):
1037 """Used to compute the runtime path of a cached file.
1037 """Used to compute the runtime path of a cached file.
1038
1038
1039 Users should subclass filecache and provide their own version of this
1039 Users should subclass filecache and provide their own version of this
1040 function to call the appropriate join function on 'obj' (an instance
1040 function to call the appropriate join function on 'obj' (an instance
1041 of the class that its member function was decorated).
1041 of the class that its member function was decorated).
1042 """
1042 """
1043 raise NotImplementedError
1043 raise NotImplementedError
1044
1044
1045 def __call__(self, func):
1045 def __call__(self, func):
1046 self.func = func
1046 self.func = func
1047 self.name = func.__name__.encode('ascii')
1047 self.name = func.__name__.encode('ascii')
1048 return self
1048 return self
1049
1049
1050 def __get__(self, obj, type=None):
1050 def __get__(self, obj, type=None):
1051 # if accessed on the class, return the descriptor itself.
1051 # if accessed on the class, return the descriptor itself.
1052 if obj is None:
1052 if obj is None:
1053 return self
1053 return self
1054 # do we need to check if the file changed?
1054 # do we need to check if the file changed?
1055 if self.name in obj.__dict__:
1055 if self.name in obj.__dict__:
1056 assert self.name in obj._filecache, self.name
1056 assert self.name in obj._filecache, self.name
1057 return obj.__dict__[self.name]
1057 return obj.__dict__[self.name]
1058
1058
1059 entry = obj._filecache.get(self.name)
1059 entry = obj._filecache.get(self.name)
1060
1060
1061 if entry:
1061 if entry:
1062 if entry.changed():
1062 if entry.changed():
1063 entry.obj = self.func(obj)
1063 entry.obj = self.func(obj)
1064 else:
1064 else:
1065 paths = [self.join(obj, path) for path in self.paths]
1065 paths = [self.join(obj, path) for path in self.paths]
1066
1066
1067 # We stat -before- creating the object so our cache doesn't lie if
1067 # We stat -before- creating the object so our cache doesn't lie if
1068 # a writer modified between the time we read and stat
1068 # a writer modified between the time we read and stat
1069 entry = filecacheentry(paths, True)
1069 entry = filecacheentry(paths, True)
1070 entry.obj = self.func(obj)
1070 entry.obj = self.func(obj)
1071
1071
1072 obj._filecache[self.name] = entry
1072 obj._filecache[self.name] = entry
1073
1073
1074 obj.__dict__[self.name] = entry.obj
1074 obj.__dict__[self.name] = entry.obj
1075 return entry.obj
1075 return entry.obj
1076
1076
1077 def __set__(self, obj, value):
1077 def __set__(self, obj, value):
1078 if self.name not in obj._filecache:
1078 if self.name not in obj._filecache:
1079 # we add an entry for the missing value because X in __dict__
1079 # we add an entry for the missing value because X in __dict__
1080 # implies X in _filecache
1080 # implies X in _filecache
1081 paths = [self.join(obj, path) for path in self.paths]
1081 paths = [self.join(obj, path) for path in self.paths]
1082 ce = filecacheentry(paths, False)
1082 ce = filecacheentry(paths, False)
1083 obj._filecache[self.name] = ce
1083 obj._filecache[self.name] = ce
1084 else:
1084 else:
1085 ce = obj._filecache[self.name]
1085 ce = obj._filecache[self.name]
1086
1086
1087 ce.obj = value # update cached copy
1087 ce.obj = value # update cached copy
1088 obj.__dict__[self.name] = value # update copy returned by obj.x
1088 obj.__dict__[self.name] = value # update copy returned by obj.x
1089
1089
1090 def __delete__(self, obj):
1090 def __delete__(self, obj):
1091 try:
1091 try:
1092 del obj.__dict__[self.name]
1092 del obj.__dict__[self.name]
1093 except KeyError:
1093 except KeyError:
1094 raise AttributeError(self.name)
1094 raise AttributeError(self.name)
1095
1095
1096 def extdatasource(repo, source):
1096 def extdatasource(repo, source):
1097 """Gather a map of rev -> value dict from the specified source
1097 """Gather a map of rev -> value dict from the specified source
1098
1098
1099 A source spec is treated as a URL, with a special case shell: type
1099 A source spec is treated as a URL, with a special case shell: type
1100 for parsing the output from a shell command.
1100 for parsing the output from a shell command.
1101
1101
1102 The data is parsed as a series of newline-separated records where
1102 The data is parsed as a series of newline-separated records where
1103 each record is a revision specifier optionally followed by a space
1103 each record is a revision specifier optionally followed by a space
1104 and a freeform string value. If the revision is known locally, it
1104 and a freeform string value. If the revision is known locally, it
1105 is converted to a rev, otherwise the record is skipped.
1105 is converted to a rev, otherwise the record is skipped.
1106
1106
1107 Note that both key and value are treated as UTF-8 and converted to
1107 Note that both key and value are treated as UTF-8 and converted to
1108 the local encoding. This allows uniformity between local and
1108 the local encoding. This allows uniformity between local and
1109 remote data sources.
1109 remote data sources.
1110 """
1110 """
1111
1111
1112 spec = repo.ui.config("extdata", source)
1112 spec = repo.ui.config("extdata", source)
1113 if not spec:
1113 if not spec:
1114 raise error.Abort(_("unknown extdata source '%s'") % source)
1114 raise error.Abort(_("unknown extdata source '%s'") % source)
1115
1115
1116 data = {}
1116 data = {}
1117 src = proc = None
1117 src = proc = None
1118 try:
1118 try:
1119 if spec.startswith("shell:"):
1119 if spec.startswith("shell:"):
1120 # external commands should be run relative to the repo root
1120 # external commands should be run relative to the repo root
1121 cmd = spec[6:]
1121 cmd = spec[6:]
1122 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1122 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1123 close_fds=procutil.closefds,
1123 close_fds=procutil.closefds,
1124 stdout=subprocess.PIPE, cwd=repo.root)
1124 stdout=subprocess.PIPE, cwd=repo.root)
1125 src = proc.stdout
1125 src = proc.stdout
1126 else:
1126 else:
1127 # treat as a URL or file
1127 # treat as a URL or file
1128 src = url.open(repo.ui, spec)
1128 src = url.open(repo.ui, spec)
1129 for l in src:
1129 for l in src:
1130 if " " in l:
1130 if " " in l:
1131 k, v = l.strip().split(" ", 1)
1131 k, v = l.strip().split(" ", 1)
1132 else:
1132 else:
1133 k, v = l.strip(), ""
1133 k, v = l.strip(), ""
1134
1134
1135 k = encoding.tolocal(k)
1135 k = encoding.tolocal(k)
1136 try:
1136 try:
1137 data[repo[k].rev()] = encoding.tolocal(v)
1137 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1138 except (error.LookupError, error.RepoLookupError):
1138 except (error.LookupError, error.RepoLookupError):
1139 pass # we ignore data for nodes that don't exist locally
1139 pass # we ignore data for nodes that don't exist locally
1140 finally:
1140 finally:
1141 if proc:
1141 if proc:
1142 proc.communicate()
1142 proc.communicate()
1143 if src:
1143 if src:
1144 src.close()
1144 src.close()
1145 if proc and proc.returncode != 0:
1145 if proc and proc.returncode != 0:
1146 raise error.Abort(_("extdata command '%s' failed: %s")
1146 raise error.Abort(_("extdata command '%s' failed: %s")
1147 % (cmd, procutil.explainexit(proc.returncode)[0]))
1147 % (cmd, procutil.explainexit(proc.returncode)[0]))
1148
1148
1149 return data
1149 return data
1150
1150
1151 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1151 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1152 if lock is None:
1152 if lock is None:
1153 raise error.LockInheritanceContractViolation(
1153 raise error.LockInheritanceContractViolation(
1154 'lock can only be inherited while held')
1154 'lock can only be inherited while held')
1155 if environ is None:
1155 if environ is None:
1156 environ = {}
1156 environ = {}
1157 with lock.inherit() as locker:
1157 with lock.inherit() as locker:
1158 environ[envvar] = locker
1158 environ[envvar] = locker
1159 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1159 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1160
1160
1161 def wlocksub(repo, cmd, *args, **kwargs):
1161 def wlocksub(repo, cmd, *args, **kwargs):
1162 """run cmd as a subprocess that allows inheriting repo's wlock
1162 """run cmd as a subprocess that allows inheriting repo's wlock
1163
1163
1164 This can only be called while the wlock is held. This takes all the
1164 This can only be called while the wlock is held. This takes all the
1165 arguments that ui.system does, and returns the exit code of the
1165 arguments that ui.system does, and returns the exit code of the
1166 subprocess."""
1166 subprocess."""
1167 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1167 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1168 **kwargs)
1168 **kwargs)
1169
1169
1170 def gdinitconfig(ui):
1170 def gdinitconfig(ui):
1171 """helper function to know if a repo should be created as general delta
1171 """helper function to know if a repo should be created as general delta
1172 """
1172 """
1173 # experimental config: format.generaldelta
1173 # experimental config: format.generaldelta
1174 return (ui.configbool('format', 'generaldelta')
1174 return (ui.configbool('format', 'generaldelta')
1175 or ui.configbool('format', 'usegeneraldelta'))
1175 or ui.configbool('format', 'usegeneraldelta'))
1176
1176
1177 def gddeltaconfig(ui):
1177 def gddeltaconfig(ui):
1178 """helper function to know if incoming delta should be optimised
1178 """helper function to know if incoming delta should be optimised
1179 """
1179 """
1180 # experimental config: format.generaldelta
1180 # experimental config: format.generaldelta
1181 return ui.configbool('format', 'generaldelta')
1181 return ui.configbool('format', 'generaldelta')
1182
1182
1183 class simplekeyvaluefile(object):
1183 class simplekeyvaluefile(object):
1184 """A simple file with key=value lines
1184 """A simple file with key=value lines
1185
1185
1186 Keys must be alphanumerics and start with a letter, values must not
1186 Keys must be alphanumerics and start with a letter, values must not
1187 contain '\n' characters"""
1187 contain '\n' characters"""
1188 firstlinekey = '__firstline'
1188 firstlinekey = '__firstline'
1189
1189
1190 def __init__(self, vfs, path, keys=None):
1190 def __init__(self, vfs, path, keys=None):
1191 self.vfs = vfs
1191 self.vfs = vfs
1192 self.path = path
1192 self.path = path
1193
1193
1194 def read(self, firstlinenonkeyval=False):
1194 def read(self, firstlinenonkeyval=False):
1195 """Read the contents of a simple key-value file
1195 """Read the contents of a simple key-value file
1196
1196
1197 'firstlinenonkeyval' indicates whether the first line of file should
1197 'firstlinenonkeyval' indicates whether the first line of file should
1198 be treated as a key-value pair or reuturned fully under the
1198 be treated as a key-value pair or reuturned fully under the
1199 __firstline key."""
1199 __firstline key."""
1200 lines = self.vfs.readlines(self.path)
1200 lines = self.vfs.readlines(self.path)
1201 d = {}
1201 d = {}
1202 if firstlinenonkeyval:
1202 if firstlinenonkeyval:
1203 if not lines:
1203 if not lines:
1204 e = _("empty simplekeyvalue file")
1204 e = _("empty simplekeyvalue file")
1205 raise error.CorruptedState(e)
1205 raise error.CorruptedState(e)
1206 # we don't want to include '\n' in the __firstline
1206 # we don't want to include '\n' in the __firstline
1207 d[self.firstlinekey] = lines[0][:-1]
1207 d[self.firstlinekey] = lines[0][:-1]
1208 del lines[0]
1208 del lines[0]
1209
1209
1210 try:
1210 try:
1211 # the 'if line.strip()' part prevents us from failing on empty
1211 # the 'if line.strip()' part prevents us from failing on empty
1212 # lines which only contain '\n' therefore are not skipped
1212 # lines which only contain '\n' therefore are not skipped
1213 # by 'if line'
1213 # by 'if line'
1214 updatedict = dict(line[:-1].split('=', 1) for line in lines
1214 updatedict = dict(line[:-1].split('=', 1) for line in lines
1215 if line.strip())
1215 if line.strip())
1216 if self.firstlinekey in updatedict:
1216 if self.firstlinekey in updatedict:
1217 e = _("%r can't be used as a key")
1217 e = _("%r can't be used as a key")
1218 raise error.CorruptedState(e % self.firstlinekey)
1218 raise error.CorruptedState(e % self.firstlinekey)
1219 d.update(updatedict)
1219 d.update(updatedict)
1220 except ValueError as e:
1220 except ValueError as e:
1221 raise error.CorruptedState(str(e))
1221 raise error.CorruptedState(str(e))
1222 return d
1222 return d
1223
1223
1224 def write(self, data, firstline=None):
1224 def write(self, data, firstline=None):
1225 """Write key=>value mapping to a file
1225 """Write key=>value mapping to a file
1226 data is a dict. Keys must be alphanumerical and start with a letter.
1226 data is a dict. Keys must be alphanumerical and start with a letter.
1227 Values must not contain newline characters.
1227 Values must not contain newline characters.
1228
1228
1229 If 'firstline' is not None, it is written to file before
1229 If 'firstline' is not None, it is written to file before
1230 everything else, as it is, not in a key=value form"""
1230 everything else, as it is, not in a key=value form"""
1231 lines = []
1231 lines = []
1232 if firstline is not None:
1232 if firstline is not None:
1233 lines.append('%s\n' % firstline)
1233 lines.append('%s\n' % firstline)
1234
1234
1235 for k, v in data.items():
1235 for k, v in data.items():
1236 if k == self.firstlinekey:
1236 if k == self.firstlinekey:
1237 e = "key name '%s' is reserved" % self.firstlinekey
1237 e = "key name '%s' is reserved" % self.firstlinekey
1238 raise error.ProgrammingError(e)
1238 raise error.ProgrammingError(e)
1239 if not k[0:1].isalpha():
1239 if not k[0:1].isalpha():
1240 e = "keys must start with a letter in a key-value file"
1240 e = "keys must start with a letter in a key-value file"
1241 raise error.ProgrammingError(e)
1241 raise error.ProgrammingError(e)
1242 if not k.isalnum():
1242 if not k.isalnum():
1243 e = "invalid key name in a simple key-value file"
1243 e = "invalid key name in a simple key-value file"
1244 raise error.ProgrammingError(e)
1244 raise error.ProgrammingError(e)
1245 if '\n' in v:
1245 if '\n' in v:
1246 e = "invalid value in a simple key-value file"
1246 e = "invalid value in a simple key-value file"
1247 raise error.ProgrammingError(e)
1247 raise error.ProgrammingError(e)
1248 lines.append("%s=%s\n" % (k, v))
1248 lines.append("%s=%s\n" % (k, v))
1249 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1249 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1250 fp.write(''.join(lines))
1250 fp.write(''.join(lines))
1251
1251
1252 _reportobsoletedsource = [
1252 _reportobsoletedsource = [
1253 'debugobsolete',
1253 'debugobsolete',
1254 'pull',
1254 'pull',
1255 'push',
1255 'push',
1256 'serve',
1256 'serve',
1257 'unbundle',
1257 'unbundle',
1258 ]
1258 ]
1259
1259
1260 _reportnewcssource = [
1260 _reportnewcssource = [
1261 'pull',
1261 'pull',
1262 'unbundle',
1262 'unbundle',
1263 ]
1263 ]
1264
1264
1265 # a list of (repo, ctx, files) functions called by various commands to allow
1265 # a list of (repo, ctx, files) functions called by various commands to allow
1266 # extensions to ensure the corresponding files are available locally, before the
1266 # extensions to ensure the corresponding files are available locally, before the
1267 # command uses them.
1267 # command uses them.
1268 fileprefetchhooks = util.hooks()
1268 fileprefetchhooks = util.hooks()
1269
1269
1270 # A marker that tells the evolve extension to suppress its own reporting
1270 # A marker that tells the evolve extension to suppress its own reporting
1271 _reportstroubledchangesets = True
1271 _reportstroubledchangesets = True
1272
1272
1273 def registersummarycallback(repo, otr, txnname=''):
1273 def registersummarycallback(repo, otr, txnname=''):
1274 """register a callback to issue a summary after the transaction is closed
1274 """register a callback to issue a summary after the transaction is closed
1275 """
1275 """
1276 def txmatch(sources):
1276 def txmatch(sources):
1277 return any(txnname.startswith(source) for source in sources)
1277 return any(txnname.startswith(source) for source in sources)
1278
1278
1279 categories = []
1279 categories = []
1280
1280
1281 def reportsummary(func):
1281 def reportsummary(func):
1282 """decorator for report callbacks."""
1282 """decorator for report callbacks."""
1283 # The repoview life cycle is shorter than the one of the actual
1283 # The repoview life cycle is shorter than the one of the actual
1284 # underlying repository. So the filtered object can die before the
1284 # underlying repository. So the filtered object can die before the
1285 # weakref is used leading to troubles. We keep a reference to the
1285 # weakref is used leading to troubles. We keep a reference to the
1286 # unfiltered object and restore the filtering when retrieving the
1286 # unfiltered object and restore the filtering when retrieving the
1287 # repository through the weakref.
1287 # repository through the weakref.
1288 filtername = repo.filtername
1288 filtername = repo.filtername
1289 reporef = weakref.ref(repo.unfiltered())
1289 reporef = weakref.ref(repo.unfiltered())
1290 def wrapped(tr):
1290 def wrapped(tr):
1291 repo = reporef()
1291 repo = reporef()
1292 if filtername:
1292 if filtername:
1293 repo = repo.filtered(filtername)
1293 repo = repo.filtered(filtername)
1294 func(repo, tr)
1294 func(repo, tr)
1295 newcat = '%02i-txnreport' % len(categories)
1295 newcat = '%02i-txnreport' % len(categories)
1296 otr.addpostclose(newcat, wrapped)
1296 otr.addpostclose(newcat, wrapped)
1297 categories.append(newcat)
1297 categories.append(newcat)
1298 return wrapped
1298 return wrapped
1299
1299
1300 if txmatch(_reportobsoletedsource):
1300 if txmatch(_reportobsoletedsource):
1301 @reportsummary
1301 @reportsummary
1302 def reportobsoleted(repo, tr):
1302 def reportobsoleted(repo, tr):
1303 obsoleted = obsutil.getobsoleted(repo, tr)
1303 obsoleted = obsutil.getobsoleted(repo, tr)
1304 if obsoleted:
1304 if obsoleted:
1305 repo.ui.status(_('obsoleted %i changesets\n')
1305 repo.ui.status(_('obsoleted %i changesets\n')
1306 % len(obsoleted))
1306 % len(obsoleted))
1307
1307
1308 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1308 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1309 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1309 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1310 instabilitytypes = [
1310 instabilitytypes = [
1311 ('orphan', 'orphan'),
1311 ('orphan', 'orphan'),
1312 ('phase-divergent', 'phasedivergent'),
1312 ('phase-divergent', 'phasedivergent'),
1313 ('content-divergent', 'contentdivergent'),
1313 ('content-divergent', 'contentdivergent'),
1314 ]
1314 ]
1315
1315
1316 def getinstabilitycounts(repo):
1316 def getinstabilitycounts(repo):
1317 filtered = repo.changelog.filteredrevs
1317 filtered = repo.changelog.filteredrevs
1318 counts = {}
1318 counts = {}
1319 for instability, revset in instabilitytypes:
1319 for instability, revset in instabilitytypes:
1320 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1320 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1321 filtered)
1321 filtered)
1322 return counts
1322 return counts
1323
1323
1324 oldinstabilitycounts = getinstabilitycounts(repo)
1324 oldinstabilitycounts = getinstabilitycounts(repo)
1325 @reportsummary
1325 @reportsummary
1326 def reportnewinstabilities(repo, tr):
1326 def reportnewinstabilities(repo, tr):
1327 newinstabilitycounts = getinstabilitycounts(repo)
1327 newinstabilitycounts = getinstabilitycounts(repo)
1328 for instability, revset in instabilitytypes:
1328 for instability, revset in instabilitytypes:
1329 delta = (newinstabilitycounts[instability] -
1329 delta = (newinstabilitycounts[instability] -
1330 oldinstabilitycounts[instability])
1330 oldinstabilitycounts[instability])
1331 if delta > 0:
1331 if delta > 0:
1332 repo.ui.warn(_('%i new %s changesets\n') %
1332 repo.ui.warn(_('%i new %s changesets\n') %
1333 (delta, instability))
1333 (delta, instability))
1334
1334
1335 if txmatch(_reportnewcssource):
1335 if txmatch(_reportnewcssource):
1336 @reportsummary
1336 @reportsummary
1337 def reportnewcs(repo, tr):
1337 def reportnewcs(repo, tr):
1338 """Report the range of new revisions pulled/unbundled."""
1338 """Report the range of new revisions pulled/unbundled."""
1339 newrevs = tr.changes.get('revs', xrange(0, 0))
1339 newrevs = tr.changes.get('revs', xrange(0, 0))
1340 if not newrevs:
1340 if not newrevs:
1341 return
1341 return
1342
1342
1343 # Compute the bounds of new revisions' range, excluding obsoletes.
1343 # Compute the bounds of new revisions' range, excluding obsoletes.
1344 unfi = repo.unfiltered()
1344 unfi = repo.unfiltered()
1345 revs = unfi.revs('%ld and not obsolete()', newrevs)
1345 revs = unfi.revs('%ld and not obsolete()', newrevs)
1346 if not revs:
1346 if not revs:
1347 # Got only obsoletes.
1347 # Got only obsoletes.
1348 return
1348 return
1349 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1349 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1350
1350
1351 if minrev == maxrev:
1351 if minrev == maxrev:
1352 revrange = minrev
1352 revrange = minrev
1353 else:
1353 else:
1354 revrange = '%s:%s' % (minrev, maxrev)
1354 revrange = '%s:%s' % (minrev, maxrev)
1355 repo.ui.status(_('new changesets %s\n') % revrange)
1355 repo.ui.status(_('new changesets %s\n') % revrange)
1356
1356
1357 def nodesummaries(repo, nodes, maxnumnodes=4):
1357 def nodesummaries(repo, nodes, maxnumnodes=4):
1358 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1358 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1359 return ' '.join(short(h) for h in nodes)
1359 return ' '.join(short(h) for h in nodes)
1360 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1360 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1361 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1361 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1362
1362
1363 def enforcesinglehead(repo, tr, desc):
1363 def enforcesinglehead(repo, tr, desc):
1364 """check that no named branch has multiple heads"""
1364 """check that no named branch has multiple heads"""
1365 if desc in ('strip', 'repair'):
1365 if desc in ('strip', 'repair'):
1366 # skip the logic during strip
1366 # skip the logic during strip
1367 return
1367 return
1368 visible = repo.filtered('visible')
1368 visible = repo.filtered('visible')
1369 # possible improvement: we could restrict the check to affected branch
1369 # possible improvement: we could restrict the check to affected branch
1370 for name, heads in visible.branchmap().iteritems():
1370 for name, heads in visible.branchmap().iteritems():
1371 if len(heads) > 1:
1371 if len(heads) > 1:
1372 msg = _('rejecting multiple heads on branch "%s"')
1372 msg = _('rejecting multiple heads on branch "%s"')
1373 msg %= name
1373 msg %= name
1374 hint = _('%d heads: %s')
1374 hint = _('%d heads: %s')
1375 hint %= (len(heads), nodesummaries(repo, heads))
1375 hint %= (len(heads), nodesummaries(repo, heads))
1376 raise error.Abort(msg, hint=hint)
1376 raise error.Abort(msg, hint=hint)
1377
1377
1378 def wrapconvertsink(sink):
1378 def wrapconvertsink(sink):
1379 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1379 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1380 before it is used, whether or not the convert extension was formally loaded.
1380 before it is used, whether or not the convert extension was formally loaded.
1381 """
1381 """
1382 return sink
1382 return sink
1383
1383
1384 def unhidehashlikerevs(repo, specs, hiddentype):
1384 def unhidehashlikerevs(repo, specs, hiddentype):
1385 """parse the user specs and unhide changesets whose hash or revision number
1385 """parse the user specs and unhide changesets whose hash or revision number
1386 is passed.
1386 is passed.
1387
1387
1388 hiddentype can be: 1) 'warn': warn while unhiding changesets
1388 hiddentype can be: 1) 'warn': warn while unhiding changesets
1389 2) 'nowarn': don't warn while unhiding changesets
1389 2) 'nowarn': don't warn while unhiding changesets
1390
1390
1391 returns a repo object with the required changesets unhidden
1391 returns a repo object with the required changesets unhidden
1392 """
1392 """
1393 if not repo.filtername or not repo.ui.configbool('experimental',
1393 if not repo.filtername or not repo.ui.configbool('experimental',
1394 'directaccess'):
1394 'directaccess'):
1395 return repo
1395 return repo
1396
1396
1397 if repo.filtername not in ('visible', 'visible-hidden'):
1397 if repo.filtername not in ('visible', 'visible-hidden'):
1398 return repo
1398 return repo
1399
1399
1400 symbols = set()
1400 symbols = set()
1401 for spec in specs:
1401 for spec in specs:
1402 try:
1402 try:
1403 tree = revsetlang.parse(spec)
1403 tree = revsetlang.parse(spec)
1404 except error.ParseError: # will be reported by scmutil.revrange()
1404 except error.ParseError: # will be reported by scmutil.revrange()
1405 continue
1405 continue
1406
1406
1407 symbols.update(revsetlang.gethashlikesymbols(tree))
1407 symbols.update(revsetlang.gethashlikesymbols(tree))
1408
1408
1409 if not symbols:
1409 if not symbols:
1410 return repo
1410 return repo
1411
1411
1412 revs = _getrevsfromsymbols(repo, symbols)
1412 revs = _getrevsfromsymbols(repo, symbols)
1413
1413
1414 if not revs:
1414 if not revs:
1415 return repo
1415 return repo
1416
1416
1417 if hiddentype == 'warn':
1417 if hiddentype == 'warn':
1418 unfi = repo.unfiltered()
1418 unfi = repo.unfiltered()
1419 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1419 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1420 repo.ui.warn(_("warning: accessing hidden changesets for write "
1420 repo.ui.warn(_("warning: accessing hidden changesets for write "
1421 "operation: %s\n") % revstr)
1421 "operation: %s\n") % revstr)
1422
1422
1423 # we have to use new filtername to separate branch/tags cache until we can
1423 # we have to use new filtername to separate branch/tags cache until we can
1424 # disbale these cache when revisions are dynamically pinned.
1424 # disbale these cache when revisions are dynamically pinned.
1425 return repo.filtered('visible-hidden', revs)
1425 return repo.filtered('visible-hidden', revs)
1426
1426
1427 def _getrevsfromsymbols(repo, symbols):
1427 def _getrevsfromsymbols(repo, symbols):
1428 """parse the list of symbols and returns a set of revision numbers of hidden
1428 """parse the list of symbols and returns a set of revision numbers of hidden
1429 changesets present in symbols"""
1429 changesets present in symbols"""
1430 revs = set()
1430 revs = set()
1431 unfi = repo.unfiltered()
1431 unfi = repo.unfiltered()
1432 unficl = unfi.changelog
1432 unficl = unfi.changelog
1433 cl = repo.changelog
1433 cl = repo.changelog
1434 tiprev = len(unficl)
1434 tiprev = len(unficl)
1435 pmatch = unficl._partialmatch
1435 pmatch = unficl._partialmatch
1436 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1436 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1437 for s in symbols:
1437 for s in symbols:
1438 try:
1438 try:
1439 n = int(s)
1439 n = int(s)
1440 if n <= tiprev:
1440 if n <= tiprev:
1441 if not allowrevnums:
1441 if not allowrevnums:
1442 continue
1442 continue
1443 else:
1443 else:
1444 if n not in cl:
1444 if n not in cl:
1445 revs.add(n)
1445 revs.add(n)
1446 continue
1446 continue
1447 except ValueError:
1447 except ValueError:
1448 pass
1448 pass
1449
1449
1450 try:
1450 try:
1451 s = pmatch(s)
1451 s = pmatch(s)
1452 except (error.LookupError, error.WdirUnsupported):
1452 except (error.LookupError, error.WdirUnsupported):
1453 s = None
1453 s = None
1454
1454
1455 if s is not None:
1455 if s is not None:
1456 rev = unficl.rev(s)
1456 rev = unficl.rev(s)
1457 if rev not in cl:
1457 if rev not in cl:
1458 revs.add(rev)
1458 revs.add(rev)
1459
1459
1460 return revs
1460 return revs
General Comments 0
You need to be logged in to leave comments. Login now