##// END OF EJS Templates
scmutil: fix oversight in b76248e51605c6 where I forgot to use msg...
Augie Fackler -
r36713:c442c4a9 default
parent child Browse files
Show More
@@ -1,1425 +1,1425 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 if pycompat.iswindows:
44 if pycompat.iswindows:
45 from . import scmwindows as scmplatform
45 from . import scmwindows as scmplatform
46 else:
46 else:
47 from . import scmposix as scmplatform
47 from . import scmposix as scmplatform
48
48
49 termsize = scmplatform.termsize
49 termsize = scmplatform.termsize
50
50
51 class status(tuple):
51 class status(tuple):
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 and 'ignored' properties are only relevant to the working copy.
53 and 'ignored' properties are only relevant to the working copy.
54 '''
54 '''
55
55
56 __slots__ = ()
56 __slots__ = ()
57
57
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 clean):
59 clean):
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 ignored, clean))
61 ignored, clean))
62
62
63 @property
63 @property
64 def modified(self):
64 def modified(self):
65 '''files that have been modified'''
65 '''files that have been modified'''
66 return self[0]
66 return self[0]
67
67
68 @property
68 @property
69 def added(self):
69 def added(self):
70 '''files that have been added'''
70 '''files that have been added'''
71 return self[1]
71 return self[1]
72
72
73 @property
73 @property
74 def removed(self):
74 def removed(self):
75 '''files that have been removed'''
75 '''files that have been removed'''
76 return self[2]
76 return self[2]
77
77
78 @property
78 @property
79 def deleted(self):
79 def deleted(self):
80 '''files that are in the dirstate, but have been deleted from the
80 '''files that are in the dirstate, but have been deleted from the
81 working copy (aka "missing")
81 working copy (aka "missing")
82 '''
82 '''
83 return self[3]
83 return self[3]
84
84
85 @property
85 @property
86 def unknown(self):
86 def unknown(self):
87 '''files not in the dirstate that are not ignored'''
87 '''files not in the dirstate that are not ignored'''
88 return self[4]
88 return self[4]
89
89
90 @property
90 @property
91 def ignored(self):
91 def ignored(self):
92 '''files not in the dirstate that are ignored (by _dirignore())'''
92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 return self[5]
93 return self[5]
94
94
95 @property
95 @property
96 def clean(self):
96 def clean(self):
97 '''files that have not been modified'''
97 '''files that have not been modified'''
98 return self[6]
98 return self[6]
99
99
100 def __repr__(self, *args, **kwargs):
100 def __repr__(self, *args, **kwargs):
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 'unknown=%r, ignored=%r, clean=%r>') % self)
102 'unknown=%r, ignored=%r, clean=%r>') % self)
103
103
104 def itersubrepos(ctx1, ctx2):
104 def itersubrepos(ctx1, ctx2):
105 """find subrepos in ctx1 or ctx2"""
105 """find subrepos in ctx1 or ctx2"""
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 # has been modified (in ctx2) but not yet committed (in ctx1).
108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111
111
112 missing = set()
112 missing = set()
113
113
114 for subpath in ctx2.substate:
114 for subpath in ctx2.substate:
115 if subpath not in ctx1.substate:
115 if subpath not in ctx1.substate:
116 del subpaths[subpath]
116 del subpaths[subpath]
117 missing.add(subpath)
117 missing.add(subpath)
118
118
119 for subpath, ctx in sorted(subpaths.iteritems()):
119 for subpath, ctx in sorted(subpaths.iteritems()):
120 yield subpath, ctx.sub(subpath)
120 yield subpath, ctx.sub(subpath)
121
121
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 # status and diff will have an accurate result when it does
123 # status and diff will have an accurate result when it does
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 # against itself.
125 # against itself.
126 for subpath in missing:
126 for subpath in missing:
127 yield subpath, ctx2.nullsub(subpath, ctx1)
127 yield subpath, ctx2.nullsub(subpath, ctx1)
128
128
129 def nochangesfound(ui, repo, excluded=None):
129 def nochangesfound(ui, repo, excluded=None):
130 '''Report no changes for push/pull, excluded is None or a list of
130 '''Report no changes for push/pull, excluded is None or a list of
131 nodes excluded from the push/pull.
131 nodes excluded from the push/pull.
132 '''
132 '''
133 secretlist = []
133 secretlist = []
134 if excluded:
134 if excluded:
135 for n in excluded:
135 for n in excluded:
136 ctx = repo[n]
136 ctx = repo[n]
137 if ctx.phase() >= phases.secret and not ctx.extinct():
137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 secretlist.append(n)
138 secretlist.append(n)
139
139
140 if secretlist:
140 if secretlist:
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 % len(secretlist))
142 % len(secretlist))
143 else:
143 else:
144 ui.status(_("no changes found\n"))
144 ui.status(_("no changes found\n"))
145
145
146 def callcatch(ui, func):
146 def callcatch(ui, func):
147 """call func() with global exception handling
147 """call func() with global exception handling
148
148
149 return func() if no exception happens. otherwise do some error handling
149 return func() if no exception happens. otherwise do some error handling
150 and return an exit code accordingly. does not handle all exceptions.
150 and return an exit code accordingly. does not handle all exceptions.
151 """
151 """
152 try:
152 try:
153 try:
153 try:
154 return func()
154 return func()
155 except: # re-raises
155 except: # re-raises
156 ui.traceback()
156 ui.traceback()
157 raise
157 raise
158 # Global exception handling, alphabetically
158 # Global exception handling, alphabetically
159 # Mercurial-specific first, followed by built-in and library exceptions
159 # Mercurial-specific first, followed by built-in and library exceptions
160 except error.LockHeld as inst:
160 except error.LockHeld as inst:
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _('timed out waiting for lock held by %r') % inst.locker
162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 else:
163 else:
164 reason = _('lock held by %r') % inst.locker
164 reason = _('lock held by %r') % inst.locker
165 ui.warn(_("abort: %s: %s\n")
165 ui.warn(_("abort: %s: %s\n")
166 % (inst.desc or util.forcebytestr(inst.filename), reason))
166 % (inst.desc or util.forcebytestr(inst.filename), reason))
167 if not inst.locker:
167 if not inst.locker:
168 ui.warn(_("(lock might be very busy)\n"))
168 ui.warn(_("(lock might be very busy)\n"))
169 except error.LockUnavailable as inst:
169 except error.LockUnavailable as inst:
170 ui.warn(_("abort: could not lock %s: %s\n") %
170 ui.warn(_("abort: could not lock %s: %s\n") %
171 (inst.desc or util.forcebytestr(inst.filename),
171 (inst.desc or util.forcebytestr(inst.filename),
172 encoding.strtolocal(inst.strerror)))
172 encoding.strtolocal(inst.strerror)))
173 except error.OutOfBandError as inst:
173 except error.OutOfBandError as inst:
174 if inst.args:
174 if inst.args:
175 msg = _("abort: remote error:\n")
175 msg = _("abort: remote error:\n")
176 else:
176 else:
177 msg = _("abort: remote error\n")
177 msg = _("abort: remote error\n")
178 ui.warn(msg)
178 ui.warn(msg)
179 if inst.args:
179 if inst.args:
180 ui.warn(''.join(inst.args))
180 ui.warn(''.join(inst.args))
181 if inst.hint:
181 if inst.hint:
182 ui.warn('(%s)\n' % inst.hint)
182 ui.warn('(%s)\n' % inst.hint)
183 except error.RepoError as inst:
183 except error.RepoError as inst:
184 ui.warn(_("abort: %s!\n") % inst)
184 ui.warn(_("abort: %s!\n") % inst)
185 if inst.hint:
185 if inst.hint:
186 ui.warn(_("(%s)\n") % inst.hint)
186 ui.warn(_("(%s)\n") % inst.hint)
187 except error.ResponseError as inst:
187 except error.ResponseError as inst:
188 ui.warn(_("abort: %s") % inst.args[0])
188 ui.warn(_("abort: %s") % inst.args[0])
189 msg = inst.args[1]
189 msg = inst.args[1]
190 if isinstance(msg, type(u'')):
190 if isinstance(msg, type(u'')):
191 msg = pycompat.sysbytes(msg)
191 msg = pycompat.sysbytes(msg)
192 elif not isinstance(inst.args[1], bytes):
192 if not isinstance(msg, bytes):
193 ui.warn(" %r\n" % (inst.args[1],))
193 ui.warn(" %r\n" % (msg,))
194 elif not inst.args[1]:
194 elif not msg:
195 ui.warn(_(" empty string\n"))
195 ui.warn(_(" empty string\n"))
196 else:
196 else:
197 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
197 ui.warn("\n%r\n" % util.ellipsis(msg))
198 except error.CensoredNodeError as inst:
198 except error.CensoredNodeError as inst:
199 ui.warn(_("abort: file censored %s!\n") % inst)
199 ui.warn(_("abort: file censored %s!\n") % inst)
200 except error.RevlogError as inst:
200 except error.RevlogError as inst:
201 ui.warn(_("abort: %s!\n") % inst)
201 ui.warn(_("abort: %s!\n") % inst)
202 except error.InterventionRequired as inst:
202 except error.InterventionRequired as inst:
203 ui.warn("%s\n" % inst)
203 ui.warn("%s\n" % inst)
204 if inst.hint:
204 if inst.hint:
205 ui.warn(_("(%s)\n") % inst.hint)
205 ui.warn(_("(%s)\n") % inst.hint)
206 return 1
206 return 1
207 except error.WdirUnsupported:
207 except error.WdirUnsupported:
208 ui.warn(_("abort: working directory revision cannot be specified\n"))
208 ui.warn(_("abort: working directory revision cannot be specified\n"))
209 except error.Abort as inst:
209 except error.Abort as inst:
210 ui.warn(_("abort: %s\n") % inst)
210 ui.warn(_("abort: %s\n") % inst)
211 if inst.hint:
211 if inst.hint:
212 ui.warn(_("(%s)\n") % inst.hint)
212 ui.warn(_("(%s)\n") % inst.hint)
213 except ImportError as inst:
213 except ImportError as inst:
214 ui.warn(_("abort: %s!\n") % util.forcebytestr(inst))
214 ui.warn(_("abort: %s!\n") % util.forcebytestr(inst))
215 m = util.forcebytestr(inst).split()[-1]
215 m = util.forcebytestr(inst).split()[-1]
216 if m in "mpatch bdiff".split():
216 if m in "mpatch bdiff".split():
217 ui.warn(_("(did you forget to compile extensions?)\n"))
217 ui.warn(_("(did you forget to compile extensions?)\n"))
218 elif m in "zlib".split():
218 elif m in "zlib".split():
219 ui.warn(_("(is your Python install correct?)\n"))
219 ui.warn(_("(is your Python install correct?)\n"))
220 except IOError as inst:
220 except IOError as inst:
221 if util.safehasattr(inst, "code"):
221 if util.safehasattr(inst, "code"):
222 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
222 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
223 elif util.safehasattr(inst, "reason"):
223 elif util.safehasattr(inst, "reason"):
224 try: # usually it is in the form (errno, strerror)
224 try: # usually it is in the form (errno, strerror)
225 reason = inst.reason.args[1]
225 reason = inst.reason.args[1]
226 except (AttributeError, IndexError):
226 except (AttributeError, IndexError):
227 # it might be anything, for example a string
227 # it might be anything, for example a string
228 reason = inst.reason
228 reason = inst.reason
229 if isinstance(reason, unicode):
229 if isinstance(reason, unicode):
230 # SSLError of Python 2.7.9 contains a unicode
230 # SSLError of Python 2.7.9 contains a unicode
231 reason = encoding.unitolocal(reason)
231 reason = encoding.unitolocal(reason)
232 ui.warn(_("abort: error: %s\n") % reason)
232 ui.warn(_("abort: error: %s\n") % reason)
233 elif (util.safehasattr(inst, "args")
233 elif (util.safehasattr(inst, "args")
234 and inst.args and inst.args[0] == errno.EPIPE):
234 and inst.args and inst.args[0] == errno.EPIPE):
235 pass
235 pass
236 elif getattr(inst, "strerror", None):
236 elif getattr(inst, "strerror", None):
237 if getattr(inst, "filename", None):
237 if getattr(inst, "filename", None):
238 ui.warn(_("abort: %s: %s\n") % (
238 ui.warn(_("abort: %s: %s\n") % (
239 encoding.strtolocal(inst.strerror),
239 encoding.strtolocal(inst.strerror),
240 util.forcebytestr(inst.filename)))
240 util.forcebytestr(inst.filename)))
241 else:
241 else:
242 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
242 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
243 else:
243 else:
244 raise
244 raise
245 except OSError as inst:
245 except OSError as inst:
246 if getattr(inst, "filename", None) is not None:
246 if getattr(inst, "filename", None) is not None:
247 ui.warn(_("abort: %s: '%s'\n") % (
247 ui.warn(_("abort: %s: '%s'\n") % (
248 encoding.strtolocal(inst.strerror),
248 encoding.strtolocal(inst.strerror),
249 util.forcebytestr(inst.filename)))
249 util.forcebytestr(inst.filename)))
250 else:
250 else:
251 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
251 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
252 except MemoryError:
252 except MemoryError:
253 ui.warn(_("abort: out of memory\n"))
253 ui.warn(_("abort: out of memory\n"))
254 except SystemExit as inst:
254 except SystemExit as inst:
255 # Commands shouldn't sys.exit directly, but give a return code.
255 # Commands shouldn't sys.exit directly, but give a return code.
256 # Just in case catch this and and pass exit code to caller.
256 # Just in case catch this and and pass exit code to caller.
257 return inst.code
257 return inst.code
258 except socket.error as inst:
258 except socket.error as inst:
259 ui.warn(_("abort: %s\n") % util.forcebytestr(inst.args[-1]))
259 ui.warn(_("abort: %s\n") % util.forcebytestr(inst.args[-1]))
260
260
261 return -1
261 return -1
262
262
263 def checknewlabel(repo, lbl, kind):
263 def checknewlabel(repo, lbl, kind):
264 # Do not use the "kind" parameter in ui output.
264 # Do not use the "kind" parameter in ui output.
265 # It makes strings difficult to translate.
265 # It makes strings difficult to translate.
266 if lbl in ['tip', '.', 'null']:
266 if lbl in ['tip', '.', 'null']:
267 raise error.Abort(_("the name '%s' is reserved") % lbl)
267 raise error.Abort(_("the name '%s' is reserved") % lbl)
268 for c in (':', '\0', '\n', '\r'):
268 for c in (':', '\0', '\n', '\r'):
269 if c in lbl:
269 if c in lbl:
270 raise error.Abort(
270 raise error.Abort(
271 _("%r cannot be used in a name") % pycompat.bytestr(c))
271 _("%r cannot be used in a name") % pycompat.bytestr(c))
272 try:
272 try:
273 int(lbl)
273 int(lbl)
274 raise error.Abort(_("cannot use an integer as a name"))
274 raise error.Abort(_("cannot use an integer as a name"))
275 except ValueError:
275 except ValueError:
276 pass
276 pass
277 if lbl.strip() != lbl:
277 if lbl.strip() != lbl:
278 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
278 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
279
279
280 def checkfilename(f):
280 def checkfilename(f):
281 '''Check that the filename f is an acceptable filename for a tracked file'''
281 '''Check that the filename f is an acceptable filename for a tracked file'''
282 if '\r' in f or '\n' in f:
282 if '\r' in f or '\n' in f:
283 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
283 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
284
284
285 def checkportable(ui, f):
285 def checkportable(ui, f):
286 '''Check if filename f is portable and warn or abort depending on config'''
286 '''Check if filename f is portable and warn or abort depending on config'''
287 checkfilename(f)
287 checkfilename(f)
288 abort, warn = checkportabilityalert(ui)
288 abort, warn = checkportabilityalert(ui)
289 if abort or warn:
289 if abort or warn:
290 msg = util.checkwinfilename(f)
290 msg = util.checkwinfilename(f)
291 if msg:
291 if msg:
292 msg = "%s: %s" % (msg, util.shellquote(f))
292 msg = "%s: %s" % (msg, util.shellquote(f))
293 if abort:
293 if abort:
294 raise error.Abort(msg)
294 raise error.Abort(msg)
295 ui.warn(_("warning: %s\n") % msg)
295 ui.warn(_("warning: %s\n") % msg)
296
296
297 def checkportabilityalert(ui):
297 def checkportabilityalert(ui):
298 '''check if the user's config requests nothing, a warning, or abort for
298 '''check if the user's config requests nothing, a warning, or abort for
299 non-portable filenames'''
299 non-portable filenames'''
300 val = ui.config('ui', 'portablefilenames')
300 val = ui.config('ui', 'portablefilenames')
301 lval = val.lower()
301 lval = val.lower()
302 bval = util.parsebool(val)
302 bval = util.parsebool(val)
303 abort = pycompat.iswindows or lval == 'abort'
303 abort = pycompat.iswindows or lval == 'abort'
304 warn = bval or lval == 'warn'
304 warn = bval or lval == 'warn'
305 if bval is None and not (warn or abort or lval == 'ignore'):
305 if bval is None and not (warn or abort or lval == 'ignore'):
306 raise error.ConfigError(
306 raise error.ConfigError(
307 _("ui.portablefilenames value is invalid ('%s')") % val)
307 _("ui.portablefilenames value is invalid ('%s')") % val)
308 return abort, warn
308 return abort, warn
309
309
310 class casecollisionauditor(object):
310 class casecollisionauditor(object):
311 def __init__(self, ui, abort, dirstate):
311 def __init__(self, ui, abort, dirstate):
312 self._ui = ui
312 self._ui = ui
313 self._abort = abort
313 self._abort = abort
314 allfiles = '\0'.join(dirstate._map)
314 allfiles = '\0'.join(dirstate._map)
315 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
315 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
316 self._dirstate = dirstate
316 self._dirstate = dirstate
317 # The purpose of _newfiles is so that we don't complain about
317 # The purpose of _newfiles is so that we don't complain about
318 # case collisions if someone were to call this object with the
318 # case collisions if someone were to call this object with the
319 # same filename twice.
319 # same filename twice.
320 self._newfiles = set()
320 self._newfiles = set()
321
321
322 def __call__(self, f):
322 def __call__(self, f):
323 if f in self._newfiles:
323 if f in self._newfiles:
324 return
324 return
325 fl = encoding.lower(f)
325 fl = encoding.lower(f)
326 if fl in self._loweredfiles and f not in self._dirstate:
326 if fl in self._loweredfiles and f not in self._dirstate:
327 msg = _('possible case-folding collision for %s') % f
327 msg = _('possible case-folding collision for %s') % f
328 if self._abort:
328 if self._abort:
329 raise error.Abort(msg)
329 raise error.Abort(msg)
330 self._ui.warn(_("warning: %s\n") % msg)
330 self._ui.warn(_("warning: %s\n") % msg)
331 self._loweredfiles.add(fl)
331 self._loweredfiles.add(fl)
332 self._newfiles.add(f)
332 self._newfiles.add(f)
333
333
334 def filteredhash(repo, maxrev):
334 def filteredhash(repo, maxrev):
335 """build hash of filtered revisions in the current repoview.
335 """build hash of filtered revisions in the current repoview.
336
336
337 Multiple caches perform up-to-date validation by checking that the
337 Multiple caches perform up-to-date validation by checking that the
338 tiprev and tipnode stored in the cache file match the current repository.
338 tiprev and tipnode stored in the cache file match the current repository.
339 However, this is not sufficient for validating repoviews because the set
339 However, this is not sufficient for validating repoviews because the set
340 of revisions in the view may change without the repository tiprev and
340 of revisions in the view may change without the repository tiprev and
341 tipnode changing.
341 tipnode changing.
342
342
343 This function hashes all the revs filtered from the view and returns
343 This function hashes all the revs filtered from the view and returns
344 that SHA-1 digest.
344 that SHA-1 digest.
345 """
345 """
346 cl = repo.changelog
346 cl = repo.changelog
347 if not cl.filteredrevs:
347 if not cl.filteredrevs:
348 return None
348 return None
349 key = None
349 key = None
350 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
350 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
351 if revs:
351 if revs:
352 s = hashlib.sha1()
352 s = hashlib.sha1()
353 for rev in revs:
353 for rev in revs:
354 s.update('%d;' % rev)
354 s.update('%d;' % rev)
355 key = s.digest()
355 key = s.digest()
356 return key
356 return key
357
357
358 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
358 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
359 '''yield every hg repository under path, always recursively.
359 '''yield every hg repository under path, always recursively.
360 The recurse flag will only control recursion into repo working dirs'''
360 The recurse flag will only control recursion into repo working dirs'''
361 def errhandler(err):
361 def errhandler(err):
362 if err.filename == path:
362 if err.filename == path:
363 raise err
363 raise err
364 samestat = getattr(os.path, 'samestat', None)
364 samestat = getattr(os.path, 'samestat', None)
365 if followsym and samestat is not None:
365 if followsym and samestat is not None:
366 def adddir(dirlst, dirname):
366 def adddir(dirlst, dirname):
367 dirstat = os.stat(dirname)
367 dirstat = os.stat(dirname)
368 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
368 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
369 if not match:
369 if not match:
370 dirlst.append(dirstat)
370 dirlst.append(dirstat)
371 return not match
371 return not match
372 else:
372 else:
373 followsym = False
373 followsym = False
374
374
375 if (seen_dirs is None) and followsym:
375 if (seen_dirs is None) and followsym:
376 seen_dirs = []
376 seen_dirs = []
377 adddir(seen_dirs, path)
377 adddir(seen_dirs, path)
378 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
378 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
379 dirs.sort()
379 dirs.sort()
380 if '.hg' in dirs:
380 if '.hg' in dirs:
381 yield root # found a repository
381 yield root # found a repository
382 qroot = os.path.join(root, '.hg', 'patches')
382 qroot = os.path.join(root, '.hg', 'patches')
383 if os.path.isdir(os.path.join(qroot, '.hg')):
383 if os.path.isdir(os.path.join(qroot, '.hg')):
384 yield qroot # we have a patch queue repo here
384 yield qroot # we have a patch queue repo here
385 if recurse:
385 if recurse:
386 # avoid recursing inside the .hg directory
386 # avoid recursing inside the .hg directory
387 dirs.remove('.hg')
387 dirs.remove('.hg')
388 else:
388 else:
389 dirs[:] = [] # don't descend further
389 dirs[:] = [] # don't descend further
390 elif followsym:
390 elif followsym:
391 newdirs = []
391 newdirs = []
392 for d in dirs:
392 for d in dirs:
393 fname = os.path.join(root, d)
393 fname = os.path.join(root, d)
394 if adddir(seen_dirs, fname):
394 if adddir(seen_dirs, fname):
395 if os.path.islink(fname):
395 if os.path.islink(fname):
396 for hgname in walkrepos(fname, True, seen_dirs):
396 for hgname in walkrepos(fname, True, seen_dirs):
397 yield hgname
397 yield hgname
398 else:
398 else:
399 newdirs.append(d)
399 newdirs.append(d)
400 dirs[:] = newdirs
400 dirs[:] = newdirs
401
401
402 def binnode(ctx):
402 def binnode(ctx):
403 """Return binary node id for a given basectx"""
403 """Return binary node id for a given basectx"""
404 node = ctx.node()
404 node = ctx.node()
405 if node is None:
405 if node is None:
406 return wdirid
406 return wdirid
407 return node
407 return node
408
408
409 def intrev(ctx):
409 def intrev(ctx):
410 """Return integer for a given basectx that can be used in comparison or
410 """Return integer for a given basectx that can be used in comparison or
411 arithmetic operation"""
411 arithmetic operation"""
412 rev = ctx.rev()
412 rev = ctx.rev()
413 if rev is None:
413 if rev is None:
414 return wdirrev
414 return wdirrev
415 return rev
415 return rev
416
416
417 def formatchangeid(ctx):
417 def formatchangeid(ctx):
418 """Format changectx as '{rev}:{node|formatnode}', which is the default
418 """Format changectx as '{rev}:{node|formatnode}', which is the default
419 template provided by logcmdutil.changesettemplater"""
419 template provided by logcmdutil.changesettemplater"""
420 repo = ctx.repo()
420 repo = ctx.repo()
421 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
421 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
422
422
423 def formatrevnode(ui, rev, node):
423 def formatrevnode(ui, rev, node):
424 """Format given revision and node depending on the current verbosity"""
424 """Format given revision and node depending on the current verbosity"""
425 if ui.debugflag:
425 if ui.debugflag:
426 hexfunc = hex
426 hexfunc = hex
427 else:
427 else:
428 hexfunc = short
428 hexfunc = short
429 return '%d:%s' % (rev, hexfunc(node))
429 return '%d:%s' % (rev, hexfunc(node))
430
430
431 def revsingle(repo, revspec, default='.', localalias=None):
431 def revsingle(repo, revspec, default='.', localalias=None):
432 if not revspec and revspec != 0:
432 if not revspec and revspec != 0:
433 return repo[default]
433 return repo[default]
434
434
435 l = revrange(repo, [revspec], localalias=localalias)
435 l = revrange(repo, [revspec], localalias=localalias)
436 if not l:
436 if not l:
437 raise error.Abort(_('empty revision set'))
437 raise error.Abort(_('empty revision set'))
438 return repo[l.last()]
438 return repo[l.last()]
439
439
440 def _pairspec(revspec):
440 def _pairspec(revspec):
441 tree = revsetlang.parse(revspec)
441 tree = revsetlang.parse(revspec)
442 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
442 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
443
443
444 def revpair(repo, revs):
444 def revpair(repo, revs):
445 if not revs:
445 if not revs:
446 return repo.dirstate.p1(), None
446 return repo.dirstate.p1(), None
447
447
448 l = revrange(repo, revs)
448 l = revrange(repo, revs)
449
449
450 if not l:
450 if not l:
451 first = second = None
451 first = second = None
452 elif l.isascending():
452 elif l.isascending():
453 first = l.min()
453 first = l.min()
454 second = l.max()
454 second = l.max()
455 elif l.isdescending():
455 elif l.isdescending():
456 first = l.max()
456 first = l.max()
457 second = l.min()
457 second = l.min()
458 else:
458 else:
459 first = l.first()
459 first = l.first()
460 second = l.last()
460 second = l.last()
461
461
462 if first is None:
462 if first is None:
463 raise error.Abort(_('empty revision range'))
463 raise error.Abort(_('empty revision range'))
464 if (first == second and len(revs) >= 2
464 if (first == second and len(revs) >= 2
465 and not all(revrange(repo, [r]) for r in revs)):
465 and not all(revrange(repo, [r]) for r in revs)):
466 raise error.Abort(_('empty revision on one side of range'))
466 raise error.Abort(_('empty revision on one side of range'))
467
467
468 # if top-level is range expression, the result must always be a pair
468 # if top-level is range expression, the result must always be a pair
469 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
469 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
470 return repo.lookup(first), None
470 return repo.lookup(first), None
471
471
472 return repo.lookup(first), repo.lookup(second)
472 return repo.lookup(first), repo.lookup(second)
473
473
474 def revrange(repo, specs, localalias=None):
474 def revrange(repo, specs, localalias=None):
475 """Execute 1 to many revsets and return the union.
475 """Execute 1 to many revsets and return the union.
476
476
477 This is the preferred mechanism for executing revsets using user-specified
477 This is the preferred mechanism for executing revsets using user-specified
478 config options, such as revset aliases.
478 config options, such as revset aliases.
479
479
480 The revsets specified by ``specs`` will be executed via a chained ``OR``
480 The revsets specified by ``specs`` will be executed via a chained ``OR``
481 expression. If ``specs`` is empty, an empty result is returned.
481 expression. If ``specs`` is empty, an empty result is returned.
482
482
483 ``specs`` can contain integers, in which case they are assumed to be
483 ``specs`` can contain integers, in which case they are assumed to be
484 revision numbers.
484 revision numbers.
485
485
486 It is assumed the revsets are already formatted. If you have arguments
486 It is assumed the revsets are already formatted. If you have arguments
487 that need to be expanded in the revset, call ``revsetlang.formatspec()``
487 that need to be expanded in the revset, call ``revsetlang.formatspec()``
488 and pass the result as an element of ``specs``.
488 and pass the result as an element of ``specs``.
489
489
490 Specifying a single revset is allowed.
490 Specifying a single revset is allowed.
491
491
492 Returns a ``revset.abstractsmartset`` which is a list-like interface over
492 Returns a ``revset.abstractsmartset`` which is a list-like interface over
493 integer revisions.
493 integer revisions.
494 """
494 """
495 allspecs = []
495 allspecs = []
496 for spec in specs:
496 for spec in specs:
497 if isinstance(spec, int):
497 if isinstance(spec, int):
498 spec = revsetlang.formatspec('rev(%d)', spec)
498 spec = revsetlang.formatspec('rev(%d)', spec)
499 allspecs.append(spec)
499 allspecs.append(spec)
500 return repo.anyrevs(allspecs, user=True, localalias=localalias)
500 return repo.anyrevs(allspecs, user=True, localalias=localalias)
501
501
502 def meaningfulparents(repo, ctx):
502 def meaningfulparents(repo, ctx):
503 """Return list of meaningful (or all if debug) parentrevs for rev.
503 """Return list of meaningful (or all if debug) parentrevs for rev.
504
504
505 For merges (two non-nullrev revisions) both parents are meaningful.
505 For merges (two non-nullrev revisions) both parents are meaningful.
506 Otherwise the first parent revision is considered meaningful if it
506 Otherwise the first parent revision is considered meaningful if it
507 is not the preceding revision.
507 is not the preceding revision.
508 """
508 """
509 parents = ctx.parents()
509 parents = ctx.parents()
510 if len(parents) > 1:
510 if len(parents) > 1:
511 return parents
511 return parents
512 if repo.ui.debugflag:
512 if repo.ui.debugflag:
513 return [parents[0], repo['null']]
513 return [parents[0], repo['null']]
514 if parents[0].rev() >= intrev(ctx) - 1:
514 if parents[0].rev() >= intrev(ctx) - 1:
515 return []
515 return []
516 return parents
516 return parents
517
517
518 def expandpats(pats):
518 def expandpats(pats):
519 '''Expand bare globs when running on windows.
519 '''Expand bare globs when running on windows.
520 On posix we assume it already has already been done by sh.'''
520 On posix we assume it already has already been done by sh.'''
521 if not util.expandglobs:
521 if not util.expandglobs:
522 return list(pats)
522 return list(pats)
523 ret = []
523 ret = []
524 for kindpat in pats:
524 for kindpat in pats:
525 kind, pat = matchmod._patsplit(kindpat, None)
525 kind, pat = matchmod._patsplit(kindpat, None)
526 if kind is None:
526 if kind is None:
527 try:
527 try:
528 globbed = glob.glob(pat)
528 globbed = glob.glob(pat)
529 except re.error:
529 except re.error:
530 globbed = [pat]
530 globbed = [pat]
531 if globbed:
531 if globbed:
532 ret.extend(globbed)
532 ret.extend(globbed)
533 continue
533 continue
534 ret.append(kindpat)
534 ret.append(kindpat)
535 return ret
535 return ret
536
536
537 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
537 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
538 badfn=None):
538 badfn=None):
539 '''Return a matcher and the patterns that were used.
539 '''Return a matcher and the patterns that were used.
540 The matcher will warn about bad matches, unless an alternate badfn callback
540 The matcher will warn about bad matches, unless an alternate badfn callback
541 is provided.'''
541 is provided.'''
542 if pats == ("",):
542 if pats == ("",):
543 pats = []
543 pats = []
544 if opts is None:
544 if opts is None:
545 opts = {}
545 opts = {}
546 if not globbed and default == 'relpath':
546 if not globbed and default == 'relpath':
547 pats = expandpats(pats or [])
547 pats = expandpats(pats or [])
548
548
549 def bad(f, msg):
549 def bad(f, msg):
550 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
550 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
551
551
552 if badfn is None:
552 if badfn is None:
553 badfn = bad
553 badfn = bad
554
554
555 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
555 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
556 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
556 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
557
557
558 if m.always():
558 if m.always():
559 pats = []
559 pats = []
560 return m, pats
560 return m, pats
561
561
562 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
562 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
563 badfn=None):
563 badfn=None):
564 '''Return a matcher that will warn about bad matches.'''
564 '''Return a matcher that will warn about bad matches.'''
565 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
565 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
566
566
567 def matchall(repo):
567 def matchall(repo):
568 '''Return a matcher that will efficiently match everything.'''
568 '''Return a matcher that will efficiently match everything.'''
569 return matchmod.always(repo.root, repo.getcwd())
569 return matchmod.always(repo.root, repo.getcwd())
570
570
571 def matchfiles(repo, files, badfn=None):
571 def matchfiles(repo, files, badfn=None):
572 '''Return a matcher that will efficiently match exactly these files.'''
572 '''Return a matcher that will efficiently match exactly these files.'''
573 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
573 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
574
574
575 def parsefollowlinespattern(repo, rev, pat, msg):
575 def parsefollowlinespattern(repo, rev, pat, msg):
576 """Return a file name from `pat` pattern suitable for usage in followlines
576 """Return a file name from `pat` pattern suitable for usage in followlines
577 logic.
577 logic.
578 """
578 """
579 if not matchmod.patkind(pat):
579 if not matchmod.patkind(pat):
580 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
580 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
581 else:
581 else:
582 ctx = repo[rev]
582 ctx = repo[rev]
583 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
583 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
584 files = [f for f in ctx if m(f)]
584 files = [f for f in ctx if m(f)]
585 if len(files) != 1:
585 if len(files) != 1:
586 raise error.ParseError(msg)
586 raise error.ParseError(msg)
587 return files[0]
587 return files[0]
588
588
589 def origpath(ui, repo, filepath):
589 def origpath(ui, repo, filepath):
590 '''customize where .orig files are created
590 '''customize where .orig files are created
591
591
592 Fetch user defined path from config file: [ui] origbackuppath = <path>
592 Fetch user defined path from config file: [ui] origbackuppath = <path>
593 Fall back to default (filepath with .orig suffix) if not specified
593 Fall back to default (filepath with .orig suffix) if not specified
594 '''
594 '''
595 origbackuppath = ui.config('ui', 'origbackuppath')
595 origbackuppath = ui.config('ui', 'origbackuppath')
596 if not origbackuppath:
596 if not origbackuppath:
597 return filepath + ".orig"
597 return filepath + ".orig"
598
598
599 # Convert filepath from an absolute path into a path inside the repo.
599 # Convert filepath from an absolute path into a path inside the repo.
600 filepathfromroot = util.normpath(os.path.relpath(filepath,
600 filepathfromroot = util.normpath(os.path.relpath(filepath,
601 start=repo.root))
601 start=repo.root))
602
602
603 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
603 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
604 origbackupdir = origvfs.dirname(filepathfromroot)
604 origbackupdir = origvfs.dirname(filepathfromroot)
605 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
605 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
606 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
606 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
607
607
608 # Remove any files that conflict with the backup file's path
608 # Remove any files that conflict with the backup file's path
609 for f in reversed(list(util.finddirs(filepathfromroot))):
609 for f in reversed(list(util.finddirs(filepathfromroot))):
610 if origvfs.isfileorlink(f):
610 if origvfs.isfileorlink(f):
611 ui.note(_('removing conflicting file: %s\n')
611 ui.note(_('removing conflicting file: %s\n')
612 % origvfs.join(f))
612 % origvfs.join(f))
613 origvfs.unlink(f)
613 origvfs.unlink(f)
614 break
614 break
615
615
616 origvfs.makedirs(origbackupdir)
616 origvfs.makedirs(origbackupdir)
617
617
618 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
618 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
619 ui.note(_('removing conflicting directory: %s\n')
619 ui.note(_('removing conflicting directory: %s\n')
620 % origvfs.join(filepathfromroot))
620 % origvfs.join(filepathfromroot))
621 origvfs.rmtree(filepathfromroot, forcibly=True)
621 origvfs.rmtree(filepathfromroot, forcibly=True)
622
622
623 return origvfs.join(filepathfromroot)
623 return origvfs.join(filepathfromroot)
624
624
625 class _containsnode(object):
625 class _containsnode(object):
626 """proxy __contains__(node) to container.__contains__ which accepts revs"""
626 """proxy __contains__(node) to container.__contains__ which accepts revs"""
627
627
628 def __init__(self, repo, revcontainer):
628 def __init__(self, repo, revcontainer):
629 self._torev = repo.changelog.rev
629 self._torev = repo.changelog.rev
630 self._revcontains = revcontainer.__contains__
630 self._revcontains = revcontainer.__contains__
631
631
632 def __contains__(self, node):
632 def __contains__(self, node):
633 return self._revcontains(self._torev(node))
633 return self._revcontains(self._torev(node))
634
634
635 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
635 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
636 """do common cleanups when old nodes are replaced by new nodes
636 """do common cleanups when old nodes are replaced by new nodes
637
637
638 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
638 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
639 (we might also want to move working directory parent in the future)
639 (we might also want to move working directory parent in the future)
640
640
641 By default, bookmark moves are calculated automatically from 'replacements',
641 By default, bookmark moves are calculated automatically from 'replacements',
642 but 'moves' can be used to override that. Also, 'moves' may include
642 but 'moves' can be used to override that. Also, 'moves' may include
643 additional bookmark moves that should not have associated obsmarkers.
643 additional bookmark moves that should not have associated obsmarkers.
644
644
645 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
645 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
646 have replacements. operation is a string, like "rebase".
646 have replacements. operation is a string, like "rebase".
647
647
648 metadata is dictionary containing metadata to be stored in obsmarker if
648 metadata is dictionary containing metadata to be stored in obsmarker if
649 obsolescence is enabled.
649 obsolescence is enabled.
650 """
650 """
651 if not replacements and not moves:
651 if not replacements and not moves:
652 return
652 return
653
653
654 # translate mapping's other forms
654 # translate mapping's other forms
655 if not util.safehasattr(replacements, 'items'):
655 if not util.safehasattr(replacements, 'items'):
656 replacements = {n: () for n in replacements}
656 replacements = {n: () for n in replacements}
657
657
658 # Calculate bookmark movements
658 # Calculate bookmark movements
659 if moves is None:
659 if moves is None:
660 moves = {}
660 moves = {}
661 # Unfiltered repo is needed since nodes in replacements might be hidden.
661 # Unfiltered repo is needed since nodes in replacements might be hidden.
662 unfi = repo.unfiltered()
662 unfi = repo.unfiltered()
663 for oldnode, newnodes in replacements.items():
663 for oldnode, newnodes in replacements.items():
664 if oldnode in moves:
664 if oldnode in moves:
665 continue
665 continue
666 if len(newnodes) > 1:
666 if len(newnodes) > 1:
667 # usually a split, take the one with biggest rev number
667 # usually a split, take the one with biggest rev number
668 newnode = next(unfi.set('max(%ln)', newnodes)).node()
668 newnode = next(unfi.set('max(%ln)', newnodes)).node()
669 elif len(newnodes) == 0:
669 elif len(newnodes) == 0:
670 # move bookmark backwards
670 # move bookmark backwards
671 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
671 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
672 list(replacements)))
672 list(replacements)))
673 if roots:
673 if roots:
674 newnode = roots[0].node()
674 newnode = roots[0].node()
675 else:
675 else:
676 newnode = nullid
676 newnode = nullid
677 else:
677 else:
678 newnode = newnodes[0]
678 newnode = newnodes[0]
679 moves[oldnode] = newnode
679 moves[oldnode] = newnode
680
680
681 with repo.transaction('cleanup') as tr:
681 with repo.transaction('cleanup') as tr:
682 # Move bookmarks
682 # Move bookmarks
683 bmarks = repo._bookmarks
683 bmarks = repo._bookmarks
684 bmarkchanges = []
684 bmarkchanges = []
685 allnewnodes = [n for ns in replacements.values() for n in ns]
685 allnewnodes = [n for ns in replacements.values() for n in ns]
686 for oldnode, newnode in moves.items():
686 for oldnode, newnode in moves.items():
687 oldbmarks = repo.nodebookmarks(oldnode)
687 oldbmarks = repo.nodebookmarks(oldnode)
688 if not oldbmarks:
688 if not oldbmarks:
689 continue
689 continue
690 from . import bookmarks # avoid import cycle
690 from . import bookmarks # avoid import cycle
691 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
691 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
692 (oldbmarks, hex(oldnode), hex(newnode)))
692 (oldbmarks, hex(oldnode), hex(newnode)))
693 # Delete divergent bookmarks being parents of related newnodes
693 # Delete divergent bookmarks being parents of related newnodes
694 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
694 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
695 allnewnodes, newnode, oldnode)
695 allnewnodes, newnode, oldnode)
696 deletenodes = _containsnode(repo, deleterevs)
696 deletenodes = _containsnode(repo, deleterevs)
697 for name in oldbmarks:
697 for name in oldbmarks:
698 bmarkchanges.append((name, newnode))
698 bmarkchanges.append((name, newnode))
699 for b in bookmarks.divergent2delete(repo, deletenodes, name):
699 for b in bookmarks.divergent2delete(repo, deletenodes, name):
700 bmarkchanges.append((b, None))
700 bmarkchanges.append((b, None))
701
701
702 if bmarkchanges:
702 if bmarkchanges:
703 bmarks.applychanges(repo, tr, bmarkchanges)
703 bmarks.applychanges(repo, tr, bmarkchanges)
704
704
705 # Obsolete or strip nodes
705 # Obsolete or strip nodes
706 if obsolete.isenabled(repo, obsolete.createmarkersopt):
706 if obsolete.isenabled(repo, obsolete.createmarkersopt):
707 # If a node is already obsoleted, and we want to obsolete it
707 # If a node is already obsoleted, and we want to obsolete it
708 # without a successor, skip that obssolete request since it's
708 # without a successor, skip that obssolete request since it's
709 # unnecessary. That's the "if s or not isobs(n)" check below.
709 # unnecessary. That's the "if s or not isobs(n)" check below.
710 # Also sort the node in topology order, that might be useful for
710 # Also sort the node in topology order, that might be useful for
711 # some obsstore logic.
711 # some obsstore logic.
712 # NOTE: the filtering and sorting might belong to createmarkers.
712 # NOTE: the filtering and sorting might belong to createmarkers.
713 isobs = unfi.obsstore.successors.__contains__
713 isobs = unfi.obsstore.successors.__contains__
714 torev = unfi.changelog.rev
714 torev = unfi.changelog.rev
715 sortfunc = lambda ns: torev(ns[0])
715 sortfunc = lambda ns: torev(ns[0])
716 rels = [(unfi[n], tuple(unfi[m] for m in s))
716 rels = [(unfi[n], tuple(unfi[m] for m in s))
717 for n, s in sorted(replacements.items(), key=sortfunc)
717 for n, s in sorted(replacements.items(), key=sortfunc)
718 if s or not isobs(n)]
718 if s or not isobs(n)]
719 if rels:
719 if rels:
720 obsolete.createmarkers(repo, rels, operation=operation,
720 obsolete.createmarkers(repo, rels, operation=operation,
721 metadata=metadata)
721 metadata=metadata)
722 else:
722 else:
723 from . import repair # avoid import cycle
723 from . import repair # avoid import cycle
724 tostrip = list(replacements)
724 tostrip = list(replacements)
725 if tostrip:
725 if tostrip:
726 repair.delayedstrip(repo.ui, repo, tostrip, operation)
726 repair.delayedstrip(repo.ui, repo, tostrip, operation)
727
727
728 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
728 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
729 if opts is None:
729 if opts is None:
730 opts = {}
730 opts = {}
731 m = matcher
731 m = matcher
732 if dry_run is None:
732 if dry_run is None:
733 dry_run = opts.get('dry_run')
733 dry_run = opts.get('dry_run')
734 if similarity is None:
734 if similarity is None:
735 similarity = float(opts.get('similarity') or 0)
735 similarity = float(opts.get('similarity') or 0)
736
736
737 ret = 0
737 ret = 0
738 join = lambda f: os.path.join(prefix, f)
738 join = lambda f: os.path.join(prefix, f)
739
739
740 wctx = repo[None]
740 wctx = repo[None]
741 for subpath in sorted(wctx.substate):
741 for subpath in sorted(wctx.substate):
742 submatch = matchmod.subdirmatcher(subpath, m)
742 submatch = matchmod.subdirmatcher(subpath, m)
743 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
743 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
744 sub = wctx.sub(subpath)
744 sub = wctx.sub(subpath)
745 try:
745 try:
746 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
746 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
747 ret = 1
747 ret = 1
748 except error.LookupError:
748 except error.LookupError:
749 repo.ui.status(_("skipping missing subrepository: %s\n")
749 repo.ui.status(_("skipping missing subrepository: %s\n")
750 % join(subpath))
750 % join(subpath))
751
751
752 rejected = []
752 rejected = []
753 def badfn(f, msg):
753 def badfn(f, msg):
754 if f in m.files():
754 if f in m.files():
755 m.bad(f, msg)
755 m.bad(f, msg)
756 rejected.append(f)
756 rejected.append(f)
757
757
758 badmatch = matchmod.badmatch(m, badfn)
758 badmatch = matchmod.badmatch(m, badfn)
759 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
759 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
760 badmatch)
760 badmatch)
761
761
762 unknownset = set(unknown + forgotten)
762 unknownset = set(unknown + forgotten)
763 toprint = unknownset.copy()
763 toprint = unknownset.copy()
764 toprint.update(deleted)
764 toprint.update(deleted)
765 for abs in sorted(toprint):
765 for abs in sorted(toprint):
766 if repo.ui.verbose or not m.exact(abs):
766 if repo.ui.verbose or not m.exact(abs):
767 if abs in unknownset:
767 if abs in unknownset:
768 status = _('adding %s\n') % m.uipath(abs)
768 status = _('adding %s\n') % m.uipath(abs)
769 else:
769 else:
770 status = _('removing %s\n') % m.uipath(abs)
770 status = _('removing %s\n') % m.uipath(abs)
771 repo.ui.status(status)
771 repo.ui.status(status)
772
772
773 renames = _findrenames(repo, m, added + unknown, removed + deleted,
773 renames = _findrenames(repo, m, added + unknown, removed + deleted,
774 similarity)
774 similarity)
775
775
776 if not dry_run:
776 if not dry_run:
777 _markchanges(repo, unknown + forgotten, deleted, renames)
777 _markchanges(repo, unknown + forgotten, deleted, renames)
778
778
779 for f in rejected:
779 for f in rejected:
780 if f in m.files():
780 if f in m.files():
781 return 1
781 return 1
782 return ret
782 return ret
783
783
784 def marktouched(repo, files, similarity=0.0):
784 def marktouched(repo, files, similarity=0.0):
785 '''Assert that files have somehow been operated upon. files are relative to
785 '''Assert that files have somehow been operated upon. files are relative to
786 the repo root.'''
786 the repo root.'''
787 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
787 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
788 rejected = []
788 rejected = []
789
789
790 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
790 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
791
791
792 if repo.ui.verbose:
792 if repo.ui.verbose:
793 unknownset = set(unknown + forgotten)
793 unknownset = set(unknown + forgotten)
794 toprint = unknownset.copy()
794 toprint = unknownset.copy()
795 toprint.update(deleted)
795 toprint.update(deleted)
796 for abs in sorted(toprint):
796 for abs in sorted(toprint):
797 if abs in unknownset:
797 if abs in unknownset:
798 status = _('adding %s\n') % abs
798 status = _('adding %s\n') % abs
799 else:
799 else:
800 status = _('removing %s\n') % abs
800 status = _('removing %s\n') % abs
801 repo.ui.status(status)
801 repo.ui.status(status)
802
802
803 renames = _findrenames(repo, m, added + unknown, removed + deleted,
803 renames = _findrenames(repo, m, added + unknown, removed + deleted,
804 similarity)
804 similarity)
805
805
806 _markchanges(repo, unknown + forgotten, deleted, renames)
806 _markchanges(repo, unknown + forgotten, deleted, renames)
807
807
808 for f in rejected:
808 for f in rejected:
809 if f in m.files():
809 if f in m.files():
810 return 1
810 return 1
811 return 0
811 return 0
812
812
813 def _interestingfiles(repo, matcher):
813 def _interestingfiles(repo, matcher):
814 '''Walk dirstate with matcher, looking for files that addremove would care
814 '''Walk dirstate with matcher, looking for files that addremove would care
815 about.
815 about.
816
816
817 This is different from dirstate.status because it doesn't care about
817 This is different from dirstate.status because it doesn't care about
818 whether files are modified or clean.'''
818 whether files are modified or clean.'''
819 added, unknown, deleted, removed, forgotten = [], [], [], [], []
819 added, unknown, deleted, removed, forgotten = [], [], [], [], []
820 audit_path = pathutil.pathauditor(repo.root, cached=True)
820 audit_path = pathutil.pathauditor(repo.root, cached=True)
821
821
822 ctx = repo[None]
822 ctx = repo[None]
823 dirstate = repo.dirstate
823 dirstate = repo.dirstate
824 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
824 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
825 unknown=True, ignored=False, full=False)
825 unknown=True, ignored=False, full=False)
826 for abs, st in walkresults.iteritems():
826 for abs, st in walkresults.iteritems():
827 dstate = dirstate[abs]
827 dstate = dirstate[abs]
828 if dstate == '?' and audit_path.check(abs):
828 if dstate == '?' and audit_path.check(abs):
829 unknown.append(abs)
829 unknown.append(abs)
830 elif dstate != 'r' and not st:
830 elif dstate != 'r' and not st:
831 deleted.append(abs)
831 deleted.append(abs)
832 elif dstate == 'r' and st:
832 elif dstate == 'r' and st:
833 forgotten.append(abs)
833 forgotten.append(abs)
834 # for finding renames
834 # for finding renames
835 elif dstate == 'r' and not st:
835 elif dstate == 'r' and not st:
836 removed.append(abs)
836 removed.append(abs)
837 elif dstate == 'a':
837 elif dstate == 'a':
838 added.append(abs)
838 added.append(abs)
839
839
840 return added, unknown, deleted, removed, forgotten
840 return added, unknown, deleted, removed, forgotten
841
841
842 def _findrenames(repo, matcher, added, removed, similarity):
842 def _findrenames(repo, matcher, added, removed, similarity):
843 '''Find renames from removed files to added ones.'''
843 '''Find renames from removed files to added ones.'''
844 renames = {}
844 renames = {}
845 if similarity > 0:
845 if similarity > 0:
846 for old, new, score in similar.findrenames(repo, added, removed,
846 for old, new, score in similar.findrenames(repo, added, removed,
847 similarity):
847 similarity):
848 if (repo.ui.verbose or not matcher.exact(old)
848 if (repo.ui.verbose or not matcher.exact(old)
849 or not matcher.exact(new)):
849 or not matcher.exact(new)):
850 repo.ui.status(_('recording removal of %s as rename to %s '
850 repo.ui.status(_('recording removal of %s as rename to %s '
851 '(%d%% similar)\n') %
851 '(%d%% similar)\n') %
852 (matcher.rel(old), matcher.rel(new),
852 (matcher.rel(old), matcher.rel(new),
853 score * 100))
853 score * 100))
854 renames[new] = old
854 renames[new] = old
855 return renames
855 return renames
856
856
857 def _markchanges(repo, unknown, deleted, renames):
857 def _markchanges(repo, unknown, deleted, renames):
858 '''Marks the files in unknown as added, the files in deleted as removed,
858 '''Marks the files in unknown as added, the files in deleted as removed,
859 and the files in renames as copied.'''
859 and the files in renames as copied.'''
860 wctx = repo[None]
860 wctx = repo[None]
861 with repo.wlock():
861 with repo.wlock():
862 wctx.forget(deleted)
862 wctx.forget(deleted)
863 wctx.add(unknown)
863 wctx.add(unknown)
864 for new, old in renames.iteritems():
864 for new, old in renames.iteritems():
865 wctx.copy(old, new)
865 wctx.copy(old, new)
866
866
867 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
867 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
868 """Update the dirstate to reflect the intent of copying src to dst. For
868 """Update the dirstate to reflect the intent of copying src to dst. For
869 different reasons it might not end with dst being marked as copied from src.
869 different reasons it might not end with dst being marked as copied from src.
870 """
870 """
871 origsrc = repo.dirstate.copied(src) or src
871 origsrc = repo.dirstate.copied(src) or src
872 if dst == origsrc: # copying back a copy?
872 if dst == origsrc: # copying back a copy?
873 if repo.dirstate[dst] not in 'mn' and not dryrun:
873 if repo.dirstate[dst] not in 'mn' and not dryrun:
874 repo.dirstate.normallookup(dst)
874 repo.dirstate.normallookup(dst)
875 else:
875 else:
876 if repo.dirstate[origsrc] == 'a' and origsrc == src:
876 if repo.dirstate[origsrc] == 'a' and origsrc == src:
877 if not ui.quiet:
877 if not ui.quiet:
878 ui.warn(_("%s has not been committed yet, so no copy "
878 ui.warn(_("%s has not been committed yet, so no copy "
879 "data will be stored for %s.\n")
879 "data will be stored for %s.\n")
880 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
880 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
881 if repo.dirstate[dst] in '?r' and not dryrun:
881 if repo.dirstate[dst] in '?r' and not dryrun:
882 wctx.add([dst])
882 wctx.add([dst])
883 elif not dryrun:
883 elif not dryrun:
884 wctx.copy(origsrc, dst)
884 wctx.copy(origsrc, dst)
885
885
886 def readrequires(opener, supported):
886 def readrequires(opener, supported):
887 '''Reads and parses .hg/requires and checks if all entries found
887 '''Reads and parses .hg/requires and checks if all entries found
888 are in the list of supported features.'''
888 are in the list of supported features.'''
889 requirements = set(opener.read("requires").splitlines())
889 requirements = set(opener.read("requires").splitlines())
890 missings = []
890 missings = []
891 for r in requirements:
891 for r in requirements:
892 if r not in supported:
892 if r not in supported:
893 if not r or not r[0:1].isalnum():
893 if not r or not r[0:1].isalnum():
894 raise error.RequirementError(_(".hg/requires file is corrupt"))
894 raise error.RequirementError(_(".hg/requires file is corrupt"))
895 missings.append(r)
895 missings.append(r)
896 missings.sort()
896 missings.sort()
897 if missings:
897 if missings:
898 raise error.RequirementError(
898 raise error.RequirementError(
899 _("repository requires features unknown to this Mercurial: %s")
899 _("repository requires features unknown to this Mercurial: %s")
900 % " ".join(missings),
900 % " ".join(missings),
901 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
901 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
902 " for more information"))
902 " for more information"))
903 return requirements
903 return requirements
904
904
905 def writerequires(opener, requirements):
905 def writerequires(opener, requirements):
906 with opener('requires', 'w') as fp:
906 with opener('requires', 'w') as fp:
907 for r in sorted(requirements):
907 for r in sorted(requirements):
908 fp.write("%s\n" % r)
908 fp.write("%s\n" % r)
909
909
910 class filecachesubentry(object):
910 class filecachesubentry(object):
911 def __init__(self, path, stat):
911 def __init__(self, path, stat):
912 self.path = path
912 self.path = path
913 self.cachestat = None
913 self.cachestat = None
914 self._cacheable = None
914 self._cacheable = None
915
915
916 if stat:
916 if stat:
917 self.cachestat = filecachesubentry.stat(self.path)
917 self.cachestat = filecachesubentry.stat(self.path)
918
918
919 if self.cachestat:
919 if self.cachestat:
920 self._cacheable = self.cachestat.cacheable()
920 self._cacheable = self.cachestat.cacheable()
921 else:
921 else:
922 # None means we don't know yet
922 # None means we don't know yet
923 self._cacheable = None
923 self._cacheable = None
924
924
925 def refresh(self):
925 def refresh(self):
926 if self.cacheable():
926 if self.cacheable():
927 self.cachestat = filecachesubentry.stat(self.path)
927 self.cachestat = filecachesubentry.stat(self.path)
928
928
929 def cacheable(self):
929 def cacheable(self):
930 if self._cacheable is not None:
930 if self._cacheable is not None:
931 return self._cacheable
931 return self._cacheable
932
932
933 # we don't know yet, assume it is for now
933 # we don't know yet, assume it is for now
934 return True
934 return True
935
935
936 def changed(self):
936 def changed(self):
937 # no point in going further if we can't cache it
937 # no point in going further if we can't cache it
938 if not self.cacheable():
938 if not self.cacheable():
939 return True
939 return True
940
940
941 newstat = filecachesubentry.stat(self.path)
941 newstat = filecachesubentry.stat(self.path)
942
942
943 # we may not know if it's cacheable yet, check again now
943 # we may not know if it's cacheable yet, check again now
944 if newstat and self._cacheable is None:
944 if newstat and self._cacheable is None:
945 self._cacheable = newstat.cacheable()
945 self._cacheable = newstat.cacheable()
946
946
947 # check again
947 # check again
948 if not self._cacheable:
948 if not self._cacheable:
949 return True
949 return True
950
950
951 if self.cachestat != newstat:
951 if self.cachestat != newstat:
952 self.cachestat = newstat
952 self.cachestat = newstat
953 return True
953 return True
954 else:
954 else:
955 return False
955 return False
956
956
957 @staticmethod
957 @staticmethod
958 def stat(path):
958 def stat(path):
959 try:
959 try:
960 return util.cachestat(path)
960 return util.cachestat(path)
961 except OSError as e:
961 except OSError as e:
962 if e.errno != errno.ENOENT:
962 if e.errno != errno.ENOENT:
963 raise
963 raise
964
964
965 class filecacheentry(object):
965 class filecacheentry(object):
966 def __init__(self, paths, stat=True):
966 def __init__(self, paths, stat=True):
967 self._entries = []
967 self._entries = []
968 for path in paths:
968 for path in paths:
969 self._entries.append(filecachesubentry(path, stat))
969 self._entries.append(filecachesubentry(path, stat))
970
970
971 def changed(self):
971 def changed(self):
972 '''true if any entry has changed'''
972 '''true if any entry has changed'''
973 for entry in self._entries:
973 for entry in self._entries:
974 if entry.changed():
974 if entry.changed():
975 return True
975 return True
976 return False
976 return False
977
977
978 def refresh(self):
978 def refresh(self):
979 for entry in self._entries:
979 for entry in self._entries:
980 entry.refresh()
980 entry.refresh()
981
981
982 class filecache(object):
982 class filecache(object):
983 '''A property like decorator that tracks files under .hg/ for updates.
983 '''A property like decorator that tracks files under .hg/ for updates.
984
984
985 Records stat info when called in _filecache.
985 Records stat info when called in _filecache.
986
986
987 On subsequent calls, compares old stat info with new info, and recreates the
987 On subsequent calls, compares old stat info with new info, and recreates the
988 object when any of the files changes, updating the new stat info in
988 object when any of the files changes, updating the new stat info in
989 _filecache.
989 _filecache.
990
990
991 Mercurial either atomic renames or appends for files under .hg,
991 Mercurial either atomic renames or appends for files under .hg,
992 so to ensure the cache is reliable we need the filesystem to be able
992 so to ensure the cache is reliable we need the filesystem to be able
993 to tell us if a file has been replaced. If it can't, we fallback to
993 to tell us if a file has been replaced. If it can't, we fallback to
994 recreating the object on every call (essentially the same behavior as
994 recreating the object on every call (essentially the same behavior as
995 propertycache).
995 propertycache).
996
996
997 '''
997 '''
998 def __init__(self, *paths):
998 def __init__(self, *paths):
999 self.paths = paths
999 self.paths = paths
1000
1000
1001 def join(self, obj, fname):
1001 def join(self, obj, fname):
1002 """Used to compute the runtime path of a cached file.
1002 """Used to compute the runtime path of a cached file.
1003
1003
1004 Users should subclass filecache and provide their own version of this
1004 Users should subclass filecache and provide their own version of this
1005 function to call the appropriate join function on 'obj' (an instance
1005 function to call the appropriate join function on 'obj' (an instance
1006 of the class that its member function was decorated).
1006 of the class that its member function was decorated).
1007 """
1007 """
1008 raise NotImplementedError
1008 raise NotImplementedError
1009
1009
1010 def __call__(self, func):
1010 def __call__(self, func):
1011 self.func = func
1011 self.func = func
1012 self.name = func.__name__.encode('ascii')
1012 self.name = func.__name__.encode('ascii')
1013 return self
1013 return self
1014
1014
1015 def __get__(self, obj, type=None):
1015 def __get__(self, obj, type=None):
1016 # if accessed on the class, return the descriptor itself.
1016 # if accessed on the class, return the descriptor itself.
1017 if obj is None:
1017 if obj is None:
1018 return self
1018 return self
1019 # do we need to check if the file changed?
1019 # do we need to check if the file changed?
1020 if self.name in obj.__dict__:
1020 if self.name in obj.__dict__:
1021 assert self.name in obj._filecache, self.name
1021 assert self.name in obj._filecache, self.name
1022 return obj.__dict__[self.name]
1022 return obj.__dict__[self.name]
1023
1023
1024 entry = obj._filecache.get(self.name)
1024 entry = obj._filecache.get(self.name)
1025
1025
1026 if entry:
1026 if entry:
1027 if entry.changed():
1027 if entry.changed():
1028 entry.obj = self.func(obj)
1028 entry.obj = self.func(obj)
1029 else:
1029 else:
1030 paths = [self.join(obj, path) for path in self.paths]
1030 paths = [self.join(obj, path) for path in self.paths]
1031
1031
1032 # We stat -before- creating the object so our cache doesn't lie if
1032 # We stat -before- creating the object so our cache doesn't lie if
1033 # a writer modified between the time we read and stat
1033 # a writer modified between the time we read and stat
1034 entry = filecacheentry(paths, True)
1034 entry = filecacheentry(paths, True)
1035 entry.obj = self.func(obj)
1035 entry.obj = self.func(obj)
1036
1036
1037 obj._filecache[self.name] = entry
1037 obj._filecache[self.name] = entry
1038
1038
1039 obj.__dict__[self.name] = entry.obj
1039 obj.__dict__[self.name] = entry.obj
1040 return entry.obj
1040 return entry.obj
1041
1041
1042 def __set__(self, obj, value):
1042 def __set__(self, obj, value):
1043 if self.name not in obj._filecache:
1043 if self.name not in obj._filecache:
1044 # we add an entry for the missing value because X in __dict__
1044 # we add an entry for the missing value because X in __dict__
1045 # implies X in _filecache
1045 # implies X in _filecache
1046 paths = [self.join(obj, path) for path in self.paths]
1046 paths = [self.join(obj, path) for path in self.paths]
1047 ce = filecacheentry(paths, False)
1047 ce = filecacheentry(paths, False)
1048 obj._filecache[self.name] = ce
1048 obj._filecache[self.name] = ce
1049 else:
1049 else:
1050 ce = obj._filecache[self.name]
1050 ce = obj._filecache[self.name]
1051
1051
1052 ce.obj = value # update cached copy
1052 ce.obj = value # update cached copy
1053 obj.__dict__[self.name] = value # update copy returned by obj.x
1053 obj.__dict__[self.name] = value # update copy returned by obj.x
1054
1054
1055 def __delete__(self, obj):
1055 def __delete__(self, obj):
1056 try:
1056 try:
1057 del obj.__dict__[self.name]
1057 del obj.__dict__[self.name]
1058 except KeyError:
1058 except KeyError:
1059 raise AttributeError(self.name)
1059 raise AttributeError(self.name)
1060
1060
1061 def extdatasource(repo, source):
1061 def extdatasource(repo, source):
1062 """Gather a map of rev -> value dict from the specified source
1062 """Gather a map of rev -> value dict from the specified source
1063
1063
1064 A source spec is treated as a URL, with a special case shell: type
1064 A source spec is treated as a URL, with a special case shell: type
1065 for parsing the output from a shell command.
1065 for parsing the output from a shell command.
1066
1066
1067 The data is parsed as a series of newline-separated records where
1067 The data is parsed as a series of newline-separated records where
1068 each record is a revision specifier optionally followed by a space
1068 each record is a revision specifier optionally followed by a space
1069 and a freeform string value. If the revision is known locally, it
1069 and a freeform string value. If the revision is known locally, it
1070 is converted to a rev, otherwise the record is skipped.
1070 is converted to a rev, otherwise the record is skipped.
1071
1071
1072 Note that both key and value are treated as UTF-8 and converted to
1072 Note that both key and value are treated as UTF-8 and converted to
1073 the local encoding. This allows uniformity between local and
1073 the local encoding. This allows uniformity between local and
1074 remote data sources.
1074 remote data sources.
1075 """
1075 """
1076
1076
1077 spec = repo.ui.config("extdata", source)
1077 spec = repo.ui.config("extdata", source)
1078 if not spec:
1078 if not spec:
1079 raise error.Abort(_("unknown extdata source '%s'") % source)
1079 raise error.Abort(_("unknown extdata source '%s'") % source)
1080
1080
1081 data = {}
1081 data = {}
1082 src = proc = None
1082 src = proc = None
1083 try:
1083 try:
1084 if spec.startswith("shell:"):
1084 if spec.startswith("shell:"):
1085 # external commands should be run relative to the repo root
1085 # external commands should be run relative to the repo root
1086 cmd = spec[6:]
1086 cmd = spec[6:]
1087 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1087 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1088 close_fds=util.closefds,
1088 close_fds=util.closefds,
1089 stdout=subprocess.PIPE, cwd=repo.root)
1089 stdout=subprocess.PIPE, cwd=repo.root)
1090 src = proc.stdout
1090 src = proc.stdout
1091 else:
1091 else:
1092 # treat as a URL or file
1092 # treat as a URL or file
1093 src = url.open(repo.ui, spec)
1093 src = url.open(repo.ui, spec)
1094 for l in src:
1094 for l in src:
1095 if " " in l:
1095 if " " in l:
1096 k, v = l.strip().split(" ", 1)
1096 k, v = l.strip().split(" ", 1)
1097 else:
1097 else:
1098 k, v = l.strip(), ""
1098 k, v = l.strip(), ""
1099
1099
1100 k = encoding.tolocal(k)
1100 k = encoding.tolocal(k)
1101 try:
1101 try:
1102 data[repo[k].rev()] = encoding.tolocal(v)
1102 data[repo[k].rev()] = encoding.tolocal(v)
1103 except (error.LookupError, error.RepoLookupError):
1103 except (error.LookupError, error.RepoLookupError):
1104 pass # we ignore data for nodes that don't exist locally
1104 pass # we ignore data for nodes that don't exist locally
1105 finally:
1105 finally:
1106 if proc:
1106 if proc:
1107 proc.communicate()
1107 proc.communicate()
1108 if src:
1108 if src:
1109 src.close()
1109 src.close()
1110 if proc and proc.returncode != 0:
1110 if proc and proc.returncode != 0:
1111 raise error.Abort(_("extdata command '%s' failed: %s")
1111 raise error.Abort(_("extdata command '%s' failed: %s")
1112 % (cmd, util.explainexit(proc.returncode)[0]))
1112 % (cmd, util.explainexit(proc.returncode)[0]))
1113
1113
1114 return data
1114 return data
1115
1115
1116 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1116 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1117 if lock is None:
1117 if lock is None:
1118 raise error.LockInheritanceContractViolation(
1118 raise error.LockInheritanceContractViolation(
1119 'lock can only be inherited while held')
1119 'lock can only be inherited while held')
1120 if environ is None:
1120 if environ is None:
1121 environ = {}
1121 environ = {}
1122 with lock.inherit() as locker:
1122 with lock.inherit() as locker:
1123 environ[envvar] = locker
1123 environ[envvar] = locker
1124 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1124 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1125
1125
1126 def wlocksub(repo, cmd, *args, **kwargs):
1126 def wlocksub(repo, cmd, *args, **kwargs):
1127 """run cmd as a subprocess that allows inheriting repo's wlock
1127 """run cmd as a subprocess that allows inheriting repo's wlock
1128
1128
1129 This can only be called while the wlock is held. This takes all the
1129 This can only be called while the wlock is held. This takes all the
1130 arguments that ui.system does, and returns the exit code of the
1130 arguments that ui.system does, and returns the exit code of the
1131 subprocess."""
1131 subprocess."""
1132 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1132 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1133 **kwargs)
1133 **kwargs)
1134
1134
1135 def gdinitconfig(ui):
1135 def gdinitconfig(ui):
1136 """helper function to know if a repo should be created as general delta
1136 """helper function to know if a repo should be created as general delta
1137 """
1137 """
1138 # experimental config: format.generaldelta
1138 # experimental config: format.generaldelta
1139 return (ui.configbool('format', 'generaldelta')
1139 return (ui.configbool('format', 'generaldelta')
1140 or ui.configbool('format', 'usegeneraldelta'))
1140 or ui.configbool('format', 'usegeneraldelta'))
1141
1141
1142 def gddeltaconfig(ui):
1142 def gddeltaconfig(ui):
1143 """helper function to know if incoming delta should be optimised
1143 """helper function to know if incoming delta should be optimised
1144 """
1144 """
1145 # experimental config: format.generaldelta
1145 # experimental config: format.generaldelta
1146 return ui.configbool('format', 'generaldelta')
1146 return ui.configbool('format', 'generaldelta')
1147
1147
1148 class simplekeyvaluefile(object):
1148 class simplekeyvaluefile(object):
1149 """A simple file with key=value lines
1149 """A simple file with key=value lines
1150
1150
1151 Keys must be alphanumerics and start with a letter, values must not
1151 Keys must be alphanumerics and start with a letter, values must not
1152 contain '\n' characters"""
1152 contain '\n' characters"""
1153 firstlinekey = '__firstline'
1153 firstlinekey = '__firstline'
1154
1154
1155 def __init__(self, vfs, path, keys=None):
1155 def __init__(self, vfs, path, keys=None):
1156 self.vfs = vfs
1156 self.vfs = vfs
1157 self.path = path
1157 self.path = path
1158
1158
1159 def read(self, firstlinenonkeyval=False):
1159 def read(self, firstlinenonkeyval=False):
1160 """Read the contents of a simple key-value file
1160 """Read the contents of a simple key-value file
1161
1161
1162 'firstlinenonkeyval' indicates whether the first line of file should
1162 'firstlinenonkeyval' indicates whether the first line of file should
1163 be treated as a key-value pair or reuturned fully under the
1163 be treated as a key-value pair or reuturned fully under the
1164 __firstline key."""
1164 __firstline key."""
1165 lines = self.vfs.readlines(self.path)
1165 lines = self.vfs.readlines(self.path)
1166 d = {}
1166 d = {}
1167 if firstlinenonkeyval:
1167 if firstlinenonkeyval:
1168 if not lines:
1168 if not lines:
1169 e = _("empty simplekeyvalue file")
1169 e = _("empty simplekeyvalue file")
1170 raise error.CorruptedState(e)
1170 raise error.CorruptedState(e)
1171 # we don't want to include '\n' in the __firstline
1171 # we don't want to include '\n' in the __firstline
1172 d[self.firstlinekey] = lines[0][:-1]
1172 d[self.firstlinekey] = lines[0][:-1]
1173 del lines[0]
1173 del lines[0]
1174
1174
1175 try:
1175 try:
1176 # the 'if line.strip()' part prevents us from failing on empty
1176 # the 'if line.strip()' part prevents us from failing on empty
1177 # lines which only contain '\n' therefore are not skipped
1177 # lines which only contain '\n' therefore are not skipped
1178 # by 'if line'
1178 # by 'if line'
1179 updatedict = dict(line[:-1].split('=', 1) for line in lines
1179 updatedict = dict(line[:-1].split('=', 1) for line in lines
1180 if line.strip())
1180 if line.strip())
1181 if self.firstlinekey in updatedict:
1181 if self.firstlinekey in updatedict:
1182 e = _("%r can't be used as a key")
1182 e = _("%r can't be used as a key")
1183 raise error.CorruptedState(e % self.firstlinekey)
1183 raise error.CorruptedState(e % self.firstlinekey)
1184 d.update(updatedict)
1184 d.update(updatedict)
1185 except ValueError as e:
1185 except ValueError as e:
1186 raise error.CorruptedState(str(e))
1186 raise error.CorruptedState(str(e))
1187 return d
1187 return d
1188
1188
1189 def write(self, data, firstline=None):
1189 def write(self, data, firstline=None):
1190 """Write key=>value mapping to a file
1190 """Write key=>value mapping to a file
1191 data is a dict. Keys must be alphanumerical and start with a letter.
1191 data is a dict. Keys must be alphanumerical and start with a letter.
1192 Values must not contain newline characters.
1192 Values must not contain newline characters.
1193
1193
1194 If 'firstline' is not None, it is written to file before
1194 If 'firstline' is not None, it is written to file before
1195 everything else, as it is, not in a key=value form"""
1195 everything else, as it is, not in a key=value form"""
1196 lines = []
1196 lines = []
1197 if firstline is not None:
1197 if firstline is not None:
1198 lines.append('%s\n' % firstline)
1198 lines.append('%s\n' % firstline)
1199
1199
1200 for k, v in data.items():
1200 for k, v in data.items():
1201 if k == self.firstlinekey:
1201 if k == self.firstlinekey:
1202 e = "key name '%s' is reserved" % self.firstlinekey
1202 e = "key name '%s' is reserved" % self.firstlinekey
1203 raise error.ProgrammingError(e)
1203 raise error.ProgrammingError(e)
1204 if not k[0:1].isalpha():
1204 if not k[0:1].isalpha():
1205 e = "keys must start with a letter in a key-value file"
1205 e = "keys must start with a letter in a key-value file"
1206 raise error.ProgrammingError(e)
1206 raise error.ProgrammingError(e)
1207 if not k.isalnum():
1207 if not k.isalnum():
1208 e = "invalid key name in a simple key-value file"
1208 e = "invalid key name in a simple key-value file"
1209 raise error.ProgrammingError(e)
1209 raise error.ProgrammingError(e)
1210 if '\n' in v:
1210 if '\n' in v:
1211 e = "invalid value in a simple key-value file"
1211 e = "invalid value in a simple key-value file"
1212 raise error.ProgrammingError(e)
1212 raise error.ProgrammingError(e)
1213 lines.append("%s=%s\n" % (k, v))
1213 lines.append("%s=%s\n" % (k, v))
1214 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1214 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1215 fp.write(''.join(lines))
1215 fp.write(''.join(lines))
1216
1216
1217 _reportobsoletedsource = [
1217 _reportobsoletedsource = [
1218 'debugobsolete',
1218 'debugobsolete',
1219 'pull',
1219 'pull',
1220 'push',
1220 'push',
1221 'serve',
1221 'serve',
1222 'unbundle',
1222 'unbundle',
1223 ]
1223 ]
1224
1224
1225 _reportnewcssource = [
1225 _reportnewcssource = [
1226 'pull',
1226 'pull',
1227 'unbundle',
1227 'unbundle',
1228 ]
1228 ]
1229
1229
1230 # a list of (repo, ctx, files) functions called by various commands to allow
1230 # a list of (repo, ctx, files) functions called by various commands to allow
1231 # extensions to ensure the corresponding files are available locally, before the
1231 # extensions to ensure the corresponding files are available locally, before the
1232 # command uses them.
1232 # command uses them.
1233 fileprefetchhooks = util.hooks()
1233 fileprefetchhooks = util.hooks()
1234
1234
1235 # A marker that tells the evolve extension to suppress its own reporting
1235 # A marker that tells the evolve extension to suppress its own reporting
1236 _reportstroubledchangesets = True
1236 _reportstroubledchangesets = True
1237
1237
1238 def registersummarycallback(repo, otr, txnname=''):
1238 def registersummarycallback(repo, otr, txnname=''):
1239 """register a callback to issue a summary after the transaction is closed
1239 """register a callback to issue a summary after the transaction is closed
1240 """
1240 """
1241 def txmatch(sources):
1241 def txmatch(sources):
1242 return any(txnname.startswith(source) for source in sources)
1242 return any(txnname.startswith(source) for source in sources)
1243
1243
1244 categories = []
1244 categories = []
1245
1245
1246 def reportsummary(func):
1246 def reportsummary(func):
1247 """decorator for report callbacks."""
1247 """decorator for report callbacks."""
1248 # The repoview life cycle is shorter than the one of the actual
1248 # The repoview life cycle is shorter than the one of the actual
1249 # underlying repository. So the filtered object can die before the
1249 # underlying repository. So the filtered object can die before the
1250 # weakref is used leading to troubles. We keep a reference to the
1250 # weakref is used leading to troubles. We keep a reference to the
1251 # unfiltered object and restore the filtering when retrieving the
1251 # unfiltered object and restore the filtering when retrieving the
1252 # repository through the weakref.
1252 # repository through the weakref.
1253 filtername = repo.filtername
1253 filtername = repo.filtername
1254 reporef = weakref.ref(repo.unfiltered())
1254 reporef = weakref.ref(repo.unfiltered())
1255 def wrapped(tr):
1255 def wrapped(tr):
1256 repo = reporef()
1256 repo = reporef()
1257 if filtername:
1257 if filtername:
1258 repo = repo.filtered(filtername)
1258 repo = repo.filtered(filtername)
1259 func(repo, tr)
1259 func(repo, tr)
1260 newcat = '%02i-txnreport' % len(categories)
1260 newcat = '%02i-txnreport' % len(categories)
1261 otr.addpostclose(newcat, wrapped)
1261 otr.addpostclose(newcat, wrapped)
1262 categories.append(newcat)
1262 categories.append(newcat)
1263 return wrapped
1263 return wrapped
1264
1264
1265 if txmatch(_reportobsoletedsource):
1265 if txmatch(_reportobsoletedsource):
1266 @reportsummary
1266 @reportsummary
1267 def reportobsoleted(repo, tr):
1267 def reportobsoleted(repo, tr):
1268 obsoleted = obsutil.getobsoleted(repo, tr)
1268 obsoleted = obsutil.getobsoleted(repo, tr)
1269 if obsoleted:
1269 if obsoleted:
1270 repo.ui.status(_('obsoleted %i changesets\n')
1270 repo.ui.status(_('obsoleted %i changesets\n')
1271 % len(obsoleted))
1271 % len(obsoleted))
1272
1272
1273 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1273 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1274 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1274 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1275 instabilitytypes = [
1275 instabilitytypes = [
1276 ('orphan', 'orphan'),
1276 ('orphan', 'orphan'),
1277 ('phase-divergent', 'phasedivergent'),
1277 ('phase-divergent', 'phasedivergent'),
1278 ('content-divergent', 'contentdivergent'),
1278 ('content-divergent', 'contentdivergent'),
1279 ]
1279 ]
1280
1280
1281 def getinstabilitycounts(repo):
1281 def getinstabilitycounts(repo):
1282 filtered = repo.changelog.filteredrevs
1282 filtered = repo.changelog.filteredrevs
1283 counts = {}
1283 counts = {}
1284 for instability, revset in instabilitytypes:
1284 for instability, revset in instabilitytypes:
1285 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1285 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1286 filtered)
1286 filtered)
1287 return counts
1287 return counts
1288
1288
1289 oldinstabilitycounts = getinstabilitycounts(repo)
1289 oldinstabilitycounts = getinstabilitycounts(repo)
1290 @reportsummary
1290 @reportsummary
1291 def reportnewinstabilities(repo, tr):
1291 def reportnewinstabilities(repo, tr):
1292 newinstabilitycounts = getinstabilitycounts(repo)
1292 newinstabilitycounts = getinstabilitycounts(repo)
1293 for instability, revset in instabilitytypes:
1293 for instability, revset in instabilitytypes:
1294 delta = (newinstabilitycounts[instability] -
1294 delta = (newinstabilitycounts[instability] -
1295 oldinstabilitycounts[instability])
1295 oldinstabilitycounts[instability])
1296 if delta > 0:
1296 if delta > 0:
1297 repo.ui.warn(_('%i new %s changesets\n') %
1297 repo.ui.warn(_('%i new %s changesets\n') %
1298 (delta, instability))
1298 (delta, instability))
1299
1299
1300 if txmatch(_reportnewcssource):
1300 if txmatch(_reportnewcssource):
1301 @reportsummary
1301 @reportsummary
1302 def reportnewcs(repo, tr):
1302 def reportnewcs(repo, tr):
1303 """Report the range of new revisions pulled/unbundled."""
1303 """Report the range of new revisions pulled/unbundled."""
1304 newrevs = tr.changes.get('revs', xrange(0, 0))
1304 newrevs = tr.changes.get('revs', xrange(0, 0))
1305 if not newrevs:
1305 if not newrevs:
1306 return
1306 return
1307
1307
1308 # Compute the bounds of new revisions' range, excluding obsoletes.
1308 # Compute the bounds of new revisions' range, excluding obsoletes.
1309 unfi = repo.unfiltered()
1309 unfi = repo.unfiltered()
1310 revs = unfi.revs('%ld and not obsolete()', newrevs)
1310 revs = unfi.revs('%ld and not obsolete()', newrevs)
1311 if not revs:
1311 if not revs:
1312 # Got only obsoletes.
1312 # Got only obsoletes.
1313 return
1313 return
1314 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1314 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1315
1315
1316 if minrev == maxrev:
1316 if minrev == maxrev:
1317 revrange = minrev
1317 revrange = minrev
1318 else:
1318 else:
1319 revrange = '%s:%s' % (minrev, maxrev)
1319 revrange = '%s:%s' % (minrev, maxrev)
1320 repo.ui.status(_('new changesets %s\n') % revrange)
1320 repo.ui.status(_('new changesets %s\n') % revrange)
1321
1321
1322 def nodesummaries(repo, nodes, maxnumnodes=4):
1322 def nodesummaries(repo, nodes, maxnumnodes=4):
1323 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1323 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1324 return ' '.join(short(h) for h in nodes)
1324 return ' '.join(short(h) for h in nodes)
1325 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1325 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1326 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1326 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1327
1327
1328 def enforcesinglehead(repo, tr, desc):
1328 def enforcesinglehead(repo, tr, desc):
1329 """check that no named branch has multiple heads"""
1329 """check that no named branch has multiple heads"""
1330 if desc in ('strip', 'repair'):
1330 if desc in ('strip', 'repair'):
1331 # skip the logic during strip
1331 # skip the logic during strip
1332 return
1332 return
1333 visible = repo.filtered('visible')
1333 visible = repo.filtered('visible')
1334 # possible improvement: we could restrict the check to affected branch
1334 # possible improvement: we could restrict the check to affected branch
1335 for name, heads in visible.branchmap().iteritems():
1335 for name, heads in visible.branchmap().iteritems():
1336 if len(heads) > 1:
1336 if len(heads) > 1:
1337 msg = _('rejecting multiple heads on branch "%s"')
1337 msg = _('rejecting multiple heads on branch "%s"')
1338 msg %= name
1338 msg %= name
1339 hint = _('%d heads: %s')
1339 hint = _('%d heads: %s')
1340 hint %= (len(heads), nodesummaries(repo, heads))
1340 hint %= (len(heads), nodesummaries(repo, heads))
1341 raise error.Abort(msg, hint=hint)
1341 raise error.Abort(msg, hint=hint)
1342
1342
1343 def wrapconvertsink(sink):
1343 def wrapconvertsink(sink):
1344 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1344 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1345 before it is used, whether or not the convert extension was formally loaded.
1345 before it is used, whether or not the convert extension was formally loaded.
1346 """
1346 """
1347 return sink
1347 return sink
1348
1348
1349 def unhidehashlikerevs(repo, specs, hiddentype):
1349 def unhidehashlikerevs(repo, specs, hiddentype):
1350 """parse the user specs and unhide changesets whose hash or revision number
1350 """parse the user specs and unhide changesets whose hash or revision number
1351 is passed.
1351 is passed.
1352
1352
1353 hiddentype can be: 1) 'warn': warn while unhiding changesets
1353 hiddentype can be: 1) 'warn': warn while unhiding changesets
1354 2) 'nowarn': don't warn while unhiding changesets
1354 2) 'nowarn': don't warn while unhiding changesets
1355
1355
1356 returns a repo object with the required changesets unhidden
1356 returns a repo object with the required changesets unhidden
1357 """
1357 """
1358 if not repo.filtername or not repo.ui.configbool('experimental',
1358 if not repo.filtername or not repo.ui.configbool('experimental',
1359 'directaccess'):
1359 'directaccess'):
1360 return repo
1360 return repo
1361
1361
1362 if repo.filtername not in ('visible', 'visible-hidden'):
1362 if repo.filtername not in ('visible', 'visible-hidden'):
1363 return repo
1363 return repo
1364
1364
1365 symbols = set()
1365 symbols = set()
1366 for spec in specs:
1366 for spec in specs:
1367 try:
1367 try:
1368 tree = revsetlang.parse(spec)
1368 tree = revsetlang.parse(spec)
1369 except error.ParseError: # will be reported by scmutil.revrange()
1369 except error.ParseError: # will be reported by scmutil.revrange()
1370 continue
1370 continue
1371
1371
1372 symbols.update(revsetlang.gethashlikesymbols(tree))
1372 symbols.update(revsetlang.gethashlikesymbols(tree))
1373
1373
1374 if not symbols:
1374 if not symbols:
1375 return repo
1375 return repo
1376
1376
1377 revs = _getrevsfromsymbols(repo, symbols)
1377 revs = _getrevsfromsymbols(repo, symbols)
1378
1378
1379 if not revs:
1379 if not revs:
1380 return repo
1380 return repo
1381
1381
1382 if hiddentype == 'warn':
1382 if hiddentype == 'warn':
1383 unfi = repo.unfiltered()
1383 unfi = repo.unfiltered()
1384 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1384 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1385 repo.ui.warn(_("warning: accessing hidden changesets for write "
1385 repo.ui.warn(_("warning: accessing hidden changesets for write "
1386 "operation: %s\n") % revstr)
1386 "operation: %s\n") % revstr)
1387
1387
1388 # we have to use new filtername to separate branch/tags cache until we can
1388 # we have to use new filtername to separate branch/tags cache until we can
1389 # disbale these cache when revisions are dynamically pinned.
1389 # disbale these cache when revisions are dynamically pinned.
1390 return repo.filtered('visible-hidden', revs)
1390 return repo.filtered('visible-hidden', revs)
1391
1391
1392 def _getrevsfromsymbols(repo, symbols):
1392 def _getrevsfromsymbols(repo, symbols):
1393 """parse the list of symbols and returns a set of revision numbers of hidden
1393 """parse the list of symbols and returns a set of revision numbers of hidden
1394 changesets present in symbols"""
1394 changesets present in symbols"""
1395 revs = set()
1395 revs = set()
1396 unfi = repo.unfiltered()
1396 unfi = repo.unfiltered()
1397 unficl = unfi.changelog
1397 unficl = unfi.changelog
1398 cl = repo.changelog
1398 cl = repo.changelog
1399 tiprev = len(unficl)
1399 tiprev = len(unficl)
1400 pmatch = unficl._partialmatch
1400 pmatch = unficl._partialmatch
1401 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1401 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1402 for s in symbols:
1402 for s in symbols:
1403 try:
1403 try:
1404 n = int(s)
1404 n = int(s)
1405 if n <= tiprev:
1405 if n <= tiprev:
1406 if not allowrevnums:
1406 if not allowrevnums:
1407 continue
1407 continue
1408 else:
1408 else:
1409 if n not in cl:
1409 if n not in cl:
1410 revs.add(n)
1410 revs.add(n)
1411 continue
1411 continue
1412 except ValueError:
1412 except ValueError:
1413 pass
1413 pass
1414
1414
1415 try:
1415 try:
1416 s = pmatch(s)
1416 s = pmatch(s)
1417 except error.LookupError:
1417 except error.LookupError:
1418 s = None
1418 s = None
1419
1419
1420 if s is not None:
1420 if s is not None:
1421 rev = unficl.rev(s)
1421 rev = unficl.rev(s)
1422 if rev not in cl:
1422 if rev not in cl:
1423 revs.add(rev)
1423 revs.add(rev)
1424
1424
1425 return revs
1425 return revs
General Comments 0
You need to be logged in to leave comments. Login now