##// END OF EJS Templates
walkrepos: don't reimplement any()...
Martin von Zweigbergk -
r36356:ddd9474d default
parent child Browse files
Show More
@@ -1,1422 +1,1418
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 if pycompat.iswindows:
44 if pycompat.iswindows:
45 from . import scmwindows as scmplatform
45 from . import scmwindows as scmplatform
46 else:
46 else:
47 from . import scmposix as scmplatform
47 from . import scmposix as scmplatform
48
48
49 termsize = scmplatform.termsize
49 termsize = scmplatform.termsize
50
50
51 class status(tuple):
51 class status(tuple):
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 and 'ignored' properties are only relevant to the working copy.
53 and 'ignored' properties are only relevant to the working copy.
54 '''
54 '''
55
55
56 __slots__ = ()
56 __slots__ = ()
57
57
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 clean):
59 clean):
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 ignored, clean))
61 ignored, clean))
62
62
63 @property
63 @property
64 def modified(self):
64 def modified(self):
65 '''files that have been modified'''
65 '''files that have been modified'''
66 return self[0]
66 return self[0]
67
67
68 @property
68 @property
69 def added(self):
69 def added(self):
70 '''files that have been added'''
70 '''files that have been added'''
71 return self[1]
71 return self[1]
72
72
73 @property
73 @property
74 def removed(self):
74 def removed(self):
75 '''files that have been removed'''
75 '''files that have been removed'''
76 return self[2]
76 return self[2]
77
77
78 @property
78 @property
79 def deleted(self):
79 def deleted(self):
80 '''files that are in the dirstate, but have been deleted from the
80 '''files that are in the dirstate, but have been deleted from the
81 working copy (aka "missing")
81 working copy (aka "missing")
82 '''
82 '''
83 return self[3]
83 return self[3]
84
84
85 @property
85 @property
86 def unknown(self):
86 def unknown(self):
87 '''files not in the dirstate that are not ignored'''
87 '''files not in the dirstate that are not ignored'''
88 return self[4]
88 return self[4]
89
89
90 @property
90 @property
91 def ignored(self):
91 def ignored(self):
92 '''files not in the dirstate that are ignored (by _dirignore())'''
92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 return self[5]
93 return self[5]
94
94
95 @property
95 @property
96 def clean(self):
96 def clean(self):
97 '''files that have not been modified'''
97 '''files that have not been modified'''
98 return self[6]
98 return self[6]
99
99
100 def __repr__(self, *args, **kwargs):
100 def __repr__(self, *args, **kwargs):
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 'unknown=%r, ignored=%r, clean=%r>') % self)
102 'unknown=%r, ignored=%r, clean=%r>') % self)
103
103
104 def itersubrepos(ctx1, ctx2):
104 def itersubrepos(ctx1, ctx2):
105 """find subrepos in ctx1 or ctx2"""
105 """find subrepos in ctx1 or ctx2"""
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 # has been modified (in ctx2) but not yet committed (in ctx1).
108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111
111
112 missing = set()
112 missing = set()
113
113
114 for subpath in ctx2.substate:
114 for subpath in ctx2.substate:
115 if subpath not in ctx1.substate:
115 if subpath not in ctx1.substate:
116 del subpaths[subpath]
116 del subpaths[subpath]
117 missing.add(subpath)
117 missing.add(subpath)
118
118
119 for subpath, ctx in sorted(subpaths.iteritems()):
119 for subpath, ctx in sorted(subpaths.iteritems()):
120 yield subpath, ctx.sub(subpath)
120 yield subpath, ctx.sub(subpath)
121
121
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 # status and diff will have an accurate result when it does
123 # status and diff will have an accurate result when it does
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 # against itself.
125 # against itself.
126 for subpath in missing:
126 for subpath in missing:
127 yield subpath, ctx2.nullsub(subpath, ctx1)
127 yield subpath, ctx2.nullsub(subpath, ctx1)
128
128
129 def nochangesfound(ui, repo, excluded=None):
129 def nochangesfound(ui, repo, excluded=None):
130 '''Report no changes for push/pull, excluded is None or a list of
130 '''Report no changes for push/pull, excluded is None or a list of
131 nodes excluded from the push/pull.
131 nodes excluded from the push/pull.
132 '''
132 '''
133 secretlist = []
133 secretlist = []
134 if excluded:
134 if excluded:
135 for n in excluded:
135 for n in excluded:
136 ctx = repo[n]
136 ctx = repo[n]
137 if ctx.phase() >= phases.secret and not ctx.extinct():
137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 secretlist.append(n)
138 secretlist.append(n)
139
139
140 if secretlist:
140 if secretlist:
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 % len(secretlist))
142 % len(secretlist))
143 else:
143 else:
144 ui.status(_("no changes found\n"))
144 ui.status(_("no changes found\n"))
145
145
146 def callcatch(ui, func):
146 def callcatch(ui, func):
147 """call func() with global exception handling
147 """call func() with global exception handling
148
148
149 return func() if no exception happens. otherwise do some error handling
149 return func() if no exception happens. otherwise do some error handling
150 and return an exit code accordingly. does not handle all exceptions.
150 and return an exit code accordingly. does not handle all exceptions.
151 """
151 """
152 try:
152 try:
153 try:
153 try:
154 return func()
154 return func()
155 except: # re-raises
155 except: # re-raises
156 ui.traceback()
156 ui.traceback()
157 raise
157 raise
158 # Global exception handling, alphabetically
158 # Global exception handling, alphabetically
159 # Mercurial-specific first, followed by built-in and library exceptions
159 # Mercurial-specific first, followed by built-in and library exceptions
160 except error.LockHeld as inst:
160 except error.LockHeld as inst:
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _('timed out waiting for lock held by %r') % inst.locker
162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 else:
163 else:
164 reason = _('lock held by %r') % inst.locker
164 reason = _('lock held by %r') % inst.locker
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
166 if not inst.locker:
166 if not inst.locker:
167 ui.warn(_("(lock might be very busy)\n"))
167 ui.warn(_("(lock might be very busy)\n"))
168 except error.LockUnavailable as inst:
168 except error.LockUnavailable as inst:
169 ui.warn(_("abort: could not lock %s: %s\n") %
169 ui.warn(_("abort: could not lock %s: %s\n") %
170 (inst.desc or inst.filename,
170 (inst.desc or inst.filename,
171 encoding.strtolocal(inst.strerror)))
171 encoding.strtolocal(inst.strerror)))
172 except error.OutOfBandError as inst:
172 except error.OutOfBandError as inst:
173 if inst.args:
173 if inst.args:
174 msg = _("abort: remote error:\n")
174 msg = _("abort: remote error:\n")
175 else:
175 else:
176 msg = _("abort: remote error\n")
176 msg = _("abort: remote error\n")
177 ui.warn(msg)
177 ui.warn(msg)
178 if inst.args:
178 if inst.args:
179 ui.warn(''.join(inst.args))
179 ui.warn(''.join(inst.args))
180 if inst.hint:
180 if inst.hint:
181 ui.warn('(%s)\n' % inst.hint)
181 ui.warn('(%s)\n' % inst.hint)
182 except error.RepoError as inst:
182 except error.RepoError as inst:
183 ui.warn(_("abort: %s!\n") % inst)
183 ui.warn(_("abort: %s!\n") % inst)
184 if inst.hint:
184 if inst.hint:
185 ui.warn(_("(%s)\n") % inst.hint)
185 ui.warn(_("(%s)\n") % inst.hint)
186 except error.ResponseError as inst:
186 except error.ResponseError as inst:
187 ui.warn(_("abort: %s") % inst.args[0])
187 ui.warn(_("abort: %s") % inst.args[0])
188 if not isinstance(inst.args[1], basestring):
188 if not isinstance(inst.args[1], basestring):
189 ui.warn(" %r\n" % (inst.args[1],))
189 ui.warn(" %r\n" % (inst.args[1],))
190 elif not inst.args[1]:
190 elif not inst.args[1]:
191 ui.warn(_(" empty string\n"))
191 ui.warn(_(" empty string\n"))
192 else:
192 else:
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
194 except error.CensoredNodeError as inst:
194 except error.CensoredNodeError as inst:
195 ui.warn(_("abort: file censored %s!\n") % inst)
195 ui.warn(_("abort: file censored %s!\n") % inst)
196 except error.RevlogError as inst:
196 except error.RevlogError as inst:
197 ui.warn(_("abort: %s!\n") % inst)
197 ui.warn(_("abort: %s!\n") % inst)
198 except error.InterventionRequired as inst:
198 except error.InterventionRequired as inst:
199 ui.warn("%s\n" % inst)
199 ui.warn("%s\n" % inst)
200 if inst.hint:
200 if inst.hint:
201 ui.warn(_("(%s)\n") % inst.hint)
201 ui.warn(_("(%s)\n") % inst.hint)
202 return 1
202 return 1
203 except error.WdirUnsupported:
203 except error.WdirUnsupported:
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
205 except error.Abort as inst:
205 except error.Abort as inst:
206 ui.warn(_("abort: %s\n") % inst)
206 ui.warn(_("abort: %s\n") % inst)
207 if inst.hint:
207 if inst.hint:
208 ui.warn(_("(%s)\n") % inst.hint)
208 ui.warn(_("(%s)\n") % inst.hint)
209 except ImportError as inst:
209 except ImportError as inst:
210 ui.warn(_("abort: %s!\n") % inst)
210 ui.warn(_("abort: %s!\n") % inst)
211 m = str(inst).split()[-1]
211 m = str(inst).split()[-1]
212 if m in "mpatch bdiff".split():
212 if m in "mpatch bdiff".split():
213 ui.warn(_("(did you forget to compile extensions?)\n"))
213 ui.warn(_("(did you forget to compile extensions?)\n"))
214 elif m in "zlib".split():
214 elif m in "zlib".split():
215 ui.warn(_("(is your Python install correct?)\n"))
215 ui.warn(_("(is your Python install correct?)\n"))
216 except IOError as inst:
216 except IOError as inst:
217 if util.safehasattr(inst, "code"):
217 if util.safehasattr(inst, "code"):
218 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
218 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
219 elif util.safehasattr(inst, "reason"):
219 elif util.safehasattr(inst, "reason"):
220 try: # usually it is in the form (errno, strerror)
220 try: # usually it is in the form (errno, strerror)
221 reason = inst.reason.args[1]
221 reason = inst.reason.args[1]
222 except (AttributeError, IndexError):
222 except (AttributeError, IndexError):
223 # it might be anything, for example a string
223 # it might be anything, for example a string
224 reason = inst.reason
224 reason = inst.reason
225 if isinstance(reason, unicode):
225 if isinstance(reason, unicode):
226 # SSLError of Python 2.7.9 contains a unicode
226 # SSLError of Python 2.7.9 contains a unicode
227 reason = encoding.unitolocal(reason)
227 reason = encoding.unitolocal(reason)
228 ui.warn(_("abort: error: %s\n") % reason)
228 ui.warn(_("abort: error: %s\n") % reason)
229 elif (util.safehasattr(inst, "args")
229 elif (util.safehasattr(inst, "args")
230 and inst.args and inst.args[0] == errno.EPIPE):
230 and inst.args and inst.args[0] == errno.EPIPE):
231 pass
231 pass
232 elif getattr(inst, "strerror", None):
232 elif getattr(inst, "strerror", None):
233 if getattr(inst, "filename", None):
233 if getattr(inst, "filename", None):
234 ui.warn(_("abort: %s: %s\n") % (
234 ui.warn(_("abort: %s: %s\n") % (
235 encoding.strtolocal(inst.strerror), inst.filename))
235 encoding.strtolocal(inst.strerror), inst.filename))
236 else:
236 else:
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
238 else:
238 else:
239 raise
239 raise
240 except OSError as inst:
240 except OSError as inst:
241 if getattr(inst, "filename", None) is not None:
241 if getattr(inst, "filename", None) is not None:
242 ui.warn(_("abort: %s: '%s'\n") % (
242 ui.warn(_("abort: %s: '%s'\n") % (
243 encoding.strtolocal(inst.strerror), inst.filename))
243 encoding.strtolocal(inst.strerror), inst.filename))
244 else:
244 else:
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 except MemoryError:
246 except MemoryError:
247 ui.warn(_("abort: out of memory\n"))
247 ui.warn(_("abort: out of memory\n"))
248 except SystemExit as inst:
248 except SystemExit as inst:
249 # Commands shouldn't sys.exit directly, but give a return code.
249 # Commands shouldn't sys.exit directly, but give a return code.
250 # Just in case catch this and and pass exit code to caller.
250 # Just in case catch this and and pass exit code to caller.
251 return inst.code
251 return inst.code
252 except socket.error as inst:
252 except socket.error as inst:
253 ui.warn(_("abort: %s\n") % inst.args[-1])
253 ui.warn(_("abort: %s\n") % inst.args[-1])
254
254
255 return -1
255 return -1
256
256
257 def checknewlabel(repo, lbl, kind):
257 def checknewlabel(repo, lbl, kind):
258 # Do not use the "kind" parameter in ui output.
258 # Do not use the "kind" parameter in ui output.
259 # It makes strings difficult to translate.
259 # It makes strings difficult to translate.
260 if lbl in ['tip', '.', 'null']:
260 if lbl in ['tip', '.', 'null']:
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
262 for c in (':', '\0', '\n', '\r'):
262 for c in (':', '\0', '\n', '\r'):
263 if c in lbl:
263 if c in lbl:
264 raise error.Abort(_("%r cannot be used in a name") % c)
264 raise error.Abort(_("%r cannot be used in a name") % c)
265 try:
265 try:
266 int(lbl)
266 int(lbl)
267 raise error.Abort(_("cannot use an integer as a name"))
267 raise error.Abort(_("cannot use an integer as a name"))
268 except ValueError:
268 except ValueError:
269 pass
269 pass
270 if lbl.strip() != lbl:
270 if lbl.strip() != lbl:
271 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
271 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
272
272
273 def checkfilename(f):
273 def checkfilename(f):
274 '''Check that the filename f is an acceptable filename for a tracked file'''
274 '''Check that the filename f is an acceptable filename for a tracked file'''
275 if '\r' in f or '\n' in f:
275 if '\r' in f or '\n' in f:
276 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
276 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
277
277
278 def checkportable(ui, f):
278 def checkportable(ui, f):
279 '''Check if filename f is portable and warn or abort depending on config'''
279 '''Check if filename f is portable and warn or abort depending on config'''
280 checkfilename(f)
280 checkfilename(f)
281 abort, warn = checkportabilityalert(ui)
281 abort, warn = checkportabilityalert(ui)
282 if abort or warn:
282 if abort or warn:
283 msg = util.checkwinfilename(f)
283 msg = util.checkwinfilename(f)
284 if msg:
284 if msg:
285 msg = "%s: %s" % (msg, util.shellquote(f))
285 msg = "%s: %s" % (msg, util.shellquote(f))
286 if abort:
286 if abort:
287 raise error.Abort(msg)
287 raise error.Abort(msg)
288 ui.warn(_("warning: %s\n") % msg)
288 ui.warn(_("warning: %s\n") % msg)
289
289
290 def checkportabilityalert(ui):
290 def checkportabilityalert(ui):
291 '''check if the user's config requests nothing, a warning, or abort for
291 '''check if the user's config requests nothing, a warning, or abort for
292 non-portable filenames'''
292 non-portable filenames'''
293 val = ui.config('ui', 'portablefilenames')
293 val = ui.config('ui', 'portablefilenames')
294 lval = val.lower()
294 lval = val.lower()
295 bval = util.parsebool(val)
295 bval = util.parsebool(val)
296 abort = pycompat.iswindows or lval == 'abort'
296 abort = pycompat.iswindows or lval == 'abort'
297 warn = bval or lval == 'warn'
297 warn = bval or lval == 'warn'
298 if bval is None and not (warn or abort or lval == 'ignore'):
298 if bval is None and not (warn or abort or lval == 'ignore'):
299 raise error.ConfigError(
299 raise error.ConfigError(
300 _("ui.portablefilenames value is invalid ('%s')") % val)
300 _("ui.portablefilenames value is invalid ('%s')") % val)
301 return abort, warn
301 return abort, warn
302
302
303 class casecollisionauditor(object):
303 class casecollisionauditor(object):
304 def __init__(self, ui, abort, dirstate):
304 def __init__(self, ui, abort, dirstate):
305 self._ui = ui
305 self._ui = ui
306 self._abort = abort
306 self._abort = abort
307 allfiles = '\0'.join(dirstate._map)
307 allfiles = '\0'.join(dirstate._map)
308 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
308 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
309 self._dirstate = dirstate
309 self._dirstate = dirstate
310 # The purpose of _newfiles is so that we don't complain about
310 # The purpose of _newfiles is so that we don't complain about
311 # case collisions if someone were to call this object with the
311 # case collisions if someone were to call this object with the
312 # same filename twice.
312 # same filename twice.
313 self._newfiles = set()
313 self._newfiles = set()
314
314
315 def __call__(self, f):
315 def __call__(self, f):
316 if f in self._newfiles:
316 if f in self._newfiles:
317 return
317 return
318 fl = encoding.lower(f)
318 fl = encoding.lower(f)
319 if fl in self._loweredfiles and f not in self._dirstate:
319 if fl in self._loweredfiles and f not in self._dirstate:
320 msg = _('possible case-folding collision for %s') % f
320 msg = _('possible case-folding collision for %s') % f
321 if self._abort:
321 if self._abort:
322 raise error.Abort(msg)
322 raise error.Abort(msg)
323 self._ui.warn(_("warning: %s\n") % msg)
323 self._ui.warn(_("warning: %s\n") % msg)
324 self._loweredfiles.add(fl)
324 self._loweredfiles.add(fl)
325 self._newfiles.add(f)
325 self._newfiles.add(f)
326
326
327 def filteredhash(repo, maxrev):
327 def filteredhash(repo, maxrev):
328 """build hash of filtered revisions in the current repoview.
328 """build hash of filtered revisions in the current repoview.
329
329
330 Multiple caches perform up-to-date validation by checking that the
330 Multiple caches perform up-to-date validation by checking that the
331 tiprev and tipnode stored in the cache file match the current repository.
331 tiprev and tipnode stored in the cache file match the current repository.
332 However, this is not sufficient for validating repoviews because the set
332 However, this is not sufficient for validating repoviews because the set
333 of revisions in the view may change without the repository tiprev and
333 of revisions in the view may change without the repository tiprev and
334 tipnode changing.
334 tipnode changing.
335
335
336 This function hashes all the revs filtered from the view and returns
336 This function hashes all the revs filtered from the view and returns
337 that SHA-1 digest.
337 that SHA-1 digest.
338 """
338 """
339 cl = repo.changelog
339 cl = repo.changelog
340 if not cl.filteredrevs:
340 if not cl.filteredrevs:
341 return None
341 return None
342 key = None
342 key = None
343 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
343 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
344 if revs:
344 if revs:
345 s = hashlib.sha1()
345 s = hashlib.sha1()
346 for rev in revs:
346 for rev in revs:
347 s.update('%d;' % rev)
347 s.update('%d;' % rev)
348 key = s.digest()
348 key = s.digest()
349 return key
349 return key
350
350
351 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
351 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
352 '''yield every hg repository under path, always recursively.
352 '''yield every hg repository under path, always recursively.
353 The recurse flag will only control recursion into repo working dirs'''
353 The recurse flag will only control recursion into repo working dirs'''
354 def errhandler(err):
354 def errhandler(err):
355 if err.filename == path:
355 if err.filename == path:
356 raise err
356 raise err
357 samestat = getattr(os.path, 'samestat', None)
357 samestat = getattr(os.path, 'samestat', None)
358 if followsym and samestat is not None:
358 if followsym and samestat is not None:
359 def adddir(dirlst, dirname):
359 def adddir(dirlst, dirname):
360 match = False
361 dirstat = os.stat(dirname)
360 dirstat = os.stat(dirname)
362 for lstdirstat in dirlst:
361 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
363 if samestat(dirstat, lstdirstat):
364 match = True
365 break
366 if not match:
362 if not match:
367 dirlst.append(dirstat)
363 dirlst.append(dirstat)
368 return not match
364 return not match
369 else:
365 else:
370 followsym = False
366 followsym = False
371
367
372 if (seen_dirs is None) and followsym:
368 if (seen_dirs is None) and followsym:
373 seen_dirs = []
369 seen_dirs = []
374 adddir(seen_dirs, path)
370 adddir(seen_dirs, path)
375 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
371 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
376 dirs.sort()
372 dirs.sort()
377 if '.hg' in dirs:
373 if '.hg' in dirs:
378 yield root # found a repository
374 yield root # found a repository
379 qroot = os.path.join(root, '.hg', 'patches')
375 qroot = os.path.join(root, '.hg', 'patches')
380 if os.path.isdir(os.path.join(qroot, '.hg')):
376 if os.path.isdir(os.path.join(qroot, '.hg')):
381 yield qroot # we have a patch queue repo here
377 yield qroot # we have a patch queue repo here
382 if recurse:
378 if recurse:
383 # avoid recursing inside the .hg directory
379 # avoid recursing inside the .hg directory
384 dirs.remove('.hg')
380 dirs.remove('.hg')
385 else:
381 else:
386 dirs[:] = [] # don't descend further
382 dirs[:] = [] # don't descend further
387 elif followsym:
383 elif followsym:
388 newdirs = []
384 newdirs = []
389 for d in dirs:
385 for d in dirs:
390 fname = os.path.join(root, d)
386 fname = os.path.join(root, d)
391 if adddir(seen_dirs, fname):
387 if adddir(seen_dirs, fname):
392 if os.path.islink(fname):
388 if os.path.islink(fname):
393 for hgname in walkrepos(fname, True, seen_dirs):
389 for hgname in walkrepos(fname, True, seen_dirs):
394 yield hgname
390 yield hgname
395 else:
391 else:
396 newdirs.append(d)
392 newdirs.append(d)
397 dirs[:] = newdirs
393 dirs[:] = newdirs
398
394
399 def binnode(ctx):
395 def binnode(ctx):
400 """Return binary node id for a given basectx"""
396 """Return binary node id for a given basectx"""
401 node = ctx.node()
397 node = ctx.node()
402 if node is None:
398 if node is None:
403 return wdirid
399 return wdirid
404 return node
400 return node
405
401
406 def intrev(ctx):
402 def intrev(ctx):
407 """Return integer for a given basectx that can be used in comparison or
403 """Return integer for a given basectx that can be used in comparison or
408 arithmetic operation"""
404 arithmetic operation"""
409 rev = ctx.rev()
405 rev = ctx.rev()
410 if rev is None:
406 if rev is None:
411 return wdirrev
407 return wdirrev
412 return rev
408 return rev
413
409
414 def formatchangeid(ctx):
410 def formatchangeid(ctx):
415 """Format changectx as '{rev}:{node|formatnode}', which is the default
411 """Format changectx as '{rev}:{node|formatnode}', which is the default
416 template provided by logcmdutil.changesettemplater"""
412 template provided by logcmdutil.changesettemplater"""
417 repo = ctx.repo()
413 repo = ctx.repo()
418 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
414 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
419
415
420 def formatrevnode(ui, rev, node):
416 def formatrevnode(ui, rev, node):
421 """Format given revision and node depending on the current verbosity"""
417 """Format given revision and node depending on the current verbosity"""
422 if ui.debugflag:
418 if ui.debugflag:
423 hexfunc = hex
419 hexfunc = hex
424 else:
420 else:
425 hexfunc = short
421 hexfunc = short
426 return '%d:%s' % (rev, hexfunc(node))
422 return '%d:%s' % (rev, hexfunc(node))
427
423
428 def revsingle(repo, revspec, default='.', localalias=None):
424 def revsingle(repo, revspec, default='.', localalias=None):
429 if not revspec and revspec != 0:
425 if not revspec and revspec != 0:
430 return repo[default]
426 return repo[default]
431
427
432 l = revrange(repo, [revspec], localalias=localalias)
428 l = revrange(repo, [revspec], localalias=localalias)
433 if not l:
429 if not l:
434 raise error.Abort(_('empty revision set'))
430 raise error.Abort(_('empty revision set'))
435 return repo[l.last()]
431 return repo[l.last()]
436
432
437 def _pairspec(revspec):
433 def _pairspec(revspec):
438 tree = revsetlang.parse(revspec)
434 tree = revsetlang.parse(revspec)
439 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
435 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
440
436
441 def revpair(repo, revs):
437 def revpair(repo, revs):
442 if not revs:
438 if not revs:
443 return repo.dirstate.p1(), None
439 return repo.dirstate.p1(), None
444
440
445 l = revrange(repo, revs)
441 l = revrange(repo, revs)
446
442
447 if not l:
443 if not l:
448 first = second = None
444 first = second = None
449 elif l.isascending():
445 elif l.isascending():
450 first = l.min()
446 first = l.min()
451 second = l.max()
447 second = l.max()
452 elif l.isdescending():
448 elif l.isdescending():
453 first = l.max()
449 first = l.max()
454 second = l.min()
450 second = l.min()
455 else:
451 else:
456 first = l.first()
452 first = l.first()
457 second = l.last()
453 second = l.last()
458
454
459 if first is None:
455 if first is None:
460 raise error.Abort(_('empty revision range'))
456 raise error.Abort(_('empty revision range'))
461 if (first == second and len(revs) >= 2
457 if (first == second and len(revs) >= 2
462 and not all(revrange(repo, [r]) for r in revs)):
458 and not all(revrange(repo, [r]) for r in revs)):
463 raise error.Abort(_('empty revision on one side of range'))
459 raise error.Abort(_('empty revision on one side of range'))
464
460
465 # if top-level is range expression, the result must always be a pair
461 # if top-level is range expression, the result must always be a pair
466 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
462 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
467 return repo.lookup(first), None
463 return repo.lookup(first), None
468
464
469 return repo.lookup(first), repo.lookup(second)
465 return repo.lookup(first), repo.lookup(second)
470
466
471 def revrange(repo, specs, localalias=None):
467 def revrange(repo, specs, localalias=None):
472 """Execute 1 to many revsets and return the union.
468 """Execute 1 to many revsets and return the union.
473
469
474 This is the preferred mechanism for executing revsets using user-specified
470 This is the preferred mechanism for executing revsets using user-specified
475 config options, such as revset aliases.
471 config options, such as revset aliases.
476
472
477 The revsets specified by ``specs`` will be executed via a chained ``OR``
473 The revsets specified by ``specs`` will be executed via a chained ``OR``
478 expression. If ``specs`` is empty, an empty result is returned.
474 expression. If ``specs`` is empty, an empty result is returned.
479
475
480 ``specs`` can contain integers, in which case they are assumed to be
476 ``specs`` can contain integers, in which case they are assumed to be
481 revision numbers.
477 revision numbers.
482
478
483 It is assumed the revsets are already formatted. If you have arguments
479 It is assumed the revsets are already formatted. If you have arguments
484 that need to be expanded in the revset, call ``revsetlang.formatspec()``
480 that need to be expanded in the revset, call ``revsetlang.formatspec()``
485 and pass the result as an element of ``specs``.
481 and pass the result as an element of ``specs``.
486
482
487 Specifying a single revset is allowed.
483 Specifying a single revset is allowed.
488
484
489 Returns a ``revset.abstractsmartset`` which is a list-like interface over
485 Returns a ``revset.abstractsmartset`` which is a list-like interface over
490 integer revisions.
486 integer revisions.
491 """
487 """
492 allspecs = []
488 allspecs = []
493 for spec in specs:
489 for spec in specs:
494 if isinstance(spec, int):
490 if isinstance(spec, int):
495 spec = revsetlang.formatspec('rev(%d)', spec)
491 spec = revsetlang.formatspec('rev(%d)', spec)
496 allspecs.append(spec)
492 allspecs.append(spec)
497 return repo.anyrevs(allspecs, user=True, localalias=localalias)
493 return repo.anyrevs(allspecs, user=True, localalias=localalias)
498
494
499 def meaningfulparents(repo, ctx):
495 def meaningfulparents(repo, ctx):
500 """Return list of meaningful (or all if debug) parentrevs for rev.
496 """Return list of meaningful (or all if debug) parentrevs for rev.
501
497
502 For merges (two non-nullrev revisions) both parents are meaningful.
498 For merges (two non-nullrev revisions) both parents are meaningful.
503 Otherwise the first parent revision is considered meaningful if it
499 Otherwise the first parent revision is considered meaningful if it
504 is not the preceding revision.
500 is not the preceding revision.
505 """
501 """
506 parents = ctx.parents()
502 parents = ctx.parents()
507 if len(parents) > 1:
503 if len(parents) > 1:
508 return parents
504 return parents
509 if repo.ui.debugflag:
505 if repo.ui.debugflag:
510 return [parents[0], repo['null']]
506 return [parents[0], repo['null']]
511 if parents[0].rev() >= intrev(ctx) - 1:
507 if parents[0].rev() >= intrev(ctx) - 1:
512 return []
508 return []
513 return parents
509 return parents
514
510
515 def expandpats(pats):
511 def expandpats(pats):
516 '''Expand bare globs when running on windows.
512 '''Expand bare globs when running on windows.
517 On posix we assume it already has already been done by sh.'''
513 On posix we assume it already has already been done by sh.'''
518 if not util.expandglobs:
514 if not util.expandglobs:
519 return list(pats)
515 return list(pats)
520 ret = []
516 ret = []
521 for kindpat in pats:
517 for kindpat in pats:
522 kind, pat = matchmod._patsplit(kindpat, None)
518 kind, pat = matchmod._patsplit(kindpat, None)
523 if kind is None:
519 if kind is None:
524 try:
520 try:
525 globbed = glob.glob(pat)
521 globbed = glob.glob(pat)
526 except re.error:
522 except re.error:
527 globbed = [pat]
523 globbed = [pat]
528 if globbed:
524 if globbed:
529 ret.extend(globbed)
525 ret.extend(globbed)
530 continue
526 continue
531 ret.append(kindpat)
527 ret.append(kindpat)
532 return ret
528 return ret
533
529
534 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
530 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
535 badfn=None):
531 badfn=None):
536 '''Return a matcher and the patterns that were used.
532 '''Return a matcher and the patterns that were used.
537 The matcher will warn about bad matches, unless an alternate badfn callback
533 The matcher will warn about bad matches, unless an alternate badfn callback
538 is provided.'''
534 is provided.'''
539 if pats == ("",):
535 if pats == ("",):
540 pats = []
536 pats = []
541 if opts is None:
537 if opts is None:
542 opts = {}
538 opts = {}
543 if not globbed and default == 'relpath':
539 if not globbed and default == 'relpath':
544 pats = expandpats(pats or [])
540 pats = expandpats(pats or [])
545
541
546 def bad(f, msg):
542 def bad(f, msg):
547 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
543 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
548
544
549 if badfn is None:
545 if badfn is None:
550 badfn = bad
546 badfn = bad
551
547
552 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
548 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
553 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
549 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
554
550
555 if m.always():
551 if m.always():
556 pats = []
552 pats = []
557 return m, pats
553 return m, pats
558
554
559 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
555 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
560 badfn=None):
556 badfn=None):
561 '''Return a matcher that will warn about bad matches.'''
557 '''Return a matcher that will warn about bad matches.'''
562 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
558 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
563
559
564 def matchall(repo):
560 def matchall(repo):
565 '''Return a matcher that will efficiently match everything.'''
561 '''Return a matcher that will efficiently match everything.'''
566 return matchmod.always(repo.root, repo.getcwd())
562 return matchmod.always(repo.root, repo.getcwd())
567
563
568 def matchfiles(repo, files, badfn=None):
564 def matchfiles(repo, files, badfn=None):
569 '''Return a matcher that will efficiently match exactly these files.'''
565 '''Return a matcher that will efficiently match exactly these files.'''
570 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
566 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
571
567
572 def parsefollowlinespattern(repo, rev, pat, msg):
568 def parsefollowlinespattern(repo, rev, pat, msg):
573 """Return a file name from `pat` pattern suitable for usage in followlines
569 """Return a file name from `pat` pattern suitable for usage in followlines
574 logic.
570 logic.
575 """
571 """
576 if not matchmod.patkind(pat):
572 if not matchmod.patkind(pat):
577 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
573 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
578 else:
574 else:
579 ctx = repo[rev]
575 ctx = repo[rev]
580 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
576 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
581 files = [f for f in ctx if m(f)]
577 files = [f for f in ctx if m(f)]
582 if len(files) != 1:
578 if len(files) != 1:
583 raise error.ParseError(msg)
579 raise error.ParseError(msg)
584 return files[0]
580 return files[0]
585
581
586 def origpath(ui, repo, filepath):
582 def origpath(ui, repo, filepath):
587 '''customize where .orig files are created
583 '''customize where .orig files are created
588
584
589 Fetch user defined path from config file: [ui] origbackuppath = <path>
585 Fetch user defined path from config file: [ui] origbackuppath = <path>
590 Fall back to default (filepath with .orig suffix) if not specified
586 Fall back to default (filepath with .orig suffix) if not specified
591 '''
587 '''
592 origbackuppath = ui.config('ui', 'origbackuppath')
588 origbackuppath = ui.config('ui', 'origbackuppath')
593 if not origbackuppath:
589 if not origbackuppath:
594 return filepath + ".orig"
590 return filepath + ".orig"
595
591
596 # Convert filepath from an absolute path into a path inside the repo.
592 # Convert filepath from an absolute path into a path inside the repo.
597 filepathfromroot = util.normpath(os.path.relpath(filepath,
593 filepathfromroot = util.normpath(os.path.relpath(filepath,
598 start=repo.root))
594 start=repo.root))
599
595
600 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
596 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
601 origbackupdir = origvfs.dirname(filepathfromroot)
597 origbackupdir = origvfs.dirname(filepathfromroot)
602 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
598 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
603 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
599 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
604
600
605 # Remove any files that conflict with the backup file's path
601 # Remove any files that conflict with the backup file's path
606 for f in reversed(list(util.finddirs(filepathfromroot))):
602 for f in reversed(list(util.finddirs(filepathfromroot))):
607 if origvfs.isfileorlink(f):
603 if origvfs.isfileorlink(f):
608 ui.note(_('removing conflicting file: %s\n')
604 ui.note(_('removing conflicting file: %s\n')
609 % origvfs.join(f))
605 % origvfs.join(f))
610 origvfs.unlink(f)
606 origvfs.unlink(f)
611 break
607 break
612
608
613 origvfs.makedirs(origbackupdir)
609 origvfs.makedirs(origbackupdir)
614
610
615 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
611 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
616 ui.note(_('removing conflicting directory: %s\n')
612 ui.note(_('removing conflicting directory: %s\n')
617 % origvfs.join(filepathfromroot))
613 % origvfs.join(filepathfromroot))
618 origvfs.rmtree(filepathfromroot, forcibly=True)
614 origvfs.rmtree(filepathfromroot, forcibly=True)
619
615
620 return origvfs.join(filepathfromroot)
616 return origvfs.join(filepathfromroot)
621
617
622 class _containsnode(object):
618 class _containsnode(object):
623 """proxy __contains__(node) to container.__contains__ which accepts revs"""
619 """proxy __contains__(node) to container.__contains__ which accepts revs"""
624
620
625 def __init__(self, repo, revcontainer):
621 def __init__(self, repo, revcontainer):
626 self._torev = repo.changelog.rev
622 self._torev = repo.changelog.rev
627 self._revcontains = revcontainer.__contains__
623 self._revcontains = revcontainer.__contains__
628
624
629 def __contains__(self, node):
625 def __contains__(self, node):
630 return self._revcontains(self._torev(node))
626 return self._revcontains(self._torev(node))
631
627
632 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
628 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
633 """do common cleanups when old nodes are replaced by new nodes
629 """do common cleanups when old nodes are replaced by new nodes
634
630
635 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
631 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
636 (we might also want to move working directory parent in the future)
632 (we might also want to move working directory parent in the future)
637
633
638 By default, bookmark moves are calculated automatically from 'replacements',
634 By default, bookmark moves are calculated automatically from 'replacements',
639 but 'moves' can be used to override that. Also, 'moves' may include
635 but 'moves' can be used to override that. Also, 'moves' may include
640 additional bookmark moves that should not have associated obsmarkers.
636 additional bookmark moves that should not have associated obsmarkers.
641
637
642 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
638 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
643 have replacements. operation is a string, like "rebase".
639 have replacements. operation is a string, like "rebase".
644
640
645 metadata is dictionary containing metadata to be stored in obsmarker if
641 metadata is dictionary containing metadata to be stored in obsmarker if
646 obsolescence is enabled.
642 obsolescence is enabled.
647 """
643 """
648 if not replacements and not moves:
644 if not replacements and not moves:
649 return
645 return
650
646
651 # translate mapping's other forms
647 # translate mapping's other forms
652 if not util.safehasattr(replacements, 'items'):
648 if not util.safehasattr(replacements, 'items'):
653 replacements = {n: () for n in replacements}
649 replacements = {n: () for n in replacements}
654
650
655 # Calculate bookmark movements
651 # Calculate bookmark movements
656 if moves is None:
652 if moves is None:
657 moves = {}
653 moves = {}
658 # Unfiltered repo is needed since nodes in replacements might be hidden.
654 # Unfiltered repo is needed since nodes in replacements might be hidden.
659 unfi = repo.unfiltered()
655 unfi = repo.unfiltered()
660 for oldnode, newnodes in replacements.items():
656 for oldnode, newnodes in replacements.items():
661 if oldnode in moves:
657 if oldnode in moves:
662 continue
658 continue
663 if len(newnodes) > 1:
659 if len(newnodes) > 1:
664 # usually a split, take the one with biggest rev number
660 # usually a split, take the one with biggest rev number
665 newnode = next(unfi.set('max(%ln)', newnodes)).node()
661 newnode = next(unfi.set('max(%ln)', newnodes)).node()
666 elif len(newnodes) == 0:
662 elif len(newnodes) == 0:
667 # move bookmark backwards
663 # move bookmark backwards
668 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
664 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
669 list(replacements)))
665 list(replacements)))
670 if roots:
666 if roots:
671 newnode = roots[0].node()
667 newnode = roots[0].node()
672 else:
668 else:
673 newnode = nullid
669 newnode = nullid
674 else:
670 else:
675 newnode = newnodes[0]
671 newnode = newnodes[0]
676 moves[oldnode] = newnode
672 moves[oldnode] = newnode
677
673
678 with repo.transaction('cleanup') as tr:
674 with repo.transaction('cleanup') as tr:
679 # Move bookmarks
675 # Move bookmarks
680 bmarks = repo._bookmarks
676 bmarks = repo._bookmarks
681 bmarkchanges = []
677 bmarkchanges = []
682 allnewnodes = [n for ns in replacements.values() for n in ns]
678 allnewnodes = [n for ns in replacements.values() for n in ns]
683 for oldnode, newnode in moves.items():
679 for oldnode, newnode in moves.items():
684 oldbmarks = repo.nodebookmarks(oldnode)
680 oldbmarks = repo.nodebookmarks(oldnode)
685 if not oldbmarks:
681 if not oldbmarks:
686 continue
682 continue
687 from . import bookmarks # avoid import cycle
683 from . import bookmarks # avoid import cycle
688 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
684 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
689 (oldbmarks, hex(oldnode), hex(newnode)))
685 (oldbmarks, hex(oldnode), hex(newnode)))
690 # Delete divergent bookmarks being parents of related newnodes
686 # Delete divergent bookmarks being parents of related newnodes
691 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
687 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
692 allnewnodes, newnode, oldnode)
688 allnewnodes, newnode, oldnode)
693 deletenodes = _containsnode(repo, deleterevs)
689 deletenodes = _containsnode(repo, deleterevs)
694 for name in oldbmarks:
690 for name in oldbmarks:
695 bmarkchanges.append((name, newnode))
691 bmarkchanges.append((name, newnode))
696 for b in bookmarks.divergent2delete(repo, deletenodes, name):
692 for b in bookmarks.divergent2delete(repo, deletenodes, name):
697 bmarkchanges.append((b, None))
693 bmarkchanges.append((b, None))
698
694
699 if bmarkchanges:
695 if bmarkchanges:
700 bmarks.applychanges(repo, tr, bmarkchanges)
696 bmarks.applychanges(repo, tr, bmarkchanges)
701
697
702 # Obsolete or strip nodes
698 # Obsolete or strip nodes
703 if obsolete.isenabled(repo, obsolete.createmarkersopt):
699 if obsolete.isenabled(repo, obsolete.createmarkersopt):
704 # If a node is already obsoleted, and we want to obsolete it
700 # If a node is already obsoleted, and we want to obsolete it
705 # without a successor, skip that obssolete request since it's
701 # without a successor, skip that obssolete request since it's
706 # unnecessary. That's the "if s or not isobs(n)" check below.
702 # unnecessary. That's the "if s or not isobs(n)" check below.
707 # Also sort the node in topology order, that might be useful for
703 # Also sort the node in topology order, that might be useful for
708 # some obsstore logic.
704 # some obsstore logic.
709 # NOTE: the filtering and sorting might belong to createmarkers.
705 # NOTE: the filtering and sorting might belong to createmarkers.
710 isobs = unfi.obsstore.successors.__contains__
706 isobs = unfi.obsstore.successors.__contains__
711 torev = unfi.changelog.rev
707 torev = unfi.changelog.rev
712 sortfunc = lambda ns: torev(ns[0])
708 sortfunc = lambda ns: torev(ns[0])
713 rels = [(unfi[n], tuple(unfi[m] for m in s))
709 rels = [(unfi[n], tuple(unfi[m] for m in s))
714 for n, s in sorted(replacements.items(), key=sortfunc)
710 for n, s in sorted(replacements.items(), key=sortfunc)
715 if s or not isobs(n)]
711 if s or not isobs(n)]
716 if rels:
712 if rels:
717 obsolete.createmarkers(repo, rels, operation=operation,
713 obsolete.createmarkers(repo, rels, operation=operation,
718 metadata=metadata)
714 metadata=metadata)
719 else:
715 else:
720 from . import repair # avoid import cycle
716 from . import repair # avoid import cycle
721 tostrip = list(replacements)
717 tostrip = list(replacements)
722 if tostrip:
718 if tostrip:
723 repair.delayedstrip(repo.ui, repo, tostrip, operation)
719 repair.delayedstrip(repo.ui, repo, tostrip, operation)
724
720
725 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
721 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
726 if opts is None:
722 if opts is None:
727 opts = {}
723 opts = {}
728 m = matcher
724 m = matcher
729 if dry_run is None:
725 if dry_run is None:
730 dry_run = opts.get('dry_run')
726 dry_run = opts.get('dry_run')
731 if similarity is None:
727 if similarity is None:
732 similarity = float(opts.get('similarity') or 0)
728 similarity = float(opts.get('similarity') or 0)
733
729
734 ret = 0
730 ret = 0
735 join = lambda f: os.path.join(prefix, f)
731 join = lambda f: os.path.join(prefix, f)
736
732
737 wctx = repo[None]
733 wctx = repo[None]
738 for subpath in sorted(wctx.substate):
734 for subpath in sorted(wctx.substate):
739 submatch = matchmod.subdirmatcher(subpath, m)
735 submatch = matchmod.subdirmatcher(subpath, m)
740 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
736 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
741 sub = wctx.sub(subpath)
737 sub = wctx.sub(subpath)
742 try:
738 try:
743 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
739 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
744 ret = 1
740 ret = 1
745 except error.LookupError:
741 except error.LookupError:
746 repo.ui.status(_("skipping missing subrepository: %s\n")
742 repo.ui.status(_("skipping missing subrepository: %s\n")
747 % join(subpath))
743 % join(subpath))
748
744
749 rejected = []
745 rejected = []
750 def badfn(f, msg):
746 def badfn(f, msg):
751 if f in m.files():
747 if f in m.files():
752 m.bad(f, msg)
748 m.bad(f, msg)
753 rejected.append(f)
749 rejected.append(f)
754
750
755 badmatch = matchmod.badmatch(m, badfn)
751 badmatch = matchmod.badmatch(m, badfn)
756 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
752 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
757 badmatch)
753 badmatch)
758
754
759 unknownset = set(unknown + forgotten)
755 unknownset = set(unknown + forgotten)
760 toprint = unknownset.copy()
756 toprint = unknownset.copy()
761 toprint.update(deleted)
757 toprint.update(deleted)
762 for abs in sorted(toprint):
758 for abs in sorted(toprint):
763 if repo.ui.verbose or not m.exact(abs):
759 if repo.ui.verbose or not m.exact(abs):
764 if abs in unknownset:
760 if abs in unknownset:
765 status = _('adding %s\n') % m.uipath(abs)
761 status = _('adding %s\n') % m.uipath(abs)
766 else:
762 else:
767 status = _('removing %s\n') % m.uipath(abs)
763 status = _('removing %s\n') % m.uipath(abs)
768 repo.ui.status(status)
764 repo.ui.status(status)
769
765
770 renames = _findrenames(repo, m, added + unknown, removed + deleted,
766 renames = _findrenames(repo, m, added + unknown, removed + deleted,
771 similarity)
767 similarity)
772
768
773 if not dry_run:
769 if not dry_run:
774 _markchanges(repo, unknown + forgotten, deleted, renames)
770 _markchanges(repo, unknown + forgotten, deleted, renames)
775
771
776 for f in rejected:
772 for f in rejected:
777 if f in m.files():
773 if f in m.files():
778 return 1
774 return 1
779 return ret
775 return ret
780
776
781 def marktouched(repo, files, similarity=0.0):
777 def marktouched(repo, files, similarity=0.0):
782 '''Assert that files have somehow been operated upon. files are relative to
778 '''Assert that files have somehow been operated upon. files are relative to
783 the repo root.'''
779 the repo root.'''
784 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
780 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
785 rejected = []
781 rejected = []
786
782
787 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
783 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
788
784
789 if repo.ui.verbose:
785 if repo.ui.verbose:
790 unknownset = set(unknown + forgotten)
786 unknownset = set(unknown + forgotten)
791 toprint = unknownset.copy()
787 toprint = unknownset.copy()
792 toprint.update(deleted)
788 toprint.update(deleted)
793 for abs in sorted(toprint):
789 for abs in sorted(toprint):
794 if abs in unknownset:
790 if abs in unknownset:
795 status = _('adding %s\n') % abs
791 status = _('adding %s\n') % abs
796 else:
792 else:
797 status = _('removing %s\n') % abs
793 status = _('removing %s\n') % abs
798 repo.ui.status(status)
794 repo.ui.status(status)
799
795
800 renames = _findrenames(repo, m, added + unknown, removed + deleted,
796 renames = _findrenames(repo, m, added + unknown, removed + deleted,
801 similarity)
797 similarity)
802
798
803 _markchanges(repo, unknown + forgotten, deleted, renames)
799 _markchanges(repo, unknown + forgotten, deleted, renames)
804
800
805 for f in rejected:
801 for f in rejected:
806 if f in m.files():
802 if f in m.files():
807 return 1
803 return 1
808 return 0
804 return 0
809
805
810 def _interestingfiles(repo, matcher):
806 def _interestingfiles(repo, matcher):
811 '''Walk dirstate with matcher, looking for files that addremove would care
807 '''Walk dirstate with matcher, looking for files that addremove would care
812 about.
808 about.
813
809
814 This is different from dirstate.status because it doesn't care about
810 This is different from dirstate.status because it doesn't care about
815 whether files are modified or clean.'''
811 whether files are modified or clean.'''
816 added, unknown, deleted, removed, forgotten = [], [], [], [], []
812 added, unknown, deleted, removed, forgotten = [], [], [], [], []
817 audit_path = pathutil.pathauditor(repo.root, cached=True)
813 audit_path = pathutil.pathauditor(repo.root, cached=True)
818
814
819 ctx = repo[None]
815 ctx = repo[None]
820 dirstate = repo.dirstate
816 dirstate = repo.dirstate
821 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
817 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
822 unknown=True, ignored=False, full=False)
818 unknown=True, ignored=False, full=False)
823 for abs, st in walkresults.iteritems():
819 for abs, st in walkresults.iteritems():
824 dstate = dirstate[abs]
820 dstate = dirstate[abs]
825 if dstate == '?' and audit_path.check(abs):
821 if dstate == '?' and audit_path.check(abs):
826 unknown.append(abs)
822 unknown.append(abs)
827 elif dstate != 'r' and not st:
823 elif dstate != 'r' and not st:
828 deleted.append(abs)
824 deleted.append(abs)
829 elif dstate == 'r' and st:
825 elif dstate == 'r' and st:
830 forgotten.append(abs)
826 forgotten.append(abs)
831 # for finding renames
827 # for finding renames
832 elif dstate == 'r' and not st:
828 elif dstate == 'r' and not st:
833 removed.append(abs)
829 removed.append(abs)
834 elif dstate == 'a':
830 elif dstate == 'a':
835 added.append(abs)
831 added.append(abs)
836
832
837 return added, unknown, deleted, removed, forgotten
833 return added, unknown, deleted, removed, forgotten
838
834
839 def _findrenames(repo, matcher, added, removed, similarity):
835 def _findrenames(repo, matcher, added, removed, similarity):
840 '''Find renames from removed files to added ones.'''
836 '''Find renames from removed files to added ones.'''
841 renames = {}
837 renames = {}
842 if similarity > 0:
838 if similarity > 0:
843 for old, new, score in similar.findrenames(repo, added, removed,
839 for old, new, score in similar.findrenames(repo, added, removed,
844 similarity):
840 similarity):
845 if (repo.ui.verbose or not matcher.exact(old)
841 if (repo.ui.verbose or not matcher.exact(old)
846 or not matcher.exact(new)):
842 or not matcher.exact(new)):
847 repo.ui.status(_('recording removal of %s as rename to %s '
843 repo.ui.status(_('recording removal of %s as rename to %s '
848 '(%d%% similar)\n') %
844 '(%d%% similar)\n') %
849 (matcher.rel(old), matcher.rel(new),
845 (matcher.rel(old), matcher.rel(new),
850 score * 100))
846 score * 100))
851 renames[new] = old
847 renames[new] = old
852 return renames
848 return renames
853
849
854 def _markchanges(repo, unknown, deleted, renames):
850 def _markchanges(repo, unknown, deleted, renames):
855 '''Marks the files in unknown as added, the files in deleted as removed,
851 '''Marks the files in unknown as added, the files in deleted as removed,
856 and the files in renames as copied.'''
852 and the files in renames as copied.'''
857 wctx = repo[None]
853 wctx = repo[None]
858 with repo.wlock():
854 with repo.wlock():
859 wctx.forget(deleted)
855 wctx.forget(deleted)
860 wctx.add(unknown)
856 wctx.add(unknown)
861 for new, old in renames.iteritems():
857 for new, old in renames.iteritems():
862 wctx.copy(old, new)
858 wctx.copy(old, new)
863
859
864 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
860 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
865 """Update the dirstate to reflect the intent of copying src to dst. For
861 """Update the dirstate to reflect the intent of copying src to dst. For
866 different reasons it might not end with dst being marked as copied from src.
862 different reasons it might not end with dst being marked as copied from src.
867 """
863 """
868 origsrc = repo.dirstate.copied(src) or src
864 origsrc = repo.dirstate.copied(src) or src
869 if dst == origsrc: # copying back a copy?
865 if dst == origsrc: # copying back a copy?
870 if repo.dirstate[dst] not in 'mn' and not dryrun:
866 if repo.dirstate[dst] not in 'mn' and not dryrun:
871 repo.dirstate.normallookup(dst)
867 repo.dirstate.normallookup(dst)
872 else:
868 else:
873 if repo.dirstate[origsrc] == 'a' and origsrc == src:
869 if repo.dirstate[origsrc] == 'a' and origsrc == src:
874 if not ui.quiet:
870 if not ui.quiet:
875 ui.warn(_("%s has not been committed yet, so no copy "
871 ui.warn(_("%s has not been committed yet, so no copy "
876 "data will be stored for %s.\n")
872 "data will be stored for %s.\n")
877 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
873 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
878 if repo.dirstate[dst] in '?r' and not dryrun:
874 if repo.dirstate[dst] in '?r' and not dryrun:
879 wctx.add([dst])
875 wctx.add([dst])
880 elif not dryrun:
876 elif not dryrun:
881 wctx.copy(origsrc, dst)
877 wctx.copy(origsrc, dst)
882
878
883 def readrequires(opener, supported):
879 def readrequires(opener, supported):
884 '''Reads and parses .hg/requires and checks if all entries found
880 '''Reads and parses .hg/requires and checks if all entries found
885 are in the list of supported features.'''
881 are in the list of supported features.'''
886 requirements = set(opener.read("requires").splitlines())
882 requirements = set(opener.read("requires").splitlines())
887 missings = []
883 missings = []
888 for r in requirements:
884 for r in requirements:
889 if r not in supported:
885 if r not in supported:
890 if not r or not r[0:1].isalnum():
886 if not r or not r[0:1].isalnum():
891 raise error.RequirementError(_(".hg/requires file is corrupt"))
887 raise error.RequirementError(_(".hg/requires file is corrupt"))
892 missings.append(r)
888 missings.append(r)
893 missings.sort()
889 missings.sort()
894 if missings:
890 if missings:
895 raise error.RequirementError(
891 raise error.RequirementError(
896 _("repository requires features unknown to this Mercurial: %s")
892 _("repository requires features unknown to this Mercurial: %s")
897 % " ".join(missings),
893 % " ".join(missings),
898 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
894 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
899 " for more information"))
895 " for more information"))
900 return requirements
896 return requirements
901
897
902 def writerequires(opener, requirements):
898 def writerequires(opener, requirements):
903 with opener('requires', 'w') as fp:
899 with opener('requires', 'w') as fp:
904 for r in sorted(requirements):
900 for r in sorted(requirements):
905 fp.write("%s\n" % r)
901 fp.write("%s\n" % r)
906
902
907 class filecachesubentry(object):
903 class filecachesubentry(object):
908 def __init__(self, path, stat):
904 def __init__(self, path, stat):
909 self.path = path
905 self.path = path
910 self.cachestat = None
906 self.cachestat = None
911 self._cacheable = None
907 self._cacheable = None
912
908
913 if stat:
909 if stat:
914 self.cachestat = filecachesubentry.stat(self.path)
910 self.cachestat = filecachesubentry.stat(self.path)
915
911
916 if self.cachestat:
912 if self.cachestat:
917 self._cacheable = self.cachestat.cacheable()
913 self._cacheable = self.cachestat.cacheable()
918 else:
914 else:
919 # None means we don't know yet
915 # None means we don't know yet
920 self._cacheable = None
916 self._cacheable = None
921
917
922 def refresh(self):
918 def refresh(self):
923 if self.cacheable():
919 if self.cacheable():
924 self.cachestat = filecachesubentry.stat(self.path)
920 self.cachestat = filecachesubentry.stat(self.path)
925
921
926 def cacheable(self):
922 def cacheable(self):
927 if self._cacheable is not None:
923 if self._cacheable is not None:
928 return self._cacheable
924 return self._cacheable
929
925
930 # we don't know yet, assume it is for now
926 # we don't know yet, assume it is for now
931 return True
927 return True
932
928
933 def changed(self):
929 def changed(self):
934 # no point in going further if we can't cache it
930 # no point in going further if we can't cache it
935 if not self.cacheable():
931 if not self.cacheable():
936 return True
932 return True
937
933
938 newstat = filecachesubentry.stat(self.path)
934 newstat = filecachesubentry.stat(self.path)
939
935
940 # we may not know if it's cacheable yet, check again now
936 # we may not know if it's cacheable yet, check again now
941 if newstat and self._cacheable is None:
937 if newstat and self._cacheable is None:
942 self._cacheable = newstat.cacheable()
938 self._cacheable = newstat.cacheable()
943
939
944 # check again
940 # check again
945 if not self._cacheable:
941 if not self._cacheable:
946 return True
942 return True
947
943
948 if self.cachestat != newstat:
944 if self.cachestat != newstat:
949 self.cachestat = newstat
945 self.cachestat = newstat
950 return True
946 return True
951 else:
947 else:
952 return False
948 return False
953
949
954 @staticmethod
950 @staticmethod
955 def stat(path):
951 def stat(path):
956 try:
952 try:
957 return util.cachestat(path)
953 return util.cachestat(path)
958 except OSError as e:
954 except OSError as e:
959 if e.errno != errno.ENOENT:
955 if e.errno != errno.ENOENT:
960 raise
956 raise
961
957
962 class filecacheentry(object):
958 class filecacheentry(object):
963 def __init__(self, paths, stat=True):
959 def __init__(self, paths, stat=True):
964 self._entries = []
960 self._entries = []
965 for path in paths:
961 for path in paths:
966 self._entries.append(filecachesubentry(path, stat))
962 self._entries.append(filecachesubentry(path, stat))
967
963
968 def changed(self):
964 def changed(self):
969 '''true if any entry has changed'''
965 '''true if any entry has changed'''
970 for entry in self._entries:
966 for entry in self._entries:
971 if entry.changed():
967 if entry.changed():
972 return True
968 return True
973 return False
969 return False
974
970
975 def refresh(self):
971 def refresh(self):
976 for entry in self._entries:
972 for entry in self._entries:
977 entry.refresh()
973 entry.refresh()
978
974
979 class filecache(object):
975 class filecache(object):
980 '''A property like decorator that tracks files under .hg/ for updates.
976 '''A property like decorator that tracks files under .hg/ for updates.
981
977
982 Records stat info when called in _filecache.
978 Records stat info when called in _filecache.
983
979
984 On subsequent calls, compares old stat info with new info, and recreates the
980 On subsequent calls, compares old stat info with new info, and recreates the
985 object when any of the files changes, updating the new stat info in
981 object when any of the files changes, updating the new stat info in
986 _filecache.
982 _filecache.
987
983
988 Mercurial either atomic renames or appends for files under .hg,
984 Mercurial either atomic renames or appends for files under .hg,
989 so to ensure the cache is reliable we need the filesystem to be able
985 so to ensure the cache is reliable we need the filesystem to be able
990 to tell us if a file has been replaced. If it can't, we fallback to
986 to tell us if a file has been replaced. If it can't, we fallback to
991 recreating the object on every call (essentially the same behavior as
987 recreating the object on every call (essentially the same behavior as
992 propertycache).
988 propertycache).
993
989
994 '''
990 '''
995 def __init__(self, *paths):
991 def __init__(self, *paths):
996 self.paths = paths
992 self.paths = paths
997
993
998 def join(self, obj, fname):
994 def join(self, obj, fname):
999 """Used to compute the runtime path of a cached file.
995 """Used to compute the runtime path of a cached file.
1000
996
1001 Users should subclass filecache and provide their own version of this
997 Users should subclass filecache and provide their own version of this
1002 function to call the appropriate join function on 'obj' (an instance
998 function to call the appropriate join function on 'obj' (an instance
1003 of the class that its member function was decorated).
999 of the class that its member function was decorated).
1004 """
1000 """
1005 raise NotImplementedError
1001 raise NotImplementedError
1006
1002
1007 def __call__(self, func):
1003 def __call__(self, func):
1008 self.func = func
1004 self.func = func
1009 self.name = func.__name__.encode('ascii')
1005 self.name = func.__name__.encode('ascii')
1010 return self
1006 return self
1011
1007
1012 def __get__(self, obj, type=None):
1008 def __get__(self, obj, type=None):
1013 # if accessed on the class, return the descriptor itself.
1009 # if accessed on the class, return the descriptor itself.
1014 if obj is None:
1010 if obj is None:
1015 return self
1011 return self
1016 # do we need to check if the file changed?
1012 # do we need to check if the file changed?
1017 if self.name in obj.__dict__:
1013 if self.name in obj.__dict__:
1018 assert self.name in obj._filecache, self.name
1014 assert self.name in obj._filecache, self.name
1019 return obj.__dict__[self.name]
1015 return obj.__dict__[self.name]
1020
1016
1021 entry = obj._filecache.get(self.name)
1017 entry = obj._filecache.get(self.name)
1022
1018
1023 if entry:
1019 if entry:
1024 if entry.changed():
1020 if entry.changed():
1025 entry.obj = self.func(obj)
1021 entry.obj = self.func(obj)
1026 else:
1022 else:
1027 paths = [self.join(obj, path) for path in self.paths]
1023 paths = [self.join(obj, path) for path in self.paths]
1028
1024
1029 # We stat -before- creating the object so our cache doesn't lie if
1025 # We stat -before- creating the object so our cache doesn't lie if
1030 # a writer modified between the time we read and stat
1026 # a writer modified between the time we read and stat
1031 entry = filecacheentry(paths, True)
1027 entry = filecacheentry(paths, True)
1032 entry.obj = self.func(obj)
1028 entry.obj = self.func(obj)
1033
1029
1034 obj._filecache[self.name] = entry
1030 obj._filecache[self.name] = entry
1035
1031
1036 obj.__dict__[self.name] = entry.obj
1032 obj.__dict__[self.name] = entry.obj
1037 return entry.obj
1033 return entry.obj
1038
1034
1039 def __set__(self, obj, value):
1035 def __set__(self, obj, value):
1040 if self.name not in obj._filecache:
1036 if self.name not in obj._filecache:
1041 # we add an entry for the missing value because X in __dict__
1037 # we add an entry for the missing value because X in __dict__
1042 # implies X in _filecache
1038 # implies X in _filecache
1043 paths = [self.join(obj, path) for path in self.paths]
1039 paths = [self.join(obj, path) for path in self.paths]
1044 ce = filecacheentry(paths, False)
1040 ce = filecacheentry(paths, False)
1045 obj._filecache[self.name] = ce
1041 obj._filecache[self.name] = ce
1046 else:
1042 else:
1047 ce = obj._filecache[self.name]
1043 ce = obj._filecache[self.name]
1048
1044
1049 ce.obj = value # update cached copy
1045 ce.obj = value # update cached copy
1050 obj.__dict__[self.name] = value # update copy returned by obj.x
1046 obj.__dict__[self.name] = value # update copy returned by obj.x
1051
1047
1052 def __delete__(self, obj):
1048 def __delete__(self, obj):
1053 try:
1049 try:
1054 del obj.__dict__[self.name]
1050 del obj.__dict__[self.name]
1055 except KeyError:
1051 except KeyError:
1056 raise AttributeError(self.name)
1052 raise AttributeError(self.name)
1057
1053
1058 def extdatasource(repo, source):
1054 def extdatasource(repo, source):
1059 """Gather a map of rev -> value dict from the specified source
1055 """Gather a map of rev -> value dict from the specified source
1060
1056
1061 A source spec is treated as a URL, with a special case shell: type
1057 A source spec is treated as a URL, with a special case shell: type
1062 for parsing the output from a shell command.
1058 for parsing the output from a shell command.
1063
1059
1064 The data is parsed as a series of newline-separated records where
1060 The data is parsed as a series of newline-separated records where
1065 each record is a revision specifier optionally followed by a space
1061 each record is a revision specifier optionally followed by a space
1066 and a freeform string value. If the revision is known locally, it
1062 and a freeform string value. If the revision is known locally, it
1067 is converted to a rev, otherwise the record is skipped.
1063 is converted to a rev, otherwise the record is skipped.
1068
1064
1069 Note that both key and value are treated as UTF-8 and converted to
1065 Note that both key and value are treated as UTF-8 and converted to
1070 the local encoding. This allows uniformity between local and
1066 the local encoding. This allows uniformity between local and
1071 remote data sources.
1067 remote data sources.
1072 """
1068 """
1073
1069
1074 spec = repo.ui.config("extdata", source)
1070 spec = repo.ui.config("extdata", source)
1075 if not spec:
1071 if not spec:
1076 raise error.Abort(_("unknown extdata source '%s'") % source)
1072 raise error.Abort(_("unknown extdata source '%s'") % source)
1077
1073
1078 data = {}
1074 data = {}
1079 src = proc = None
1075 src = proc = None
1080 try:
1076 try:
1081 if spec.startswith("shell:"):
1077 if spec.startswith("shell:"):
1082 # external commands should be run relative to the repo root
1078 # external commands should be run relative to the repo root
1083 cmd = spec[6:]
1079 cmd = spec[6:]
1084 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1080 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1085 close_fds=util.closefds,
1081 close_fds=util.closefds,
1086 stdout=subprocess.PIPE, cwd=repo.root)
1082 stdout=subprocess.PIPE, cwd=repo.root)
1087 src = proc.stdout
1083 src = proc.stdout
1088 else:
1084 else:
1089 # treat as a URL or file
1085 # treat as a URL or file
1090 src = url.open(repo.ui, spec)
1086 src = url.open(repo.ui, spec)
1091 for l in src:
1087 for l in src:
1092 if " " in l:
1088 if " " in l:
1093 k, v = l.strip().split(" ", 1)
1089 k, v = l.strip().split(" ", 1)
1094 else:
1090 else:
1095 k, v = l.strip(), ""
1091 k, v = l.strip(), ""
1096
1092
1097 k = encoding.tolocal(k)
1093 k = encoding.tolocal(k)
1098 try:
1094 try:
1099 data[repo[k].rev()] = encoding.tolocal(v)
1095 data[repo[k].rev()] = encoding.tolocal(v)
1100 except (error.LookupError, error.RepoLookupError):
1096 except (error.LookupError, error.RepoLookupError):
1101 pass # we ignore data for nodes that don't exist locally
1097 pass # we ignore data for nodes that don't exist locally
1102 finally:
1098 finally:
1103 if proc:
1099 if proc:
1104 proc.communicate()
1100 proc.communicate()
1105 if src:
1101 if src:
1106 src.close()
1102 src.close()
1107 if proc and proc.returncode != 0:
1103 if proc and proc.returncode != 0:
1108 raise error.Abort(_("extdata command '%s' failed: %s")
1104 raise error.Abort(_("extdata command '%s' failed: %s")
1109 % (cmd, util.explainexit(proc.returncode)[0]))
1105 % (cmd, util.explainexit(proc.returncode)[0]))
1110
1106
1111 return data
1107 return data
1112
1108
1113 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1109 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1114 if lock is None:
1110 if lock is None:
1115 raise error.LockInheritanceContractViolation(
1111 raise error.LockInheritanceContractViolation(
1116 'lock can only be inherited while held')
1112 'lock can only be inherited while held')
1117 if environ is None:
1113 if environ is None:
1118 environ = {}
1114 environ = {}
1119 with lock.inherit() as locker:
1115 with lock.inherit() as locker:
1120 environ[envvar] = locker
1116 environ[envvar] = locker
1121 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1117 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1122
1118
1123 def wlocksub(repo, cmd, *args, **kwargs):
1119 def wlocksub(repo, cmd, *args, **kwargs):
1124 """run cmd as a subprocess that allows inheriting repo's wlock
1120 """run cmd as a subprocess that allows inheriting repo's wlock
1125
1121
1126 This can only be called while the wlock is held. This takes all the
1122 This can only be called while the wlock is held. This takes all the
1127 arguments that ui.system does, and returns the exit code of the
1123 arguments that ui.system does, and returns the exit code of the
1128 subprocess."""
1124 subprocess."""
1129 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1125 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1130 **kwargs)
1126 **kwargs)
1131
1127
1132 def gdinitconfig(ui):
1128 def gdinitconfig(ui):
1133 """helper function to know if a repo should be created as general delta
1129 """helper function to know if a repo should be created as general delta
1134 """
1130 """
1135 # experimental config: format.generaldelta
1131 # experimental config: format.generaldelta
1136 return (ui.configbool('format', 'generaldelta')
1132 return (ui.configbool('format', 'generaldelta')
1137 or ui.configbool('format', 'usegeneraldelta'))
1133 or ui.configbool('format', 'usegeneraldelta'))
1138
1134
1139 def gddeltaconfig(ui):
1135 def gddeltaconfig(ui):
1140 """helper function to know if incoming delta should be optimised
1136 """helper function to know if incoming delta should be optimised
1141 """
1137 """
1142 # experimental config: format.generaldelta
1138 # experimental config: format.generaldelta
1143 return ui.configbool('format', 'generaldelta')
1139 return ui.configbool('format', 'generaldelta')
1144
1140
1145 class simplekeyvaluefile(object):
1141 class simplekeyvaluefile(object):
1146 """A simple file with key=value lines
1142 """A simple file with key=value lines
1147
1143
1148 Keys must be alphanumerics and start with a letter, values must not
1144 Keys must be alphanumerics and start with a letter, values must not
1149 contain '\n' characters"""
1145 contain '\n' characters"""
1150 firstlinekey = '__firstline'
1146 firstlinekey = '__firstline'
1151
1147
1152 def __init__(self, vfs, path, keys=None):
1148 def __init__(self, vfs, path, keys=None):
1153 self.vfs = vfs
1149 self.vfs = vfs
1154 self.path = path
1150 self.path = path
1155
1151
1156 def read(self, firstlinenonkeyval=False):
1152 def read(self, firstlinenonkeyval=False):
1157 """Read the contents of a simple key-value file
1153 """Read the contents of a simple key-value file
1158
1154
1159 'firstlinenonkeyval' indicates whether the first line of file should
1155 'firstlinenonkeyval' indicates whether the first line of file should
1160 be treated as a key-value pair or reuturned fully under the
1156 be treated as a key-value pair or reuturned fully under the
1161 __firstline key."""
1157 __firstline key."""
1162 lines = self.vfs.readlines(self.path)
1158 lines = self.vfs.readlines(self.path)
1163 d = {}
1159 d = {}
1164 if firstlinenonkeyval:
1160 if firstlinenonkeyval:
1165 if not lines:
1161 if not lines:
1166 e = _("empty simplekeyvalue file")
1162 e = _("empty simplekeyvalue file")
1167 raise error.CorruptedState(e)
1163 raise error.CorruptedState(e)
1168 # we don't want to include '\n' in the __firstline
1164 # we don't want to include '\n' in the __firstline
1169 d[self.firstlinekey] = lines[0][:-1]
1165 d[self.firstlinekey] = lines[0][:-1]
1170 del lines[0]
1166 del lines[0]
1171
1167
1172 try:
1168 try:
1173 # the 'if line.strip()' part prevents us from failing on empty
1169 # the 'if line.strip()' part prevents us from failing on empty
1174 # lines which only contain '\n' therefore are not skipped
1170 # lines which only contain '\n' therefore are not skipped
1175 # by 'if line'
1171 # by 'if line'
1176 updatedict = dict(line[:-1].split('=', 1) for line in lines
1172 updatedict = dict(line[:-1].split('=', 1) for line in lines
1177 if line.strip())
1173 if line.strip())
1178 if self.firstlinekey in updatedict:
1174 if self.firstlinekey in updatedict:
1179 e = _("%r can't be used as a key")
1175 e = _("%r can't be used as a key")
1180 raise error.CorruptedState(e % self.firstlinekey)
1176 raise error.CorruptedState(e % self.firstlinekey)
1181 d.update(updatedict)
1177 d.update(updatedict)
1182 except ValueError as e:
1178 except ValueError as e:
1183 raise error.CorruptedState(str(e))
1179 raise error.CorruptedState(str(e))
1184 return d
1180 return d
1185
1181
1186 def write(self, data, firstline=None):
1182 def write(self, data, firstline=None):
1187 """Write key=>value mapping to a file
1183 """Write key=>value mapping to a file
1188 data is a dict. Keys must be alphanumerical and start with a letter.
1184 data is a dict. Keys must be alphanumerical and start with a letter.
1189 Values must not contain newline characters.
1185 Values must not contain newline characters.
1190
1186
1191 If 'firstline' is not None, it is written to file before
1187 If 'firstline' is not None, it is written to file before
1192 everything else, as it is, not in a key=value form"""
1188 everything else, as it is, not in a key=value form"""
1193 lines = []
1189 lines = []
1194 if firstline is not None:
1190 if firstline is not None:
1195 lines.append('%s\n' % firstline)
1191 lines.append('%s\n' % firstline)
1196
1192
1197 for k, v in data.items():
1193 for k, v in data.items():
1198 if k == self.firstlinekey:
1194 if k == self.firstlinekey:
1199 e = "key name '%s' is reserved" % self.firstlinekey
1195 e = "key name '%s' is reserved" % self.firstlinekey
1200 raise error.ProgrammingError(e)
1196 raise error.ProgrammingError(e)
1201 if not k[0:1].isalpha():
1197 if not k[0:1].isalpha():
1202 e = "keys must start with a letter in a key-value file"
1198 e = "keys must start with a letter in a key-value file"
1203 raise error.ProgrammingError(e)
1199 raise error.ProgrammingError(e)
1204 if not k.isalnum():
1200 if not k.isalnum():
1205 e = "invalid key name in a simple key-value file"
1201 e = "invalid key name in a simple key-value file"
1206 raise error.ProgrammingError(e)
1202 raise error.ProgrammingError(e)
1207 if '\n' in v:
1203 if '\n' in v:
1208 e = "invalid value in a simple key-value file"
1204 e = "invalid value in a simple key-value file"
1209 raise error.ProgrammingError(e)
1205 raise error.ProgrammingError(e)
1210 lines.append("%s=%s\n" % (k, v))
1206 lines.append("%s=%s\n" % (k, v))
1211 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1207 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1212 fp.write(''.join(lines))
1208 fp.write(''.join(lines))
1213
1209
1214 _reportobsoletedsource = [
1210 _reportobsoletedsource = [
1215 'debugobsolete',
1211 'debugobsolete',
1216 'pull',
1212 'pull',
1217 'push',
1213 'push',
1218 'serve',
1214 'serve',
1219 'unbundle',
1215 'unbundle',
1220 ]
1216 ]
1221
1217
1222 _reportnewcssource = [
1218 _reportnewcssource = [
1223 'pull',
1219 'pull',
1224 'unbundle',
1220 'unbundle',
1225 ]
1221 ]
1226
1222
1227 # a list of (repo, ctx, files) functions called by various commands to allow
1223 # a list of (repo, ctx, files) functions called by various commands to allow
1228 # extensions to ensure the corresponding files are available locally, before the
1224 # extensions to ensure the corresponding files are available locally, before the
1229 # command uses them.
1225 # command uses them.
1230 fileprefetchhooks = util.hooks()
1226 fileprefetchhooks = util.hooks()
1231
1227
1232 # A marker that tells the evolve extension to suppress its own reporting
1228 # A marker that tells the evolve extension to suppress its own reporting
1233 _reportstroubledchangesets = True
1229 _reportstroubledchangesets = True
1234
1230
1235 def registersummarycallback(repo, otr, txnname=''):
1231 def registersummarycallback(repo, otr, txnname=''):
1236 """register a callback to issue a summary after the transaction is closed
1232 """register a callback to issue a summary after the transaction is closed
1237 """
1233 """
1238 def txmatch(sources):
1234 def txmatch(sources):
1239 return any(txnname.startswith(source) for source in sources)
1235 return any(txnname.startswith(source) for source in sources)
1240
1236
1241 categories = []
1237 categories = []
1242
1238
1243 def reportsummary(func):
1239 def reportsummary(func):
1244 """decorator for report callbacks."""
1240 """decorator for report callbacks."""
1245 # The repoview life cycle is shorter than the one of the actual
1241 # The repoview life cycle is shorter than the one of the actual
1246 # underlying repository. So the filtered object can die before the
1242 # underlying repository. So the filtered object can die before the
1247 # weakref is used leading to troubles. We keep a reference to the
1243 # weakref is used leading to troubles. We keep a reference to the
1248 # unfiltered object and restore the filtering when retrieving the
1244 # unfiltered object and restore the filtering when retrieving the
1249 # repository through the weakref.
1245 # repository through the weakref.
1250 filtername = repo.filtername
1246 filtername = repo.filtername
1251 reporef = weakref.ref(repo.unfiltered())
1247 reporef = weakref.ref(repo.unfiltered())
1252 def wrapped(tr):
1248 def wrapped(tr):
1253 repo = reporef()
1249 repo = reporef()
1254 if filtername:
1250 if filtername:
1255 repo = repo.filtered(filtername)
1251 repo = repo.filtered(filtername)
1256 func(repo, tr)
1252 func(repo, tr)
1257 newcat = '%02i-txnreport' % len(categories)
1253 newcat = '%02i-txnreport' % len(categories)
1258 otr.addpostclose(newcat, wrapped)
1254 otr.addpostclose(newcat, wrapped)
1259 categories.append(newcat)
1255 categories.append(newcat)
1260 return wrapped
1256 return wrapped
1261
1257
1262 if txmatch(_reportobsoletedsource):
1258 if txmatch(_reportobsoletedsource):
1263 @reportsummary
1259 @reportsummary
1264 def reportobsoleted(repo, tr):
1260 def reportobsoleted(repo, tr):
1265 obsoleted = obsutil.getobsoleted(repo, tr)
1261 obsoleted = obsutil.getobsoleted(repo, tr)
1266 if obsoleted:
1262 if obsoleted:
1267 repo.ui.status(_('obsoleted %i changesets\n')
1263 repo.ui.status(_('obsoleted %i changesets\n')
1268 % len(obsoleted))
1264 % len(obsoleted))
1269
1265
1270 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1266 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1271 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1267 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1272 instabilitytypes = [
1268 instabilitytypes = [
1273 ('orphan', 'orphan'),
1269 ('orphan', 'orphan'),
1274 ('phase-divergent', 'phasedivergent'),
1270 ('phase-divergent', 'phasedivergent'),
1275 ('content-divergent', 'contentdivergent'),
1271 ('content-divergent', 'contentdivergent'),
1276 ]
1272 ]
1277
1273
1278 def getinstabilitycounts(repo):
1274 def getinstabilitycounts(repo):
1279 filtered = repo.changelog.filteredrevs
1275 filtered = repo.changelog.filteredrevs
1280 counts = {}
1276 counts = {}
1281 for instability, revset in instabilitytypes:
1277 for instability, revset in instabilitytypes:
1282 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1278 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1283 filtered)
1279 filtered)
1284 return counts
1280 return counts
1285
1281
1286 oldinstabilitycounts = getinstabilitycounts(repo)
1282 oldinstabilitycounts = getinstabilitycounts(repo)
1287 @reportsummary
1283 @reportsummary
1288 def reportnewinstabilities(repo, tr):
1284 def reportnewinstabilities(repo, tr):
1289 newinstabilitycounts = getinstabilitycounts(repo)
1285 newinstabilitycounts = getinstabilitycounts(repo)
1290 for instability, revset in instabilitytypes:
1286 for instability, revset in instabilitytypes:
1291 delta = (newinstabilitycounts[instability] -
1287 delta = (newinstabilitycounts[instability] -
1292 oldinstabilitycounts[instability])
1288 oldinstabilitycounts[instability])
1293 if delta > 0:
1289 if delta > 0:
1294 repo.ui.warn(_('%i new %s changesets\n') %
1290 repo.ui.warn(_('%i new %s changesets\n') %
1295 (delta, instability))
1291 (delta, instability))
1296
1292
1297 if txmatch(_reportnewcssource):
1293 if txmatch(_reportnewcssource):
1298 @reportsummary
1294 @reportsummary
1299 def reportnewcs(repo, tr):
1295 def reportnewcs(repo, tr):
1300 """Report the range of new revisions pulled/unbundled."""
1296 """Report the range of new revisions pulled/unbundled."""
1301 newrevs = tr.changes.get('revs', xrange(0, 0))
1297 newrevs = tr.changes.get('revs', xrange(0, 0))
1302 if not newrevs:
1298 if not newrevs:
1303 return
1299 return
1304
1300
1305 # Compute the bounds of new revisions' range, excluding obsoletes.
1301 # Compute the bounds of new revisions' range, excluding obsoletes.
1306 unfi = repo.unfiltered()
1302 unfi = repo.unfiltered()
1307 revs = unfi.revs('%ld and not obsolete()', newrevs)
1303 revs = unfi.revs('%ld and not obsolete()', newrevs)
1308 if not revs:
1304 if not revs:
1309 # Got only obsoletes.
1305 # Got only obsoletes.
1310 return
1306 return
1311 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1307 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1312
1308
1313 if minrev == maxrev:
1309 if minrev == maxrev:
1314 revrange = minrev
1310 revrange = minrev
1315 else:
1311 else:
1316 revrange = '%s:%s' % (minrev, maxrev)
1312 revrange = '%s:%s' % (minrev, maxrev)
1317 repo.ui.status(_('new changesets %s\n') % revrange)
1313 repo.ui.status(_('new changesets %s\n') % revrange)
1318
1314
1319 def nodesummaries(repo, nodes, maxnumnodes=4):
1315 def nodesummaries(repo, nodes, maxnumnodes=4):
1320 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1316 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1321 return ' '.join(short(h) for h in nodes)
1317 return ' '.join(short(h) for h in nodes)
1322 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1318 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1323 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1319 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1324
1320
1325 def enforcesinglehead(repo, tr, desc):
1321 def enforcesinglehead(repo, tr, desc):
1326 """check that no named branch has multiple heads"""
1322 """check that no named branch has multiple heads"""
1327 if desc in ('strip', 'repair'):
1323 if desc in ('strip', 'repair'):
1328 # skip the logic during strip
1324 # skip the logic during strip
1329 return
1325 return
1330 visible = repo.filtered('visible')
1326 visible = repo.filtered('visible')
1331 # possible improvement: we could restrict the check to affected branch
1327 # possible improvement: we could restrict the check to affected branch
1332 for name, heads in visible.branchmap().iteritems():
1328 for name, heads in visible.branchmap().iteritems():
1333 if len(heads) > 1:
1329 if len(heads) > 1:
1334 msg = _('rejecting multiple heads on branch "%s"')
1330 msg = _('rejecting multiple heads on branch "%s"')
1335 msg %= name
1331 msg %= name
1336 hint = _('%d heads: %s')
1332 hint = _('%d heads: %s')
1337 hint %= (len(heads), nodesummaries(repo, heads))
1333 hint %= (len(heads), nodesummaries(repo, heads))
1338 raise error.Abort(msg, hint=hint)
1334 raise error.Abort(msg, hint=hint)
1339
1335
1340 def wrapconvertsink(sink):
1336 def wrapconvertsink(sink):
1341 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1337 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1342 before it is used, whether or not the convert extension was formally loaded.
1338 before it is used, whether or not the convert extension was formally loaded.
1343 """
1339 """
1344 return sink
1340 return sink
1345
1341
1346 def unhidehashlikerevs(repo, specs, hiddentype):
1342 def unhidehashlikerevs(repo, specs, hiddentype):
1347 """parse the user specs and unhide changesets whose hash or revision number
1343 """parse the user specs and unhide changesets whose hash or revision number
1348 is passed.
1344 is passed.
1349
1345
1350 hiddentype can be: 1) 'warn': warn while unhiding changesets
1346 hiddentype can be: 1) 'warn': warn while unhiding changesets
1351 2) 'nowarn': don't warn while unhiding changesets
1347 2) 'nowarn': don't warn while unhiding changesets
1352
1348
1353 returns a repo object with the required changesets unhidden
1349 returns a repo object with the required changesets unhidden
1354 """
1350 """
1355 if not repo.filtername or not repo.ui.configbool('experimental',
1351 if not repo.filtername or not repo.ui.configbool('experimental',
1356 'directaccess'):
1352 'directaccess'):
1357 return repo
1353 return repo
1358
1354
1359 if repo.filtername not in ('visible', 'visible-hidden'):
1355 if repo.filtername not in ('visible', 'visible-hidden'):
1360 return repo
1356 return repo
1361
1357
1362 symbols = set()
1358 symbols = set()
1363 for spec in specs:
1359 for spec in specs:
1364 try:
1360 try:
1365 tree = revsetlang.parse(spec)
1361 tree = revsetlang.parse(spec)
1366 except error.ParseError: # will be reported by scmutil.revrange()
1362 except error.ParseError: # will be reported by scmutil.revrange()
1367 continue
1363 continue
1368
1364
1369 symbols.update(revsetlang.gethashlikesymbols(tree))
1365 symbols.update(revsetlang.gethashlikesymbols(tree))
1370
1366
1371 if not symbols:
1367 if not symbols:
1372 return repo
1368 return repo
1373
1369
1374 revs = _getrevsfromsymbols(repo, symbols)
1370 revs = _getrevsfromsymbols(repo, symbols)
1375
1371
1376 if not revs:
1372 if not revs:
1377 return repo
1373 return repo
1378
1374
1379 if hiddentype == 'warn':
1375 if hiddentype == 'warn':
1380 unfi = repo.unfiltered()
1376 unfi = repo.unfiltered()
1381 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1377 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1382 repo.ui.warn(_("warning: accessing hidden changesets for write "
1378 repo.ui.warn(_("warning: accessing hidden changesets for write "
1383 "operation: %s\n") % revstr)
1379 "operation: %s\n") % revstr)
1384
1380
1385 # we have to use new filtername to separate branch/tags cache until we can
1381 # we have to use new filtername to separate branch/tags cache until we can
1386 # disbale these cache when revisions are dynamically pinned.
1382 # disbale these cache when revisions are dynamically pinned.
1387 return repo.filtered('visible-hidden', revs)
1383 return repo.filtered('visible-hidden', revs)
1388
1384
1389 def _getrevsfromsymbols(repo, symbols):
1385 def _getrevsfromsymbols(repo, symbols):
1390 """parse the list of symbols and returns a set of revision numbers of hidden
1386 """parse the list of symbols and returns a set of revision numbers of hidden
1391 changesets present in symbols"""
1387 changesets present in symbols"""
1392 revs = set()
1388 revs = set()
1393 unfi = repo.unfiltered()
1389 unfi = repo.unfiltered()
1394 unficl = unfi.changelog
1390 unficl = unfi.changelog
1395 cl = repo.changelog
1391 cl = repo.changelog
1396 tiprev = len(unficl)
1392 tiprev = len(unficl)
1397 pmatch = unficl._partialmatch
1393 pmatch = unficl._partialmatch
1398 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1394 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1399 for s in symbols:
1395 for s in symbols:
1400 try:
1396 try:
1401 n = int(s)
1397 n = int(s)
1402 if n <= tiprev:
1398 if n <= tiprev:
1403 if not allowrevnums:
1399 if not allowrevnums:
1404 continue
1400 continue
1405 else:
1401 else:
1406 if n not in cl:
1402 if n not in cl:
1407 revs.add(n)
1403 revs.add(n)
1408 continue
1404 continue
1409 except ValueError:
1405 except ValueError:
1410 pass
1406 pass
1411
1407
1412 try:
1408 try:
1413 s = pmatch(s)
1409 s = pmatch(s)
1414 except error.LookupError:
1410 except error.LookupError:
1415 s = None
1411 s = None
1416
1412
1417 if s is not None:
1413 if s is not None:
1418 rev = unficl.rev(s)
1414 rev = unficl.rev(s)
1419 if rev not in cl:
1415 if rev not in cl:
1420 revs.add(rev)
1416 revs.add(rev)
1421
1417
1422 return revs
1418 return revs
General Comments 0
You need to be logged in to leave comments. Login now