##// END OF EJS Templates
scmutil: fix a repr in an error message on Python 3...
Augie Fackler -
r36587:bb5f5c1c default
parent child Browse files
Show More
@@ -1,1418 +1,1419
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 if pycompat.iswindows:
44 if pycompat.iswindows:
45 from . import scmwindows as scmplatform
45 from . import scmwindows as scmplatform
46 else:
46 else:
47 from . import scmposix as scmplatform
47 from . import scmposix as scmplatform
48
48
49 termsize = scmplatform.termsize
49 termsize = scmplatform.termsize
50
50
51 class status(tuple):
51 class status(tuple):
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 and 'ignored' properties are only relevant to the working copy.
53 and 'ignored' properties are only relevant to the working copy.
54 '''
54 '''
55
55
56 __slots__ = ()
56 __slots__ = ()
57
57
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 clean):
59 clean):
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 ignored, clean))
61 ignored, clean))
62
62
63 @property
63 @property
64 def modified(self):
64 def modified(self):
65 '''files that have been modified'''
65 '''files that have been modified'''
66 return self[0]
66 return self[0]
67
67
68 @property
68 @property
69 def added(self):
69 def added(self):
70 '''files that have been added'''
70 '''files that have been added'''
71 return self[1]
71 return self[1]
72
72
73 @property
73 @property
74 def removed(self):
74 def removed(self):
75 '''files that have been removed'''
75 '''files that have been removed'''
76 return self[2]
76 return self[2]
77
77
78 @property
78 @property
79 def deleted(self):
79 def deleted(self):
80 '''files that are in the dirstate, but have been deleted from the
80 '''files that are in the dirstate, but have been deleted from the
81 working copy (aka "missing")
81 working copy (aka "missing")
82 '''
82 '''
83 return self[3]
83 return self[3]
84
84
85 @property
85 @property
86 def unknown(self):
86 def unknown(self):
87 '''files not in the dirstate that are not ignored'''
87 '''files not in the dirstate that are not ignored'''
88 return self[4]
88 return self[4]
89
89
90 @property
90 @property
91 def ignored(self):
91 def ignored(self):
92 '''files not in the dirstate that are ignored (by _dirignore())'''
92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 return self[5]
93 return self[5]
94
94
95 @property
95 @property
96 def clean(self):
96 def clean(self):
97 '''files that have not been modified'''
97 '''files that have not been modified'''
98 return self[6]
98 return self[6]
99
99
100 def __repr__(self, *args, **kwargs):
100 def __repr__(self, *args, **kwargs):
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 'unknown=%r, ignored=%r, clean=%r>') % self)
102 'unknown=%r, ignored=%r, clean=%r>') % self)
103
103
104 def itersubrepos(ctx1, ctx2):
104 def itersubrepos(ctx1, ctx2):
105 """find subrepos in ctx1 or ctx2"""
105 """find subrepos in ctx1 or ctx2"""
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 # has been modified (in ctx2) but not yet committed (in ctx1).
108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111
111
112 missing = set()
112 missing = set()
113
113
114 for subpath in ctx2.substate:
114 for subpath in ctx2.substate:
115 if subpath not in ctx1.substate:
115 if subpath not in ctx1.substate:
116 del subpaths[subpath]
116 del subpaths[subpath]
117 missing.add(subpath)
117 missing.add(subpath)
118
118
119 for subpath, ctx in sorted(subpaths.iteritems()):
119 for subpath, ctx in sorted(subpaths.iteritems()):
120 yield subpath, ctx.sub(subpath)
120 yield subpath, ctx.sub(subpath)
121
121
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 # status and diff will have an accurate result when it does
123 # status and diff will have an accurate result when it does
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 # against itself.
125 # against itself.
126 for subpath in missing:
126 for subpath in missing:
127 yield subpath, ctx2.nullsub(subpath, ctx1)
127 yield subpath, ctx2.nullsub(subpath, ctx1)
128
128
129 def nochangesfound(ui, repo, excluded=None):
129 def nochangesfound(ui, repo, excluded=None):
130 '''Report no changes for push/pull, excluded is None or a list of
130 '''Report no changes for push/pull, excluded is None or a list of
131 nodes excluded from the push/pull.
131 nodes excluded from the push/pull.
132 '''
132 '''
133 secretlist = []
133 secretlist = []
134 if excluded:
134 if excluded:
135 for n in excluded:
135 for n in excluded:
136 ctx = repo[n]
136 ctx = repo[n]
137 if ctx.phase() >= phases.secret and not ctx.extinct():
137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 secretlist.append(n)
138 secretlist.append(n)
139
139
140 if secretlist:
140 if secretlist:
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 % len(secretlist))
142 % len(secretlist))
143 else:
143 else:
144 ui.status(_("no changes found\n"))
144 ui.status(_("no changes found\n"))
145
145
146 def callcatch(ui, func):
146 def callcatch(ui, func):
147 """call func() with global exception handling
147 """call func() with global exception handling
148
148
149 return func() if no exception happens. otherwise do some error handling
149 return func() if no exception happens. otherwise do some error handling
150 and return an exit code accordingly. does not handle all exceptions.
150 and return an exit code accordingly. does not handle all exceptions.
151 """
151 """
152 try:
152 try:
153 try:
153 try:
154 return func()
154 return func()
155 except: # re-raises
155 except: # re-raises
156 ui.traceback()
156 ui.traceback()
157 raise
157 raise
158 # Global exception handling, alphabetically
158 # Global exception handling, alphabetically
159 # Mercurial-specific first, followed by built-in and library exceptions
159 # Mercurial-specific first, followed by built-in and library exceptions
160 except error.LockHeld as inst:
160 except error.LockHeld as inst:
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _('timed out waiting for lock held by %r') % inst.locker
162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 else:
163 else:
164 reason = _('lock held by %r') % inst.locker
164 reason = _('lock held by %r') % inst.locker
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
166 if not inst.locker:
166 if not inst.locker:
167 ui.warn(_("(lock might be very busy)\n"))
167 ui.warn(_("(lock might be very busy)\n"))
168 except error.LockUnavailable as inst:
168 except error.LockUnavailable as inst:
169 ui.warn(_("abort: could not lock %s: %s\n") %
169 ui.warn(_("abort: could not lock %s: %s\n") %
170 (inst.desc or inst.filename,
170 (inst.desc or inst.filename,
171 encoding.strtolocal(inst.strerror)))
171 encoding.strtolocal(inst.strerror)))
172 except error.OutOfBandError as inst:
172 except error.OutOfBandError as inst:
173 if inst.args:
173 if inst.args:
174 msg = _("abort: remote error:\n")
174 msg = _("abort: remote error:\n")
175 else:
175 else:
176 msg = _("abort: remote error\n")
176 msg = _("abort: remote error\n")
177 ui.warn(msg)
177 ui.warn(msg)
178 if inst.args:
178 if inst.args:
179 ui.warn(''.join(inst.args))
179 ui.warn(''.join(inst.args))
180 if inst.hint:
180 if inst.hint:
181 ui.warn('(%s)\n' % inst.hint)
181 ui.warn('(%s)\n' % inst.hint)
182 except error.RepoError as inst:
182 except error.RepoError as inst:
183 ui.warn(_("abort: %s!\n") % inst)
183 ui.warn(_("abort: %s!\n") % inst)
184 if inst.hint:
184 if inst.hint:
185 ui.warn(_("(%s)\n") % inst.hint)
185 ui.warn(_("(%s)\n") % inst.hint)
186 except error.ResponseError as inst:
186 except error.ResponseError as inst:
187 ui.warn(_("abort: %s") % inst.args[0])
187 ui.warn(_("abort: %s") % inst.args[0])
188 if not isinstance(inst.args[1], basestring):
188 if not isinstance(inst.args[1], basestring):
189 ui.warn(" %r\n" % (inst.args[1],))
189 ui.warn(" %r\n" % (inst.args[1],))
190 elif not inst.args[1]:
190 elif not inst.args[1]:
191 ui.warn(_(" empty string\n"))
191 ui.warn(_(" empty string\n"))
192 else:
192 else:
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
194 except error.CensoredNodeError as inst:
194 except error.CensoredNodeError as inst:
195 ui.warn(_("abort: file censored %s!\n") % inst)
195 ui.warn(_("abort: file censored %s!\n") % inst)
196 except error.RevlogError as inst:
196 except error.RevlogError as inst:
197 ui.warn(_("abort: %s!\n") % inst)
197 ui.warn(_("abort: %s!\n") % inst)
198 except error.InterventionRequired as inst:
198 except error.InterventionRequired as inst:
199 ui.warn("%s\n" % inst)
199 ui.warn("%s\n" % inst)
200 if inst.hint:
200 if inst.hint:
201 ui.warn(_("(%s)\n") % inst.hint)
201 ui.warn(_("(%s)\n") % inst.hint)
202 return 1
202 return 1
203 except error.WdirUnsupported:
203 except error.WdirUnsupported:
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
205 except error.Abort as inst:
205 except error.Abort as inst:
206 ui.warn(_("abort: %s\n") % inst)
206 ui.warn(_("abort: %s\n") % inst)
207 if inst.hint:
207 if inst.hint:
208 ui.warn(_("(%s)\n") % inst.hint)
208 ui.warn(_("(%s)\n") % inst.hint)
209 except ImportError as inst:
209 except ImportError as inst:
210 ui.warn(_("abort: %s!\n") % inst)
210 ui.warn(_("abort: %s!\n") % inst)
211 m = util.forcebytestr(inst).split()[-1]
211 m = util.forcebytestr(inst).split()[-1]
212 if m in "mpatch bdiff".split():
212 if m in "mpatch bdiff".split():
213 ui.warn(_("(did you forget to compile extensions?)\n"))
213 ui.warn(_("(did you forget to compile extensions?)\n"))
214 elif m in "zlib".split():
214 elif m in "zlib".split():
215 ui.warn(_("(is your Python install correct?)\n"))
215 ui.warn(_("(is your Python install correct?)\n"))
216 except IOError as inst:
216 except IOError as inst:
217 if util.safehasattr(inst, "code"):
217 if util.safehasattr(inst, "code"):
218 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
218 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
219 elif util.safehasattr(inst, "reason"):
219 elif util.safehasattr(inst, "reason"):
220 try: # usually it is in the form (errno, strerror)
220 try: # usually it is in the form (errno, strerror)
221 reason = inst.reason.args[1]
221 reason = inst.reason.args[1]
222 except (AttributeError, IndexError):
222 except (AttributeError, IndexError):
223 # it might be anything, for example a string
223 # it might be anything, for example a string
224 reason = inst.reason
224 reason = inst.reason
225 if isinstance(reason, unicode):
225 if isinstance(reason, unicode):
226 # SSLError of Python 2.7.9 contains a unicode
226 # SSLError of Python 2.7.9 contains a unicode
227 reason = encoding.unitolocal(reason)
227 reason = encoding.unitolocal(reason)
228 ui.warn(_("abort: error: %s\n") % reason)
228 ui.warn(_("abort: error: %s\n") % reason)
229 elif (util.safehasattr(inst, "args")
229 elif (util.safehasattr(inst, "args")
230 and inst.args and inst.args[0] == errno.EPIPE):
230 and inst.args and inst.args[0] == errno.EPIPE):
231 pass
231 pass
232 elif getattr(inst, "strerror", None):
232 elif getattr(inst, "strerror", None):
233 if getattr(inst, "filename", None):
233 if getattr(inst, "filename", None):
234 ui.warn(_("abort: %s: %s\n") % (
234 ui.warn(_("abort: %s: %s\n") % (
235 encoding.strtolocal(inst.strerror), inst.filename))
235 encoding.strtolocal(inst.strerror), inst.filename))
236 else:
236 else:
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
238 else:
238 else:
239 raise
239 raise
240 except OSError as inst:
240 except OSError as inst:
241 if getattr(inst, "filename", None) is not None:
241 if getattr(inst, "filename", None) is not None:
242 ui.warn(_("abort: %s: '%s'\n") % (
242 ui.warn(_("abort: %s: '%s'\n") % (
243 encoding.strtolocal(inst.strerror), inst.filename))
243 encoding.strtolocal(inst.strerror), inst.filename))
244 else:
244 else:
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 except MemoryError:
246 except MemoryError:
247 ui.warn(_("abort: out of memory\n"))
247 ui.warn(_("abort: out of memory\n"))
248 except SystemExit as inst:
248 except SystemExit as inst:
249 # Commands shouldn't sys.exit directly, but give a return code.
249 # Commands shouldn't sys.exit directly, but give a return code.
250 # Just in case catch this and and pass exit code to caller.
250 # Just in case catch this and and pass exit code to caller.
251 return inst.code
251 return inst.code
252 except socket.error as inst:
252 except socket.error as inst:
253 ui.warn(_("abort: %s\n") % inst.args[-1])
253 ui.warn(_("abort: %s\n") % inst.args[-1])
254
254
255 return -1
255 return -1
256
256
257 def checknewlabel(repo, lbl, kind):
257 def checknewlabel(repo, lbl, kind):
258 # Do not use the "kind" parameter in ui output.
258 # Do not use the "kind" parameter in ui output.
259 # It makes strings difficult to translate.
259 # It makes strings difficult to translate.
260 if lbl in ['tip', '.', 'null']:
260 if lbl in ['tip', '.', 'null']:
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
262 for c in (':', '\0', '\n', '\r'):
262 for c in (':', '\0', '\n', '\r'):
263 if c in lbl:
263 if c in lbl:
264 raise error.Abort(_("%r cannot be used in a name") % c)
264 raise error.Abort(
265 _("%r cannot be used in a name") % pycompat.bytestr(c))
265 try:
266 try:
266 int(lbl)
267 int(lbl)
267 raise error.Abort(_("cannot use an integer as a name"))
268 raise error.Abort(_("cannot use an integer as a name"))
268 except ValueError:
269 except ValueError:
269 pass
270 pass
270 if lbl.strip() != lbl:
271 if lbl.strip() != lbl:
271 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
272 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
272
273
273 def checkfilename(f):
274 def checkfilename(f):
274 '''Check that the filename f is an acceptable filename for a tracked file'''
275 '''Check that the filename f is an acceptable filename for a tracked file'''
275 if '\r' in f or '\n' in f:
276 if '\r' in f or '\n' in f:
276 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
277 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
277
278
278 def checkportable(ui, f):
279 def checkportable(ui, f):
279 '''Check if filename f is portable and warn or abort depending on config'''
280 '''Check if filename f is portable and warn or abort depending on config'''
280 checkfilename(f)
281 checkfilename(f)
281 abort, warn = checkportabilityalert(ui)
282 abort, warn = checkportabilityalert(ui)
282 if abort or warn:
283 if abort or warn:
283 msg = util.checkwinfilename(f)
284 msg = util.checkwinfilename(f)
284 if msg:
285 if msg:
285 msg = "%s: %s" % (msg, util.shellquote(f))
286 msg = "%s: %s" % (msg, util.shellquote(f))
286 if abort:
287 if abort:
287 raise error.Abort(msg)
288 raise error.Abort(msg)
288 ui.warn(_("warning: %s\n") % msg)
289 ui.warn(_("warning: %s\n") % msg)
289
290
290 def checkportabilityalert(ui):
291 def checkportabilityalert(ui):
291 '''check if the user's config requests nothing, a warning, or abort for
292 '''check if the user's config requests nothing, a warning, or abort for
292 non-portable filenames'''
293 non-portable filenames'''
293 val = ui.config('ui', 'portablefilenames')
294 val = ui.config('ui', 'portablefilenames')
294 lval = val.lower()
295 lval = val.lower()
295 bval = util.parsebool(val)
296 bval = util.parsebool(val)
296 abort = pycompat.iswindows or lval == 'abort'
297 abort = pycompat.iswindows or lval == 'abort'
297 warn = bval or lval == 'warn'
298 warn = bval or lval == 'warn'
298 if bval is None and not (warn or abort or lval == 'ignore'):
299 if bval is None and not (warn or abort or lval == 'ignore'):
299 raise error.ConfigError(
300 raise error.ConfigError(
300 _("ui.portablefilenames value is invalid ('%s')") % val)
301 _("ui.portablefilenames value is invalid ('%s')") % val)
301 return abort, warn
302 return abort, warn
302
303
303 class casecollisionauditor(object):
304 class casecollisionauditor(object):
304 def __init__(self, ui, abort, dirstate):
305 def __init__(self, ui, abort, dirstate):
305 self._ui = ui
306 self._ui = ui
306 self._abort = abort
307 self._abort = abort
307 allfiles = '\0'.join(dirstate._map)
308 allfiles = '\0'.join(dirstate._map)
308 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
309 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
309 self._dirstate = dirstate
310 self._dirstate = dirstate
310 # The purpose of _newfiles is so that we don't complain about
311 # The purpose of _newfiles is so that we don't complain about
311 # case collisions if someone were to call this object with the
312 # case collisions if someone were to call this object with the
312 # same filename twice.
313 # same filename twice.
313 self._newfiles = set()
314 self._newfiles = set()
314
315
315 def __call__(self, f):
316 def __call__(self, f):
316 if f in self._newfiles:
317 if f in self._newfiles:
317 return
318 return
318 fl = encoding.lower(f)
319 fl = encoding.lower(f)
319 if fl in self._loweredfiles and f not in self._dirstate:
320 if fl in self._loweredfiles and f not in self._dirstate:
320 msg = _('possible case-folding collision for %s') % f
321 msg = _('possible case-folding collision for %s') % f
321 if self._abort:
322 if self._abort:
322 raise error.Abort(msg)
323 raise error.Abort(msg)
323 self._ui.warn(_("warning: %s\n") % msg)
324 self._ui.warn(_("warning: %s\n") % msg)
324 self._loweredfiles.add(fl)
325 self._loweredfiles.add(fl)
325 self._newfiles.add(f)
326 self._newfiles.add(f)
326
327
327 def filteredhash(repo, maxrev):
328 def filteredhash(repo, maxrev):
328 """build hash of filtered revisions in the current repoview.
329 """build hash of filtered revisions in the current repoview.
329
330
330 Multiple caches perform up-to-date validation by checking that the
331 Multiple caches perform up-to-date validation by checking that the
331 tiprev and tipnode stored in the cache file match the current repository.
332 tiprev and tipnode stored in the cache file match the current repository.
332 However, this is not sufficient for validating repoviews because the set
333 However, this is not sufficient for validating repoviews because the set
333 of revisions in the view may change without the repository tiprev and
334 of revisions in the view may change without the repository tiprev and
334 tipnode changing.
335 tipnode changing.
335
336
336 This function hashes all the revs filtered from the view and returns
337 This function hashes all the revs filtered from the view and returns
337 that SHA-1 digest.
338 that SHA-1 digest.
338 """
339 """
339 cl = repo.changelog
340 cl = repo.changelog
340 if not cl.filteredrevs:
341 if not cl.filteredrevs:
341 return None
342 return None
342 key = None
343 key = None
343 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
344 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
344 if revs:
345 if revs:
345 s = hashlib.sha1()
346 s = hashlib.sha1()
346 for rev in revs:
347 for rev in revs:
347 s.update('%d;' % rev)
348 s.update('%d;' % rev)
348 key = s.digest()
349 key = s.digest()
349 return key
350 return key
350
351
351 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
352 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
352 '''yield every hg repository under path, always recursively.
353 '''yield every hg repository under path, always recursively.
353 The recurse flag will only control recursion into repo working dirs'''
354 The recurse flag will only control recursion into repo working dirs'''
354 def errhandler(err):
355 def errhandler(err):
355 if err.filename == path:
356 if err.filename == path:
356 raise err
357 raise err
357 samestat = getattr(os.path, 'samestat', None)
358 samestat = getattr(os.path, 'samestat', None)
358 if followsym and samestat is not None:
359 if followsym and samestat is not None:
359 def adddir(dirlst, dirname):
360 def adddir(dirlst, dirname):
360 dirstat = os.stat(dirname)
361 dirstat = os.stat(dirname)
361 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
362 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
362 if not match:
363 if not match:
363 dirlst.append(dirstat)
364 dirlst.append(dirstat)
364 return not match
365 return not match
365 else:
366 else:
366 followsym = False
367 followsym = False
367
368
368 if (seen_dirs is None) and followsym:
369 if (seen_dirs is None) and followsym:
369 seen_dirs = []
370 seen_dirs = []
370 adddir(seen_dirs, path)
371 adddir(seen_dirs, path)
371 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
372 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
372 dirs.sort()
373 dirs.sort()
373 if '.hg' in dirs:
374 if '.hg' in dirs:
374 yield root # found a repository
375 yield root # found a repository
375 qroot = os.path.join(root, '.hg', 'patches')
376 qroot = os.path.join(root, '.hg', 'patches')
376 if os.path.isdir(os.path.join(qroot, '.hg')):
377 if os.path.isdir(os.path.join(qroot, '.hg')):
377 yield qroot # we have a patch queue repo here
378 yield qroot # we have a patch queue repo here
378 if recurse:
379 if recurse:
379 # avoid recursing inside the .hg directory
380 # avoid recursing inside the .hg directory
380 dirs.remove('.hg')
381 dirs.remove('.hg')
381 else:
382 else:
382 dirs[:] = [] # don't descend further
383 dirs[:] = [] # don't descend further
383 elif followsym:
384 elif followsym:
384 newdirs = []
385 newdirs = []
385 for d in dirs:
386 for d in dirs:
386 fname = os.path.join(root, d)
387 fname = os.path.join(root, d)
387 if adddir(seen_dirs, fname):
388 if adddir(seen_dirs, fname):
388 if os.path.islink(fname):
389 if os.path.islink(fname):
389 for hgname in walkrepos(fname, True, seen_dirs):
390 for hgname in walkrepos(fname, True, seen_dirs):
390 yield hgname
391 yield hgname
391 else:
392 else:
392 newdirs.append(d)
393 newdirs.append(d)
393 dirs[:] = newdirs
394 dirs[:] = newdirs
394
395
395 def binnode(ctx):
396 def binnode(ctx):
396 """Return binary node id for a given basectx"""
397 """Return binary node id for a given basectx"""
397 node = ctx.node()
398 node = ctx.node()
398 if node is None:
399 if node is None:
399 return wdirid
400 return wdirid
400 return node
401 return node
401
402
402 def intrev(ctx):
403 def intrev(ctx):
403 """Return integer for a given basectx that can be used in comparison or
404 """Return integer for a given basectx that can be used in comparison or
404 arithmetic operation"""
405 arithmetic operation"""
405 rev = ctx.rev()
406 rev = ctx.rev()
406 if rev is None:
407 if rev is None:
407 return wdirrev
408 return wdirrev
408 return rev
409 return rev
409
410
410 def formatchangeid(ctx):
411 def formatchangeid(ctx):
411 """Format changectx as '{rev}:{node|formatnode}', which is the default
412 """Format changectx as '{rev}:{node|formatnode}', which is the default
412 template provided by logcmdutil.changesettemplater"""
413 template provided by logcmdutil.changesettemplater"""
413 repo = ctx.repo()
414 repo = ctx.repo()
414 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
415 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
415
416
416 def formatrevnode(ui, rev, node):
417 def formatrevnode(ui, rev, node):
417 """Format given revision and node depending on the current verbosity"""
418 """Format given revision and node depending on the current verbosity"""
418 if ui.debugflag:
419 if ui.debugflag:
419 hexfunc = hex
420 hexfunc = hex
420 else:
421 else:
421 hexfunc = short
422 hexfunc = short
422 return '%d:%s' % (rev, hexfunc(node))
423 return '%d:%s' % (rev, hexfunc(node))
423
424
424 def revsingle(repo, revspec, default='.', localalias=None):
425 def revsingle(repo, revspec, default='.', localalias=None):
425 if not revspec and revspec != 0:
426 if not revspec and revspec != 0:
426 return repo[default]
427 return repo[default]
427
428
428 l = revrange(repo, [revspec], localalias=localalias)
429 l = revrange(repo, [revspec], localalias=localalias)
429 if not l:
430 if not l:
430 raise error.Abort(_('empty revision set'))
431 raise error.Abort(_('empty revision set'))
431 return repo[l.last()]
432 return repo[l.last()]
432
433
433 def _pairspec(revspec):
434 def _pairspec(revspec):
434 tree = revsetlang.parse(revspec)
435 tree = revsetlang.parse(revspec)
435 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
436 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
436
437
437 def revpair(repo, revs):
438 def revpair(repo, revs):
438 if not revs:
439 if not revs:
439 return repo.dirstate.p1(), None
440 return repo.dirstate.p1(), None
440
441
441 l = revrange(repo, revs)
442 l = revrange(repo, revs)
442
443
443 if not l:
444 if not l:
444 first = second = None
445 first = second = None
445 elif l.isascending():
446 elif l.isascending():
446 first = l.min()
447 first = l.min()
447 second = l.max()
448 second = l.max()
448 elif l.isdescending():
449 elif l.isdescending():
449 first = l.max()
450 first = l.max()
450 second = l.min()
451 second = l.min()
451 else:
452 else:
452 first = l.first()
453 first = l.first()
453 second = l.last()
454 second = l.last()
454
455
455 if first is None:
456 if first is None:
456 raise error.Abort(_('empty revision range'))
457 raise error.Abort(_('empty revision range'))
457 if (first == second and len(revs) >= 2
458 if (first == second and len(revs) >= 2
458 and not all(revrange(repo, [r]) for r in revs)):
459 and not all(revrange(repo, [r]) for r in revs)):
459 raise error.Abort(_('empty revision on one side of range'))
460 raise error.Abort(_('empty revision on one side of range'))
460
461
461 # if top-level is range expression, the result must always be a pair
462 # if top-level is range expression, the result must always be a pair
462 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
463 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
463 return repo.lookup(first), None
464 return repo.lookup(first), None
464
465
465 return repo.lookup(first), repo.lookup(second)
466 return repo.lookup(first), repo.lookup(second)
466
467
467 def revrange(repo, specs, localalias=None):
468 def revrange(repo, specs, localalias=None):
468 """Execute 1 to many revsets and return the union.
469 """Execute 1 to many revsets and return the union.
469
470
470 This is the preferred mechanism for executing revsets using user-specified
471 This is the preferred mechanism for executing revsets using user-specified
471 config options, such as revset aliases.
472 config options, such as revset aliases.
472
473
473 The revsets specified by ``specs`` will be executed via a chained ``OR``
474 The revsets specified by ``specs`` will be executed via a chained ``OR``
474 expression. If ``specs`` is empty, an empty result is returned.
475 expression. If ``specs`` is empty, an empty result is returned.
475
476
476 ``specs`` can contain integers, in which case they are assumed to be
477 ``specs`` can contain integers, in which case they are assumed to be
477 revision numbers.
478 revision numbers.
478
479
479 It is assumed the revsets are already formatted. If you have arguments
480 It is assumed the revsets are already formatted. If you have arguments
480 that need to be expanded in the revset, call ``revsetlang.formatspec()``
481 that need to be expanded in the revset, call ``revsetlang.formatspec()``
481 and pass the result as an element of ``specs``.
482 and pass the result as an element of ``specs``.
482
483
483 Specifying a single revset is allowed.
484 Specifying a single revset is allowed.
484
485
485 Returns a ``revset.abstractsmartset`` which is a list-like interface over
486 Returns a ``revset.abstractsmartset`` which is a list-like interface over
486 integer revisions.
487 integer revisions.
487 """
488 """
488 allspecs = []
489 allspecs = []
489 for spec in specs:
490 for spec in specs:
490 if isinstance(spec, int):
491 if isinstance(spec, int):
491 spec = revsetlang.formatspec('rev(%d)', spec)
492 spec = revsetlang.formatspec('rev(%d)', spec)
492 allspecs.append(spec)
493 allspecs.append(spec)
493 return repo.anyrevs(allspecs, user=True, localalias=localalias)
494 return repo.anyrevs(allspecs, user=True, localalias=localalias)
494
495
495 def meaningfulparents(repo, ctx):
496 def meaningfulparents(repo, ctx):
496 """Return list of meaningful (or all if debug) parentrevs for rev.
497 """Return list of meaningful (or all if debug) parentrevs for rev.
497
498
498 For merges (two non-nullrev revisions) both parents are meaningful.
499 For merges (two non-nullrev revisions) both parents are meaningful.
499 Otherwise the first parent revision is considered meaningful if it
500 Otherwise the first parent revision is considered meaningful if it
500 is not the preceding revision.
501 is not the preceding revision.
501 """
502 """
502 parents = ctx.parents()
503 parents = ctx.parents()
503 if len(parents) > 1:
504 if len(parents) > 1:
504 return parents
505 return parents
505 if repo.ui.debugflag:
506 if repo.ui.debugflag:
506 return [parents[0], repo['null']]
507 return [parents[0], repo['null']]
507 if parents[0].rev() >= intrev(ctx) - 1:
508 if parents[0].rev() >= intrev(ctx) - 1:
508 return []
509 return []
509 return parents
510 return parents
510
511
511 def expandpats(pats):
512 def expandpats(pats):
512 '''Expand bare globs when running on windows.
513 '''Expand bare globs when running on windows.
513 On posix we assume it already has already been done by sh.'''
514 On posix we assume it already has already been done by sh.'''
514 if not util.expandglobs:
515 if not util.expandglobs:
515 return list(pats)
516 return list(pats)
516 ret = []
517 ret = []
517 for kindpat in pats:
518 for kindpat in pats:
518 kind, pat = matchmod._patsplit(kindpat, None)
519 kind, pat = matchmod._patsplit(kindpat, None)
519 if kind is None:
520 if kind is None:
520 try:
521 try:
521 globbed = glob.glob(pat)
522 globbed = glob.glob(pat)
522 except re.error:
523 except re.error:
523 globbed = [pat]
524 globbed = [pat]
524 if globbed:
525 if globbed:
525 ret.extend(globbed)
526 ret.extend(globbed)
526 continue
527 continue
527 ret.append(kindpat)
528 ret.append(kindpat)
528 return ret
529 return ret
529
530
530 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
531 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
531 badfn=None):
532 badfn=None):
532 '''Return a matcher and the patterns that were used.
533 '''Return a matcher and the patterns that were used.
533 The matcher will warn about bad matches, unless an alternate badfn callback
534 The matcher will warn about bad matches, unless an alternate badfn callback
534 is provided.'''
535 is provided.'''
535 if pats == ("",):
536 if pats == ("",):
536 pats = []
537 pats = []
537 if opts is None:
538 if opts is None:
538 opts = {}
539 opts = {}
539 if not globbed and default == 'relpath':
540 if not globbed and default == 'relpath':
540 pats = expandpats(pats or [])
541 pats = expandpats(pats or [])
541
542
542 def bad(f, msg):
543 def bad(f, msg):
543 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
544 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
544
545
545 if badfn is None:
546 if badfn is None:
546 badfn = bad
547 badfn = bad
547
548
548 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
549 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
549 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
550 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
550
551
551 if m.always():
552 if m.always():
552 pats = []
553 pats = []
553 return m, pats
554 return m, pats
554
555
555 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
556 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
556 badfn=None):
557 badfn=None):
557 '''Return a matcher that will warn about bad matches.'''
558 '''Return a matcher that will warn about bad matches.'''
558 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
559 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
559
560
560 def matchall(repo):
561 def matchall(repo):
561 '''Return a matcher that will efficiently match everything.'''
562 '''Return a matcher that will efficiently match everything.'''
562 return matchmod.always(repo.root, repo.getcwd())
563 return matchmod.always(repo.root, repo.getcwd())
563
564
564 def matchfiles(repo, files, badfn=None):
565 def matchfiles(repo, files, badfn=None):
565 '''Return a matcher that will efficiently match exactly these files.'''
566 '''Return a matcher that will efficiently match exactly these files.'''
566 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
567 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
567
568
568 def parsefollowlinespattern(repo, rev, pat, msg):
569 def parsefollowlinespattern(repo, rev, pat, msg):
569 """Return a file name from `pat` pattern suitable for usage in followlines
570 """Return a file name from `pat` pattern suitable for usage in followlines
570 logic.
571 logic.
571 """
572 """
572 if not matchmod.patkind(pat):
573 if not matchmod.patkind(pat):
573 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
574 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
574 else:
575 else:
575 ctx = repo[rev]
576 ctx = repo[rev]
576 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
577 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
577 files = [f for f in ctx if m(f)]
578 files = [f for f in ctx if m(f)]
578 if len(files) != 1:
579 if len(files) != 1:
579 raise error.ParseError(msg)
580 raise error.ParseError(msg)
580 return files[0]
581 return files[0]
581
582
582 def origpath(ui, repo, filepath):
583 def origpath(ui, repo, filepath):
583 '''customize where .orig files are created
584 '''customize where .orig files are created
584
585
585 Fetch user defined path from config file: [ui] origbackuppath = <path>
586 Fetch user defined path from config file: [ui] origbackuppath = <path>
586 Fall back to default (filepath with .orig suffix) if not specified
587 Fall back to default (filepath with .orig suffix) if not specified
587 '''
588 '''
588 origbackuppath = ui.config('ui', 'origbackuppath')
589 origbackuppath = ui.config('ui', 'origbackuppath')
589 if not origbackuppath:
590 if not origbackuppath:
590 return filepath + ".orig"
591 return filepath + ".orig"
591
592
592 # Convert filepath from an absolute path into a path inside the repo.
593 # Convert filepath from an absolute path into a path inside the repo.
593 filepathfromroot = util.normpath(os.path.relpath(filepath,
594 filepathfromroot = util.normpath(os.path.relpath(filepath,
594 start=repo.root))
595 start=repo.root))
595
596
596 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
597 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
597 origbackupdir = origvfs.dirname(filepathfromroot)
598 origbackupdir = origvfs.dirname(filepathfromroot)
598 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
599 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
599 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
600 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
600
601
601 # Remove any files that conflict with the backup file's path
602 # Remove any files that conflict with the backup file's path
602 for f in reversed(list(util.finddirs(filepathfromroot))):
603 for f in reversed(list(util.finddirs(filepathfromroot))):
603 if origvfs.isfileorlink(f):
604 if origvfs.isfileorlink(f):
604 ui.note(_('removing conflicting file: %s\n')
605 ui.note(_('removing conflicting file: %s\n')
605 % origvfs.join(f))
606 % origvfs.join(f))
606 origvfs.unlink(f)
607 origvfs.unlink(f)
607 break
608 break
608
609
609 origvfs.makedirs(origbackupdir)
610 origvfs.makedirs(origbackupdir)
610
611
611 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
612 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
612 ui.note(_('removing conflicting directory: %s\n')
613 ui.note(_('removing conflicting directory: %s\n')
613 % origvfs.join(filepathfromroot))
614 % origvfs.join(filepathfromroot))
614 origvfs.rmtree(filepathfromroot, forcibly=True)
615 origvfs.rmtree(filepathfromroot, forcibly=True)
615
616
616 return origvfs.join(filepathfromroot)
617 return origvfs.join(filepathfromroot)
617
618
618 class _containsnode(object):
619 class _containsnode(object):
619 """proxy __contains__(node) to container.__contains__ which accepts revs"""
620 """proxy __contains__(node) to container.__contains__ which accepts revs"""
620
621
621 def __init__(self, repo, revcontainer):
622 def __init__(self, repo, revcontainer):
622 self._torev = repo.changelog.rev
623 self._torev = repo.changelog.rev
623 self._revcontains = revcontainer.__contains__
624 self._revcontains = revcontainer.__contains__
624
625
625 def __contains__(self, node):
626 def __contains__(self, node):
626 return self._revcontains(self._torev(node))
627 return self._revcontains(self._torev(node))
627
628
628 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
629 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
629 """do common cleanups when old nodes are replaced by new nodes
630 """do common cleanups when old nodes are replaced by new nodes
630
631
631 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
632 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
632 (we might also want to move working directory parent in the future)
633 (we might also want to move working directory parent in the future)
633
634
634 By default, bookmark moves are calculated automatically from 'replacements',
635 By default, bookmark moves are calculated automatically from 'replacements',
635 but 'moves' can be used to override that. Also, 'moves' may include
636 but 'moves' can be used to override that. Also, 'moves' may include
636 additional bookmark moves that should not have associated obsmarkers.
637 additional bookmark moves that should not have associated obsmarkers.
637
638
638 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
639 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
639 have replacements. operation is a string, like "rebase".
640 have replacements. operation is a string, like "rebase".
640
641
641 metadata is dictionary containing metadata to be stored in obsmarker if
642 metadata is dictionary containing metadata to be stored in obsmarker if
642 obsolescence is enabled.
643 obsolescence is enabled.
643 """
644 """
644 if not replacements and not moves:
645 if not replacements and not moves:
645 return
646 return
646
647
647 # translate mapping's other forms
648 # translate mapping's other forms
648 if not util.safehasattr(replacements, 'items'):
649 if not util.safehasattr(replacements, 'items'):
649 replacements = {n: () for n in replacements}
650 replacements = {n: () for n in replacements}
650
651
651 # Calculate bookmark movements
652 # Calculate bookmark movements
652 if moves is None:
653 if moves is None:
653 moves = {}
654 moves = {}
654 # Unfiltered repo is needed since nodes in replacements might be hidden.
655 # Unfiltered repo is needed since nodes in replacements might be hidden.
655 unfi = repo.unfiltered()
656 unfi = repo.unfiltered()
656 for oldnode, newnodes in replacements.items():
657 for oldnode, newnodes in replacements.items():
657 if oldnode in moves:
658 if oldnode in moves:
658 continue
659 continue
659 if len(newnodes) > 1:
660 if len(newnodes) > 1:
660 # usually a split, take the one with biggest rev number
661 # usually a split, take the one with biggest rev number
661 newnode = next(unfi.set('max(%ln)', newnodes)).node()
662 newnode = next(unfi.set('max(%ln)', newnodes)).node()
662 elif len(newnodes) == 0:
663 elif len(newnodes) == 0:
663 # move bookmark backwards
664 # move bookmark backwards
664 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
665 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
665 list(replacements)))
666 list(replacements)))
666 if roots:
667 if roots:
667 newnode = roots[0].node()
668 newnode = roots[0].node()
668 else:
669 else:
669 newnode = nullid
670 newnode = nullid
670 else:
671 else:
671 newnode = newnodes[0]
672 newnode = newnodes[0]
672 moves[oldnode] = newnode
673 moves[oldnode] = newnode
673
674
674 with repo.transaction('cleanup') as tr:
675 with repo.transaction('cleanup') as tr:
675 # Move bookmarks
676 # Move bookmarks
676 bmarks = repo._bookmarks
677 bmarks = repo._bookmarks
677 bmarkchanges = []
678 bmarkchanges = []
678 allnewnodes = [n for ns in replacements.values() for n in ns]
679 allnewnodes = [n for ns in replacements.values() for n in ns]
679 for oldnode, newnode in moves.items():
680 for oldnode, newnode in moves.items():
680 oldbmarks = repo.nodebookmarks(oldnode)
681 oldbmarks = repo.nodebookmarks(oldnode)
681 if not oldbmarks:
682 if not oldbmarks:
682 continue
683 continue
683 from . import bookmarks # avoid import cycle
684 from . import bookmarks # avoid import cycle
684 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
685 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
685 (oldbmarks, hex(oldnode), hex(newnode)))
686 (oldbmarks, hex(oldnode), hex(newnode)))
686 # Delete divergent bookmarks being parents of related newnodes
687 # Delete divergent bookmarks being parents of related newnodes
687 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
688 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
688 allnewnodes, newnode, oldnode)
689 allnewnodes, newnode, oldnode)
689 deletenodes = _containsnode(repo, deleterevs)
690 deletenodes = _containsnode(repo, deleterevs)
690 for name in oldbmarks:
691 for name in oldbmarks:
691 bmarkchanges.append((name, newnode))
692 bmarkchanges.append((name, newnode))
692 for b in bookmarks.divergent2delete(repo, deletenodes, name):
693 for b in bookmarks.divergent2delete(repo, deletenodes, name):
693 bmarkchanges.append((b, None))
694 bmarkchanges.append((b, None))
694
695
695 if bmarkchanges:
696 if bmarkchanges:
696 bmarks.applychanges(repo, tr, bmarkchanges)
697 bmarks.applychanges(repo, tr, bmarkchanges)
697
698
698 # Obsolete or strip nodes
699 # Obsolete or strip nodes
699 if obsolete.isenabled(repo, obsolete.createmarkersopt):
700 if obsolete.isenabled(repo, obsolete.createmarkersopt):
700 # If a node is already obsoleted, and we want to obsolete it
701 # If a node is already obsoleted, and we want to obsolete it
701 # without a successor, skip that obssolete request since it's
702 # without a successor, skip that obssolete request since it's
702 # unnecessary. That's the "if s or not isobs(n)" check below.
703 # unnecessary. That's the "if s or not isobs(n)" check below.
703 # Also sort the node in topology order, that might be useful for
704 # Also sort the node in topology order, that might be useful for
704 # some obsstore logic.
705 # some obsstore logic.
705 # NOTE: the filtering and sorting might belong to createmarkers.
706 # NOTE: the filtering and sorting might belong to createmarkers.
706 isobs = unfi.obsstore.successors.__contains__
707 isobs = unfi.obsstore.successors.__contains__
707 torev = unfi.changelog.rev
708 torev = unfi.changelog.rev
708 sortfunc = lambda ns: torev(ns[0])
709 sortfunc = lambda ns: torev(ns[0])
709 rels = [(unfi[n], tuple(unfi[m] for m in s))
710 rels = [(unfi[n], tuple(unfi[m] for m in s))
710 for n, s in sorted(replacements.items(), key=sortfunc)
711 for n, s in sorted(replacements.items(), key=sortfunc)
711 if s or not isobs(n)]
712 if s or not isobs(n)]
712 if rels:
713 if rels:
713 obsolete.createmarkers(repo, rels, operation=operation,
714 obsolete.createmarkers(repo, rels, operation=operation,
714 metadata=metadata)
715 metadata=metadata)
715 else:
716 else:
716 from . import repair # avoid import cycle
717 from . import repair # avoid import cycle
717 tostrip = list(replacements)
718 tostrip = list(replacements)
718 if tostrip:
719 if tostrip:
719 repair.delayedstrip(repo.ui, repo, tostrip, operation)
720 repair.delayedstrip(repo.ui, repo, tostrip, operation)
720
721
721 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
722 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
722 if opts is None:
723 if opts is None:
723 opts = {}
724 opts = {}
724 m = matcher
725 m = matcher
725 if dry_run is None:
726 if dry_run is None:
726 dry_run = opts.get('dry_run')
727 dry_run = opts.get('dry_run')
727 if similarity is None:
728 if similarity is None:
728 similarity = float(opts.get('similarity') or 0)
729 similarity = float(opts.get('similarity') or 0)
729
730
730 ret = 0
731 ret = 0
731 join = lambda f: os.path.join(prefix, f)
732 join = lambda f: os.path.join(prefix, f)
732
733
733 wctx = repo[None]
734 wctx = repo[None]
734 for subpath in sorted(wctx.substate):
735 for subpath in sorted(wctx.substate):
735 submatch = matchmod.subdirmatcher(subpath, m)
736 submatch = matchmod.subdirmatcher(subpath, m)
736 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
737 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
737 sub = wctx.sub(subpath)
738 sub = wctx.sub(subpath)
738 try:
739 try:
739 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
740 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
740 ret = 1
741 ret = 1
741 except error.LookupError:
742 except error.LookupError:
742 repo.ui.status(_("skipping missing subrepository: %s\n")
743 repo.ui.status(_("skipping missing subrepository: %s\n")
743 % join(subpath))
744 % join(subpath))
744
745
745 rejected = []
746 rejected = []
746 def badfn(f, msg):
747 def badfn(f, msg):
747 if f in m.files():
748 if f in m.files():
748 m.bad(f, msg)
749 m.bad(f, msg)
749 rejected.append(f)
750 rejected.append(f)
750
751
751 badmatch = matchmod.badmatch(m, badfn)
752 badmatch = matchmod.badmatch(m, badfn)
752 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
753 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
753 badmatch)
754 badmatch)
754
755
755 unknownset = set(unknown + forgotten)
756 unknownset = set(unknown + forgotten)
756 toprint = unknownset.copy()
757 toprint = unknownset.copy()
757 toprint.update(deleted)
758 toprint.update(deleted)
758 for abs in sorted(toprint):
759 for abs in sorted(toprint):
759 if repo.ui.verbose or not m.exact(abs):
760 if repo.ui.verbose or not m.exact(abs):
760 if abs in unknownset:
761 if abs in unknownset:
761 status = _('adding %s\n') % m.uipath(abs)
762 status = _('adding %s\n') % m.uipath(abs)
762 else:
763 else:
763 status = _('removing %s\n') % m.uipath(abs)
764 status = _('removing %s\n') % m.uipath(abs)
764 repo.ui.status(status)
765 repo.ui.status(status)
765
766
766 renames = _findrenames(repo, m, added + unknown, removed + deleted,
767 renames = _findrenames(repo, m, added + unknown, removed + deleted,
767 similarity)
768 similarity)
768
769
769 if not dry_run:
770 if not dry_run:
770 _markchanges(repo, unknown + forgotten, deleted, renames)
771 _markchanges(repo, unknown + forgotten, deleted, renames)
771
772
772 for f in rejected:
773 for f in rejected:
773 if f in m.files():
774 if f in m.files():
774 return 1
775 return 1
775 return ret
776 return ret
776
777
777 def marktouched(repo, files, similarity=0.0):
778 def marktouched(repo, files, similarity=0.0):
778 '''Assert that files have somehow been operated upon. files are relative to
779 '''Assert that files have somehow been operated upon. files are relative to
779 the repo root.'''
780 the repo root.'''
780 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
781 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
781 rejected = []
782 rejected = []
782
783
783 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
784 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
784
785
785 if repo.ui.verbose:
786 if repo.ui.verbose:
786 unknownset = set(unknown + forgotten)
787 unknownset = set(unknown + forgotten)
787 toprint = unknownset.copy()
788 toprint = unknownset.copy()
788 toprint.update(deleted)
789 toprint.update(deleted)
789 for abs in sorted(toprint):
790 for abs in sorted(toprint):
790 if abs in unknownset:
791 if abs in unknownset:
791 status = _('adding %s\n') % abs
792 status = _('adding %s\n') % abs
792 else:
793 else:
793 status = _('removing %s\n') % abs
794 status = _('removing %s\n') % abs
794 repo.ui.status(status)
795 repo.ui.status(status)
795
796
796 renames = _findrenames(repo, m, added + unknown, removed + deleted,
797 renames = _findrenames(repo, m, added + unknown, removed + deleted,
797 similarity)
798 similarity)
798
799
799 _markchanges(repo, unknown + forgotten, deleted, renames)
800 _markchanges(repo, unknown + forgotten, deleted, renames)
800
801
801 for f in rejected:
802 for f in rejected:
802 if f in m.files():
803 if f in m.files():
803 return 1
804 return 1
804 return 0
805 return 0
805
806
806 def _interestingfiles(repo, matcher):
807 def _interestingfiles(repo, matcher):
807 '''Walk dirstate with matcher, looking for files that addremove would care
808 '''Walk dirstate with matcher, looking for files that addremove would care
808 about.
809 about.
809
810
810 This is different from dirstate.status because it doesn't care about
811 This is different from dirstate.status because it doesn't care about
811 whether files are modified or clean.'''
812 whether files are modified or clean.'''
812 added, unknown, deleted, removed, forgotten = [], [], [], [], []
813 added, unknown, deleted, removed, forgotten = [], [], [], [], []
813 audit_path = pathutil.pathauditor(repo.root, cached=True)
814 audit_path = pathutil.pathauditor(repo.root, cached=True)
814
815
815 ctx = repo[None]
816 ctx = repo[None]
816 dirstate = repo.dirstate
817 dirstate = repo.dirstate
817 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
818 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
818 unknown=True, ignored=False, full=False)
819 unknown=True, ignored=False, full=False)
819 for abs, st in walkresults.iteritems():
820 for abs, st in walkresults.iteritems():
820 dstate = dirstate[abs]
821 dstate = dirstate[abs]
821 if dstate == '?' and audit_path.check(abs):
822 if dstate == '?' and audit_path.check(abs):
822 unknown.append(abs)
823 unknown.append(abs)
823 elif dstate != 'r' and not st:
824 elif dstate != 'r' and not st:
824 deleted.append(abs)
825 deleted.append(abs)
825 elif dstate == 'r' and st:
826 elif dstate == 'r' and st:
826 forgotten.append(abs)
827 forgotten.append(abs)
827 # for finding renames
828 # for finding renames
828 elif dstate == 'r' and not st:
829 elif dstate == 'r' and not st:
829 removed.append(abs)
830 removed.append(abs)
830 elif dstate == 'a':
831 elif dstate == 'a':
831 added.append(abs)
832 added.append(abs)
832
833
833 return added, unknown, deleted, removed, forgotten
834 return added, unknown, deleted, removed, forgotten
834
835
835 def _findrenames(repo, matcher, added, removed, similarity):
836 def _findrenames(repo, matcher, added, removed, similarity):
836 '''Find renames from removed files to added ones.'''
837 '''Find renames from removed files to added ones.'''
837 renames = {}
838 renames = {}
838 if similarity > 0:
839 if similarity > 0:
839 for old, new, score in similar.findrenames(repo, added, removed,
840 for old, new, score in similar.findrenames(repo, added, removed,
840 similarity):
841 similarity):
841 if (repo.ui.verbose or not matcher.exact(old)
842 if (repo.ui.verbose or not matcher.exact(old)
842 or not matcher.exact(new)):
843 or not matcher.exact(new)):
843 repo.ui.status(_('recording removal of %s as rename to %s '
844 repo.ui.status(_('recording removal of %s as rename to %s '
844 '(%d%% similar)\n') %
845 '(%d%% similar)\n') %
845 (matcher.rel(old), matcher.rel(new),
846 (matcher.rel(old), matcher.rel(new),
846 score * 100))
847 score * 100))
847 renames[new] = old
848 renames[new] = old
848 return renames
849 return renames
849
850
850 def _markchanges(repo, unknown, deleted, renames):
851 def _markchanges(repo, unknown, deleted, renames):
851 '''Marks the files in unknown as added, the files in deleted as removed,
852 '''Marks the files in unknown as added, the files in deleted as removed,
852 and the files in renames as copied.'''
853 and the files in renames as copied.'''
853 wctx = repo[None]
854 wctx = repo[None]
854 with repo.wlock():
855 with repo.wlock():
855 wctx.forget(deleted)
856 wctx.forget(deleted)
856 wctx.add(unknown)
857 wctx.add(unknown)
857 for new, old in renames.iteritems():
858 for new, old in renames.iteritems():
858 wctx.copy(old, new)
859 wctx.copy(old, new)
859
860
860 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
861 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
861 """Update the dirstate to reflect the intent of copying src to dst. For
862 """Update the dirstate to reflect the intent of copying src to dst. For
862 different reasons it might not end with dst being marked as copied from src.
863 different reasons it might not end with dst being marked as copied from src.
863 """
864 """
864 origsrc = repo.dirstate.copied(src) or src
865 origsrc = repo.dirstate.copied(src) or src
865 if dst == origsrc: # copying back a copy?
866 if dst == origsrc: # copying back a copy?
866 if repo.dirstate[dst] not in 'mn' and not dryrun:
867 if repo.dirstate[dst] not in 'mn' and not dryrun:
867 repo.dirstate.normallookup(dst)
868 repo.dirstate.normallookup(dst)
868 else:
869 else:
869 if repo.dirstate[origsrc] == 'a' and origsrc == src:
870 if repo.dirstate[origsrc] == 'a' and origsrc == src:
870 if not ui.quiet:
871 if not ui.quiet:
871 ui.warn(_("%s has not been committed yet, so no copy "
872 ui.warn(_("%s has not been committed yet, so no copy "
872 "data will be stored for %s.\n")
873 "data will be stored for %s.\n")
873 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
874 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
874 if repo.dirstate[dst] in '?r' and not dryrun:
875 if repo.dirstate[dst] in '?r' and not dryrun:
875 wctx.add([dst])
876 wctx.add([dst])
876 elif not dryrun:
877 elif not dryrun:
877 wctx.copy(origsrc, dst)
878 wctx.copy(origsrc, dst)
878
879
879 def readrequires(opener, supported):
880 def readrequires(opener, supported):
880 '''Reads and parses .hg/requires and checks if all entries found
881 '''Reads and parses .hg/requires and checks if all entries found
881 are in the list of supported features.'''
882 are in the list of supported features.'''
882 requirements = set(opener.read("requires").splitlines())
883 requirements = set(opener.read("requires").splitlines())
883 missings = []
884 missings = []
884 for r in requirements:
885 for r in requirements:
885 if r not in supported:
886 if r not in supported:
886 if not r or not r[0:1].isalnum():
887 if not r or not r[0:1].isalnum():
887 raise error.RequirementError(_(".hg/requires file is corrupt"))
888 raise error.RequirementError(_(".hg/requires file is corrupt"))
888 missings.append(r)
889 missings.append(r)
889 missings.sort()
890 missings.sort()
890 if missings:
891 if missings:
891 raise error.RequirementError(
892 raise error.RequirementError(
892 _("repository requires features unknown to this Mercurial: %s")
893 _("repository requires features unknown to this Mercurial: %s")
893 % " ".join(missings),
894 % " ".join(missings),
894 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
895 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
895 " for more information"))
896 " for more information"))
896 return requirements
897 return requirements
897
898
898 def writerequires(opener, requirements):
899 def writerequires(opener, requirements):
899 with opener('requires', 'w') as fp:
900 with opener('requires', 'w') as fp:
900 for r in sorted(requirements):
901 for r in sorted(requirements):
901 fp.write("%s\n" % r)
902 fp.write("%s\n" % r)
902
903
903 class filecachesubentry(object):
904 class filecachesubentry(object):
904 def __init__(self, path, stat):
905 def __init__(self, path, stat):
905 self.path = path
906 self.path = path
906 self.cachestat = None
907 self.cachestat = None
907 self._cacheable = None
908 self._cacheable = None
908
909
909 if stat:
910 if stat:
910 self.cachestat = filecachesubentry.stat(self.path)
911 self.cachestat = filecachesubentry.stat(self.path)
911
912
912 if self.cachestat:
913 if self.cachestat:
913 self._cacheable = self.cachestat.cacheable()
914 self._cacheable = self.cachestat.cacheable()
914 else:
915 else:
915 # None means we don't know yet
916 # None means we don't know yet
916 self._cacheable = None
917 self._cacheable = None
917
918
918 def refresh(self):
919 def refresh(self):
919 if self.cacheable():
920 if self.cacheable():
920 self.cachestat = filecachesubentry.stat(self.path)
921 self.cachestat = filecachesubentry.stat(self.path)
921
922
922 def cacheable(self):
923 def cacheable(self):
923 if self._cacheable is not None:
924 if self._cacheable is not None:
924 return self._cacheable
925 return self._cacheable
925
926
926 # we don't know yet, assume it is for now
927 # we don't know yet, assume it is for now
927 return True
928 return True
928
929
929 def changed(self):
930 def changed(self):
930 # no point in going further if we can't cache it
931 # no point in going further if we can't cache it
931 if not self.cacheable():
932 if not self.cacheable():
932 return True
933 return True
933
934
934 newstat = filecachesubentry.stat(self.path)
935 newstat = filecachesubentry.stat(self.path)
935
936
936 # we may not know if it's cacheable yet, check again now
937 # we may not know if it's cacheable yet, check again now
937 if newstat and self._cacheable is None:
938 if newstat and self._cacheable is None:
938 self._cacheable = newstat.cacheable()
939 self._cacheable = newstat.cacheable()
939
940
940 # check again
941 # check again
941 if not self._cacheable:
942 if not self._cacheable:
942 return True
943 return True
943
944
944 if self.cachestat != newstat:
945 if self.cachestat != newstat:
945 self.cachestat = newstat
946 self.cachestat = newstat
946 return True
947 return True
947 else:
948 else:
948 return False
949 return False
949
950
950 @staticmethod
951 @staticmethod
951 def stat(path):
952 def stat(path):
952 try:
953 try:
953 return util.cachestat(path)
954 return util.cachestat(path)
954 except OSError as e:
955 except OSError as e:
955 if e.errno != errno.ENOENT:
956 if e.errno != errno.ENOENT:
956 raise
957 raise
957
958
958 class filecacheentry(object):
959 class filecacheentry(object):
959 def __init__(self, paths, stat=True):
960 def __init__(self, paths, stat=True):
960 self._entries = []
961 self._entries = []
961 for path in paths:
962 for path in paths:
962 self._entries.append(filecachesubentry(path, stat))
963 self._entries.append(filecachesubentry(path, stat))
963
964
964 def changed(self):
965 def changed(self):
965 '''true if any entry has changed'''
966 '''true if any entry has changed'''
966 for entry in self._entries:
967 for entry in self._entries:
967 if entry.changed():
968 if entry.changed():
968 return True
969 return True
969 return False
970 return False
970
971
971 def refresh(self):
972 def refresh(self):
972 for entry in self._entries:
973 for entry in self._entries:
973 entry.refresh()
974 entry.refresh()
974
975
975 class filecache(object):
976 class filecache(object):
976 '''A property like decorator that tracks files under .hg/ for updates.
977 '''A property like decorator that tracks files under .hg/ for updates.
977
978
978 Records stat info when called in _filecache.
979 Records stat info when called in _filecache.
979
980
980 On subsequent calls, compares old stat info with new info, and recreates the
981 On subsequent calls, compares old stat info with new info, and recreates the
981 object when any of the files changes, updating the new stat info in
982 object when any of the files changes, updating the new stat info in
982 _filecache.
983 _filecache.
983
984
984 Mercurial either atomic renames or appends for files under .hg,
985 Mercurial either atomic renames or appends for files under .hg,
985 so to ensure the cache is reliable we need the filesystem to be able
986 so to ensure the cache is reliable we need the filesystem to be able
986 to tell us if a file has been replaced. If it can't, we fallback to
987 to tell us if a file has been replaced. If it can't, we fallback to
987 recreating the object on every call (essentially the same behavior as
988 recreating the object on every call (essentially the same behavior as
988 propertycache).
989 propertycache).
989
990
990 '''
991 '''
991 def __init__(self, *paths):
992 def __init__(self, *paths):
992 self.paths = paths
993 self.paths = paths
993
994
994 def join(self, obj, fname):
995 def join(self, obj, fname):
995 """Used to compute the runtime path of a cached file.
996 """Used to compute the runtime path of a cached file.
996
997
997 Users should subclass filecache and provide their own version of this
998 Users should subclass filecache and provide their own version of this
998 function to call the appropriate join function on 'obj' (an instance
999 function to call the appropriate join function on 'obj' (an instance
999 of the class that its member function was decorated).
1000 of the class that its member function was decorated).
1000 """
1001 """
1001 raise NotImplementedError
1002 raise NotImplementedError
1002
1003
1003 def __call__(self, func):
1004 def __call__(self, func):
1004 self.func = func
1005 self.func = func
1005 self.name = func.__name__.encode('ascii')
1006 self.name = func.__name__.encode('ascii')
1006 return self
1007 return self
1007
1008
1008 def __get__(self, obj, type=None):
1009 def __get__(self, obj, type=None):
1009 # if accessed on the class, return the descriptor itself.
1010 # if accessed on the class, return the descriptor itself.
1010 if obj is None:
1011 if obj is None:
1011 return self
1012 return self
1012 # do we need to check if the file changed?
1013 # do we need to check if the file changed?
1013 if self.name in obj.__dict__:
1014 if self.name in obj.__dict__:
1014 assert self.name in obj._filecache, self.name
1015 assert self.name in obj._filecache, self.name
1015 return obj.__dict__[self.name]
1016 return obj.__dict__[self.name]
1016
1017
1017 entry = obj._filecache.get(self.name)
1018 entry = obj._filecache.get(self.name)
1018
1019
1019 if entry:
1020 if entry:
1020 if entry.changed():
1021 if entry.changed():
1021 entry.obj = self.func(obj)
1022 entry.obj = self.func(obj)
1022 else:
1023 else:
1023 paths = [self.join(obj, path) for path in self.paths]
1024 paths = [self.join(obj, path) for path in self.paths]
1024
1025
1025 # We stat -before- creating the object so our cache doesn't lie if
1026 # We stat -before- creating the object so our cache doesn't lie if
1026 # a writer modified between the time we read and stat
1027 # a writer modified between the time we read and stat
1027 entry = filecacheentry(paths, True)
1028 entry = filecacheentry(paths, True)
1028 entry.obj = self.func(obj)
1029 entry.obj = self.func(obj)
1029
1030
1030 obj._filecache[self.name] = entry
1031 obj._filecache[self.name] = entry
1031
1032
1032 obj.__dict__[self.name] = entry.obj
1033 obj.__dict__[self.name] = entry.obj
1033 return entry.obj
1034 return entry.obj
1034
1035
1035 def __set__(self, obj, value):
1036 def __set__(self, obj, value):
1036 if self.name not in obj._filecache:
1037 if self.name not in obj._filecache:
1037 # we add an entry for the missing value because X in __dict__
1038 # we add an entry for the missing value because X in __dict__
1038 # implies X in _filecache
1039 # implies X in _filecache
1039 paths = [self.join(obj, path) for path in self.paths]
1040 paths = [self.join(obj, path) for path in self.paths]
1040 ce = filecacheentry(paths, False)
1041 ce = filecacheentry(paths, False)
1041 obj._filecache[self.name] = ce
1042 obj._filecache[self.name] = ce
1042 else:
1043 else:
1043 ce = obj._filecache[self.name]
1044 ce = obj._filecache[self.name]
1044
1045
1045 ce.obj = value # update cached copy
1046 ce.obj = value # update cached copy
1046 obj.__dict__[self.name] = value # update copy returned by obj.x
1047 obj.__dict__[self.name] = value # update copy returned by obj.x
1047
1048
1048 def __delete__(self, obj):
1049 def __delete__(self, obj):
1049 try:
1050 try:
1050 del obj.__dict__[self.name]
1051 del obj.__dict__[self.name]
1051 except KeyError:
1052 except KeyError:
1052 raise AttributeError(self.name)
1053 raise AttributeError(self.name)
1053
1054
1054 def extdatasource(repo, source):
1055 def extdatasource(repo, source):
1055 """Gather a map of rev -> value dict from the specified source
1056 """Gather a map of rev -> value dict from the specified source
1056
1057
1057 A source spec is treated as a URL, with a special case shell: type
1058 A source spec is treated as a URL, with a special case shell: type
1058 for parsing the output from a shell command.
1059 for parsing the output from a shell command.
1059
1060
1060 The data is parsed as a series of newline-separated records where
1061 The data is parsed as a series of newline-separated records where
1061 each record is a revision specifier optionally followed by a space
1062 each record is a revision specifier optionally followed by a space
1062 and a freeform string value. If the revision is known locally, it
1063 and a freeform string value. If the revision is known locally, it
1063 is converted to a rev, otherwise the record is skipped.
1064 is converted to a rev, otherwise the record is skipped.
1064
1065
1065 Note that both key and value are treated as UTF-8 and converted to
1066 Note that both key and value are treated as UTF-8 and converted to
1066 the local encoding. This allows uniformity between local and
1067 the local encoding. This allows uniformity between local and
1067 remote data sources.
1068 remote data sources.
1068 """
1069 """
1069
1070
1070 spec = repo.ui.config("extdata", source)
1071 spec = repo.ui.config("extdata", source)
1071 if not spec:
1072 if not spec:
1072 raise error.Abort(_("unknown extdata source '%s'") % source)
1073 raise error.Abort(_("unknown extdata source '%s'") % source)
1073
1074
1074 data = {}
1075 data = {}
1075 src = proc = None
1076 src = proc = None
1076 try:
1077 try:
1077 if spec.startswith("shell:"):
1078 if spec.startswith("shell:"):
1078 # external commands should be run relative to the repo root
1079 # external commands should be run relative to the repo root
1079 cmd = spec[6:]
1080 cmd = spec[6:]
1080 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1081 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1081 close_fds=util.closefds,
1082 close_fds=util.closefds,
1082 stdout=subprocess.PIPE, cwd=repo.root)
1083 stdout=subprocess.PIPE, cwd=repo.root)
1083 src = proc.stdout
1084 src = proc.stdout
1084 else:
1085 else:
1085 # treat as a URL or file
1086 # treat as a URL or file
1086 src = url.open(repo.ui, spec)
1087 src = url.open(repo.ui, spec)
1087 for l in src:
1088 for l in src:
1088 if " " in l:
1089 if " " in l:
1089 k, v = l.strip().split(" ", 1)
1090 k, v = l.strip().split(" ", 1)
1090 else:
1091 else:
1091 k, v = l.strip(), ""
1092 k, v = l.strip(), ""
1092
1093
1093 k = encoding.tolocal(k)
1094 k = encoding.tolocal(k)
1094 try:
1095 try:
1095 data[repo[k].rev()] = encoding.tolocal(v)
1096 data[repo[k].rev()] = encoding.tolocal(v)
1096 except (error.LookupError, error.RepoLookupError):
1097 except (error.LookupError, error.RepoLookupError):
1097 pass # we ignore data for nodes that don't exist locally
1098 pass # we ignore data for nodes that don't exist locally
1098 finally:
1099 finally:
1099 if proc:
1100 if proc:
1100 proc.communicate()
1101 proc.communicate()
1101 if src:
1102 if src:
1102 src.close()
1103 src.close()
1103 if proc and proc.returncode != 0:
1104 if proc and proc.returncode != 0:
1104 raise error.Abort(_("extdata command '%s' failed: %s")
1105 raise error.Abort(_("extdata command '%s' failed: %s")
1105 % (cmd, util.explainexit(proc.returncode)[0]))
1106 % (cmd, util.explainexit(proc.returncode)[0]))
1106
1107
1107 return data
1108 return data
1108
1109
1109 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1110 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1110 if lock is None:
1111 if lock is None:
1111 raise error.LockInheritanceContractViolation(
1112 raise error.LockInheritanceContractViolation(
1112 'lock can only be inherited while held')
1113 'lock can only be inherited while held')
1113 if environ is None:
1114 if environ is None:
1114 environ = {}
1115 environ = {}
1115 with lock.inherit() as locker:
1116 with lock.inherit() as locker:
1116 environ[envvar] = locker
1117 environ[envvar] = locker
1117 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1118 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1118
1119
1119 def wlocksub(repo, cmd, *args, **kwargs):
1120 def wlocksub(repo, cmd, *args, **kwargs):
1120 """run cmd as a subprocess that allows inheriting repo's wlock
1121 """run cmd as a subprocess that allows inheriting repo's wlock
1121
1122
1122 This can only be called while the wlock is held. This takes all the
1123 This can only be called while the wlock is held. This takes all the
1123 arguments that ui.system does, and returns the exit code of the
1124 arguments that ui.system does, and returns the exit code of the
1124 subprocess."""
1125 subprocess."""
1125 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1126 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1126 **kwargs)
1127 **kwargs)
1127
1128
1128 def gdinitconfig(ui):
1129 def gdinitconfig(ui):
1129 """helper function to know if a repo should be created as general delta
1130 """helper function to know if a repo should be created as general delta
1130 """
1131 """
1131 # experimental config: format.generaldelta
1132 # experimental config: format.generaldelta
1132 return (ui.configbool('format', 'generaldelta')
1133 return (ui.configbool('format', 'generaldelta')
1133 or ui.configbool('format', 'usegeneraldelta'))
1134 or ui.configbool('format', 'usegeneraldelta'))
1134
1135
1135 def gddeltaconfig(ui):
1136 def gddeltaconfig(ui):
1136 """helper function to know if incoming delta should be optimised
1137 """helper function to know if incoming delta should be optimised
1137 """
1138 """
1138 # experimental config: format.generaldelta
1139 # experimental config: format.generaldelta
1139 return ui.configbool('format', 'generaldelta')
1140 return ui.configbool('format', 'generaldelta')
1140
1141
1141 class simplekeyvaluefile(object):
1142 class simplekeyvaluefile(object):
1142 """A simple file with key=value lines
1143 """A simple file with key=value lines
1143
1144
1144 Keys must be alphanumerics and start with a letter, values must not
1145 Keys must be alphanumerics and start with a letter, values must not
1145 contain '\n' characters"""
1146 contain '\n' characters"""
1146 firstlinekey = '__firstline'
1147 firstlinekey = '__firstline'
1147
1148
1148 def __init__(self, vfs, path, keys=None):
1149 def __init__(self, vfs, path, keys=None):
1149 self.vfs = vfs
1150 self.vfs = vfs
1150 self.path = path
1151 self.path = path
1151
1152
1152 def read(self, firstlinenonkeyval=False):
1153 def read(self, firstlinenonkeyval=False):
1153 """Read the contents of a simple key-value file
1154 """Read the contents of a simple key-value file
1154
1155
1155 'firstlinenonkeyval' indicates whether the first line of file should
1156 'firstlinenonkeyval' indicates whether the first line of file should
1156 be treated as a key-value pair or reuturned fully under the
1157 be treated as a key-value pair or reuturned fully under the
1157 __firstline key."""
1158 __firstline key."""
1158 lines = self.vfs.readlines(self.path)
1159 lines = self.vfs.readlines(self.path)
1159 d = {}
1160 d = {}
1160 if firstlinenonkeyval:
1161 if firstlinenonkeyval:
1161 if not lines:
1162 if not lines:
1162 e = _("empty simplekeyvalue file")
1163 e = _("empty simplekeyvalue file")
1163 raise error.CorruptedState(e)
1164 raise error.CorruptedState(e)
1164 # we don't want to include '\n' in the __firstline
1165 # we don't want to include '\n' in the __firstline
1165 d[self.firstlinekey] = lines[0][:-1]
1166 d[self.firstlinekey] = lines[0][:-1]
1166 del lines[0]
1167 del lines[0]
1167
1168
1168 try:
1169 try:
1169 # the 'if line.strip()' part prevents us from failing on empty
1170 # the 'if line.strip()' part prevents us from failing on empty
1170 # lines which only contain '\n' therefore are not skipped
1171 # lines which only contain '\n' therefore are not skipped
1171 # by 'if line'
1172 # by 'if line'
1172 updatedict = dict(line[:-1].split('=', 1) for line in lines
1173 updatedict = dict(line[:-1].split('=', 1) for line in lines
1173 if line.strip())
1174 if line.strip())
1174 if self.firstlinekey in updatedict:
1175 if self.firstlinekey in updatedict:
1175 e = _("%r can't be used as a key")
1176 e = _("%r can't be used as a key")
1176 raise error.CorruptedState(e % self.firstlinekey)
1177 raise error.CorruptedState(e % self.firstlinekey)
1177 d.update(updatedict)
1178 d.update(updatedict)
1178 except ValueError as e:
1179 except ValueError as e:
1179 raise error.CorruptedState(str(e))
1180 raise error.CorruptedState(str(e))
1180 return d
1181 return d
1181
1182
1182 def write(self, data, firstline=None):
1183 def write(self, data, firstline=None):
1183 """Write key=>value mapping to a file
1184 """Write key=>value mapping to a file
1184 data is a dict. Keys must be alphanumerical and start with a letter.
1185 data is a dict. Keys must be alphanumerical and start with a letter.
1185 Values must not contain newline characters.
1186 Values must not contain newline characters.
1186
1187
1187 If 'firstline' is not None, it is written to file before
1188 If 'firstline' is not None, it is written to file before
1188 everything else, as it is, not in a key=value form"""
1189 everything else, as it is, not in a key=value form"""
1189 lines = []
1190 lines = []
1190 if firstline is not None:
1191 if firstline is not None:
1191 lines.append('%s\n' % firstline)
1192 lines.append('%s\n' % firstline)
1192
1193
1193 for k, v in data.items():
1194 for k, v in data.items():
1194 if k == self.firstlinekey:
1195 if k == self.firstlinekey:
1195 e = "key name '%s' is reserved" % self.firstlinekey
1196 e = "key name '%s' is reserved" % self.firstlinekey
1196 raise error.ProgrammingError(e)
1197 raise error.ProgrammingError(e)
1197 if not k[0:1].isalpha():
1198 if not k[0:1].isalpha():
1198 e = "keys must start with a letter in a key-value file"
1199 e = "keys must start with a letter in a key-value file"
1199 raise error.ProgrammingError(e)
1200 raise error.ProgrammingError(e)
1200 if not k.isalnum():
1201 if not k.isalnum():
1201 e = "invalid key name in a simple key-value file"
1202 e = "invalid key name in a simple key-value file"
1202 raise error.ProgrammingError(e)
1203 raise error.ProgrammingError(e)
1203 if '\n' in v:
1204 if '\n' in v:
1204 e = "invalid value in a simple key-value file"
1205 e = "invalid value in a simple key-value file"
1205 raise error.ProgrammingError(e)
1206 raise error.ProgrammingError(e)
1206 lines.append("%s=%s\n" % (k, v))
1207 lines.append("%s=%s\n" % (k, v))
1207 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1208 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1208 fp.write(''.join(lines))
1209 fp.write(''.join(lines))
1209
1210
1210 _reportobsoletedsource = [
1211 _reportobsoletedsource = [
1211 'debugobsolete',
1212 'debugobsolete',
1212 'pull',
1213 'pull',
1213 'push',
1214 'push',
1214 'serve',
1215 'serve',
1215 'unbundle',
1216 'unbundle',
1216 ]
1217 ]
1217
1218
1218 _reportnewcssource = [
1219 _reportnewcssource = [
1219 'pull',
1220 'pull',
1220 'unbundle',
1221 'unbundle',
1221 ]
1222 ]
1222
1223
1223 # a list of (repo, ctx, files) functions called by various commands to allow
1224 # a list of (repo, ctx, files) functions called by various commands to allow
1224 # extensions to ensure the corresponding files are available locally, before the
1225 # extensions to ensure the corresponding files are available locally, before the
1225 # command uses them.
1226 # command uses them.
1226 fileprefetchhooks = util.hooks()
1227 fileprefetchhooks = util.hooks()
1227
1228
1228 # A marker that tells the evolve extension to suppress its own reporting
1229 # A marker that tells the evolve extension to suppress its own reporting
1229 _reportstroubledchangesets = True
1230 _reportstroubledchangesets = True
1230
1231
1231 def registersummarycallback(repo, otr, txnname=''):
1232 def registersummarycallback(repo, otr, txnname=''):
1232 """register a callback to issue a summary after the transaction is closed
1233 """register a callback to issue a summary after the transaction is closed
1233 """
1234 """
1234 def txmatch(sources):
1235 def txmatch(sources):
1235 return any(txnname.startswith(source) for source in sources)
1236 return any(txnname.startswith(source) for source in sources)
1236
1237
1237 categories = []
1238 categories = []
1238
1239
1239 def reportsummary(func):
1240 def reportsummary(func):
1240 """decorator for report callbacks."""
1241 """decorator for report callbacks."""
1241 # The repoview life cycle is shorter than the one of the actual
1242 # The repoview life cycle is shorter than the one of the actual
1242 # underlying repository. So the filtered object can die before the
1243 # underlying repository. So the filtered object can die before the
1243 # weakref is used leading to troubles. We keep a reference to the
1244 # weakref is used leading to troubles. We keep a reference to the
1244 # unfiltered object and restore the filtering when retrieving the
1245 # unfiltered object and restore the filtering when retrieving the
1245 # repository through the weakref.
1246 # repository through the weakref.
1246 filtername = repo.filtername
1247 filtername = repo.filtername
1247 reporef = weakref.ref(repo.unfiltered())
1248 reporef = weakref.ref(repo.unfiltered())
1248 def wrapped(tr):
1249 def wrapped(tr):
1249 repo = reporef()
1250 repo = reporef()
1250 if filtername:
1251 if filtername:
1251 repo = repo.filtered(filtername)
1252 repo = repo.filtered(filtername)
1252 func(repo, tr)
1253 func(repo, tr)
1253 newcat = '%02i-txnreport' % len(categories)
1254 newcat = '%02i-txnreport' % len(categories)
1254 otr.addpostclose(newcat, wrapped)
1255 otr.addpostclose(newcat, wrapped)
1255 categories.append(newcat)
1256 categories.append(newcat)
1256 return wrapped
1257 return wrapped
1257
1258
1258 if txmatch(_reportobsoletedsource):
1259 if txmatch(_reportobsoletedsource):
1259 @reportsummary
1260 @reportsummary
1260 def reportobsoleted(repo, tr):
1261 def reportobsoleted(repo, tr):
1261 obsoleted = obsutil.getobsoleted(repo, tr)
1262 obsoleted = obsutil.getobsoleted(repo, tr)
1262 if obsoleted:
1263 if obsoleted:
1263 repo.ui.status(_('obsoleted %i changesets\n')
1264 repo.ui.status(_('obsoleted %i changesets\n')
1264 % len(obsoleted))
1265 % len(obsoleted))
1265
1266
1266 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1267 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1267 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1268 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1268 instabilitytypes = [
1269 instabilitytypes = [
1269 ('orphan', 'orphan'),
1270 ('orphan', 'orphan'),
1270 ('phase-divergent', 'phasedivergent'),
1271 ('phase-divergent', 'phasedivergent'),
1271 ('content-divergent', 'contentdivergent'),
1272 ('content-divergent', 'contentdivergent'),
1272 ]
1273 ]
1273
1274
1274 def getinstabilitycounts(repo):
1275 def getinstabilitycounts(repo):
1275 filtered = repo.changelog.filteredrevs
1276 filtered = repo.changelog.filteredrevs
1276 counts = {}
1277 counts = {}
1277 for instability, revset in instabilitytypes:
1278 for instability, revset in instabilitytypes:
1278 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1279 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1279 filtered)
1280 filtered)
1280 return counts
1281 return counts
1281
1282
1282 oldinstabilitycounts = getinstabilitycounts(repo)
1283 oldinstabilitycounts = getinstabilitycounts(repo)
1283 @reportsummary
1284 @reportsummary
1284 def reportnewinstabilities(repo, tr):
1285 def reportnewinstabilities(repo, tr):
1285 newinstabilitycounts = getinstabilitycounts(repo)
1286 newinstabilitycounts = getinstabilitycounts(repo)
1286 for instability, revset in instabilitytypes:
1287 for instability, revset in instabilitytypes:
1287 delta = (newinstabilitycounts[instability] -
1288 delta = (newinstabilitycounts[instability] -
1288 oldinstabilitycounts[instability])
1289 oldinstabilitycounts[instability])
1289 if delta > 0:
1290 if delta > 0:
1290 repo.ui.warn(_('%i new %s changesets\n') %
1291 repo.ui.warn(_('%i new %s changesets\n') %
1291 (delta, instability))
1292 (delta, instability))
1292
1293
1293 if txmatch(_reportnewcssource):
1294 if txmatch(_reportnewcssource):
1294 @reportsummary
1295 @reportsummary
1295 def reportnewcs(repo, tr):
1296 def reportnewcs(repo, tr):
1296 """Report the range of new revisions pulled/unbundled."""
1297 """Report the range of new revisions pulled/unbundled."""
1297 newrevs = tr.changes.get('revs', xrange(0, 0))
1298 newrevs = tr.changes.get('revs', xrange(0, 0))
1298 if not newrevs:
1299 if not newrevs:
1299 return
1300 return
1300
1301
1301 # Compute the bounds of new revisions' range, excluding obsoletes.
1302 # Compute the bounds of new revisions' range, excluding obsoletes.
1302 unfi = repo.unfiltered()
1303 unfi = repo.unfiltered()
1303 revs = unfi.revs('%ld and not obsolete()', newrevs)
1304 revs = unfi.revs('%ld and not obsolete()', newrevs)
1304 if not revs:
1305 if not revs:
1305 # Got only obsoletes.
1306 # Got only obsoletes.
1306 return
1307 return
1307 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1308 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1308
1309
1309 if minrev == maxrev:
1310 if minrev == maxrev:
1310 revrange = minrev
1311 revrange = minrev
1311 else:
1312 else:
1312 revrange = '%s:%s' % (minrev, maxrev)
1313 revrange = '%s:%s' % (minrev, maxrev)
1313 repo.ui.status(_('new changesets %s\n') % revrange)
1314 repo.ui.status(_('new changesets %s\n') % revrange)
1314
1315
1315 def nodesummaries(repo, nodes, maxnumnodes=4):
1316 def nodesummaries(repo, nodes, maxnumnodes=4):
1316 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1317 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1317 return ' '.join(short(h) for h in nodes)
1318 return ' '.join(short(h) for h in nodes)
1318 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1319 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1319 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1320 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1320
1321
1321 def enforcesinglehead(repo, tr, desc):
1322 def enforcesinglehead(repo, tr, desc):
1322 """check that no named branch has multiple heads"""
1323 """check that no named branch has multiple heads"""
1323 if desc in ('strip', 'repair'):
1324 if desc in ('strip', 'repair'):
1324 # skip the logic during strip
1325 # skip the logic during strip
1325 return
1326 return
1326 visible = repo.filtered('visible')
1327 visible = repo.filtered('visible')
1327 # possible improvement: we could restrict the check to affected branch
1328 # possible improvement: we could restrict the check to affected branch
1328 for name, heads in visible.branchmap().iteritems():
1329 for name, heads in visible.branchmap().iteritems():
1329 if len(heads) > 1:
1330 if len(heads) > 1:
1330 msg = _('rejecting multiple heads on branch "%s"')
1331 msg = _('rejecting multiple heads on branch "%s"')
1331 msg %= name
1332 msg %= name
1332 hint = _('%d heads: %s')
1333 hint = _('%d heads: %s')
1333 hint %= (len(heads), nodesummaries(repo, heads))
1334 hint %= (len(heads), nodesummaries(repo, heads))
1334 raise error.Abort(msg, hint=hint)
1335 raise error.Abort(msg, hint=hint)
1335
1336
1336 def wrapconvertsink(sink):
1337 def wrapconvertsink(sink):
1337 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1338 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1338 before it is used, whether or not the convert extension was formally loaded.
1339 before it is used, whether or not the convert extension was formally loaded.
1339 """
1340 """
1340 return sink
1341 return sink
1341
1342
1342 def unhidehashlikerevs(repo, specs, hiddentype):
1343 def unhidehashlikerevs(repo, specs, hiddentype):
1343 """parse the user specs and unhide changesets whose hash or revision number
1344 """parse the user specs and unhide changesets whose hash or revision number
1344 is passed.
1345 is passed.
1345
1346
1346 hiddentype can be: 1) 'warn': warn while unhiding changesets
1347 hiddentype can be: 1) 'warn': warn while unhiding changesets
1347 2) 'nowarn': don't warn while unhiding changesets
1348 2) 'nowarn': don't warn while unhiding changesets
1348
1349
1349 returns a repo object with the required changesets unhidden
1350 returns a repo object with the required changesets unhidden
1350 """
1351 """
1351 if not repo.filtername or not repo.ui.configbool('experimental',
1352 if not repo.filtername or not repo.ui.configbool('experimental',
1352 'directaccess'):
1353 'directaccess'):
1353 return repo
1354 return repo
1354
1355
1355 if repo.filtername not in ('visible', 'visible-hidden'):
1356 if repo.filtername not in ('visible', 'visible-hidden'):
1356 return repo
1357 return repo
1357
1358
1358 symbols = set()
1359 symbols = set()
1359 for spec in specs:
1360 for spec in specs:
1360 try:
1361 try:
1361 tree = revsetlang.parse(spec)
1362 tree = revsetlang.parse(spec)
1362 except error.ParseError: # will be reported by scmutil.revrange()
1363 except error.ParseError: # will be reported by scmutil.revrange()
1363 continue
1364 continue
1364
1365
1365 symbols.update(revsetlang.gethashlikesymbols(tree))
1366 symbols.update(revsetlang.gethashlikesymbols(tree))
1366
1367
1367 if not symbols:
1368 if not symbols:
1368 return repo
1369 return repo
1369
1370
1370 revs = _getrevsfromsymbols(repo, symbols)
1371 revs = _getrevsfromsymbols(repo, symbols)
1371
1372
1372 if not revs:
1373 if not revs:
1373 return repo
1374 return repo
1374
1375
1375 if hiddentype == 'warn':
1376 if hiddentype == 'warn':
1376 unfi = repo.unfiltered()
1377 unfi = repo.unfiltered()
1377 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1378 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1378 repo.ui.warn(_("warning: accessing hidden changesets for write "
1379 repo.ui.warn(_("warning: accessing hidden changesets for write "
1379 "operation: %s\n") % revstr)
1380 "operation: %s\n") % revstr)
1380
1381
1381 # we have to use new filtername to separate branch/tags cache until we can
1382 # we have to use new filtername to separate branch/tags cache until we can
1382 # disbale these cache when revisions are dynamically pinned.
1383 # disbale these cache when revisions are dynamically pinned.
1383 return repo.filtered('visible-hidden', revs)
1384 return repo.filtered('visible-hidden', revs)
1384
1385
1385 def _getrevsfromsymbols(repo, symbols):
1386 def _getrevsfromsymbols(repo, symbols):
1386 """parse the list of symbols and returns a set of revision numbers of hidden
1387 """parse the list of symbols and returns a set of revision numbers of hidden
1387 changesets present in symbols"""
1388 changesets present in symbols"""
1388 revs = set()
1389 revs = set()
1389 unfi = repo.unfiltered()
1390 unfi = repo.unfiltered()
1390 unficl = unfi.changelog
1391 unficl = unfi.changelog
1391 cl = repo.changelog
1392 cl = repo.changelog
1392 tiprev = len(unficl)
1393 tiprev = len(unficl)
1393 pmatch = unficl._partialmatch
1394 pmatch = unficl._partialmatch
1394 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1395 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1395 for s in symbols:
1396 for s in symbols:
1396 try:
1397 try:
1397 n = int(s)
1398 n = int(s)
1398 if n <= tiprev:
1399 if n <= tiprev:
1399 if not allowrevnums:
1400 if not allowrevnums:
1400 continue
1401 continue
1401 else:
1402 else:
1402 if n not in cl:
1403 if n not in cl:
1403 revs.add(n)
1404 revs.add(n)
1404 continue
1405 continue
1405 except ValueError:
1406 except ValueError:
1406 pass
1407 pass
1407
1408
1408 try:
1409 try:
1409 s = pmatch(s)
1410 s = pmatch(s)
1410 except error.LookupError:
1411 except error.LookupError:
1411 s = None
1412 s = None
1412
1413
1413 if s is not None:
1414 if s is not None:
1414 rev = unficl.rev(s)
1415 rev = unficl.rev(s)
1415 if rev not in cl:
1416 if rev not in cl:
1416 revs.add(rev)
1417 revs.add(rev)
1417
1418
1418 return revs
1419 return revs
General Comments 0
You need to be logged in to leave comments. Login now