##// END OF EJS Templates
scmutil: fix requires-file isalnum() check on first byte...
Augie Fackler -
r36331:3f98634b default
parent child Browse files
Show More
@@ -1,1422 +1,1422 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 if pycompat.iswindows:
44 if pycompat.iswindows:
45 from . import scmwindows as scmplatform
45 from . import scmwindows as scmplatform
46 else:
46 else:
47 from . import scmposix as scmplatform
47 from . import scmposix as scmplatform
48
48
49 termsize = scmplatform.termsize
49 termsize = scmplatform.termsize
50
50
51 class status(tuple):
51 class status(tuple):
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 and 'ignored' properties are only relevant to the working copy.
53 and 'ignored' properties are only relevant to the working copy.
54 '''
54 '''
55
55
56 __slots__ = ()
56 __slots__ = ()
57
57
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 clean):
59 clean):
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 ignored, clean))
61 ignored, clean))
62
62
63 @property
63 @property
64 def modified(self):
64 def modified(self):
65 '''files that have been modified'''
65 '''files that have been modified'''
66 return self[0]
66 return self[0]
67
67
68 @property
68 @property
69 def added(self):
69 def added(self):
70 '''files that have been added'''
70 '''files that have been added'''
71 return self[1]
71 return self[1]
72
72
73 @property
73 @property
74 def removed(self):
74 def removed(self):
75 '''files that have been removed'''
75 '''files that have been removed'''
76 return self[2]
76 return self[2]
77
77
78 @property
78 @property
79 def deleted(self):
79 def deleted(self):
80 '''files that are in the dirstate, but have been deleted from the
80 '''files that are in the dirstate, but have been deleted from the
81 working copy (aka "missing")
81 working copy (aka "missing")
82 '''
82 '''
83 return self[3]
83 return self[3]
84
84
85 @property
85 @property
86 def unknown(self):
86 def unknown(self):
87 '''files not in the dirstate that are not ignored'''
87 '''files not in the dirstate that are not ignored'''
88 return self[4]
88 return self[4]
89
89
90 @property
90 @property
91 def ignored(self):
91 def ignored(self):
92 '''files not in the dirstate that are ignored (by _dirignore())'''
92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 return self[5]
93 return self[5]
94
94
95 @property
95 @property
96 def clean(self):
96 def clean(self):
97 '''files that have not been modified'''
97 '''files that have not been modified'''
98 return self[6]
98 return self[6]
99
99
100 def __repr__(self, *args, **kwargs):
100 def __repr__(self, *args, **kwargs):
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 'unknown=%r, ignored=%r, clean=%r>') % self)
102 'unknown=%r, ignored=%r, clean=%r>') % self)
103
103
104 def itersubrepos(ctx1, ctx2):
104 def itersubrepos(ctx1, ctx2):
105 """find subrepos in ctx1 or ctx2"""
105 """find subrepos in ctx1 or ctx2"""
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 # has been modified (in ctx2) but not yet committed (in ctx1).
108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111
111
112 missing = set()
112 missing = set()
113
113
114 for subpath in ctx2.substate:
114 for subpath in ctx2.substate:
115 if subpath not in ctx1.substate:
115 if subpath not in ctx1.substate:
116 del subpaths[subpath]
116 del subpaths[subpath]
117 missing.add(subpath)
117 missing.add(subpath)
118
118
119 for subpath, ctx in sorted(subpaths.iteritems()):
119 for subpath, ctx in sorted(subpaths.iteritems()):
120 yield subpath, ctx.sub(subpath)
120 yield subpath, ctx.sub(subpath)
121
121
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 # status and diff will have an accurate result when it does
123 # status and diff will have an accurate result when it does
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 # against itself.
125 # against itself.
126 for subpath in missing:
126 for subpath in missing:
127 yield subpath, ctx2.nullsub(subpath, ctx1)
127 yield subpath, ctx2.nullsub(subpath, ctx1)
128
128
129 def nochangesfound(ui, repo, excluded=None):
129 def nochangesfound(ui, repo, excluded=None):
130 '''Report no changes for push/pull, excluded is None or a list of
130 '''Report no changes for push/pull, excluded is None or a list of
131 nodes excluded from the push/pull.
131 nodes excluded from the push/pull.
132 '''
132 '''
133 secretlist = []
133 secretlist = []
134 if excluded:
134 if excluded:
135 for n in excluded:
135 for n in excluded:
136 ctx = repo[n]
136 ctx = repo[n]
137 if ctx.phase() >= phases.secret and not ctx.extinct():
137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 secretlist.append(n)
138 secretlist.append(n)
139
139
140 if secretlist:
140 if secretlist:
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 % len(secretlist))
142 % len(secretlist))
143 else:
143 else:
144 ui.status(_("no changes found\n"))
144 ui.status(_("no changes found\n"))
145
145
146 def callcatch(ui, func):
146 def callcatch(ui, func):
147 """call func() with global exception handling
147 """call func() with global exception handling
148
148
149 return func() if no exception happens. otherwise do some error handling
149 return func() if no exception happens. otherwise do some error handling
150 and return an exit code accordingly. does not handle all exceptions.
150 and return an exit code accordingly. does not handle all exceptions.
151 """
151 """
152 try:
152 try:
153 try:
153 try:
154 return func()
154 return func()
155 except: # re-raises
155 except: # re-raises
156 ui.traceback()
156 ui.traceback()
157 raise
157 raise
158 # Global exception handling, alphabetically
158 # Global exception handling, alphabetically
159 # Mercurial-specific first, followed by built-in and library exceptions
159 # Mercurial-specific first, followed by built-in and library exceptions
160 except error.LockHeld as inst:
160 except error.LockHeld as inst:
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _('timed out waiting for lock held by %r') % inst.locker
162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 else:
163 else:
164 reason = _('lock held by %r') % inst.locker
164 reason = _('lock held by %r') % inst.locker
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
166 if not inst.locker:
166 if not inst.locker:
167 ui.warn(_("(lock might be very busy)\n"))
167 ui.warn(_("(lock might be very busy)\n"))
168 except error.LockUnavailable as inst:
168 except error.LockUnavailable as inst:
169 ui.warn(_("abort: could not lock %s: %s\n") %
169 ui.warn(_("abort: could not lock %s: %s\n") %
170 (inst.desc or inst.filename,
170 (inst.desc or inst.filename,
171 encoding.strtolocal(inst.strerror)))
171 encoding.strtolocal(inst.strerror)))
172 except error.OutOfBandError as inst:
172 except error.OutOfBandError as inst:
173 if inst.args:
173 if inst.args:
174 msg = _("abort: remote error:\n")
174 msg = _("abort: remote error:\n")
175 else:
175 else:
176 msg = _("abort: remote error\n")
176 msg = _("abort: remote error\n")
177 ui.warn(msg)
177 ui.warn(msg)
178 if inst.args:
178 if inst.args:
179 ui.warn(''.join(inst.args))
179 ui.warn(''.join(inst.args))
180 if inst.hint:
180 if inst.hint:
181 ui.warn('(%s)\n' % inst.hint)
181 ui.warn('(%s)\n' % inst.hint)
182 except error.RepoError as inst:
182 except error.RepoError as inst:
183 ui.warn(_("abort: %s!\n") % inst)
183 ui.warn(_("abort: %s!\n") % inst)
184 if inst.hint:
184 if inst.hint:
185 ui.warn(_("(%s)\n") % inst.hint)
185 ui.warn(_("(%s)\n") % inst.hint)
186 except error.ResponseError as inst:
186 except error.ResponseError as inst:
187 ui.warn(_("abort: %s") % inst.args[0])
187 ui.warn(_("abort: %s") % inst.args[0])
188 if not isinstance(inst.args[1], basestring):
188 if not isinstance(inst.args[1], basestring):
189 ui.warn(" %r\n" % (inst.args[1],))
189 ui.warn(" %r\n" % (inst.args[1],))
190 elif not inst.args[1]:
190 elif not inst.args[1]:
191 ui.warn(_(" empty string\n"))
191 ui.warn(_(" empty string\n"))
192 else:
192 else:
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
194 except error.CensoredNodeError as inst:
194 except error.CensoredNodeError as inst:
195 ui.warn(_("abort: file censored %s!\n") % inst)
195 ui.warn(_("abort: file censored %s!\n") % inst)
196 except error.RevlogError as inst:
196 except error.RevlogError as inst:
197 ui.warn(_("abort: %s!\n") % inst)
197 ui.warn(_("abort: %s!\n") % inst)
198 except error.InterventionRequired as inst:
198 except error.InterventionRequired as inst:
199 ui.warn("%s\n" % inst)
199 ui.warn("%s\n" % inst)
200 if inst.hint:
200 if inst.hint:
201 ui.warn(_("(%s)\n") % inst.hint)
201 ui.warn(_("(%s)\n") % inst.hint)
202 return 1
202 return 1
203 except error.WdirUnsupported:
203 except error.WdirUnsupported:
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
205 except error.Abort as inst:
205 except error.Abort as inst:
206 ui.warn(_("abort: %s\n") % inst)
206 ui.warn(_("abort: %s\n") % inst)
207 if inst.hint:
207 if inst.hint:
208 ui.warn(_("(%s)\n") % inst.hint)
208 ui.warn(_("(%s)\n") % inst.hint)
209 except ImportError as inst:
209 except ImportError as inst:
210 ui.warn(_("abort: %s!\n") % inst)
210 ui.warn(_("abort: %s!\n") % inst)
211 m = str(inst).split()[-1]
211 m = str(inst).split()[-1]
212 if m in "mpatch bdiff".split():
212 if m in "mpatch bdiff".split():
213 ui.warn(_("(did you forget to compile extensions?)\n"))
213 ui.warn(_("(did you forget to compile extensions?)\n"))
214 elif m in "zlib".split():
214 elif m in "zlib".split():
215 ui.warn(_("(is your Python install correct?)\n"))
215 ui.warn(_("(is your Python install correct?)\n"))
216 except IOError as inst:
216 except IOError as inst:
217 if util.safehasattr(inst, "code"):
217 if util.safehasattr(inst, "code"):
218 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
218 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
219 elif util.safehasattr(inst, "reason"):
219 elif util.safehasattr(inst, "reason"):
220 try: # usually it is in the form (errno, strerror)
220 try: # usually it is in the form (errno, strerror)
221 reason = inst.reason.args[1]
221 reason = inst.reason.args[1]
222 except (AttributeError, IndexError):
222 except (AttributeError, IndexError):
223 # it might be anything, for example a string
223 # it might be anything, for example a string
224 reason = inst.reason
224 reason = inst.reason
225 if isinstance(reason, unicode):
225 if isinstance(reason, unicode):
226 # SSLError of Python 2.7.9 contains a unicode
226 # SSLError of Python 2.7.9 contains a unicode
227 reason = encoding.unitolocal(reason)
227 reason = encoding.unitolocal(reason)
228 ui.warn(_("abort: error: %s\n") % reason)
228 ui.warn(_("abort: error: %s\n") % reason)
229 elif (util.safehasattr(inst, "args")
229 elif (util.safehasattr(inst, "args")
230 and inst.args and inst.args[0] == errno.EPIPE):
230 and inst.args and inst.args[0] == errno.EPIPE):
231 pass
231 pass
232 elif getattr(inst, "strerror", None):
232 elif getattr(inst, "strerror", None):
233 if getattr(inst, "filename", None):
233 if getattr(inst, "filename", None):
234 ui.warn(_("abort: %s: %s\n") % (
234 ui.warn(_("abort: %s: %s\n") % (
235 encoding.strtolocal(inst.strerror), inst.filename))
235 encoding.strtolocal(inst.strerror), inst.filename))
236 else:
236 else:
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
238 else:
238 else:
239 raise
239 raise
240 except OSError as inst:
240 except OSError as inst:
241 if getattr(inst, "filename", None) is not None:
241 if getattr(inst, "filename", None) is not None:
242 ui.warn(_("abort: %s: '%s'\n") % (
242 ui.warn(_("abort: %s: '%s'\n") % (
243 encoding.strtolocal(inst.strerror), inst.filename))
243 encoding.strtolocal(inst.strerror), inst.filename))
244 else:
244 else:
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 except MemoryError:
246 except MemoryError:
247 ui.warn(_("abort: out of memory\n"))
247 ui.warn(_("abort: out of memory\n"))
248 except SystemExit as inst:
248 except SystemExit as inst:
249 # Commands shouldn't sys.exit directly, but give a return code.
249 # Commands shouldn't sys.exit directly, but give a return code.
250 # Just in case catch this and and pass exit code to caller.
250 # Just in case catch this and and pass exit code to caller.
251 return inst.code
251 return inst.code
252 except socket.error as inst:
252 except socket.error as inst:
253 ui.warn(_("abort: %s\n") % inst.args[-1])
253 ui.warn(_("abort: %s\n") % inst.args[-1])
254
254
255 return -1
255 return -1
256
256
257 def checknewlabel(repo, lbl, kind):
257 def checknewlabel(repo, lbl, kind):
258 # Do not use the "kind" parameter in ui output.
258 # Do not use the "kind" parameter in ui output.
259 # It makes strings difficult to translate.
259 # It makes strings difficult to translate.
260 if lbl in ['tip', '.', 'null']:
260 if lbl in ['tip', '.', 'null']:
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
262 for c in (':', '\0', '\n', '\r'):
262 for c in (':', '\0', '\n', '\r'):
263 if c in lbl:
263 if c in lbl:
264 raise error.Abort(_("%r cannot be used in a name") % c)
264 raise error.Abort(_("%r cannot be used in a name") % c)
265 try:
265 try:
266 int(lbl)
266 int(lbl)
267 raise error.Abort(_("cannot use an integer as a name"))
267 raise error.Abort(_("cannot use an integer as a name"))
268 except ValueError:
268 except ValueError:
269 pass
269 pass
270 if lbl.strip() != lbl:
270 if lbl.strip() != lbl:
271 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
271 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
272
272
273 def checkfilename(f):
273 def checkfilename(f):
274 '''Check that the filename f is an acceptable filename for a tracked file'''
274 '''Check that the filename f is an acceptable filename for a tracked file'''
275 if '\r' in f or '\n' in f:
275 if '\r' in f or '\n' in f:
276 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
276 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
277
277
278 def checkportable(ui, f):
278 def checkportable(ui, f):
279 '''Check if filename f is portable and warn or abort depending on config'''
279 '''Check if filename f is portable and warn or abort depending on config'''
280 checkfilename(f)
280 checkfilename(f)
281 abort, warn = checkportabilityalert(ui)
281 abort, warn = checkportabilityalert(ui)
282 if abort or warn:
282 if abort or warn:
283 msg = util.checkwinfilename(f)
283 msg = util.checkwinfilename(f)
284 if msg:
284 if msg:
285 msg = "%s: %s" % (msg, util.shellquote(f))
285 msg = "%s: %s" % (msg, util.shellquote(f))
286 if abort:
286 if abort:
287 raise error.Abort(msg)
287 raise error.Abort(msg)
288 ui.warn(_("warning: %s\n") % msg)
288 ui.warn(_("warning: %s\n") % msg)
289
289
290 def checkportabilityalert(ui):
290 def checkportabilityalert(ui):
291 '''check if the user's config requests nothing, a warning, or abort for
291 '''check if the user's config requests nothing, a warning, or abort for
292 non-portable filenames'''
292 non-portable filenames'''
293 val = ui.config('ui', 'portablefilenames')
293 val = ui.config('ui', 'portablefilenames')
294 lval = val.lower()
294 lval = val.lower()
295 bval = util.parsebool(val)
295 bval = util.parsebool(val)
296 abort = pycompat.iswindows or lval == 'abort'
296 abort = pycompat.iswindows or lval == 'abort'
297 warn = bval or lval == 'warn'
297 warn = bval or lval == 'warn'
298 if bval is None and not (warn or abort or lval == 'ignore'):
298 if bval is None and not (warn or abort or lval == 'ignore'):
299 raise error.ConfigError(
299 raise error.ConfigError(
300 _("ui.portablefilenames value is invalid ('%s')") % val)
300 _("ui.portablefilenames value is invalid ('%s')") % val)
301 return abort, warn
301 return abort, warn
302
302
303 class casecollisionauditor(object):
303 class casecollisionauditor(object):
304 def __init__(self, ui, abort, dirstate):
304 def __init__(self, ui, abort, dirstate):
305 self._ui = ui
305 self._ui = ui
306 self._abort = abort
306 self._abort = abort
307 allfiles = '\0'.join(dirstate._map)
307 allfiles = '\0'.join(dirstate._map)
308 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
308 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
309 self._dirstate = dirstate
309 self._dirstate = dirstate
310 # The purpose of _newfiles is so that we don't complain about
310 # The purpose of _newfiles is so that we don't complain about
311 # case collisions if someone were to call this object with the
311 # case collisions if someone were to call this object with the
312 # same filename twice.
312 # same filename twice.
313 self._newfiles = set()
313 self._newfiles = set()
314
314
315 def __call__(self, f):
315 def __call__(self, f):
316 if f in self._newfiles:
316 if f in self._newfiles:
317 return
317 return
318 fl = encoding.lower(f)
318 fl = encoding.lower(f)
319 if fl in self._loweredfiles and f not in self._dirstate:
319 if fl in self._loweredfiles and f not in self._dirstate:
320 msg = _('possible case-folding collision for %s') % f
320 msg = _('possible case-folding collision for %s') % f
321 if self._abort:
321 if self._abort:
322 raise error.Abort(msg)
322 raise error.Abort(msg)
323 self._ui.warn(_("warning: %s\n") % msg)
323 self._ui.warn(_("warning: %s\n") % msg)
324 self._loweredfiles.add(fl)
324 self._loweredfiles.add(fl)
325 self._newfiles.add(f)
325 self._newfiles.add(f)
326
326
327 def filteredhash(repo, maxrev):
327 def filteredhash(repo, maxrev):
328 """build hash of filtered revisions in the current repoview.
328 """build hash of filtered revisions in the current repoview.
329
329
330 Multiple caches perform up-to-date validation by checking that the
330 Multiple caches perform up-to-date validation by checking that the
331 tiprev and tipnode stored in the cache file match the current repository.
331 tiprev and tipnode stored in the cache file match the current repository.
332 However, this is not sufficient for validating repoviews because the set
332 However, this is not sufficient for validating repoviews because the set
333 of revisions in the view may change without the repository tiprev and
333 of revisions in the view may change without the repository tiprev and
334 tipnode changing.
334 tipnode changing.
335
335
336 This function hashes all the revs filtered from the view and returns
336 This function hashes all the revs filtered from the view and returns
337 that SHA-1 digest.
337 that SHA-1 digest.
338 """
338 """
339 cl = repo.changelog
339 cl = repo.changelog
340 if not cl.filteredrevs:
340 if not cl.filteredrevs:
341 return None
341 return None
342 key = None
342 key = None
343 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
343 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
344 if revs:
344 if revs:
345 s = hashlib.sha1()
345 s = hashlib.sha1()
346 for rev in revs:
346 for rev in revs:
347 s.update('%d;' % rev)
347 s.update('%d;' % rev)
348 key = s.digest()
348 key = s.digest()
349 return key
349 return key
350
350
351 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
351 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
352 '''yield every hg repository under path, always recursively.
352 '''yield every hg repository under path, always recursively.
353 The recurse flag will only control recursion into repo working dirs'''
353 The recurse flag will only control recursion into repo working dirs'''
354 def errhandler(err):
354 def errhandler(err):
355 if err.filename == path:
355 if err.filename == path:
356 raise err
356 raise err
357 samestat = getattr(os.path, 'samestat', None)
357 samestat = getattr(os.path, 'samestat', None)
358 if followsym and samestat is not None:
358 if followsym and samestat is not None:
359 def adddir(dirlst, dirname):
359 def adddir(dirlst, dirname):
360 match = False
360 match = False
361 dirstat = os.stat(dirname)
361 dirstat = os.stat(dirname)
362 for lstdirstat in dirlst:
362 for lstdirstat in dirlst:
363 if samestat(dirstat, lstdirstat):
363 if samestat(dirstat, lstdirstat):
364 match = True
364 match = True
365 break
365 break
366 if not match:
366 if not match:
367 dirlst.append(dirstat)
367 dirlst.append(dirstat)
368 return not match
368 return not match
369 else:
369 else:
370 followsym = False
370 followsym = False
371
371
372 if (seen_dirs is None) and followsym:
372 if (seen_dirs is None) and followsym:
373 seen_dirs = []
373 seen_dirs = []
374 adddir(seen_dirs, path)
374 adddir(seen_dirs, path)
375 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
375 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
376 dirs.sort()
376 dirs.sort()
377 if '.hg' in dirs:
377 if '.hg' in dirs:
378 yield root # found a repository
378 yield root # found a repository
379 qroot = os.path.join(root, '.hg', 'patches')
379 qroot = os.path.join(root, '.hg', 'patches')
380 if os.path.isdir(os.path.join(qroot, '.hg')):
380 if os.path.isdir(os.path.join(qroot, '.hg')):
381 yield qroot # we have a patch queue repo here
381 yield qroot # we have a patch queue repo here
382 if recurse:
382 if recurse:
383 # avoid recursing inside the .hg directory
383 # avoid recursing inside the .hg directory
384 dirs.remove('.hg')
384 dirs.remove('.hg')
385 else:
385 else:
386 dirs[:] = [] # don't descend further
386 dirs[:] = [] # don't descend further
387 elif followsym:
387 elif followsym:
388 newdirs = []
388 newdirs = []
389 for d in dirs:
389 for d in dirs:
390 fname = os.path.join(root, d)
390 fname = os.path.join(root, d)
391 if adddir(seen_dirs, fname):
391 if adddir(seen_dirs, fname):
392 if os.path.islink(fname):
392 if os.path.islink(fname):
393 for hgname in walkrepos(fname, True, seen_dirs):
393 for hgname in walkrepos(fname, True, seen_dirs):
394 yield hgname
394 yield hgname
395 else:
395 else:
396 newdirs.append(d)
396 newdirs.append(d)
397 dirs[:] = newdirs
397 dirs[:] = newdirs
398
398
399 def binnode(ctx):
399 def binnode(ctx):
400 """Return binary node id for a given basectx"""
400 """Return binary node id for a given basectx"""
401 node = ctx.node()
401 node = ctx.node()
402 if node is None:
402 if node is None:
403 return wdirid
403 return wdirid
404 return node
404 return node
405
405
406 def intrev(ctx):
406 def intrev(ctx):
407 """Return integer for a given basectx that can be used in comparison or
407 """Return integer for a given basectx that can be used in comparison or
408 arithmetic operation"""
408 arithmetic operation"""
409 rev = ctx.rev()
409 rev = ctx.rev()
410 if rev is None:
410 if rev is None:
411 return wdirrev
411 return wdirrev
412 return rev
412 return rev
413
413
414 def formatchangeid(ctx):
414 def formatchangeid(ctx):
415 """Format changectx as '{rev}:{node|formatnode}', which is the default
415 """Format changectx as '{rev}:{node|formatnode}', which is the default
416 template provided by logcmdutil.changesettemplater"""
416 template provided by logcmdutil.changesettemplater"""
417 repo = ctx.repo()
417 repo = ctx.repo()
418 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
418 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
419
419
420 def formatrevnode(ui, rev, node):
420 def formatrevnode(ui, rev, node):
421 """Format given revision and node depending on the current verbosity"""
421 """Format given revision and node depending on the current verbosity"""
422 if ui.debugflag:
422 if ui.debugflag:
423 hexfunc = hex
423 hexfunc = hex
424 else:
424 else:
425 hexfunc = short
425 hexfunc = short
426 return '%d:%s' % (rev, hexfunc(node))
426 return '%d:%s' % (rev, hexfunc(node))
427
427
428 def revsingle(repo, revspec, default='.', localalias=None):
428 def revsingle(repo, revspec, default='.', localalias=None):
429 if not revspec and revspec != 0:
429 if not revspec and revspec != 0:
430 return repo[default]
430 return repo[default]
431
431
432 l = revrange(repo, [revspec], localalias=localalias)
432 l = revrange(repo, [revspec], localalias=localalias)
433 if not l:
433 if not l:
434 raise error.Abort(_('empty revision set'))
434 raise error.Abort(_('empty revision set'))
435 return repo[l.last()]
435 return repo[l.last()]
436
436
437 def _pairspec(revspec):
437 def _pairspec(revspec):
438 tree = revsetlang.parse(revspec)
438 tree = revsetlang.parse(revspec)
439 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
439 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
440
440
441 def revpair(repo, revs):
441 def revpair(repo, revs):
442 if not revs:
442 if not revs:
443 return repo.dirstate.p1(), None
443 return repo.dirstate.p1(), None
444
444
445 l = revrange(repo, revs)
445 l = revrange(repo, revs)
446
446
447 if not l:
447 if not l:
448 first = second = None
448 first = second = None
449 elif l.isascending():
449 elif l.isascending():
450 first = l.min()
450 first = l.min()
451 second = l.max()
451 second = l.max()
452 elif l.isdescending():
452 elif l.isdescending():
453 first = l.max()
453 first = l.max()
454 second = l.min()
454 second = l.min()
455 else:
455 else:
456 first = l.first()
456 first = l.first()
457 second = l.last()
457 second = l.last()
458
458
459 if first is None:
459 if first is None:
460 raise error.Abort(_('empty revision range'))
460 raise error.Abort(_('empty revision range'))
461 if (first == second and len(revs) >= 2
461 if (first == second and len(revs) >= 2
462 and not all(revrange(repo, [r]) for r in revs)):
462 and not all(revrange(repo, [r]) for r in revs)):
463 raise error.Abort(_('empty revision on one side of range'))
463 raise error.Abort(_('empty revision on one side of range'))
464
464
465 # if top-level is range expression, the result must always be a pair
465 # if top-level is range expression, the result must always be a pair
466 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
466 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
467 return repo.lookup(first), None
467 return repo.lookup(first), None
468
468
469 return repo.lookup(first), repo.lookup(second)
469 return repo.lookup(first), repo.lookup(second)
470
470
471 def revrange(repo, specs, localalias=None):
471 def revrange(repo, specs, localalias=None):
472 """Execute 1 to many revsets and return the union.
472 """Execute 1 to many revsets and return the union.
473
473
474 This is the preferred mechanism for executing revsets using user-specified
474 This is the preferred mechanism for executing revsets using user-specified
475 config options, such as revset aliases.
475 config options, such as revset aliases.
476
476
477 The revsets specified by ``specs`` will be executed via a chained ``OR``
477 The revsets specified by ``specs`` will be executed via a chained ``OR``
478 expression. If ``specs`` is empty, an empty result is returned.
478 expression. If ``specs`` is empty, an empty result is returned.
479
479
480 ``specs`` can contain integers, in which case they are assumed to be
480 ``specs`` can contain integers, in which case they are assumed to be
481 revision numbers.
481 revision numbers.
482
482
483 It is assumed the revsets are already formatted. If you have arguments
483 It is assumed the revsets are already formatted. If you have arguments
484 that need to be expanded in the revset, call ``revsetlang.formatspec()``
484 that need to be expanded in the revset, call ``revsetlang.formatspec()``
485 and pass the result as an element of ``specs``.
485 and pass the result as an element of ``specs``.
486
486
487 Specifying a single revset is allowed.
487 Specifying a single revset is allowed.
488
488
489 Returns a ``revset.abstractsmartset`` which is a list-like interface over
489 Returns a ``revset.abstractsmartset`` which is a list-like interface over
490 integer revisions.
490 integer revisions.
491 """
491 """
492 allspecs = []
492 allspecs = []
493 for spec in specs:
493 for spec in specs:
494 if isinstance(spec, int):
494 if isinstance(spec, int):
495 spec = revsetlang.formatspec('rev(%d)', spec)
495 spec = revsetlang.formatspec('rev(%d)', spec)
496 allspecs.append(spec)
496 allspecs.append(spec)
497 return repo.anyrevs(allspecs, user=True, localalias=localalias)
497 return repo.anyrevs(allspecs, user=True, localalias=localalias)
498
498
499 def meaningfulparents(repo, ctx):
499 def meaningfulparents(repo, ctx):
500 """Return list of meaningful (or all if debug) parentrevs for rev.
500 """Return list of meaningful (or all if debug) parentrevs for rev.
501
501
502 For merges (two non-nullrev revisions) both parents are meaningful.
502 For merges (two non-nullrev revisions) both parents are meaningful.
503 Otherwise the first parent revision is considered meaningful if it
503 Otherwise the first parent revision is considered meaningful if it
504 is not the preceding revision.
504 is not the preceding revision.
505 """
505 """
506 parents = ctx.parents()
506 parents = ctx.parents()
507 if len(parents) > 1:
507 if len(parents) > 1:
508 return parents
508 return parents
509 if repo.ui.debugflag:
509 if repo.ui.debugflag:
510 return [parents[0], repo['null']]
510 return [parents[0], repo['null']]
511 if parents[0].rev() >= intrev(ctx) - 1:
511 if parents[0].rev() >= intrev(ctx) - 1:
512 return []
512 return []
513 return parents
513 return parents
514
514
515 def expandpats(pats):
515 def expandpats(pats):
516 '''Expand bare globs when running on windows.
516 '''Expand bare globs when running on windows.
517 On posix we assume it already has already been done by sh.'''
517 On posix we assume it already has already been done by sh.'''
518 if not util.expandglobs:
518 if not util.expandglobs:
519 return list(pats)
519 return list(pats)
520 ret = []
520 ret = []
521 for kindpat in pats:
521 for kindpat in pats:
522 kind, pat = matchmod._patsplit(kindpat, None)
522 kind, pat = matchmod._patsplit(kindpat, None)
523 if kind is None:
523 if kind is None:
524 try:
524 try:
525 globbed = glob.glob(pat)
525 globbed = glob.glob(pat)
526 except re.error:
526 except re.error:
527 globbed = [pat]
527 globbed = [pat]
528 if globbed:
528 if globbed:
529 ret.extend(globbed)
529 ret.extend(globbed)
530 continue
530 continue
531 ret.append(kindpat)
531 ret.append(kindpat)
532 return ret
532 return ret
533
533
534 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
534 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
535 badfn=None):
535 badfn=None):
536 '''Return a matcher and the patterns that were used.
536 '''Return a matcher and the patterns that were used.
537 The matcher will warn about bad matches, unless an alternate badfn callback
537 The matcher will warn about bad matches, unless an alternate badfn callback
538 is provided.'''
538 is provided.'''
539 if pats == ("",):
539 if pats == ("",):
540 pats = []
540 pats = []
541 if opts is None:
541 if opts is None:
542 opts = {}
542 opts = {}
543 if not globbed and default == 'relpath':
543 if not globbed and default == 'relpath':
544 pats = expandpats(pats or [])
544 pats = expandpats(pats or [])
545
545
546 def bad(f, msg):
546 def bad(f, msg):
547 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
547 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
548
548
549 if badfn is None:
549 if badfn is None:
550 badfn = bad
550 badfn = bad
551
551
552 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
552 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
553 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
553 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
554
554
555 if m.always():
555 if m.always():
556 pats = []
556 pats = []
557 return m, pats
557 return m, pats
558
558
559 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
559 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
560 badfn=None):
560 badfn=None):
561 '''Return a matcher that will warn about bad matches.'''
561 '''Return a matcher that will warn about bad matches.'''
562 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
562 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
563
563
564 def matchall(repo):
564 def matchall(repo):
565 '''Return a matcher that will efficiently match everything.'''
565 '''Return a matcher that will efficiently match everything.'''
566 return matchmod.always(repo.root, repo.getcwd())
566 return matchmod.always(repo.root, repo.getcwd())
567
567
568 def matchfiles(repo, files, badfn=None):
568 def matchfiles(repo, files, badfn=None):
569 '''Return a matcher that will efficiently match exactly these files.'''
569 '''Return a matcher that will efficiently match exactly these files.'''
570 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
570 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
571
571
572 def parsefollowlinespattern(repo, rev, pat, msg):
572 def parsefollowlinespattern(repo, rev, pat, msg):
573 """Return a file name from `pat` pattern suitable for usage in followlines
573 """Return a file name from `pat` pattern suitable for usage in followlines
574 logic.
574 logic.
575 """
575 """
576 if not matchmod.patkind(pat):
576 if not matchmod.patkind(pat):
577 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
577 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
578 else:
578 else:
579 ctx = repo[rev]
579 ctx = repo[rev]
580 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
580 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
581 files = [f for f in ctx if m(f)]
581 files = [f for f in ctx if m(f)]
582 if len(files) != 1:
582 if len(files) != 1:
583 raise error.ParseError(msg)
583 raise error.ParseError(msg)
584 return files[0]
584 return files[0]
585
585
586 def origpath(ui, repo, filepath):
586 def origpath(ui, repo, filepath):
587 '''customize where .orig files are created
587 '''customize where .orig files are created
588
588
589 Fetch user defined path from config file: [ui] origbackuppath = <path>
589 Fetch user defined path from config file: [ui] origbackuppath = <path>
590 Fall back to default (filepath with .orig suffix) if not specified
590 Fall back to default (filepath with .orig suffix) if not specified
591 '''
591 '''
592 origbackuppath = ui.config('ui', 'origbackuppath')
592 origbackuppath = ui.config('ui', 'origbackuppath')
593 if not origbackuppath:
593 if not origbackuppath:
594 return filepath + ".orig"
594 return filepath + ".orig"
595
595
596 # Convert filepath from an absolute path into a path inside the repo.
596 # Convert filepath from an absolute path into a path inside the repo.
597 filepathfromroot = util.normpath(os.path.relpath(filepath,
597 filepathfromroot = util.normpath(os.path.relpath(filepath,
598 start=repo.root))
598 start=repo.root))
599
599
600 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
600 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
601 origbackupdir = origvfs.dirname(filepathfromroot)
601 origbackupdir = origvfs.dirname(filepathfromroot)
602 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
602 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
603 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
603 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
604
604
605 # Remove any files that conflict with the backup file's path
605 # Remove any files that conflict with the backup file's path
606 for f in reversed(list(util.finddirs(filepathfromroot))):
606 for f in reversed(list(util.finddirs(filepathfromroot))):
607 if origvfs.isfileorlink(f):
607 if origvfs.isfileorlink(f):
608 ui.note(_('removing conflicting file: %s\n')
608 ui.note(_('removing conflicting file: %s\n')
609 % origvfs.join(f))
609 % origvfs.join(f))
610 origvfs.unlink(f)
610 origvfs.unlink(f)
611 break
611 break
612
612
613 origvfs.makedirs(origbackupdir)
613 origvfs.makedirs(origbackupdir)
614
614
615 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
615 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
616 ui.note(_('removing conflicting directory: %s\n')
616 ui.note(_('removing conflicting directory: %s\n')
617 % origvfs.join(filepathfromroot))
617 % origvfs.join(filepathfromroot))
618 origvfs.rmtree(filepathfromroot, forcibly=True)
618 origvfs.rmtree(filepathfromroot, forcibly=True)
619
619
620 return origvfs.join(filepathfromroot)
620 return origvfs.join(filepathfromroot)
621
621
622 class _containsnode(object):
622 class _containsnode(object):
623 """proxy __contains__(node) to container.__contains__ which accepts revs"""
623 """proxy __contains__(node) to container.__contains__ which accepts revs"""
624
624
625 def __init__(self, repo, revcontainer):
625 def __init__(self, repo, revcontainer):
626 self._torev = repo.changelog.rev
626 self._torev = repo.changelog.rev
627 self._revcontains = revcontainer.__contains__
627 self._revcontains = revcontainer.__contains__
628
628
629 def __contains__(self, node):
629 def __contains__(self, node):
630 return self._revcontains(self._torev(node))
630 return self._revcontains(self._torev(node))
631
631
632 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
632 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
633 """do common cleanups when old nodes are replaced by new nodes
633 """do common cleanups when old nodes are replaced by new nodes
634
634
635 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
635 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
636 (we might also want to move working directory parent in the future)
636 (we might also want to move working directory parent in the future)
637
637
638 By default, bookmark moves are calculated automatically from 'replacements',
638 By default, bookmark moves are calculated automatically from 'replacements',
639 but 'moves' can be used to override that. Also, 'moves' may include
639 but 'moves' can be used to override that. Also, 'moves' may include
640 additional bookmark moves that should not have associated obsmarkers.
640 additional bookmark moves that should not have associated obsmarkers.
641
641
642 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
642 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
643 have replacements. operation is a string, like "rebase".
643 have replacements. operation is a string, like "rebase".
644
644
645 metadata is dictionary containing metadata to be stored in obsmarker if
645 metadata is dictionary containing metadata to be stored in obsmarker if
646 obsolescence is enabled.
646 obsolescence is enabled.
647 """
647 """
648 if not replacements and not moves:
648 if not replacements and not moves:
649 return
649 return
650
650
651 # translate mapping's other forms
651 # translate mapping's other forms
652 if not util.safehasattr(replacements, 'items'):
652 if not util.safehasattr(replacements, 'items'):
653 replacements = {n: () for n in replacements}
653 replacements = {n: () for n in replacements}
654
654
655 # Calculate bookmark movements
655 # Calculate bookmark movements
656 if moves is None:
656 if moves is None:
657 moves = {}
657 moves = {}
658 # Unfiltered repo is needed since nodes in replacements might be hidden.
658 # Unfiltered repo is needed since nodes in replacements might be hidden.
659 unfi = repo.unfiltered()
659 unfi = repo.unfiltered()
660 for oldnode, newnodes in replacements.items():
660 for oldnode, newnodes in replacements.items():
661 if oldnode in moves:
661 if oldnode in moves:
662 continue
662 continue
663 if len(newnodes) > 1:
663 if len(newnodes) > 1:
664 # usually a split, take the one with biggest rev number
664 # usually a split, take the one with biggest rev number
665 newnode = next(unfi.set('max(%ln)', newnodes)).node()
665 newnode = next(unfi.set('max(%ln)', newnodes)).node()
666 elif len(newnodes) == 0:
666 elif len(newnodes) == 0:
667 # move bookmark backwards
667 # move bookmark backwards
668 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
668 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
669 list(replacements)))
669 list(replacements)))
670 if roots:
670 if roots:
671 newnode = roots[0].node()
671 newnode = roots[0].node()
672 else:
672 else:
673 newnode = nullid
673 newnode = nullid
674 else:
674 else:
675 newnode = newnodes[0]
675 newnode = newnodes[0]
676 moves[oldnode] = newnode
676 moves[oldnode] = newnode
677
677
678 with repo.transaction('cleanup') as tr:
678 with repo.transaction('cleanup') as tr:
679 # Move bookmarks
679 # Move bookmarks
680 bmarks = repo._bookmarks
680 bmarks = repo._bookmarks
681 bmarkchanges = []
681 bmarkchanges = []
682 allnewnodes = [n for ns in replacements.values() for n in ns]
682 allnewnodes = [n for ns in replacements.values() for n in ns]
683 for oldnode, newnode in moves.items():
683 for oldnode, newnode in moves.items():
684 oldbmarks = repo.nodebookmarks(oldnode)
684 oldbmarks = repo.nodebookmarks(oldnode)
685 if not oldbmarks:
685 if not oldbmarks:
686 continue
686 continue
687 from . import bookmarks # avoid import cycle
687 from . import bookmarks # avoid import cycle
688 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
688 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
689 (oldbmarks, hex(oldnode), hex(newnode)))
689 (oldbmarks, hex(oldnode), hex(newnode)))
690 # Delete divergent bookmarks being parents of related newnodes
690 # Delete divergent bookmarks being parents of related newnodes
691 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
691 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
692 allnewnodes, newnode, oldnode)
692 allnewnodes, newnode, oldnode)
693 deletenodes = _containsnode(repo, deleterevs)
693 deletenodes = _containsnode(repo, deleterevs)
694 for name in oldbmarks:
694 for name in oldbmarks:
695 bmarkchanges.append((name, newnode))
695 bmarkchanges.append((name, newnode))
696 for b in bookmarks.divergent2delete(repo, deletenodes, name):
696 for b in bookmarks.divergent2delete(repo, deletenodes, name):
697 bmarkchanges.append((b, None))
697 bmarkchanges.append((b, None))
698
698
699 if bmarkchanges:
699 if bmarkchanges:
700 bmarks.applychanges(repo, tr, bmarkchanges)
700 bmarks.applychanges(repo, tr, bmarkchanges)
701
701
702 # Obsolete or strip nodes
702 # Obsolete or strip nodes
703 if obsolete.isenabled(repo, obsolete.createmarkersopt):
703 if obsolete.isenabled(repo, obsolete.createmarkersopt):
704 # If a node is already obsoleted, and we want to obsolete it
704 # If a node is already obsoleted, and we want to obsolete it
705 # without a successor, skip that obssolete request since it's
705 # without a successor, skip that obssolete request since it's
706 # unnecessary. That's the "if s or not isobs(n)" check below.
706 # unnecessary. That's the "if s or not isobs(n)" check below.
707 # Also sort the node in topology order, that might be useful for
707 # Also sort the node in topology order, that might be useful for
708 # some obsstore logic.
708 # some obsstore logic.
709 # NOTE: the filtering and sorting might belong to createmarkers.
709 # NOTE: the filtering and sorting might belong to createmarkers.
710 isobs = unfi.obsstore.successors.__contains__
710 isobs = unfi.obsstore.successors.__contains__
711 torev = unfi.changelog.rev
711 torev = unfi.changelog.rev
712 sortfunc = lambda ns: torev(ns[0])
712 sortfunc = lambda ns: torev(ns[0])
713 rels = [(unfi[n], tuple(unfi[m] for m in s))
713 rels = [(unfi[n], tuple(unfi[m] for m in s))
714 for n, s in sorted(replacements.items(), key=sortfunc)
714 for n, s in sorted(replacements.items(), key=sortfunc)
715 if s or not isobs(n)]
715 if s or not isobs(n)]
716 if rels:
716 if rels:
717 obsolete.createmarkers(repo, rels, operation=operation,
717 obsolete.createmarkers(repo, rels, operation=operation,
718 metadata=metadata)
718 metadata=metadata)
719 else:
719 else:
720 from . import repair # avoid import cycle
720 from . import repair # avoid import cycle
721 tostrip = list(replacements)
721 tostrip = list(replacements)
722 if tostrip:
722 if tostrip:
723 repair.delayedstrip(repo.ui, repo, tostrip, operation)
723 repair.delayedstrip(repo.ui, repo, tostrip, operation)
724
724
725 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
725 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
726 if opts is None:
726 if opts is None:
727 opts = {}
727 opts = {}
728 m = matcher
728 m = matcher
729 if dry_run is None:
729 if dry_run is None:
730 dry_run = opts.get('dry_run')
730 dry_run = opts.get('dry_run')
731 if similarity is None:
731 if similarity is None:
732 similarity = float(opts.get('similarity') or 0)
732 similarity = float(opts.get('similarity') or 0)
733
733
734 ret = 0
734 ret = 0
735 join = lambda f: os.path.join(prefix, f)
735 join = lambda f: os.path.join(prefix, f)
736
736
737 wctx = repo[None]
737 wctx = repo[None]
738 for subpath in sorted(wctx.substate):
738 for subpath in sorted(wctx.substate):
739 submatch = matchmod.subdirmatcher(subpath, m)
739 submatch = matchmod.subdirmatcher(subpath, m)
740 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
740 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
741 sub = wctx.sub(subpath)
741 sub = wctx.sub(subpath)
742 try:
742 try:
743 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
743 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
744 ret = 1
744 ret = 1
745 except error.LookupError:
745 except error.LookupError:
746 repo.ui.status(_("skipping missing subrepository: %s\n")
746 repo.ui.status(_("skipping missing subrepository: %s\n")
747 % join(subpath))
747 % join(subpath))
748
748
749 rejected = []
749 rejected = []
750 def badfn(f, msg):
750 def badfn(f, msg):
751 if f in m.files():
751 if f in m.files():
752 m.bad(f, msg)
752 m.bad(f, msg)
753 rejected.append(f)
753 rejected.append(f)
754
754
755 badmatch = matchmod.badmatch(m, badfn)
755 badmatch = matchmod.badmatch(m, badfn)
756 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
756 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
757 badmatch)
757 badmatch)
758
758
759 unknownset = set(unknown + forgotten)
759 unknownset = set(unknown + forgotten)
760 toprint = unknownset.copy()
760 toprint = unknownset.copy()
761 toprint.update(deleted)
761 toprint.update(deleted)
762 for abs in sorted(toprint):
762 for abs in sorted(toprint):
763 if repo.ui.verbose or not m.exact(abs):
763 if repo.ui.verbose or not m.exact(abs):
764 if abs in unknownset:
764 if abs in unknownset:
765 status = _('adding %s\n') % m.uipath(abs)
765 status = _('adding %s\n') % m.uipath(abs)
766 else:
766 else:
767 status = _('removing %s\n') % m.uipath(abs)
767 status = _('removing %s\n') % m.uipath(abs)
768 repo.ui.status(status)
768 repo.ui.status(status)
769
769
770 renames = _findrenames(repo, m, added + unknown, removed + deleted,
770 renames = _findrenames(repo, m, added + unknown, removed + deleted,
771 similarity)
771 similarity)
772
772
773 if not dry_run:
773 if not dry_run:
774 _markchanges(repo, unknown + forgotten, deleted, renames)
774 _markchanges(repo, unknown + forgotten, deleted, renames)
775
775
776 for f in rejected:
776 for f in rejected:
777 if f in m.files():
777 if f in m.files():
778 return 1
778 return 1
779 return ret
779 return ret
780
780
781 def marktouched(repo, files, similarity=0.0):
781 def marktouched(repo, files, similarity=0.0):
782 '''Assert that files have somehow been operated upon. files are relative to
782 '''Assert that files have somehow been operated upon. files are relative to
783 the repo root.'''
783 the repo root.'''
784 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
784 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
785 rejected = []
785 rejected = []
786
786
787 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
787 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
788
788
789 if repo.ui.verbose:
789 if repo.ui.verbose:
790 unknownset = set(unknown + forgotten)
790 unknownset = set(unknown + forgotten)
791 toprint = unknownset.copy()
791 toprint = unknownset.copy()
792 toprint.update(deleted)
792 toprint.update(deleted)
793 for abs in sorted(toprint):
793 for abs in sorted(toprint):
794 if abs in unknownset:
794 if abs in unknownset:
795 status = _('adding %s\n') % abs
795 status = _('adding %s\n') % abs
796 else:
796 else:
797 status = _('removing %s\n') % abs
797 status = _('removing %s\n') % abs
798 repo.ui.status(status)
798 repo.ui.status(status)
799
799
800 renames = _findrenames(repo, m, added + unknown, removed + deleted,
800 renames = _findrenames(repo, m, added + unknown, removed + deleted,
801 similarity)
801 similarity)
802
802
803 _markchanges(repo, unknown + forgotten, deleted, renames)
803 _markchanges(repo, unknown + forgotten, deleted, renames)
804
804
805 for f in rejected:
805 for f in rejected:
806 if f in m.files():
806 if f in m.files():
807 return 1
807 return 1
808 return 0
808 return 0
809
809
810 def _interestingfiles(repo, matcher):
810 def _interestingfiles(repo, matcher):
811 '''Walk dirstate with matcher, looking for files that addremove would care
811 '''Walk dirstate with matcher, looking for files that addremove would care
812 about.
812 about.
813
813
814 This is different from dirstate.status because it doesn't care about
814 This is different from dirstate.status because it doesn't care about
815 whether files are modified or clean.'''
815 whether files are modified or clean.'''
816 added, unknown, deleted, removed, forgotten = [], [], [], [], []
816 added, unknown, deleted, removed, forgotten = [], [], [], [], []
817 audit_path = pathutil.pathauditor(repo.root, cached=True)
817 audit_path = pathutil.pathauditor(repo.root, cached=True)
818
818
819 ctx = repo[None]
819 ctx = repo[None]
820 dirstate = repo.dirstate
820 dirstate = repo.dirstate
821 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
821 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
822 unknown=True, ignored=False, full=False)
822 unknown=True, ignored=False, full=False)
823 for abs, st in walkresults.iteritems():
823 for abs, st in walkresults.iteritems():
824 dstate = dirstate[abs]
824 dstate = dirstate[abs]
825 if dstate == '?' and audit_path.check(abs):
825 if dstate == '?' and audit_path.check(abs):
826 unknown.append(abs)
826 unknown.append(abs)
827 elif dstate != 'r' and not st:
827 elif dstate != 'r' and not st:
828 deleted.append(abs)
828 deleted.append(abs)
829 elif dstate == 'r' and st:
829 elif dstate == 'r' and st:
830 forgotten.append(abs)
830 forgotten.append(abs)
831 # for finding renames
831 # for finding renames
832 elif dstate == 'r' and not st:
832 elif dstate == 'r' and not st:
833 removed.append(abs)
833 removed.append(abs)
834 elif dstate == 'a':
834 elif dstate == 'a':
835 added.append(abs)
835 added.append(abs)
836
836
837 return added, unknown, deleted, removed, forgotten
837 return added, unknown, deleted, removed, forgotten
838
838
839 def _findrenames(repo, matcher, added, removed, similarity):
839 def _findrenames(repo, matcher, added, removed, similarity):
840 '''Find renames from removed files to added ones.'''
840 '''Find renames from removed files to added ones.'''
841 renames = {}
841 renames = {}
842 if similarity > 0:
842 if similarity > 0:
843 for old, new, score in similar.findrenames(repo, added, removed,
843 for old, new, score in similar.findrenames(repo, added, removed,
844 similarity):
844 similarity):
845 if (repo.ui.verbose or not matcher.exact(old)
845 if (repo.ui.verbose or not matcher.exact(old)
846 or not matcher.exact(new)):
846 or not matcher.exact(new)):
847 repo.ui.status(_('recording removal of %s as rename to %s '
847 repo.ui.status(_('recording removal of %s as rename to %s '
848 '(%d%% similar)\n') %
848 '(%d%% similar)\n') %
849 (matcher.rel(old), matcher.rel(new),
849 (matcher.rel(old), matcher.rel(new),
850 score * 100))
850 score * 100))
851 renames[new] = old
851 renames[new] = old
852 return renames
852 return renames
853
853
854 def _markchanges(repo, unknown, deleted, renames):
854 def _markchanges(repo, unknown, deleted, renames):
855 '''Marks the files in unknown as added, the files in deleted as removed,
855 '''Marks the files in unknown as added, the files in deleted as removed,
856 and the files in renames as copied.'''
856 and the files in renames as copied.'''
857 wctx = repo[None]
857 wctx = repo[None]
858 with repo.wlock():
858 with repo.wlock():
859 wctx.forget(deleted)
859 wctx.forget(deleted)
860 wctx.add(unknown)
860 wctx.add(unknown)
861 for new, old in renames.iteritems():
861 for new, old in renames.iteritems():
862 wctx.copy(old, new)
862 wctx.copy(old, new)
863
863
864 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
864 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
865 """Update the dirstate to reflect the intent of copying src to dst. For
865 """Update the dirstate to reflect the intent of copying src to dst. For
866 different reasons it might not end with dst being marked as copied from src.
866 different reasons it might not end with dst being marked as copied from src.
867 """
867 """
868 origsrc = repo.dirstate.copied(src) or src
868 origsrc = repo.dirstate.copied(src) or src
869 if dst == origsrc: # copying back a copy?
869 if dst == origsrc: # copying back a copy?
870 if repo.dirstate[dst] not in 'mn' and not dryrun:
870 if repo.dirstate[dst] not in 'mn' and not dryrun:
871 repo.dirstate.normallookup(dst)
871 repo.dirstate.normallookup(dst)
872 else:
872 else:
873 if repo.dirstate[origsrc] == 'a' and origsrc == src:
873 if repo.dirstate[origsrc] == 'a' and origsrc == src:
874 if not ui.quiet:
874 if not ui.quiet:
875 ui.warn(_("%s has not been committed yet, so no copy "
875 ui.warn(_("%s has not been committed yet, so no copy "
876 "data will be stored for %s.\n")
876 "data will be stored for %s.\n")
877 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
877 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
878 if repo.dirstate[dst] in '?r' and not dryrun:
878 if repo.dirstate[dst] in '?r' and not dryrun:
879 wctx.add([dst])
879 wctx.add([dst])
880 elif not dryrun:
880 elif not dryrun:
881 wctx.copy(origsrc, dst)
881 wctx.copy(origsrc, dst)
882
882
883 def readrequires(opener, supported):
883 def readrequires(opener, supported):
884 '''Reads and parses .hg/requires and checks if all entries found
884 '''Reads and parses .hg/requires and checks if all entries found
885 are in the list of supported features.'''
885 are in the list of supported features.'''
886 requirements = set(opener.read("requires").splitlines())
886 requirements = set(opener.read("requires").splitlines())
887 missings = []
887 missings = []
888 for r in requirements:
888 for r in requirements:
889 if r not in supported:
889 if r not in supported:
890 if not r or not r[0].isalnum():
890 if not r or not r[0:1].isalnum():
891 raise error.RequirementError(_(".hg/requires file is corrupt"))
891 raise error.RequirementError(_(".hg/requires file is corrupt"))
892 missings.append(r)
892 missings.append(r)
893 missings.sort()
893 missings.sort()
894 if missings:
894 if missings:
895 raise error.RequirementError(
895 raise error.RequirementError(
896 _("repository requires features unknown to this Mercurial: %s")
896 _("repository requires features unknown to this Mercurial: %s")
897 % " ".join(missings),
897 % " ".join(missings),
898 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
898 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
899 " for more information"))
899 " for more information"))
900 return requirements
900 return requirements
901
901
902 def writerequires(opener, requirements):
902 def writerequires(opener, requirements):
903 with opener('requires', 'w') as fp:
903 with opener('requires', 'w') as fp:
904 for r in sorted(requirements):
904 for r in sorted(requirements):
905 fp.write("%s\n" % r)
905 fp.write("%s\n" % r)
906
906
907 class filecachesubentry(object):
907 class filecachesubentry(object):
908 def __init__(self, path, stat):
908 def __init__(self, path, stat):
909 self.path = path
909 self.path = path
910 self.cachestat = None
910 self.cachestat = None
911 self._cacheable = None
911 self._cacheable = None
912
912
913 if stat:
913 if stat:
914 self.cachestat = filecachesubentry.stat(self.path)
914 self.cachestat = filecachesubentry.stat(self.path)
915
915
916 if self.cachestat:
916 if self.cachestat:
917 self._cacheable = self.cachestat.cacheable()
917 self._cacheable = self.cachestat.cacheable()
918 else:
918 else:
919 # None means we don't know yet
919 # None means we don't know yet
920 self._cacheable = None
920 self._cacheable = None
921
921
922 def refresh(self):
922 def refresh(self):
923 if self.cacheable():
923 if self.cacheable():
924 self.cachestat = filecachesubentry.stat(self.path)
924 self.cachestat = filecachesubentry.stat(self.path)
925
925
926 def cacheable(self):
926 def cacheable(self):
927 if self._cacheable is not None:
927 if self._cacheable is not None:
928 return self._cacheable
928 return self._cacheable
929
929
930 # we don't know yet, assume it is for now
930 # we don't know yet, assume it is for now
931 return True
931 return True
932
932
933 def changed(self):
933 def changed(self):
934 # no point in going further if we can't cache it
934 # no point in going further if we can't cache it
935 if not self.cacheable():
935 if not self.cacheable():
936 return True
936 return True
937
937
938 newstat = filecachesubentry.stat(self.path)
938 newstat = filecachesubentry.stat(self.path)
939
939
940 # we may not know if it's cacheable yet, check again now
940 # we may not know if it's cacheable yet, check again now
941 if newstat and self._cacheable is None:
941 if newstat and self._cacheable is None:
942 self._cacheable = newstat.cacheable()
942 self._cacheable = newstat.cacheable()
943
943
944 # check again
944 # check again
945 if not self._cacheable:
945 if not self._cacheable:
946 return True
946 return True
947
947
948 if self.cachestat != newstat:
948 if self.cachestat != newstat:
949 self.cachestat = newstat
949 self.cachestat = newstat
950 return True
950 return True
951 else:
951 else:
952 return False
952 return False
953
953
954 @staticmethod
954 @staticmethod
955 def stat(path):
955 def stat(path):
956 try:
956 try:
957 return util.cachestat(path)
957 return util.cachestat(path)
958 except OSError as e:
958 except OSError as e:
959 if e.errno != errno.ENOENT:
959 if e.errno != errno.ENOENT:
960 raise
960 raise
961
961
962 class filecacheentry(object):
962 class filecacheentry(object):
963 def __init__(self, paths, stat=True):
963 def __init__(self, paths, stat=True):
964 self._entries = []
964 self._entries = []
965 for path in paths:
965 for path in paths:
966 self._entries.append(filecachesubentry(path, stat))
966 self._entries.append(filecachesubentry(path, stat))
967
967
968 def changed(self):
968 def changed(self):
969 '''true if any entry has changed'''
969 '''true if any entry has changed'''
970 for entry in self._entries:
970 for entry in self._entries:
971 if entry.changed():
971 if entry.changed():
972 return True
972 return True
973 return False
973 return False
974
974
975 def refresh(self):
975 def refresh(self):
976 for entry in self._entries:
976 for entry in self._entries:
977 entry.refresh()
977 entry.refresh()
978
978
979 class filecache(object):
979 class filecache(object):
980 '''A property like decorator that tracks files under .hg/ for updates.
980 '''A property like decorator that tracks files under .hg/ for updates.
981
981
982 Records stat info when called in _filecache.
982 Records stat info when called in _filecache.
983
983
984 On subsequent calls, compares old stat info with new info, and recreates the
984 On subsequent calls, compares old stat info with new info, and recreates the
985 object when any of the files changes, updating the new stat info in
985 object when any of the files changes, updating the new stat info in
986 _filecache.
986 _filecache.
987
987
988 Mercurial either atomic renames or appends for files under .hg,
988 Mercurial either atomic renames or appends for files under .hg,
989 so to ensure the cache is reliable we need the filesystem to be able
989 so to ensure the cache is reliable we need the filesystem to be able
990 to tell us if a file has been replaced. If it can't, we fallback to
990 to tell us if a file has been replaced. If it can't, we fallback to
991 recreating the object on every call (essentially the same behavior as
991 recreating the object on every call (essentially the same behavior as
992 propertycache).
992 propertycache).
993
993
994 '''
994 '''
995 def __init__(self, *paths):
995 def __init__(self, *paths):
996 self.paths = paths
996 self.paths = paths
997
997
998 def join(self, obj, fname):
998 def join(self, obj, fname):
999 """Used to compute the runtime path of a cached file.
999 """Used to compute the runtime path of a cached file.
1000
1000
1001 Users should subclass filecache and provide their own version of this
1001 Users should subclass filecache and provide their own version of this
1002 function to call the appropriate join function on 'obj' (an instance
1002 function to call the appropriate join function on 'obj' (an instance
1003 of the class that its member function was decorated).
1003 of the class that its member function was decorated).
1004 """
1004 """
1005 raise NotImplementedError
1005 raise NotImplementedError
1006
1006
1007 def __call__(self, func):
1007 def __call__(self, func):
1008 self.func = func
1008 self.func = func
1009 self.name = func.__name__.encode('ascii')
1009 self.name = func.__name__.encode('ascii')
1010 return self
1010 return self
1011
1011
1012 def __get__(self, obj, type=None):
1012 def __get__(self, obj, type=None):
1013 # if accessed on the class, return the descriptor itself.
1013 # if accessed on the class, return the descriptor itself.
1014 if obj is None:
1014 if obj is None:
1015 return self
1015 return self
1016 # do we need to check if the file changed?
1016 # do we need to check if the file changed?
1017 if self.name in obj.__dict__:
1017 if self.name in obj.__dict__:
1018 assert self.name in obj._filecache, self.name
1018 assert self.name in obj._filecache, self.name
1019 return obj.__dict__[self.name]
1019 return obj.__dict__[self.name]
1020
1020
1021 entry = obj._filecache.get(self.name)
1021 entry = obj._filecache.get(self.name)
1022
1022
1023 if entry:
1023 if entry:
1024 if entry.changed():
1024 if entry.changed():
1025 entry.obj = self.func(obj)
1025 entry.obj = self.func(obj)
1026 else:
1026 else:
1027 paths = [self.join(obj, path) for path in self.paths]
1027 paths = [self.join(obj, path) for path in self.paths]
1028
1028
1029 # We stat -before- creating the object so our cache doesn't lie if
1029 # We stat -before- creating the object so our cache doesn't lie if
1030 # a writer modified between the time we read and stat
1030 # a writer modified between the time we read and stat
1031 entry = filecacheentry(paths, True)
1031 entry = filecacheentry(paths, True)
1032 entry.obj = self.func(obj)
1032 entry.obj = self.func(obj)
1033
1033
1034 obj._filecache[self.name] = entry
1034 obj._filecache[self.name] = entry
1035
1035
1036 obj.__dict__[self.name] = entry.obj
1036 obj.__dict__[self.name] = entry.obj
1037 return entry.obj
1037 return entry.obj
1038
1038
1039 def __set__(self, obj, value):
1039 def __set__(self, obj, value):
1040 if self.name not in obj._filecache:
1040 if self.name not in obj._filecache:
1041 # we add an entry for the missing value because X in __dict__
1041 # we add an entry for the missing value because X in __dict__
1042 # implies X in _filecache
1042 # implies X in _filecache
1043 paths = [self.join(obj, path) for path in self.paths]
1043 paths = [self.join(obj, path) for path in self.paths]
1044 ce = filecacheentry(paths, False)
1044 ce = filecacheentry(paths, False)
1045 obj._filecache[self.name] = ce
1045 obj._filecache[self.name] = ce
1046 else:
1046 else:
1047 ce = obj._filecache[self.name]
1047 ce = obj._filecache[self.name]
1048
1048
1049 ce.obj = value # update cached copy
1049 ce.obj = value # update cached copy
1050 obj.__dict__[self.name] = value # update copy returned by obj.x
1050 obj.__dict__[self.name] = value # update copy returned by obj.x
1051
1051
1052 def __delete__(self, obj):
1052 def __delete__(self, obj):
1053 try:
1053 try:
1054 del obj.__dict__[self.name]
1054 del obj.__dict__[self.name]
1055 except KeyError:
1055 except KeyError:
1056 raise AttributeError(self.name)
1056 raise AttributeError(self.name)
1057
1057
1058 def extdatasource(repo, source):
1058 def extdatasource(repo, source):
1059 """Gather a map of rev -> value dict from the specified source
1059 """Gather a map of rev -> value dict from the specified source
1060
1060
1061 A source spec is treated as a URL, with a special case shell: type
1061 A source spec is treated as a URL, with a special case shell: type
1062 for parsing the output from a shell command.
1062 for parsing the output from a shell command.
1063
1063
1064 The data is parsed as a series of newline-separated records where
1064 The data is parsed as a series of newline-separated records where
1065 each record is a revision specifier optionally followed by a space
1065 each record is a revision specifier optionally followed by a space
1066 and a freeform string value. If the revision is known locally, it
1066 and a freeform string value. If the revision is known locally, it
1067 is converted to a rev, otherwise the record is skipped.
1067 is converted to a rev, otherwise the record is skipped.
1068
1068
1069 Note that both key and value are treated as UTF-8 and converted to
1069 Note that both key and value are treated as UTF-8 and converted to
1070 the local encoding. This allows uniformity between local and
1070 the local encoding. This allows uniformity between local and
1071 remote data sources.
1071 remote data sources.
1072 """
1072 """
1073
1073
1074 spec = repo.ui.config("extdata", source)
1074 spec = repo.ui.config("extdata", source)
1075 if not spec:
1075 if not spec:
1076 raise error.Abort(_("unknown extdata source '%s'") % source)
1076 raise error.Abort(_("unknown extdata source '%s'") % source)
1077
1077
1078 data = {}
1078 data = {}
1079 src = proc = None
1079 src = proc = None
1080 try:
1080 try:
1081 if spec.startswith("shell:"):
1081 if spec.startswith("shell:"):
1082 # external commands should be run relative to the repo root
1082 # external commands should be run relative to the repo root
1083 cmd = spec[6:]
1083 cmd = spec[6:]
1084 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1084 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1085 close_fds=util.closefds,
1085 close_fds=util.closefds,
1086 stdout=subprocess.PIPE, cwd=repo.root)
1086 stdout=subprocess.PIPE, cwd=repo.root)
1087 src = proc.stdout
1087 src = proc.stdout
1088 else:
1088 else:
1089 # treat as a URL or file
1089 # treat as a URL or file
1090 src = url.open(repo.ui, spec)
1090 src = url.open(repo.ui, spec)
1091 for l in src:
1091 for l in src:
1092 if " " in l:
1092 if " " in l:
1093 k, v = l.strip().split(" ", 1)
1093 k, v = l.strip().split(" ", 1)
1094 else:
1094 else:
1095 k, v = l.strip(), ""
1095 k, v = l.strip(), ""
1096
1096
1097 k = encoding.tolocal(k)
1097 k = encoding.tolocal(k)
1098 try:
1098 try:
1099 data[repo[k].rev()] = encoding.tolocal(v)
1099 data[repo[k].rev()] = encoding.tolocal(v)
1100 except (error.LookupError, error.RepoLookupError):
1100 except (error.LookupError, error.RepoLookupError):
1101 pass # we ignore data for nodes that don't exist locally
1101 pass # we ignore data for nodes that don't exist locally
1102 finally:
1102 finally:
1103 if proc:
1103 if proc:
1104 proc.communicate()
1104 proc.communicate()
1105 if src:
1105 if src:
1106 src.close()
1106 src.close()
1107 if proc and proc.returncode != 0:
1107 if proc and proc.returncode != 0:
1108 raise error.Abort(_("extdata command '%s' failed: %s")
1108 raise error.Abort(_("extdata command '%s' failed: %s")
1109 % (cmd, util.explainexit(proc.returncode)[0]))
1109 % (cmd, util.explainexit(proc.returncode)[0]))
1110
1110
1111 return data
1111 return data
1112
1112
1113 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1113 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1114 if lock is None:
1114 if lock is None:
1115 raise error.LockInheritanceContractViolation(
1115 raise error.LockInheritanceContractViolation(
1116 'lock can only be inherited while held')
1116 'lock can only be inherited while held')
1117 if environ is None:
1117 if environ is None:
1118 environ = {}
1118 environ = {}
1119 with lock.inherit() as locker:
1119 with lock.inherit() as locker:
1120 environ[envvar] = locker
1120 environ[envvar] = locker
1121 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1121 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1122
1122
1123 def wlocksub(repo, cmd, *args, **kwargs):
1123 def wlocksub(repo, cmd, *args, **kwargs):
1124 """run cmd as a subprocess that allows inheriting repo's wlock
1124 """run cmd as a subprocess that allows inheriting repo's wlock
1125
1125
1126 This can only be called while the wlock is held. This takes all the
1126 This can only be called while the wlock is held. This takes all the
1127 arguments that ui.system does, and returns the exit code of the
1127 arguments that ui.system does, and returns the exit code of the
1128 subprocess."""
1128 subprocess."""
1129 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1129 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1130 **kwargs)
1130 **kwargs)
1131
1131
1132 def gdinitconfig(ui):
1132 def gdinitconfig(ui):
1133 """helper function to know if a repo should be created as general delta
1133 """helper function to know if a repo should be created as general delta
1134 """
1134 """
1135 # experimental config: format.generaldelta
1135 # experimental config: format.generaldelta
1136 return (ui.configbool('format', 'generaldelta')
1136 return (ui.configbool('format', 'generaldelta')
1137 or ui.configbool('format', 'usegeneraldelta'))
1137 or ui.configbool('format', 'usegeneraldelta'))
1138
1138
1139 def gddeltaconfig(ui):
1139 def gddeltaconfig(ui):
1140 """helper function to know if incoming delta should be optimised
1140 """helper function to know if incoming delta should be optimised
1141 """
1141 """
1142 # experimental config: format.generaldelta
1142 # experimental config: format.generaldelta
1143 return ui.configbool('format', 'generaldelta')
1143 return ui.configbool('format', 'generaldelta')
1144
1144
1145 class simplekeyvaluefile(object):
1145 class simplekeyvaluefile(object):
1146 """A simple file with key=value lines
1146 """A simple file with key=value lines
1147
1147
1148 Keys must be alphanumerics and start with a letter, values must not
1148 Keys must be alphanumerics and start with a letter, values must not
1149 contain '\n' characters"""
1149 contain '\n' characters"""
1150 firstlinekey = '__firstline'
1150 firstlinekey = '__firstline'
1151
1151
1152 def __init__(self, vfs, path, keys=None):
1152 def __init__(self, vfs, path, keys=None):
1153 self.vfs = vfs
1153 self.vfs = vfs
1154 self.path = path
1154 self.path = path
1155
1155
1156 def read(self, firstlinenonkeyval=False):
1156 def read(self, firstlinenonkeyval=False):
1157 """Read the contents of a simple key-value file
1157 """Read the contents of a simple key-value file
1158
1158
1159 'firstlinenonkeyval' indicates whether the first line of file should
1159 'firstlinenonkeyval' indicates whether the first line of file should
1160 be treated as a key-value pair or reuturned fully under the
1160 be treated as a key-value pair or reuturned fully under the
1161 __firstline key."""
1161 __firstline key."""
1162 lines = self.vfs.readlines(self.path)
1162 lines = self.vfs.readlines(self.path)
1163 d = {}
1163 d = {}
1164 if firstlinenonkeyval:
1164 if firstlinenonkeyval:
1165 if not lines:
1165 if not lines:
1166 e = _("empty simplekeyvalue file")
1166 e = _("empty simplekeyvalue file")
1167 raise error.CorruptedState(e)
1167 raise error.CorruptedState(e)
1168 # we don't want to include '\n' in the __firstline
1168 # we don't want to include '\n' in the __firstline
1169 d[self.firstlinekey] = lines[0][:-1]
1169 d[self.firstlinekey] = lines[0][:-1]
1170 del lines[0]
1170 del lines[0]
1171
1171
1172 try:
1172 try:
1173 # the 'if line.strip()' part prevents us from failing on empty
1173 # the 'if line.strip()' part prevents us from failing on empty
1174 # lines which only contain '\n' therefore are not skipped
1174 # lines which only contain '\n' therefore are not skipped
1175 # by 'if line'
1175 # by 'if line'
1176 updatedict = dict(line[:-1].split('=', 1) for line in lines
1176 updatedict = dict(line[:-1].split('=', 1) for line in lines
1177 if line.strip())
1177 if line.strip())
1178 if self.firstlinekey in updatedict:
1178 if self.firstlinekey in updatedict:
1179 e = _("%r can't be used as a key")
1179 e = _("%r can't be used as a key")
1180 raise error.CorruptedState(e % self.firstlinekey)
1180 raise error.CorruptedState(e % self.firstlinekey)
1181 d.update(updatedict)
1181 d.update(updatedict)
1182 except ValueError as e:
1182 except ValueError as e:
1183 raise error.CorruptedState(str(e))
1183 raise error.CorruptedState(str(e))
1184 return d
1184 return d
1185
1185
1186 def write(self, data, firstline=None):
1186 def write(self, data, firstline=None):
1187 """Write key=>value mapping to a file
1187 """Write key=>value mapping to a file
1188 data is a dict. Keys must be alphanumerical and start with a letter.
1188 data is a dict. Keys must be alphanumerical and start with a letter.
1189 Values must not contain newline characters.
1189 Values must not contain newline characters.
1190
1190
1191 If 'firstline' is not None, it is written to file before
1191 If 'firstline' is not None, it is written to file before
1192 everything else, as it is, not in a key=value form"""
1192 everything else, as it is, not in a key=value form"""
1193 lines = []
1193 lines = []
1194 if firstline is not None:
1194 if firstline is not None:
1195 lines.append('%s\n' % firstline)
1195 lines.append('%s\n' % firstline)
1196
1196
1197 for k, v in data.items():
1197 for k, v in data.items():
1198 if k == self.firstlinekey:
1198 if k == self.firstlinekey:
1199 e = "key name '%s' is reserved" % self.firstlinekey
1199 e = "key name '%s' is reserved" % self.firstlinekey
1200 raise error.ProgrammingError(e)
1200 raise error.ProgrammingError(e)
1201 if not k[0:1].isalpha():
1201 if not k[0:1].isalpha():
1202 e = "keys must start with a letter in a key-value file"
1202 e = "keys must start with a letter in a key-value file"
1203 raise error.ProgrammingError(e)
1203 raise error.ProgrammingError(e)
1204 if not k.isalnum():
1204 if not k.isalnum():
1205 e = "invalid key name in a simple key-value file"
1205 e = "invalid key name in a simple key-value file"
1206 raise error.ProgrammingError(e)
1206 raise error.ProgrammingError(e)
1207 if '\n' in v:
1207 if '\n' in v:
1208 e = "invalid value in a simple key-value file"
1208 e = "invalid value in a simple key-value file"
1209 raise error.ProgrammingError(e)
1209 raise error.ProgrammingError(e)
1210 lines.append("%s=%s\n" % (k, v))
1210 lines.append("%s=%s\n" % (k, v))
1211 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1211 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1212 fp.write(''.join(lines))
1212 fp.write(''.join(lines))
1213
1213
1214 _reportobsoletedsource = [
1214 _reportobsoletedsource = [
1215 'debugobsolete',
1215 'debugobsolete',
1216 'pull',
1216 'pull',
1217 'push',
1217 'push',
1218 'serve',
1218 'serve',
1219 'unbundle',
1219 'unbundle',
1220 ]
1220 ]
1221
1221
1222 _reportnewcssource = [
1222 _reportnewcssource = [
1223 'pull',
1223 'pull',
1224 'unbundle',
1224 'unbundle',
1225 ]
1225 ]
1226
1226
1227 # a list of (repo, ctx, files) functions called by various commands to allow
1227 # a list of (repo, ctx, files) functions called by various commands to allow
1228 # extensions to ensure the corresponding files are available locally, before the
1228 # extensions to ensure the corresponding files are available locally, before the
1229 # command uses them.
1229 # command uses them.
1230 fileprefetchhooks = util.hooks()
1230 fileprefetchhooks = util.hooks()
1231
1231
1232 # A marker that tells the evolve extension to suppress its own reporting
1232 # A marker that tells the evolve extension to suppress its own reporting
1233 _reportstroubledchangesets = True
1233 _reportstroubledchangesets = True
1234
1234
1235 def registersummarycallback(repo, otr, txnname=''):
1235 def registersummarycallback(repo, otr, txnname=''):
1236 """register a callback to issue a summary after the transaction is closed
1236 """register a callback to issue a summary after the transaction is closed
1237 """
1237 """
1238 def txmatch(sources):
1238 def txmatch(sources):
1239 return any(txnname.startswith(source) for source in sources)
1239 return any(txnname.startswith(source) for source in sources)
1240
1240
1241 categories = []
1241 categories = []
1242
1242
1243 def reportsummary(func):
1243 def reportsummary(func):
1244 """decorator for report callbacks."""
1244 """decorator for report callbacks."""
1245 # The repoview life cycle is shorter than the one of the actual
1245 # The repoview life cycle is shorter than the one of the actual
1246 # underlying repository. So the filtered object can die before the
1246 # underlying repository. So the filtered object can die before the
1247 # weakref is used leading to troubles. We keep a reference to the
1247 # weakref is used leading to troubles. We keep a reference to the
1248 # unfiltered object and restore the filtering when retrieving the
1248 # unfiltered object and restore the filtering when retrieving the
1249 # repository through the weakref.
1249 # repository through the weakref.
1250 filtername = repo.filtername
1250 filtername = repo.filtername
1251 reporef = weakref.ref(repo.unfiltered())
1251 reporef = weakref.ref(repo.unfiltered())
1252 def wrapped(tr):
1252 def wrapped(tr):
1253 repo = reporef()
1253 repo = reporef()
1254 if filtername:
1254 if filtername:
1255 repo = repo.filtered(filtername)
1255 repo = repo.filtered(filtername)
1256 func(repo, tr)
1256 func(repo, tr)
1257 newcat = '%02i-txnreport' % len(categories)
1257 newcat = '%02i-txnreport' % len(categories)
1258 otr.addpostclose(newcat, wrapped)
1258 otr.addpostclose(newcat, wrapped)
1259 categories.append(newcat)
1259 categories.append(newcat)
1260 return wrapped
1260 return wrapped
1261
1261
1262 if txmatch(_reportobsoletedsource):
1262 if txmatch(_reportobsoletedsource):
1263 @reportsummary
1263 @reportsummary
1264 def reportobsoleted(repo, tr):
1264 def reportobsoleted(repo, tr):
1265 obsoleted = obsutil.getobsoleted(repo, tr)
1265 obsoleted = obsutil.getobsoleted(repo, tr)
1266 if obsoleted:
1266 if obsoleted:
1267 repo.ui.status(_('obsoleted %i changesets\n')
1267 repo.ui.status(_('obsoleted %i changesets\n')
1268 % len(obsoleted))
1268 % len(obsoleted))
1269
1269
1270 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1270 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1271 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1271 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1272 instabilitytypes = [
1272 instabilitytypes = [
1273 ('orphan', 'orphan'),
1273 ('orphan', 'orphan'),
1274 ('phase-divergent', 'phasedivergent'),
1274 ('phase-divergent', 'phasedivergent'),
1275 ('content-divergent', 'contentdivergent'),
1275 ('content-divergent', 'contentdivergent'),
1276 ]
1276 ]
1277
1277
1278 def getinstabilitycounts(repo):
1278 def getinstabilitycounts(repo):
1279 filtered = repo.changelog.filteredrevs
1279 filtered = repo.changelog.filteredrevs
1280 counts = {}
1280 counts = {}
1281 for instability, revset in instabilitytypes:
1281 for instability, revset in instabilitytypes:
1282 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1282 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1283 filtered)
1283 filtered)
1284 return counts
1284 return counts
1285
1285
1286 oldinstabilitycounts = getinstabilitycounts(repo)
1286 oldinstabilitycounts = getinstabilitycounts(repo)
1287 @reportsummary
1287 @reportsummary
1288 def reportnewinstabilities(repo, tr):
1288 def reportnewinstabilities(repo, tr):
1289 newinstabilitycounts = getinstabilitycounts(repo)
1289 newinstabilitycounts = getinstabilitycounts(repo)
1290 for instability, revset in instabilitytypes:
1290 for instability, revset in instabilitytypes:
1291 delta = (newinstabilitycounts[instability] -
1291 delta = (newinstabilitycounts[instability] -
1292 oldinstabilitycounts[instability])
1292 oldinstabilitycounts[instability])
1293 if delta > 0:
1293 if delta > 0:
1294 repo.ui.warn(_('%i new %s changesets\n') %
1294 repo.ui.warn(_('%i new %s changesets\n') %
1295 (delta, instability))
1295 (delta, instability))
1296
1296
1297 if txmatch(_reportnewcssource):
1297 if txmatch(_reportnewcssource):
1298 @reportsummary
1298 @reportsummary
1299 def reportnewcs(repo, tr):
1299 def reportnewcs(repo, tr):
1300 """Report the range of new revisions pulled/unbundled."""
1300 """Report the range of new revisions pulled/unbundled."""
1301 newrevs = tr.changes.get('revs', xrange(0, 0))
1301 newrevs = tr.changes.get('revs', xrange(0, 0))
1302 if not newrevs:
1302 if not newrevs:
1303 return
1303 return
1304
1304
1305 # Compute the bounds of new revisions' range, excluding obsoletes.
1305 # Compute the bounds of new revisions' range, excluding obsoletes.
1306 unfi = repo.unfiltered()
1306 unfi = repo.unfiltered()
1307 revs = unfi.revs('%ld and not obsolete()', newrevs)
1307 revs = unfi.revs('%ld and not obsolete()', newrevs)
1308 if not revs:
1308 if not revs:
1309 # Got only obsoletes.
1309 # Got only obsoletes.
1310 return
1310 return
1311 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1311 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1312
1312
1313 if minrev == maxrev:
1313 if minrev == maxrev:
1314 revrange = minrev
1314 revrange = minrev
1315 else:
1315 else:
1316 revrange = '%s:%s' % (minrev, maxrev)
1316 revrange = '%s:%s' % (minrev, maxrev)
1317 repo.ui.status(_('new changesets %s\n') % revrange)
1317 repo.ui.status(_('new changesets %s\n') % revrange)
1318
1318
1319 def nodesummaries(repo, nodes, maxnumnodes=4):
1319 def nodesummaries(repo, nodes, maxnumnodes=4):
1320 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1320 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1321 return ' '.join(short(h) for h in nodes)
1321 return ' '.join(short(h) for h in nodes)
1322 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1322 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1323 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1323 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1324
1324
1325 def enforcesinglehead(repo, tr, desc):
1325 def enforcesinglehead(repo, tr, desc):
1326 """check that no named branch has multiple heads"""
1326 """check that no named branch has multiple heads"""
1327 if desc in ('strip', 'repair'):
1327 if desc in ('strip', 'repair'):
1328 # skip the logic during strip
1328 # skip the logic during strip
1329 return
1329 return
1330 visible = repo.filtered('visible')
1330 visible = repo.filtered('visible')
1331 # possible improvement: we could restrict the check to affected branch
1331 # possible improvement: we could restrict the check to affected branch
1332 for name, heads in visible.branchmap().iteritems():
1332 for name, heads in visible.branchmap().iteritems():
1333 if len(heads) > 1:
1333 if len(heads) > 1:
1334 msg = _('rejecting multiple heads on branch "%s"')
1334 msg = _('rejecting multiple heads on branch "%s"')
1335 msg %= name
1335 msg %= name
1336 hint = _('%d heads: %s')
1336 hint = _('%d heads: %s')
1337 hint %= (len(heads), nodesummaries(repo, heads))
1337 hint %= (len(heads), nodesummaries(repo, heads))
1338 raise error.Abort(msg, hint=hint)
1338 raise error.Abort(msg, hint=hint)
1339
1339
1340 def wrapconvertsink(sink):
1340 def wrapconvertsink(sink):
1341 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1341 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1342 before it is used, whether or not the convert extension was formally loaded.
1342 before it is used, whether or not the convert extension was formally loaded.
1343 """
1343 """
1344 return sink
1344 return sink
1345
1345
1346 def unhidehashlikerevs(repo, specs, hiddentype):
1346 def unhidehashlikerevs(repo, specs, hiddentype):
1347 """parse the user specs and unhide changesets whose hash or revision number
1347 """parse the user specs and unhide changesets whose hash or revision number
1348 is passed.
1348 is passed.
1349
1349
1350 hiddentype can be: 1) 'warn': warn while unhiding changesets
1350 hiddentype can be: 1) 'warn': warn while unhiding changesets
1351 2) 'nowarn': don't warn while unhiding changesets
1351 2) 'nowarn': don't warn while unhiding changesets
1352
1352
1353 returns a repo object with the required changesets unhidden
1353 returns a repo object with the required changesets unhidden
1354 """
1354 """
1355 if not repo.filtername or not repo.ui.configbool('experimental',
1355 if not repo.filtername or not repo.ui.configbool('experimental',
1356 'directaccess'):
1356 'directaccess'):
1357 return repo
1357 return repo
1358
1358
1359 if repo.filtername not in ('visible', 'visible-hidden'):
1359 if repo.filtername not in ('visible', 'visible-hidden'):
1360 return repo
1360 return repo
1361
1361
1362 symbols = set()
1362 symbols = set()
1363 for spec in specs:
1363 for spec in specs:
1364 try:
1364 try:
1365 tree = revsetlang.parse(spec)
1365 tree = revsetlang.parse(spec)
1366 except error.ParseError: # will be reported by scmutil.revrange()
1366 except error.ParseError: # will be reported by scmutil.revrange()
1367 continue
1367 continue
1368
1368
1369 symbols.update(revsetlang.gethashlikesymbols(tree))
1369 symbols.update(revsetlang.gethashlikesymbols(tree))
1370
1370
1371 if not symbols:
1371 if not symbols:
1372 return repo
1372 return repo
1373
1373
1374 revs = _getrevsfromsymbols(repo, symbols)
1374 revs = _getrevsfromsymbols(repo, symbols)
1375
1375
1376 if not revs:
1376 if not revs:
1377 return repo
1377 return repo
1378
1378
1379 if hiddentype == 'warn':
1379 if hiddentype == 'warn':
1380 unfi = repo.unfiltered()
1380 unfi = repo.unfiltered()
1381 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1381 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1382 repo.ui.warn(_("warning: accessing hidden changesets for write "
1382 repo.ui.warn(_("warning: accessing hidden changesets for write "
1383 "operation: %s\n") % revstr)
1383 "operation: %s\n") % revstr)
1384
1384
1385 # we have to use new filtername to separate branch/tags cache until we can
1385 # we have to use new filtername to separate branch/tags cache until we can
1386 # disbale these cache when revisions are dynamically pinned.
1386 # disbale these cache when revisions are dynamically pinned.
1387 return repo.filtered('visible-hidden', revs)
1387 return repo.filtered('visible-hidden', revs)
1388
1388
1389 def _getrevsfromsymbols(repo, symbols):
1389 def _getrevsfromsymbols(repo, symbols):
1390 """parse the list of symbols and returns a set of revision numbers of hidden
1390 """parse the list of symbols and returns a set of revision numbers of hidden
1391 changesets present in symbols"""
1391 changesets present in symbols"""
1392 revs = set()
1392 revs = set()
1393 unfi = repo.unfiltered()
1393 unfi = repo.unfiltered()
1394 unficl = unfi.changelog
1394 unficl = unfi.changelog
1395 cl = repo.changelog
1395 cl = repo.changelog
1396 tiprev = len(unficl)
1396 tiprev = len(unficl)
1397 pmatch = unficl._partialmatch
1397 pmatch = unficl._partialmatch
1398 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1398 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1399 for s in symbols:
1399 for s in symbols:
1400 try:
1400 try:
1401 n = int(s)
1401 n = int(s)
1402 if n <= tiprev:
1402 if n <= tiprev:
1403 if not allowrevnums:
1403 if not allowrevnums:
1404 continue
1404 continue
1405 else:
1405 else:
1406 if n not in cl:
1406 if n not in cl:
1407 revs.add(n)
1407 revs.add(n)
1408 continue
1408 continue
1409 except ValueError:
1409 except ValueError:
1410 pass
1410 pass
1411
1411
1412 try:
1412 try:
1413 s = pmatch(s)
1413 s = pmatch(s)
1414 except error.LookupError:
1414 except error.LookupError:
1415 s = None
1415 s = None
1416
1416
1417 if s is not None:
1417 if s is not None:
1418 rev = unficl.rev(s)
1418 rev = unficl.rev(s)
1419 if rev not in cl:
1419 if rev not in cl:
1420 revs.add(rev)
1420 revs.add(rev)
1421
1421
1422 return revs
1422 return revs
General Comments 0
You need to be logged in to leave comments. Login now