##// END OF EJS Templates
scmutil: factor out building of transaction summary callback...
Denis Laxalde -
r34621:b799f116 default
parent child Browse files
Show More
@@ -1,1218 +1,1228 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 if pycompat.osname == 'nt':
44 if pycompat.osname == 'nt':
45 from . import scmwindows as scmplatform
45 from . import scmwindows as scmplatform
46 else:
46 else:
47 from . import scmposix as scmplatform
47 from . import scmposix as scmplatform
48
48
49 termsize = scmplatform.termsize
49 termsize = scmplatform.termsize
50
50
51 class status(tuple):
51 class status(tuple):
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 and 'ignored' properties are only relevant to the working copy.
53 and 'ignored' properties are only relevant to the working copy.
54 '''
54 '''
55
55
56 __slots__ = ()
56 __slots__ = ()
57
57
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 clean):
59 clean):
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 ignored, clean))
61 ignored, clean))
62
62
63 @property
63 @property
64 def modified(self):
64 def modified(self):
65 '''files that have been modified'''
65 '''files that have been modified'''
66 return self[0]
66 return self[0]
67
67
68 @property
68 @property
69 def added(self):
69 def added(self):
70 '''files that have been added'''
70 '''files that have been added'''
71 return self[1]
71 return self[1]
72
72
73 @property
73 @property
74 def removed(self):
74 def removed(self):
75 '''files that have been removed'''
75 '''files that have been removed'''
76 return self[2]
76 return self[2]
77
77
78 @property
78 @property
79 def deleted(self):
79 def deleted(self):
80 '''files that are in the dirstate, but have been deleted from the
80 '''files that are in the dirstate, but have been deleted from the
81 working copy (aka "missing")
81 working copy (aka "missing")
82 '''
82 '''
83 return self[3]
83 return self[3]
84
84
85 @property
85 @property
86 def unknown(self):
86 def unknown(self):
87 '''files not in the dirstate that are not ignored'''
87 '''files not in the dirstate that are not ignored'''
88 return self[4]
88 return self[4]
89
89
90 @property
90 @property
91 def ignored(self):
91 def ignored(self):
92 '''files not in the dirstate that are ignored (by _dirignore())'''
92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 return self[5]
93 return self[5]
94
94
95 @property
95 @property
96 def clean(self):
96 def clean(self):
97 '''files that have not been modified'''
97 '''files that have not been modified'''
98 return self[6]
98 return self[6]
99
99
100 def __repr__(self, *args, **kwargs):
100 def __repr__(self, *args, **kwargs):
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 'unknown=%r, ignored=%r, clean=%r>') % self)
102 'unknown=%r, ignored=%r, clean=%r>') % self)
103
103
104 def itersubrepos(ctx1, ctx2):
104 def itersubrepos(ctx1, ctx2):
105 """find subrepos in ctx1 or ctx2"""
105 """find subrepos in ctx1 or ctx2"""
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 # has been modified (in ctx2) but not yet committed (in ctx1).
108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111
111
112 missing = set()
112 missing = set()
113
113
114 for subpath in ctx2.substate:
114 for subpath in ctx2.substate:
115 if subpath not in ctx1.substate:
115 if subpath not in ctx1.substate:
116 del subpaths[subpath]
116 del subpaths[subpath]
117 missing.add(subpath)
117 missing.add(subpath)
118
118
119 for subpath, ctx in sorted(subpaths.iteritems()):
119 for subpath, ctx in sorted(subpaths.iteritems()):
120 yield subpath, ctx.sub(subpath)
120 yield subpath, ctx.sub(subpath)
121
121
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 # status and diff will have an accurate result when it does
123 # status and diff will have an accurate result when it does
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 # against itself.
125 # against itself.
126 for subpath in missing:
126 for subpath in missing:
127 yield subpath, ctx2.nullsub(subpath, ctx1)
127 yield subpath, ctx2.nullsub(subpath, ctx1)
128
128
129 def nochangesfound(ui, repo, excluded=None):
129 def nochangesfound(ui, repo, excluded=None):
130 '''Report no changes for push/pull, excluded is None or a list of
130 '''Report no changes for push/pull, excluded is None or a list of
131 nodes excluded from the push/pull.
131 nodes excluded from the push/pull.
132 '''
132 '''
133 secretlist = []
133 secretlist = []
134 if excluded:
134 if excluded:
135 for n in excluded:
135 for n in excluded:
136 ctx = repo[n]
136 ctx = repo[n]
137 if ctx.phase() >= phases.secret and not ctx.extinct():
137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 secretlist.append(n)
138 secretlist.append(n)
139
139
140 if secretlist:
140 if secretlist:
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 % len(secretlist))
142 % len(secretlist))
143 else:
143 else:
144 ui.status(_("no changes found\n"))
144 ui.status(_("no changes found\n"))
145
145
146 def callcatch(ui, func):
146 def callcatch(ui, func):
147 """call func() with global exception handling
147 """call func() with global exception handling
148
148
149 return func() if no exception happens. otherwise do some error handling
149 return func() if no exception happens. otherwise do some error handling
150 and return an exit code accordingly. does not handle all exceptions.
150 and return an exit code accordingly. does not handle all exceptions.
151 """
151 """
152 try:
152 try:
153 try:
153 try:
154 return func()
154 return func()
155 except: # re-raises
155 except: # re-raises
156 ui.traceback()
156 ui.traceback()
157 raise
157 raise
158 # Global exception handling, alphabetically
158 # Global exception handling, alphabetically
159 # Mercurial-specific first, followed by built-in and library exceptions
159 # Mercurial-specific first, followed by built-in and library exceptions
160 except error.LockHeld as inst:
160 except error.LockHeld as inst:
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _('timed out waiting for lock held by %r') % inst.locker
162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 else:
163 else:
164 reason = _('lock held by %r') % inst.locker
164 reason = _('lock held by %r') % inst.locker
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
166 if not inst.locker:
166 if not inst.locker:
167 ui.warn(_("(lock might be very busy)\n"))
167 ui.warn(_("(lock might be very busy)\n"))
168 except error.LockUnavailable as inst:
168 except error.LockUnavailable as inst:
169 ui.warn(_("abort: could not lock %s: %s\n") %
169 ui.warn(_("abort: could not lock %s: %s\n") %
170 (inst.desc or inst.filename,
170 (inst.desc or inst.filename,
171 encoding.strtolocal(inst.strerror)))
171 encoding.strtolocal(inst.strerror)))
172 except error.OutOfBandError as inst:
172 except error.OutOfBandError as inst:
173 if inst.args:
173 if inst.args:
174 msg = _("abort: remote error:\n")
174 msg = _("abort: remote error:\n")
175 else:
175 else:
176 msg = _("abort: remote error\n")
176 msg = _("abort: remote error\n")
177 ui.warn(msg)
177 ui.warn(msg)
178 if inst.args:
178 if inst.args:
179 ui.warn(''.join(inst.args))
179 ui.warn(''.join(inst.args))
180 if inst.hint:
180 if inst.hint:
181 ui.warn('(%s)\n' % inst.hint)
181 ui.warn('(%s)\n' % inst.hint)
182 except error.RepoError as inst:
182 except error.RepoError as inst:
183 ui.warn(_("abort: %s!\n") % inst)
183 ui.warn(_("abort: %s!\n") % inst)
184 if inst.hint:
184 if inst.hint:
185 ui.warn(_("(%s)\n") % inst.hint)
185 ui.warn(_("(%s)\n") % inst.hint)
186 except error.ResponseError as inst:
186 except error.ResponseError as inst:
187 ui.warn(_("abort: %s") % inst.args[0])
187 ui.warn(_("abort: %s") % inst.args[0])
188 if not isinstance(inst.args[1], basestring):
188 if not isinstance(inst.args[1], basestring):
189 ui.warn(" %r\n" % (inst.args[1],))
189 ui.warn(" %r\n" % (inst.args[1],))
190 elif not inst.args[1]:
190 elif not inst.args[1]:
191 ui.warn(_(" empty string\n"))
191 ui.warn(_(" empty string\n"))
192 else:
192 else:
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
194 except error.CensoredNodeError as inst:
194 except error.CensoredNodeError as inst:
195 ui.warn(_("abort: file censored %s!\n") % inst)
195 ui.warn(_("abort: file censored %s!\n") % inst)
196 except error.RevlogError as inst:
196 except error.RevlogError as inst:
197 ui.warn(_("abort: %s!\n") % inst)
197 ui.warn(_("abort: %s!\n") % inst)
198 except error.InterventionRequired as inst:
198 except error.InterventionRequired as inst:
199 ui.warn("%s\n" % inst)
199 ui.warn("%s\n" % inst)
200 if inst.hint:
200 if inst.hint:
201 ui.warn(_("(%s)\n") % inst.hint)
201 ui.warn(_("(%s)\n") % inst.hint)
202 return 1
202 return 1
203 except error.WdirUnsupported:
203 except error.WdirUnsupported:
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
205 except error.Abort as inst:
205 except error.Abort as inst:
206 ui.warn(_("abort: %s\n") % inst)
206 ui.warn(_("abort: %s\n") % inst)
207 if inst.hint:
207 if inst.hint:
208 ui.warn(_("(%s)\n") % inst.hint)
208 ui.warn(_("(%s)\n") % inst.hint)
209 except ImportError as inst:
209 except ImportError as inst:
210 ui.warn(_("abort: %s!\n") % inst)
210 ui.warn(_("abort: %s!\n") % inst)
211 m = str(inst).split()[-1]
211 m = str(inst).split()[-1]
212 if m in "mpatch bdiff".split():
212 if m in "mpatch bdiff".split():
213 ui.warn(_("(did you forget to compile extensions?)\n"))
213 ui.warn(_("(did you forget to compile extensions?)\n"))
214 elif m in "zlib".split():
214 elif m in "zlib".split():
215 ui.warn(_("(is your Python install correct?)\n"))
215 ui.warn(_("(is your Python install correct?)\n"))
216 except IOError as inst:
216 except IOError as inst:
217 if util.safehasattr(inst, "code"):
217 if util.safehasattr(inst, "code"):
218 ui.warn(_("abort: %s\n") % inst)
218 ui.warn(_("abort: %s\n") % inst)
219 elif util.safehasattr(inst, "reason"):
219 elif util.safehasattr(inst, "reason"):
220 try: # usually it is in the form (errno, strerror)
220 try: # usually it is in the form (errno, strerror)
221 reason = inst.reason.args[1]
221 reason = inst.reason.args[1]
222 except (AttributeError, IndexError):
222 except (AttributeError, IndexError):
223 # it might be anything, for example a string
223 # it might be anything, for example a string
224 reason = inst.reason
224 reason = inst.reason
225 if isinstance(reason, unicode):
225 if isinstance(reason, unicode):
226 # SSLError of Python 2.7.9 contains a unicode
226 # SSLError of Python 2.7.9 contains a unicode
227 reason = encoding.unitolocal(reason)
227 reason = encoding.unitolocal(reason)
228 ui.warn(_("abort: error: %s\n") % reason)
228 ui.warn(_("abort: error: %s\n") % reason)
229 elif (util.safehasattr(inst, "args")
229 elif (util.safehasattr(inst, "args")
230 and inst.args and inst.args[0] == errno.EPIPE):
230 and inst.args and inst.args[0] == errno.EPIPE):
231 pass
231 pass
232 elif getattr(inst, "strerror", None):
232 elif getattr(inst, "strerror", None):
233 if getattr(inst, "filename", None):
233 if getattr(inst, "filename", None):
234 ui.warn(_("abort: %s: %s\n") % (
234 ui.warn(_("abort: %s: %s\n") % (
235 encoding.strtolocal(inst.strerror), inst.filename))
235 encoding.strtolocal(inst.strerror), inst.filename))
236 else:
236 else:
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
238 else:
238 else:
239 raise
239 raise
240 except OSError as inst:
240 except OSError as inst:
241 if getattr(inst, "filename", None) is not None:
241 if getattr(inst, "filename", None) is not None:
242 ui.warn(_("abort: %s: '%s'\n") % (
242 ui.warn(_("abort: %s: '%s'\n") % (
243 encoding.strtolocal(inst.strerror), inst.filename))
243 encoding.strtolocal(inst.strerror), inst.filename))
244 else:
244 else:
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 except MemoryError:
246 except MemoryError:
247 ui.warn(_("abort: out of memory\n"))
247 ui.warn(_("abort: out of memory\n"))
248 except SystemExit as inst:
248 except SystemExit as inst:
249 # Commands shouldn't sys.exit directly, but give a return code.
249 # Commands shouldn't sys.exit directly, but give a return code.
250 # Just in case catch this and and pass exit code to caller.
250 # Just in case catch this and and pass exit code to caller.
251 return inst.code
251 return inst.code
252 except socket.error as inst:
252 except socket.error as inst:
253 ui.warn(_("abort: %s\n") % inst.args[-1])
253 ui.warn(_("abort: %s\n") % inst.args[-1])
254
254
255 return -1
255 return -1
256
256
257 def checknewlabel(repo, lbl, kind):
257 def checknewlabel(repo, lbl, kind):
258 # Do not use the "kind" parameter in ui output.
258 # Do not use the "kind" parameter in ui output.
259 # It makes strings difficult to translate.
259 # It makes strings difficult to translate.
260 if lbl in ['tip', '.', 'null']:
260 if lbl in ['tip', '.', 'null']:
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
262 for c in (':', '\0', '\n', '\r'):
262 for c in (':', '\0', '\n', '\r'):
263 if c in lbl:
263 if c in lbl:
264 raise error.Abort(_("%r cannot be used in a name") % c)
264 raise error.Abort(_("%r cannot be used in a name") % c)
265 try:
265 try:
266 int(lbl)
266 int(lbl)
267 raise error.Abort(_("cannot use an integer as a name"))
267 raise error.Abort(_("cannot use an integer as a name"))
268 except ValueError:
268 except ValueError:
269 pass
269 pass
270
270
271 def checkfilename(f):
271 def checkfilename(f):
272 '''Check that the filename f is an acceptable filename for a tracked file'''
272 '''Check that the filename f is an acceptable filename for a tracked file'''
273 if '\r' in f or '\n' in f:
273 if '\r' in f or '\n' in f:
274 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
274 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
275
275
276 def checkportable(ui, f):
276 def checkportable(ui, f):
277 '''Check if filename f is portable and warn or abort depending on config'''
277 '''Check if filename f is portable and warn or abort depending on config'''
278 checkfilename(f)
278 checkfilename(f)
279 abort, warn = checkportabilityalert(ui)
279 abort, warn = checkportabilityalert(ui)
280 if abort or warn:
280 if abort or warn:
281 msg = util.checkwinfilename(f)
281 msg = util.checkwinfilename(f)
282 if msg:
282 if msg:
283 msg = "%s: %s" % (msg, util.shellquote(f))
283 msg = "%s: %s" % (msg, util.shellquote(f))
284 if abort:
284 if abort:
285 raise error.Abort(msg)
285 raise error.Abort(msg)
286 ui.warn(_("warning: %s\n") % msg)
286 ui.warn(_("warning: %s\n") % msg)
287
287
288 def checkportabilityalert(ui):
288 def checkportabilityalert(ui):
289 '''check if the user's config requests nothing, a warning, or abort for
289 '''check if the user's config requests nothing, a warning, or abort for
290 non-portable filenames'''
290 non-portable filenames'''
291 val = ui.config('ui', 'portablefilenames')
291 val = ui.config('ui', 'portablefilenames')
292 lval = val.lower()
292 lval = val.lower()
293 bval = util.parsebool(val)
293 bval = util.parsebool(val)
294 abort = pycompat.osname == 'nt' or lval == 'abort'
294 abort = pycompat.osname == 'nt' or lval == 'abort'
295 warn = bval or lval == 'warn'
295 warn = bval or lval == 'warn'
296 if bval is None and not (warn or abort or lval == 'ignore'):
296 if bval is None and not (warn or abort or lval == 'ignore'):
297 raise error.ConfigError(
297 raise error.ConfigError(
298 _("ui.portablefilenames value is invalid ('%s')") % val)
298 _("ui.portablefilenames value is invalid ('%s')") % val)
299 return abort, warn
299 return abort, warn
300
300
301 class casecollisionauditor(object):
301 class casecollisionauditor(object):
302 def __init__(self, ui, abort, dirstate):
302 def __init__(self, ui, abort, dirstate):
303 self._ui = ui
303 self._ui = ui
304 self._abort = abort
304 self._abort = abort
305 allfiles = '\0'.join(dirstate._map)
305 allfiles = '\0'.join(dirstate._map)
306 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
306 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
307 self._dirstate = dirstate
307 self._dirstate = dirstate
308 # The purpose of _newfiles is so that we don't complain about
308 # The purpose of _newfiles is so that we don't complain about
309 # case collisions if someone were to call this object with the
309 # case collisions if someone were to call this object with the
310 # same filename twice.
310 # same filename twice.
311 self._newfiles = set()
311 self._newfiles = set()
312
312
313 def __call__(self, f):
313 def __call__(self, f):
314 if f in self._newfiles:
314 if f in self._newfiles:
315 return
315 return
316 fl = encoding.lower(f)
316 fl = encoding.lower(f)
317 if fl in self._loweredfiles and f not in self._dirstate:
317 if fl in self._loweredfiles and f not in self._dirstate:
318 msg = _('possible case-folding collision for %s') % f
318 msg = _('possible case-folding collision for %s') % f
319 if self._abort:
319 if self._abort:
320 raise error.Abort(msg)
320 raise error.Abort(msg)
321 self._ui.warn(_("warning: %s\n") % msg)
321 self._ui.warn(_("warning: %s\n") % msg)
322 self._loweredfiles.add(fl)
322 self._loweredfiles.add(fl)
323 self._newfiles.add(f)
323 self._newfiles.add(f)
324
324
325 def filteredhash(repo, maxrev):
325 def filteredhash(repo, maxrev):
326 """build hash of filtered revisions in the current repoview.
326 """build hash of filtered revisions in the current repoview.
327
327
328 Multiple caches perform up-to-date validation by checking that the
328 Multiple caches perform up-to-date validation by checking that the
329 tiprev and tipnode stored in the cache file match the current repository.
329 tiprev and tipnode stored in the cache file match the current repository.
330 However, this is not sufficient for validating repoviews because the set
330 However, this is not sufficient for validating repoviews because the set
331 of revisions in the view may change without the repository tiprev and
331 of revisions in the view may change without the repository tiprev and
332 tipnode changing.
332 tipnode changing.
333
333
334 This function hashes all the revs filtered from the view and returns
334 This function hashes all the revs filtered from the view and returns
335 that SHA-1 digest.
335 that SHA-1 digest.
336 """
336 """
337 cl = repo.changelog
337 cl = repo.changelog
338 if not cl.filteredrevs:
338 if not cl.filteredrevs:
339 return None
339 return None
340 key = None
340 key = None
341 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
341 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
342 if revs:
342 if revs:
343 s = hashlib.sha1()
343 s = hashlib.sha1()
344 for rev in revs:
344 for rev in revs:
345 s.update('%d;' % rev)
345 s.update('%d;' % rev)
346 key = s.digest()
346 key = s.digest()
347 return key
347 return key
348
348
349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
350 '''yield every hg repository under path, always recursively.
350 '''yield every hg repository under path, always recursively.
351 The recurse flag will only control recursion into repo working dirs'''
351 The recurse flag will only control recursion into repo working dirs'''
352 def errhandler(err):
352 def errhandler(err):
353 if err.filename == path:
353 if err.filename == path:
354 raise err
354 raise err
355 samestat = getattr(os.path, 'samestat', None)
355 samestat = getattr(os.path, 'samestat', None)
356 if followsym and samestat is not None:
356 if followsym and samestat is not None:
357 def adddir(dirlst, dirname):
357 def adddir(dirlst, dirname):
358 match = False
358 match = False
359 dirstat = os.stat(dirname)
359 dirstat = os.stat(dirname)
360 for lstdirstat in dirlst:
360 for lstdirstat in dirlst:
361 if samestat(dirstat, lstdirstat):
361 if samestat(dirstat, lstdirstat):
362 match = True
362 match = True
363 break
363 break
364 if not match:
364 if not match:
365 dirlst.append(dirstat)
365 dirlst.append(dirstat)
366 return not match
366 return not match
367 else:
367 else:
368 followsym = False
368 followsym = False
369
369
370 if (seen_dirs is None) and followsym:
370 if (seen_dirs is None) and followsym:
371 seen_dirs = []
371 seen_dirs = []
372 adddir(seen_dirs, path)
372 adddir(seen_dirs, path)
373 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
373 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
374 dirs.sort()
374 dirs.sort()
375 if '.hg' in dirs:
375 if '.hg' in dirs:
376 yield root # found a repository
376 yield root # found a repository
377 qroot = os.path.join(root, '.hg', 'patches')
377 qroot = os.path.join(root, '.hg', 'patches')
378 if os.path.isdir(os.path.join(qroot, '.hg')):
378 if os.path.isdir(os.path.join(qroot, '.hg')):
379 yield qroot # we have a patch queue repo here
379 yield qroot # we have a patch queue repo here
380 if recurse:
380 if recurse:
381 # avoid recursing inside the .hg directory
381 # avoid recursing inside the .hg directory
382 dirs.remove('.hg')
382 dirs.remove('.hg')
383 else:
383 else:
384 dirs[:] = [] # don't descend further
384 dirs[:] = [] # don't descend further
385 elif followsym:
385 elif followsym:
386 newdirs = []
386 newdirs = []
387 for d in dirs:
387 for d in dirs:
388 fname = os.path.join(root, d)
388 fname = os.path.join(root, d)
389 if adddir(seen_dirs, fname):
389 if adddir(seen_dirs, fname):
390 if os.path.islink(fname):
390 if os.path.islink(fname):
391 for hgname in walkrepos(fname, True, seen_dirs):
391 for hgname in walkrepos(fname, True, seen_dirs):
392 yield hgname
392 yield hgname
393 else:
393 else:
394 newdirs.append(d)
394 newdirs.append(d)
395 dirs[:] = newdirs
395 dirs[:] = newdirs
396
396
397 def binnode(ctx):
397 def binnode(ctx):
398 """Return binary node id for a given basectx"""
398 """Return binary node id for a given basectx"""
399 node = ctx.node()
399 node = ctx.node()
400 if node is None:
400 if node is None:
401 return wdirid
401 return wdirid
402 return node
402 return node
403
403
404 def intrev(ctx):
404 def intrev(ctx):
405 """Return integer for a given basectx that can be used in comparison or
405 """Return integer for a given basectx that can be used in comparison or
406 arithmetic operation"""
406 arithmetic operation"""
407 rev = ctx.rev()
407 rev = ctx.rev()
408 if rev is None:
408 if rev is None:
409 return wdirrev
409 return wdirrev
410 return rev
410 return rev
411
411
412 def formatchangeid(ctx):
412 def formatchangeid(ctx):
413 """Format changectx as '{rev}:{node|formatnode}', which is the default
413 """Format changectx as '{rev}:{node|formatnode}', which is the default
414 template provided by cmdutil.changeset_templater"""
414 template provided by cmdutil.changeset_templater"""
415 repo = ctx.repo()
415 repo = ctx.repo()
416 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
416 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
417
417
418 def formatrevnode(ui, rev, node):
418 def formatrevnode(ui, rev, node):
419 """Format given revision and node depending on the current verbosity"""
419 """Format given revision and node depending on the current verbosity"""
420 if ui.debugflag:
420 if ui.debugflag:
421 hexfunc = hex
421 hexfunc = hex
422 else:
422 else:
423 hexfunc = short
423 hexfunc = short
424 return '%d:%s' % (rev, hexfunc(node))
424 return '%d:%s' % (rev, hexfunc(node))
425
425
426 def revsingle(repo, revspec, default='.', localalias=None):
426 def revsingle(repo, revspec, default='.', localalias=None):
427 if not revspec and revspec != 0:
427 if not revspec and revspec != 0:
428 return repo[default]
428 return repo[default]
429
429
430 l = revrange(repo, [revspec], localalias=localalias)
430 l = revrange(repo, [revspec], localalias=localalias)
431 if not l:
431 if not l:
432 raise error.Abort(_('empty revision set'))
432 raise error.Abort(_('empty revision set'))
433 return repo[l.last()]
433 return repo[l.last()]
434
434
435 def _pairspec(revspec):
435 def _pairspec(revspec):
436 tree = revsetlang.parse(revspec)
436 tree = revsetlang.parse(revspec)
437 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
437 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
438
438
439 def revpair(repo, revs):
439 def revpair(repo, revs):
440 if not revs:
440 if not revs:
441 return repo.dirstate.p1(), None
441 return repo.dirstate.p1(), None
442
442
443 l = revrange(repo, revs)
443 l = revrange(repo, revs)
444
444
445 if not l:
445 if not l:
446 first = second = None
446 first = second = None
447 elif l.isascending():
447 elif l.isascending():
448 first = l.min()
448 first = l.min()
449 second = l.max()
449 second = l.max()
450 elif l.isdescending():
450 elif l.isdescending():
451 first = l.max()
451 first = l.max()
452 second = l.min()
452 second = l.min()
453 else:
453 else:
454 first = l.first()
454 first = l.first()
455 second = l.last()
455 second = l.last()
456
456
457 if first is None:
457 if first is None:
458 raise error.Abort(_('empty revision range'))
458 raise error.Abort(_('empty revision range'))
459 if (first == second and len(revs) >= 2
459 if (first == second and len(revs) >= 2
460 and not all(revrange(repo, [r]) for r in revs)):
460 and not all(revrange(repo, [r]) for r in revs)):
461 raise error.Abort(_('empty revision on one side of range'))
461 raise error.Abort(_('empty revision on one side of range'))
462
462
463 # if top-level is range expression, the result must always be a pair
463 # if top-level is range expression, the result must always be a pair
464 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
464 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
465 return repo.lookup(first), None
465 return repo.lookup(first), None
466
466
467 return repo.lookup(first), repo.lookup(second)
467 return repo.lookup(first), repo.lookup(second)
468
468
469 def revrange(repo, specs, localalias=None):
469 def revrange(repo, specs, localalias=None):
470 """Execute 1 to many revsets and return the union.
470 """Execute 1 to many revsets and return the union.
471
471
472 This is the preferred mechanism for executing revsets using user-specified
472 This is the preferred mechanism for executing revsets using user-specified
473 config options, such as revset aliases.
473 config options, such as revset aliases.
474
474
475 The revsets specified by ``specs`` will be executed via a chained ``OR``
475 The revsets specified by ``specs`` will be executed via a chained ``OR``
476 expression. If ``specs`` is empty, an empty result is returned.
476 expression. If ``specs`` is empty, an empty result is returned.
477
477
478 ``specs`` can contain integers, in which case they are assumed to be
478 ``specs`` can contain integers, in which case they are assumed to be
479 revision numbers.
479 revision numbers.
480
480
481 It is assumed the revsets are already formatted. If you have arguments
481 It is assumed the revsets are already formatted. If you have arguments
482 that need to be expanded in the revset, call ``revsetlang.formatspec()``
482 that need to be expanded in the revset, call ``revsetlang.formatspec()``
483 and pass the result as an element of ``specs``.
483 and pass the result as an element of ``specs``.
484
484
485 Specifying a single revset is allowed.
485 Specifying a single revset is allowed.
486
486
487 Returns a ``revset.abstractsmartset`` which is a list-like interface over
487 Returns a ``revset.abstractsmartset`` which is a list-like interface over
488 integer revisions.
488 integer revisions.
489 """
489 """
490 allspecs = []
490 allspecs = []
491 for spec in specs:
491 for spec in specs:
492 if isinstance(spec, int):
492 if isinstance(spec, int):
493 spec = revsetlang.formatspec('rev(%d)', spec)
493 spec = revsetlang.formatspec('rev(%d)', spec)
494 allspecs.append(spec)
494 allspecs.append(spec)
495 return repo.anyrevs(allspecs, user=True, localalias=localalias)
495 return repo.anyrevs(allspecs, user=True, localalias=localalias)
496
496
497 def meaningfulparents(repo, ctx):
497 def meaningfulparents(repo, ctx):
498 """Return list of meaningful (or all if debug) parentrevs for rev.
498 """Return list of meaningful (or all if debug) parentrevs for rev.
499
499
500 For merges (two non-nullrev revisions) both parents are meaningful.
500 For merges (two non-nullrev revisions) both parents are meaningful.
501 Otherwise the first parent revision is considered meaningful if it
501 Otherwise the first parent revision is considered meaningful if it
502 is not the preceding revision.
502 is not the preceding revision.
503 """
503 """
504 parents = ctx.parents()
504 parents = ctx.parents()
505 if len(parents) > 1:
505 if len(parents) > 1:
506 return parents
506 return parents
507 if repo.ui.debugflag:
507 if repo.ui.debugflag:
508 return [parents[0], repo['null']]
508 return [parents[0], repo['null']]
509 if parents[0].rev() >= intrev(ctx) - 1:
509 if parents[0].rev() >= intrev(ctx) - 1:
510 return []
510 return []
511 return parents
511 return parents
512
512
513 def expandpats(pats):
513 def expandpats(pats):
514 '''Expand bare globs when running on windows.
514 '''Expand bare globs when running on windows.
515 On posix we assume it already has already been done by sh.'''
515 On posix we assume it already has already been done by sh.'''
516 if not util.expandglobs:
516 if not util.expandglobs:
517 return list(pats)
517 return list(pats)
518 ret = []
518 ret = []
519 for kindpat in pats:
519 for kindpat in pats:
520 kind, pat = matchmod._patsplit(kindpat, None)
520 kind, pat = matchmod._patsplit(kindpat, None)
521 if kind is None:
521 if kind is None:
522 try:
522 try:
523 globbed = glob.glob(pat)
523 globbed = glob.glob(pat)
524 except re.error:
524 except re.error:
525 globbed = [pat]
525 globbed = [pat]
526 if globbed:
526 if globbed:
527 ret.extend(globbed)
527 ret.extend(globbed)
528 continue
528 continue
529 ret.append(kindpat)
529 ret.append(kindpat)
530 return ret
530 return ret
531
531
532 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
532 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
533 badfn=None):
533 badfn=None):
534 '''Return a matcher and the patterns that were used.
534 '''Return a matcher and the patterns that were used.
535 The matcher will warn about bad matches, unless an alternate badfn callback
535 The matcher will warn about bad matches, unless an alternate badfn callback
536 is provided.'''
536 is provided.'''
537 if pats == ("",):
537 if pats == ("",):
538 pats = []
538 pats = []
539 if opts is None:
539 if opts is None:
540 opts = {}
540 opts = {}
541 if not globbed and default == 'relpath':
541 if not globbed and default == 'relpath':
542 pats = expandpats(pats or [])
542 pats = expandpats(pats or [])
543
543
544 def bad(f, msg):
544 def bad(f, msg):
545 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
545 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
546
546
547 if badfn is None:
547 if badfn is None:
548 badfn = bad
548 badfn = bad
549
549
550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
551 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
551 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
552
552
553 if m.always():
553 if m.always():
554 pats = []
554 pats = []
555 return m, pats
555 return m, pats
556
556
557 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
557 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
558 badfn=None):
558 badfn=None):
559 '''Return a matcher that will warn about bad matches.'''
559 '''Return a matcher that will warn about bad matches.'''
560 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
560 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
561
561
562 def matchall(repo):
562 def matchall(repo):
563 '''Return a matcher that will efficiently match everything.'''
563 '''Return a matcher that will efficiently match everything.'''
564 return matchmod.always(repo.root, repo.getcwd())
564 return matchmod.always(repo.root, repo.getcwd())
565
565
566 def matchfiles(repo, files, badfn=None):
566 def matchfiles(repo, files, badfn=None):
567 '''Return a matcher that will efficiently match exactly these files.'''
567 '''Return a matcher that will efficiently match exactly these files.'''
568 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
568 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
569
569
570 def origpath(ui, repo, filepath):
570 def origpath(ui, repo, filepath):
571 '''customize where .orig files are created
571 '''customize where .orig files are created
572
572
573 Fetch user defined path from config file: [ui] origbackuppath = <path>
573 Fetch user defined path from config file: [ui] origbackuppath = <path>
574 Fall back to default (filepath with .orig suffix) if not specified
574 Fall back to default (filepath with .orig suffix) if not specified
575 '''
575 '''
576 origbackuppath = ui.config('ui', 'origbackuppath')
576 origbackuppath = ui.config('ui', 'origbackuppath')
577 if not origbackuppath:
577 if not origbackuppath:
578 return filepath + ".orig"
578 return filepath + ".orig"
579
579
580 # Convert filepath from an absolute path into a path inside the repo.
580 # Convert filepath from an absolute path into a path inside the repo.
581 filepathfromroot = util.normpath(os.path.relpath(filepath,
581 filepathfromroot = util.normpath(os.path.relpath(filepath,
582 start=repo.root))
582 start=repo.root))
583
583
584 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
584 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
585 origbackupdir = origvfs.dirname(filepathfromroot)
585 origbackupdir = origvfs.dirname(filepathfromroot)
586 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
586 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
587 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
587 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
588
588
589 # Remove any files that conflict with the backup file's path
589 # Remove any files that conflict with the backup file's path
590 for f in reversed(list(util.finddirs(filepathfromroot))):
590 for f in reversed(list(util.finddirs(filepathfromroot))):
591 if origvfs.isfileorlink(f):
591 if origvfs.isfileorlink(f):
592 ui.note(_('removing conflicting file: %s\n')
592 ui.note(_('removing conflicting file: %s\n')
593 % origvfs.join(f))
593 % origvfs.join(f))
594 origvfs.unlink(f)
594 origvfs.unlink(f)
595 break
595 break
596
596
597 origvfs.makedirs(origbackupdir)
597 origvfs.makedirs(origbackupdir)
598
598
599 if origvfs.isdir(filepathfromroot):
599 if origvfs.isdir(filepathfromroot):
600 ui.note(_('removing conflicting directory: %s\n')
600 ui.note(_('removing conflicting directory: %s\n')
601 % origvfs.join(filepathfromroot))
601 % origvfs.join(filepathfromroot))
602 origvfs.rmtree(filepathfromroot, forcibly=True)
602 origvfs.rmtree(filepathfromroot, forcibly=True)
603
603
604 return origvfs.join(filepathfromroot)
604 return origvfs.join(filepathfromroot)
605
605
606 class _containsnode(object):
606 class _containsnode(object):
607 """proxy __contains__(node) to container.__contains__ which accepts revs"""
607 """proxy __contains__(node) to container.__contains__ which accepts revs"""
608
608
609 def __init__(self, repo, revcontainer):
609 def __init__(self, repo, revcontainer):
610 self._torev = repo.changelog.rev
610 self._torev = repo.changelog.rev
611 self._revcontains = revcontainer.__contains__
611 self._revcontains = revcontainer.__contains__
612
612
613 def __contains__(self, node):
613 def __contains__(self, node):
614 return self._revcontains(self._torev(node))
614 return self._revcontains(self._torev(node))
615
615
616 def cleanupnodes(repo, replacements, operation, moves=None):
616 def cleanupnodes(repo, replacements, operation, moves=None):
617 """do common cleanups when old nodes are replaced by new nodes
617 """do common cleanups when old nodes are replaced by new nodes
618
618
619 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
619 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
620 (we might also want to move working directory parent in the future)
620 (we might also want to move working directory parent in the future)
621
621
622 By default, bookmark moves are calculated automatically from 'replacements',
622 By default, bookmark moves are calculated automatically from 'replacements',
623 but 'moves' can be used to override that. Also, 'moves' may include
623 but 'moves' can be used to override that. Also, 'moves' may include
624 additional bookmark moves that should not have associated obsmarkers.
624 additional bookmark moves that should not have associated obsmarkers.
625
625
626 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
626 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
627 have replacements. operation is a string, like "rebase".
627 have replacements. operation is a string, like "rebase".
628 """
628 """
629 if not replacements and not moves:
629 if not replacements and not moves:
630 return
630 return
631
631
632 # translate mapping's other forms
632 # translate mapping's other forms
633 if not util.safehasattr(replacements, 'items'):
633 if not util.safehasattr(replacements, 'items'):
634 replacements = {n: () for n in replacements}
634 replacements = {n: () for n in replacements}
635
635
636 # Calculate bookmark movements
636 # Calculate bookmark movements
637 if moves is None:
637 if moves is None:
638 moves = {}
638 moves = {}
639 # Unfiltered repo is needed since nodes in replacements might be hidden.
639 # Unfiltered repo is needed since nodes in replacements might be hidden.
640 unfi = repo.unfiltered()
640 unfi = repo.unfiltered()
641 for oldnode, newnodes in replacements.items():
641 for oldnode, newnodes in replacements.items():
642 if oldnode in moves:
642 if oldnode in moves:
643 continue
643 continue
644 if len(newnodes) > 1:
644 if len(newnodes) > 1:
645 # usually a split, take the one with biggest rev number
645 # usually a split, take the one with biggest rev number
646 newnode = next(unfi.set('max(%ln)', newnodes)).node()
646 newnode = next(unfi.set('max(%ln)', newnodes)).node()
647 elif len(newnodes) == 0:
647 elif len(newnodes) == 0:
648 # move bookmark backwards
648 # move bookmark backwards
649 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
649 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
650 list(replacements)))
650 list(replacements)))
651 if roots:
651 if roots:
652 newnode = roots[0].node()
652 newnode = roots[0].node()
653 else:
653 else:
654 newnode = nullid
654 newnode = nullid
655 else:
655 else:
656 newnode = newnodes[0]
656 newnode = newnodes[0]
657 moves[oldnode] = newnode
657 moves[oldnode] = newnode
658
658
659 with repo.transaction('cleanup') as tr:
659 with repo.transaction('cleanup') as tr:
660 # Move bookmarks
660 # Move bookmarks
661 bmarks = repo._bookmarks
661 bmarks = repo._bookmarks
662 bmarkchanges = []
662 bmarkchanges = []
663 allnewnodes = [n for ns in replacements.values() for n in ns]
663 allnewnodes = [n for ns in replacements.values() for n in ns]
664 for oldnode, newnode in moves.items():
664 for oldnode, newnode in moves.items():
665 oldbmarks = repo.nodebookmarks(oldnode)
665 oldbmarks = repo.nodebookmarks(oldnode)
666 if not oldbmarks:
666 if not oldbmarks:
667 continue
667 continue
668 from . import bookmarks # avoid import cycle
668 from . import bookmarks # avoid import cycle
669 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
669 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
670 (oldbmarks, hex(oldnode), hex(newnode)))
670 (oldbmarks, hex(oldnode), hex(newnode)))
671 # Delete divergent bookmarks being parents of related newnodes
671 # Delete divergent bookmarks being parents of related newnodes
672 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
672 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
673 allnewnodes, newnode, oldnode)
673 allnewnodes, newnode, oldnode)
674 deletenodes = _containsnode(repo, deleterevs)
674 deletenodes = _containsnode(repo, deleterevs)
675 for name in oldbmarks:
675 for name in oldbmarks:
676 bmarkchanges.append((name, newnode))
676 bmarkchanges.append((name, newnode))
677 for b in bookmarks.divergent2delete(repo, deletenodes, name):
677 for b in bookmarks.divergent2delete(repo, deletenodes, name):
678 bmarkchanges.append((b, None))
678 bmarkchanges.append((b, None))
679
679
680 if bmarkchanges:
680 if bmarkchanges:
681 bmarks.applychanges(repo, tr, bmarkchanges)
681 bmarks.applychanges(repo, tr, bmarkchanges)
682
682
683 # Obsolete or strip nodes
683 # Obsolete or strip nodes
684 if obsolete.isenabled(repo, obsolete.createmarkersopt):
684 if obsolete.isenabled(repo, obsolete.createmarkersopt):
685 # If a node is already obsoleted, and we want to obsolete it
685 # If a node is already obsoleted, and we want to obsolete it
686 # without a successor, skip that obssolete request since it's
686 # without a successor, skip that obssolete request since it's
687 # unnecessary. That's the "if s or not isobs(n)" check below.
687 # unnecessary. That's the "if s or not isobs(n)" check below.
688 # Also sort the node in topology order, that might be useful for
688 # Also sort the node in topology order, that might be useful for
689 # some obsstore logic.
689 # some obsstore logic.
690 # NOTE: the filtering and sorting might belong to createmarkers.
690 # NOTE: the filtering and sorting might belong to createmarkers.
691 isobs = unfi.obsstore.successors.__contains__
691 isobs = unfi.obsstore.successors.__contains__
692 torev = unfi.changelog.rev
692 torev = unfi.changelog.rev
693 sortfunc = lambda ns: torev(ns[0])
693 sortfunc = lambda ns: torev(ns[0])
694 rels = [(unfi[n], tuple(unfi[m] for m in s))
694 rels = [(unfi[n], tuple(unfi[m] for m in s))
695 for n, s in sorted(replacements.items(), key=sortfunc)
695 for n, s in sorted(replacements.items(), key=sortfunc)
696 if s or not isobs(n)]
696 if s or not isobs(n)]
697 if rels:
697 if rels:
698 obsolete.createmarkers(repo, rels, operation=operation)
698 obsolete.createmarkers(repo, rels, operation=operation)
699 else:
699 else:
700 from . import repair # avoid import cycle
700 from . import repair # avoid import cycle
701 tostrip = list(replacements)
701 tostrip = list(replacements)
702 if tostrip:
702 if tostrip:
703 repair.delayedstrip(repo.ui, repo, tostrip, operation)
703 repair.delayedstrip(repo.ui, repo, tostrip, operation)
704
704
705 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
705 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
706 if opts is None:
706 if opts is None:
707 opts = {}
707 opts = {}
708 m = matcher
708 m = matcher
709 if dry_run is None:
709 if dry_run is None:
710 dry_run = opts.get('dry_run')
710 dry_run = opts.get('dry_run')
711 if similarity is None:
711 if similarity is None:
712 similarity = float(opts.get('similarity') or 0)
712 similarity = float(opts.get('similarity') or 0)
713
713
714 ret = 0
714 ret = 0
715 join = lambda f: os.path.join(prefix, f)
715 join = lambda f: os.path.join(prefix, f)
716
716
717 wctx = repo[None]
717 wctx = repo[None]
718 for subpath in sorted(wctx.substate):
718 for subpath in sorted(wctx.substate):
719 submatch = matchmod.subdirmatcher(subpath, m)
719 submatch = matchmod.subdirmatcher(subpath, m)
720 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
720 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
721 sub = wctx.sub(subpath)
721 sub = wctx.sub(subpath)
722 try:
722 try:
723 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
723 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
724 ret = 1
724 ret = 1
725 except error.LookupError:
725 except error.LookupError:
726 repo.ui.status(_("skipping missing subrepository: %s\n")
726 repo.ui.status(_("skipping missing subrepository: %s\n")
727 % join(subpath))
727 % join(subpath))
728
728
729 rejected = []
729 rejected = []
730 def badfn(f, msg):
730 def badfn(f, msg):
731 if f in m.files():
731 if f in m.files():
732 m.bad(f, msg)
732 m.bad(f, msg)
733 rejected.append(f)
733 rejected.append(f)
734
734
735 badmatch = matchmod.badmatch(m, badfn)
735 badmatch = matchmod.badmatch(m, badfn)
736 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
736 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
737 badmatch)
737 badmatch)
738
738
739 unknownset = set(unknown + forgotten)
739 unknownset = set(unknown + forgotten)
740 toprint = unknownset.copy()
740 toprint = unknownset.copy()
741 toprint.update(deleted)
741 toprint.update(deleted)
742 for abs in sorted(toprint):
742 for abs in sorted(toprint):
743 if repo.ui.verbose or not m.exact(abs):
743 if repo.ui.verbose or not m.exact(abs):
744 if abs in unknownset:
744 if abs in unknownset:
745 status = _('adding %s\n') % m.uipath(abs)
745 status = _('adding %s\n') % m.uipath(abs)
746 else:
746 else:
747 status = _('removing %s\n') % m.uipath(abs)
747 status = _('removing %s\n') % m.uipath(abs)
748 repo.ui.status(status)
748 repo.ui.status(status)
749
749
750 renames = _findrenames(repo, m, added + unknown, removed + deleted,
750 renames = _findrenames(repo, m, added + unknown, removed + deleted,
751 similarity)
751 similarity)
752
752
753 if not dry_run:
753 if not dry_run:
754 _markchanges(repo, unknown + forgotten, deleted, renames)
754 _markchanges(repo, unknown + forgotten, deleted, renames)
755
755
756 for f in rejected:
756 for f in rejected:
757 if f in m.files():
757 if f in m.files():
758 return 1
758 return 1
759 return ret
759 return ret
760
760
761 def marktouched(repo, files, similarity=0.0):
761 def marktouched(repo, files, similarity=0.0):
762 '''Assert that files have somehow been operated upon. files are relative to
762 '''Assert that files have somehow been operated upon. files are relative to
763 the repo root.'''
763 the repo root.'''
764 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
764 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
765 rejected = []
765 rejected = []
766
766
767 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
767 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
768
768
769 if repo.ui.verbose:
769 if repo.ui.verbose:
770 unknownset = set(unknown + forgotten)
770 unknownset = set(unknown + forgotten)
771 toprint = unknownset.copy()
771 toprint = unknownset.copy()
772 toprint.update(deleted)
772 toprint.update(deleted)
773 for abs in sorted(toprint):
773 for abs in sorted(toprint):
774 if abs in unknownset:
774 if abs in unknownset:
775 status = _('adding %s\n') % abs
775 status = _('adding %s\n') % abs
776 else:
776 else:
777 status = _('removing %s\n') % abs
777 status = _('removing %s\n') % abs
778 repo.ui.status(status)
778 repo.ui.status(status)
779
779
780 renames = _findrenames(repo, m, added + unknown, removed + deleted,
780 renames = _findrenames(repo, m, added + unknown, removed + deleted,
781 similarity)
781 similarity)
782
782
783 _markchanges(repo, unknown + forgotten, deleted, renames)
783 _markchanges(repo, unknown + forgotten, deleted, renames)
784
784
785 for f in rejected:
785 for f in rejected:
786 if f in m.files():
786 if f in m.files():
787 return 1
787 return 1
788 return 0
788 return 0
789
789
790 def _interestingfiles(repo, matcher):
790 def _interestingfiles(repo, matcher):
791 '''Walk dirstate with matcher, looking for files that addremove would care
791 '''Walk dirstate with matcher, looking for files that addremove would care
792 about.
792 about.
793
793
794 This is different from dirstate.status because it doesn't care about
794 This is different from dirstate.status because it doesn't care about
795 whether files are modified or clean.'''
795 whether files are modified or clean.'''
796 added, unknown, deleted, removed, forgotten = [], [], [], [], []
796 added, unknown, deleted, removed, forgotten = [], [], [], [], []
797 audit_path = pathutil.pathauditor(repo.root, cached=True)
797 audit_path = pathutil.pathauditor(repo.root, cached=True)
798
798
799 ctx = repo[None]
799 ctx = repo[None]
800 dirstate = repo.dirstate
800 dirstate = repo.dirstate
801 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
801 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
802 unknown=True, ignored=False, full=False)
802 unknown=True, ignored=False, full=False)
803 for abs, st in walkresults.iteritems():
803 for abs, st in walkresults.iteritems():
804 dstate = dirstate[abs]
804 dstate = dirstate[abs]
805 if dstate == '?' and audit_path.check(abs):
805 if dstate == '?' and audit_path.check(abs):
806 unknown.append(abs)
806 unknown.append(abs)
807 elif dstate != 'r' and not st:
807 elif dstate != 'r' and not st:
808 deleted.append(abs)
808 deleted.append(abs)
809 elif dstate == 'r' and st:
809 elif dstate == 'r' and st:
810 forgotten.append(abs)
810 forgotten.append(abs)
811 # for finding renames
811 # for finding renames
812 elif dstate == 'r' and not st:
812 elif dstate == 'r' and not st:
813 removed.append(abs)
813 removed.append(abs)
814 elif dstate == 'a':
814 elif dstate == 'a':
815 added.append(abs)
815 added.append(abs)
816
816
817 return added, unknown, deleted, removed, forgotten
817 return added, unknown, deleted, removed, forgotten
818
818
819 def _findrenames(repo, matcher, added, removed, similarity):
819 def _findrenames(repo, matcher, added, removed, similarity):
820 '''Find renames from removed files to added ones.'''
820 '''Find renames from removed files to added ones.'''
821 renames = {}
821 renames = {}
822 if similarity > 0:
822 if similarity > 0:
823 for old, new, score in similar.findrenames(repo, added, removed,
823 for old, new, score in similar.findrenames(repo, added, removed,
824 similarity):
824 similarity):
825 if (repo.ui.verbose or not matcher.exact(old)
825 if (repo.ui.verbose or not matcher.exact(old)
826 or not matcher.exact(new)):
826 or not matcher.exact(new)):
827 repo.ui.status(_('recording removal of %s as rename to %s '
827 repo.ui.status(_('recording removal of %s as rename to %s '
828 '(%d%% similar)\n') %
828 '(%d%% similar)\n') %
829 (matcher.rel(old), matcher.rel(new),
829 (matcher.rel(old), matcher.rel(new),
830 score * 100))
830 score * 100))
831 renames[new] = old
831 renames[new] = old
832 return renames
832 return renames
833
833
834 def _markchanges(repo, unknown, deleted, renames):
834 def _markchanges(repo, unknown, deleted, renames):
835 '''Marks the files in unknown as added, the files in deleted as removed,
835 '''Marks the files in unknown as added, the files in deleted as removed,
836 and the files in renames as copied.'''
836 and the files in renames as copied.'''
837 wctx = repo[None]
837 wctx = repo[None]
838 with repo.wlock():
838 with repo.wlock():
839 wctx.forget(deleted)
839 wctx.forget(deleted)
840 wctx.add(unknown)
840 wctx.add(unknown)
841 for new, old in renames.iteritems():
841 for new, old in renames.iteritems():
842 wctx.copy(old, new)
842 wctx.copy(old, new)
843
843
844 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
844 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
845 """Update the dirstate to reflect the intent of copying src to dst. For
845 """Update the dirstate to reflect the intent of copying src to dst. For
846 different reasons it might not end with dst being marked as copied from src.
846 different reasons it might not end with dst being marked as copied from src.
847 """
847 """
848 origsrc = repo.dirstate.copied(src) or src
848 origsrc = repo.dirstate.copied(src) or src
849 if dst == origsrc: # copying back a copy?
849 if dst == origsrc: # copying back a copy?
850 if repo.dirstate[dst] not in 'mn' and not dryrun:
850 if repo.dirstate[dst] not in 'mn' and not dryrun:
851 repo.dirstate.normallookup(dst)
851 repo.dirstate.normallookup(dst)
852 else:
852 else:
853 if repo.dirstate[origsrc] == 'a' and origsrc == src:
853 if repo.dirstate[origsrc] == 'a' and origsrc == src:
854 if not ui.quiet:
854 if not ui.quiet:
855 ui.warn(_("%s has not been committed yet, so no copy "
855 ui.warn(_("%s has not been committed yet, so no copy "
856 "data will be stored for %s.\n")
856 "data will be stored for %s.\n")
857 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
857 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
858 if repo.dirstate[dst] in '?r' and not dryrun:
858 if repo.dirstate[dst] in '?r' and not dryrun:
859 wctx.add([dst])
859 wctx.add([dst])
860 elif not dryrun:
860 elif not dryrun:
861 wctx.copy(origsrc, dst)
861 wctx.copy(origsrc, dst)
862
862
863 def readrequires(opener, supported):
863 def readrequires(opener, supported):
864 '''Reads and parses .hg/requires and checks if all entries found
864 '''Reads and parses .hg/requires and checks if all entries found
865 are in the list of supported features.'''
865 are in the list of supported features.'''
866 requirements = set(opener.read("requires").splitlines())
866 requirements = set(opener.read("requires").splitlines())
867 missings = []
867 missings = []
868 for r in requirements:
868 for r in requirements:
869 if r not in supported:
869 if r not in supported:
870 if not r or not r[0].isalnum():
870 if not r or not r[0].isalnum():
871 raise error.RequirementError(_(".hg/requires file is corrupt"))
871 raise error.RequirementError(_(".hg/requires file is corrupt"))
872 missings.append(r)
872 missings.append(r)
873 missings.sort()
873 missings.sort()
874 if missings:
874 if missings:
875 raise error.RequirementError(
875 raise error.RequirementError(
876 _("repository requires features unknown to this Mercurial: %s")
876 _("repository requires features unknown to this Mercurial: %s")
877 % " ".join(missings),
877 % " ".join(missings),
878 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
878 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
879 " for more information"))
879 " for more information"))
880 return requirements
880 return requirements
881
881
882 def writerequires(opener, requirements):
882 def writerequires(opener, requirements):
883 with opener('requires', 'w') as fp:
883 with opener('requires', 'w') as fp:
884 for r in sorted(requirements):
884 for r in sorted(requirements):
885 fp.write("%s\n" % r)
885 fp.write("%s\n" % r)
886
886
887 class filecachesubentry(object):
887 class filecachesubentry(object):
888 def __init__(self, path, stat):
888 def __init__(self, path, stat):
889 self.path = path
889 self.path = path
890 self.cachestat = None
890 self.cachestat = None
891 self._cacheable = None
891 self._cacheable = None
892
892
893 if stat:
893 if stat:
894 self.cachestat = filecachesubentry.stat(self.path)
894 self.cachestat = filecachesubentry.stat(self.path)
895
895
896 if self.cachestat:
896 if self.cachestat:
897 self._cacheable = self.cachestat.cacheable()
897 self._cacheable = self.cachestat.cacheable()
898 else:
898 else:
899 # None means we don't know yet
899 # None means we don't know yet
900 self._cacheable = None
900 self._cacheable = None
901
901
902 def refresh(self):
902 def refresh(self):
903 if self.cacheable():
903 if self.cacheable():
904 self.cachestat = filecachesubentry.stat(self.path)
904 self.cachestat = filecachesubentry.stat(self.path)
905
905
906 def cacheable(self):
906 def cacheable(self):
907 if self._cacheable is not None:
907 if self._cacheable is not None:
908 return self._cacheable
908 return self._cacheable
909
909
910 # we don't know yet, assume it is for now
910 # we don't know yet, assume it is for now
911 return True
911 return True
912
912
913 def changed(self):
913 def changed(self):
914 # no point in going further if we can't cache it
914 # no point in going further if we can't cache it
915 if not self.cacheable():
915 if not self.cacheable():
916 return True
916 return True
917
917
918 newstat = filecachesubentry.stat(self.path)
918 newstat = filecachesubentry.stat(self.path)
919
919
920 # we may not know if it's cacheable yet, check again now
920 # we may not know if it's cacheable yet, check again now
921 if newstat and self._cacheable is None:
921 if newstat and self._cacheable is None:
922 self._cacheable = newstat.cacheable()
922 self._cacheable = newstat.cacheable()
923
923
924 # check again
924 # check again
925 if not self._cacheable:
925 if not self._cacheable:
926 return True
926 return True
927
927
928 if self.cachestat != newstat:
928 if self.cachestat != newstat:
929 self.cachestat = newstat
929 self.cachestat = newstat
930 return True
930 return True
931 else:
931 else:
932 return False
932 return False
933
933
934 @staticmethod
934 @staticmethod
935 def stat(path):
935 def stat(path):
936 try:
936 try:
937 return util.cachestat(path)
937 return util.cachestat(path)
938 except OSError as e:
938 except OSError as e:
939 if e.errno != errno.ENOENT:
939 if e.errno != errno.ENOENT:
940 raise
940 raise
941
941
942 class filecacheentry(object):
942 class filecacheentry(object):
943 def __init__(self, paths, stat=True):
943 def __init__(self, paths, stat=True):
944 self._entries = []
944 self._entries = []
945 for path in paths:
945 for path in paths:
946 self._entries.append(filecachesubentry(path, stat))
946 self._entries.append(filecachesubentry(path, stat))
947
947
948 def changed(self):
948 def changed(self):
949 '''true if any entry has changed'''
949 '''true if any entry has changed'''
950 for entry in self._entries:
950 for entry in self._entries:
951 if entry.changed():
951 if entry.changed():
952 return True
952 return True
953 return False
953 return False
954
954
955 def refresh(self):
955 def refresh(self):
956 for entry in self._entries:
956 for entry in self._entries:
957 entry.refresh()
957 entry.refresh()
958
958
959 class filecache(object):
959 class filecache(object):
960 '''A property like decorator that tracks files under .hg/ for updates.
960 '''A property like decorator that tracks files under .hg/ for updates.
961
961
962 Records stat info when called in _filecache.
962 Records stat info when called in _filecache.
963
963
964 On subsequent calls, compares old stat info with new info, and recreates the
964 On subsequent calls, compares old stat info with new info, and recreates the
965 object when any of the files changes, updating the new stat info in
965 object when any of the files changes, updating the new stat info in
966 _filecache.
966 _filecache.
967
967
968 Mercurial either atomic renames or appends for files under .hg,
968 Mercurial either atomic renames or appends for files under .hg,
969 so to ensure the cache is reliable we need the filesystem to be able
969 so to ensure the cache is reliable we need the filesystem to be able
970 to tell us if a file has been replaced. If it can't, we fallback to
970 to tell us if a file has been replaced. If it can't, we fallback to
971 recreating the object on every call (essentially the same behavior as
971 recreating the object on every call (essentially the same behavior as
972 propertycache).
972 propertycache).
973
973
974 '''
974 '''
975 def __init__(self, *paths):
975 def __init__(self, *paths):
976 self.paths = paths
976 self.paths = paths
977
977
978 def join(self, obj, fname):
978 def join(self, obj, fname):
979 """Used to compute the runtime path of a cached file.
979 """Used to compute the runtime path of a cached file.
980
980
981 Users should subclass filecache and provide their own version of this
981 Users should subclass filecache and provide their own version of this
982 function to call the appropriate join function on 'obj' (an instance
982 function to call the appropriate join function on 'obj' (an instance
983 of the class that its member function was decorated).
983 of the class that its member function was decorated).
984 """
984 """
985 raise NotImplementedError
985 raise NotImplementedError
986
986
987 def __call__(self, func):
987 def __call__(self, func):
988 self.func = func
988 self.func = func
989 self.name = func.__name__.encode('ascii')
989 self.name = func.__name__.encode('ascii')
990 return self
990 return self
991
991
992 def __get__(self, obj, type=None):
992 def __get__(self, obj, type=None):
993 # if accessed on the class, return the descriptor itself.
993 # if accessed on the class, return the descriptor itself.
994 if obj is None:
994 if obj is None:
995 return self
995 return self
996 # do we need to check if the file changed?
996 # do we need to check if the file changed?
997 if self.name in obj.__dict__:
997 if self.name in obj.__dict__:
998 assert self.name in obj._filecache, self.name
998 assert self.name in obj._filecache, self.name
999 return obj.__dict__[self.name]
999 return obj.__dict__[self.name]
1000
1000
1001 entry = obj._filecache.get(self.name)
1001 entry = obj._filecache.get(self.name)
1002
1002
1003 if entry:
1003 if entry:
1004 if entry.changed():
1004 if entry.changed():
1005 entry.obj = self.func(obj)
1005 entry.obj = self.func(obj)
1006 else:
1006 else:
1007 paths = [self.join(obj, path) for path in self.paths]
1007 paths = [self.join(obj, path) for path in self.paths]
1008
1008
1009 # We stat -before- creating the object so our cache doesn't lie if
1009 # We stat -before- creating the object so our cache doesn't lie if
1010 # a writer modified between the time we read and stat
1010 # a writer modified between the time we read and stat
1011 entry = filecacheentry(paths, True)
1011 entry = filecacheentry(paths, True)
1012 entry.obj = self.func(obj)
1012 entry.obj = self.func(obj)
1013
1013
1014 obj._filecache[self.name] = entry
1014 obj._filecache[self.name] = entry
1015
1015
1016 obj.__dict__[self.name] = entry.obj
1016 obj.__dict__[self.name] = entry.obj
1017 return entry.obj
1017 return entry.obj
1018
1018
1019 def __set__(self, obj, value):
1019 def __set__(self, obj, value):
1020 if self.name not in obj._filecache:
1020 if self.name not in obj._filecache:
1021 # we add an entry for the missing value because X in __dict__
1021 # we add an entry for the missing value because X in __dict__
1022 # implies X in _filecache
1022 # implies X in _filecache
1023 paths = [self.join(obj, path) for path in self.paths]
1023 paths = [self.join(obj, path) for path in self.paths]
1024 ce = filecacheentry(paths, False)
1024 ce = filecacheentry(paths, False)
1025 obj._filecache[self.name] = ce
1025 obj._filecache[self.name] = ce
1026 else:
1026 else:
1027 ce = obj._filecache[self.name]
1027 ce = obj._filecache[self.name]
1028
1028
1029 ce.obj = value # update cached copy
1029 ce.obj = value # update cached copy
1030 obj.__dict__[self.name] = value # update copy returned by obj.x
1030 obj.__dict__[self.name] = value # update copy returned by obj.x
1031
1031
1032 def __delete__(self, obj):
1032 def __delete__(self, obj):
1033 try:
1033 try:
1034 del obj.__dict__[self.name]
1034 del obj.__dict__[self.name]
1035 except KeyError:
1035 except KeyError:
1036 raise AttributeError(self.name)
1036 raise AttributeError(self.name)
1037
1037
1038 def extdatasource(repo, source):
1038 def extdatasource(repo, source):
1039 """Gather a map of rev -> value dict from the specified source
1039 """Gather a map of rev -> value dict from the specified source
1040
1040
1041 A source spec is treated as a URL, with a special case shell: type
1041 A source spec is treated as a URL, with a special case shell: type
1042 for parsing the output from a shell command.
1042 for parsing the output from a shell command.
1043
1043
1044 The data is parsed as a series of newline-separated records where
1044 The data is parsed as a series of newline-separated records where
1045 each record is a revision specifier optionally followed by a space
1045 each record is a revision specifier optionally followed by a space
1046 and a freeform string value. If the revision is known locally, it
1046 and a freeform string value. If the revision is known locally, it
1047 is converted to a rev, otherwise the record is skipped.
1047 is converted to a rev, otherwise the record is skipped.
1048
1048
1049 Note that both key and value are treated as UTF-8 and converted to
1049 Note that both key and value are treated as UTF-8 and converted to
1050 the local encoding. This allows uniformity between local and
1050 the local encoding. This allows uniformity between local and
1051 remote data sources.
1051 remote data sources.
1052 """
1052 """
1053
1053
1054 spec = repo.ui.config("extdata", source)
1054 spec = repo.ui.config("extdata", source)
1055 if not spec:
1055 if not spec:
1056 raise error.Abort(_("unknown extdata source '%s'") % source)
1056 raise error.Abort(_("unknown extdata source '%s'") % source)
1057
1057
1058 data = {}
1058 data = {}
1059 src = proc = None
1059 src = proc = None
1060 try:
1060 try:
1061 if spec.startswith("shell:"):
1061 if spec.startswith("shell:"):
1062 # external commands should be run relative to the repo root
1062 # external commands should be run relative to the repo root
1063 cmd = spec[6:]
1063 cmd = spec[6:]
1064 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1064 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1065 close_fds=util.closefds,
1065 close_fds=util.closefds,
1066 stdout=subprocess.PIPE, cwd=repo.root)
1066 stdout=subprocess.PIPE, cwd=repo.root)
1067 src = proc.stdout
1067 src = proc.stdout
1068 else:
1068 else:
1069 # treat as a URL or file
1069 # treat as a URL or file
1070 src = url.open(repo.ui, spec)
1070 src = url.open(repo.ui, spec)
1071 for l in src:
1071 for l in src:
1072 if " " in l:
1072 if " " in l:
1073 k, v = l.strip().split(" ", 1)
1073 k, v = l.strip().split(" ", 1)
1074 else:
1074 else:
1075 k, v = l.strip(), ""
1075 k, v = l.strip(), ""
1076
1076
1077 k = encoding.tolocal(k)
1077 k = encoding.tolocal(k)
1078 try:
1078 try:
1079 data[repo[k].rev()] = encoding.tolocal(v)
1079 data[repo[k].rev()] = encoding.tolocal(v)
1080 except (error.LookupError, error.RepoLookupError):
1080 except (error.LookupError, error.RepoLookupError):
1081 pass # we ignore data for nodes that don't exist locally
1081 pass # we ignore data for nodes that don't exist locally
1082 finally:
1082 finally:
1083 if proc:
1083 if proc:
1084 proc.communicate()
1084 proc.communicate()
1085 if proc.returncode != 0:
1085 if proc.returncode != 0:
1086 # not an error so 'cmd | grep' can be empty
1086 # not an error so 'cmd | grep' can be empty
1087 repo.ui.debug("extdata command '%s' %s\n"
1087 repo.ui.debug("extdata command '%s' %s\n"
1088 % (cmd, util.explainexit(proc.returncode)[0]))
1088 % (cmd, util.explainexit(proc.returncode)[0]))
1089 if src:
1089 if src:
1090 src.close()
1090 src.close()
1091
1091
1092 return data
1092 return data
1093
1093
1094 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1094 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1095 if lock is None:
1095 if lock is None:
1096 raise error.LockInheritanceContractViolation(
1096 raise error.LockInheritanceContractViolation(
1097 'lock can only be inherited while held')
1097 'lock can only be inherited while held')
1098 if environ is None:
1098 if environ is None:
1099 environ = {}
1099 environ = {}
1100 with lock.inherit() as locker:
1100 with lock.inherit() as locker:
1101 environ[envvar] = locker
1101 environ[envvar] = locker
1102 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1102 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1103
1103
1104 def wlocksub(repo, cmd, *args, **kwargs):
1104 def wlocksub(repo, cmd, *args, **kwargs):
1105 """run cmd as a subprocess that allows inheriting repo's wlock
1105 """run cmd as a subprocess that allows inheriting repo's wlock
1106
1106
1107 This can only be called while the wlock is held. This takes all the
1107 This can only be called while the wlock is held. This takes all the
1108 arguments that ui.system does, and returns the exit code of the
1108 arguments that ui.system does, and returns the exit code of the
1109 subprocess."""
1109 subprocess."""
1110 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1110 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1111 **kwargs)
1111 **kwargs)
1112
1112
1113 def gdinitconfig(ui):
1113 def gdinitconfig(ui):
1114 """helper function to know if a repo should be created as general delta
1114 """helper function to know if a repo should be created as general delta
1115 """
1115 """
1116 # experimental config: format.generaldelta
1116 # experimental config: format.generaldelta
1117 return (ui.configbool('format', 'generaldelta')
1117 return (ui.configbool('format', 'generaldelta')
1118 or ui.configbool('format', 'usegeneraldelta'))
1118 or ui.configbool('format', 'usegeneraldelta'))
1119
1119
1120 def gddeltaconfig(ui):
1120 def gddeltaconfig(ui):
1121 """helper function to know if incoming delta should be optimised
1121 """helper function to know if incoming delta should be optimised
1122 """
1122 """
1123 # experimental config: format.generaldelta
1123 # experimental config: format.generaldelta
1124 return ui.configbool('format', 'generaldelta')
1124 return ui.configbool('format', 'generaldelta')
1125
1125
1126 class simplekeyvaluefile(object):
1126 class simplekeyvaluefile(object):
1127 """A simple file with key=value lines
1127 """A simple file with key=value lines
1128
1128
1129 Keys must be alphanumerics and start with a letter, values must not
1129 Keys must be alphanumerics and start with a letter, values must not
1130 contain '\n' characters"""
1130 contain '\n' characters"""
1131 firstlinekey = '__firstline'
1131 firstlinekey = '__firstline'
1132
1132
1133 def __init__(self, vfs, path, keys=None):
1133 def __init__(self, vfs, path, keys=None):
1134 self.vfs = vfs
1134 self.vfs = vfs
1135 self.path = path
1135 self.path = path
1136
1136
1137 def read(self, firstlinenonkeyval=False):
1137 def read(self, firstlinenonkeyval=False):
1138 """Read the contents of a simple key-value file
1138 """Read the contents of a simple key-value file
1139
1139
1140 'firstlinenonkeyval' indicates whether the first line of file should
1140 'firstlinenonkeyval' indicates whether the first line of file should
1141 be treated as a key-value pair or reuturned fully under the
1141 be treated as a key-value pair or reuturned fully under the
1142 __firstline key."""
1142 __firstline key."""
1143 lines = self.vfs.readlines(self.path)
1143 lines = self.vfs.readlines(self.path)
1144 d = {}
1144 d = {}
1145 if firstlinenonkeyval:
1145 if firstlinenonkeyval:
1146 if not lines:
1146 if not lines:
1147 e = _("empty simplekeyvalue file")
1147 e = _("empty simplekeyvalue file")
1148 raise error.CorruptedState(e)
1148 raise error.CorruptedState(e)
1149 # we don't want to include '\n' in the __firstline
1149 # we don't want to include '\n' in the __firstline
1150 d[self.firstlinekey] = lines[0][:-1]
1150 d[self.firstlinekey] = lines[0][:-1]
1151 del lines[0]
1151 del lines[0]
1152
1152
1153 try:
1153 try:
1154 # the 'if line.strip()' part prevents us from failing on empty
1154 # the 'if line.strip()' part prevents us from failing on empty
1155 # lines which only contain '\n' therefore are not skipped
1155 # lines which only contain '\n' therefore are not skipped
1156 # by 'if line'
1156 # by 'if line'
1157 updatedict = dict(line[:-1].split('=', 1) for line in lines
1157 updatedict = dict(line[:-1].split('=', 1) for line in lines
1158 if line.strip())
1158 if line.strip())
1159 if self.firstlinekey in updatedict:
1159 if self.firstlinekey in updatedict:
1160 e = _("%r can't be used as a key")
1160 e = _("%r can't be used as a key")
1161 raise error.CorruptedState(e % self.firstlinekey)
1161 raise error.CorruptedState(e % self.firstlinekey)
1162 d.update(updatedict)
1162 d.update(updatedict)
1163 except ValueError as e:
1163 except ValueError as e:
1164 raise error.CorruptedState(str(e))
1164 raise error.CorruptedState(str(e))
1165 return d
1165 return d
1166
1166
1167 def write(self, data, firstline=None):
1167 def write(self, data, firstline=None):
1168 """Write key=>value mapping to a file
1168 """Write key=>value mapping to a file
1169 data is a dict. Keys must be alphanumerical and start with a letter.
1169 data is a dict. Keys must be alphanumerical and start with a letter.
1170 Values must not contain newline characters.
1170 Values must not contain newline characters.
1171
1171
1172 If 'firstline' is not None, it is written to file before
1172 If 'firstline' is not None, it is written to file before
1173 everything else, as it is, not in a key=value form"""
1173 everything else, as it is, not in a key=value form"""
1174 lines = []
1174 lines = []
1175 if firstline is not None:
1175 if firstline is not None:
1176 lines.append('%s\n' % firstline)
1176 lines.append('%s\n' % firstline)
1177
1177
1178 for k, v in data.items():
1178 for k, v in data.items():
1179 if k == self.firstlinekey:
1179 if k == self.firstlinekey:
1180 e = "key name '%s' is reserved" % self.firstlinekey
1180 e = "key name '%s' is reserved" % self.firstlinekey
1181 raise error.ProgrammingError(e)
1181 raise error.ProgrammingError(e)
1182 if not k[0].isalpha():
1182 if not k[0].isalpha():
1183 e = "keys must start with a letter in a key-value file"
1183 e = "keys must start with a letter in a key-value file"
1184 raise error.ProgrammingError(e)
1184 raise error.ProgrammingError(e)
1185 if not k.isalnum():
1185 if not k.isalnum():
1186 e = "invalid key name in a simple key-value file"
1186 e = "invalid key name in a simple key-value file"
1187 raise error.ProgrammingError(e)
1187 raise error.ProgrammingError(e)
1188 if '\n' in v:
1188 if '\n' in v:
1189 e = "invalid value in a simple key-value file"
1189 e = "invalid value in a simple key-value file"
1190 raise error.ProgrammingError(e)
1190 raise error.ProgrammingError(e)
1191 lines.append("%s=%s\n" % (k, v))
1191 lines.append("%s=%s\n" % (k, v))
1192 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1192 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1193 fp.write(''.join(lines))
1193 fp.write(''.join(lines))
1194
1194
1195 _reportobsoletedsource = [
1195 _reportobsoletedsource = [
1196 'debugobsolete',
1196 'debugobsolete',
1197 'pull',
1197 'pull',
1198 'push',
1198 'push',
1199 'serve',
1199 'serve',
1200 'unbundle',
1200 'unbundle',
1201 ]
1201 ]
1202
1202
1203 def registersummarycallback(repo, otr, txnname=''):
1203 def registersummarycallback(repo, otr, txnname=''):
1204 """register a callback to issue a summary after the transaction is closed
1204 """register a callback to issue a summary after the transaction is closed
1205 """
1205 """
1206 def txmatch(sources):
1206 def txmatch(sources):
1207 return any(txnname.startswith(source) for source in sources)
1207 return any(txnname.startswith(source) for source in sources)
1208
1208
1209 if txmatch(_reportobsoletedsource):
1209 categories = []
1210
1211 def reportsummary(func):
1212 """decorator for report callbacks."""
1210 reporef = weakref.ref(repo)
1213 reporef = weakref.ref(repo)
1211 def reportsummary(tr):
1214 def wrapped(tr):
1212 """the actual callback reporting the summary"""
1213 repo = reporef()
1215 repo = reporef()
1216 func(repo, tr)
1217 newcat = '%2i-txnreport' % len(categories)
1218 otr.addpostclose(newcat, wrapped)
1219 categories.append(newcat)
1220 return wrapped
1221
1222 if txmatch(_reportobsoletedsource):
1223 @reportsummary
1224 def reportobsoleted(repo, tr):
1214 obsoleted = obsutil.getobsoleted(repo, tr)
1225 obsoleted = obsutil.getobsoleted(repo, tr)
1215 if obsoleted:
1226 if obsoleted:
1216 repo.ui.status(_('obsoleted %i changesets\n')
1227 repo.ui.status(_('obsoleted %i changesets\n')
1217 % len(obsoleted))
1228 % len(obsoleted))
1218 otr.addpostclose('00-txnreport', reportsummary)
General Comments 0
You need to be logged in to leave comments. Login now