##// END OF EJS Templates
cleanupnode: do not use generator for node mapping...
Octobus -
r33352:967ac37f default
parent child Browse files
Show More
@@ -1,1093 +1,1093
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 wdirid,
22 wdirid,
23 wdirrev,
23 wdirrev,
24 )
24 )
25
25
26 from .i18n import _
26 from .i18n import _
27 from . import (
27 from . import (
28 encoding,
28 encoding,
29 error,
29 error,
30 match as matchmod,
30 match as matchmod,
31 obsolete,
31 obsolete,
32 obsutil,
32 obsutil,
33 pathutil,
33 pathutil,
34 phases,
34 phases,
35 pycompat,
35 pycompat,
36 revsetlang,
36 revsetlang,
37 similar,
37 similar,
38 util,
38 util,
39 )
39 )
40
40
41 if pycompat.osname == 'nt':
41 if pycompat.osname == 'nt':
42 from . import scmwindows as scmplatform
42 from . import scmwindows as scmplatform
43 else:
43 else:
44 from . import scmposix as scmplatform
44 from . import scmposix as scmplatform
45
45
46 termsize = scmplatform.termsize
46 termsize = scmplatform.termsize
47
47
48 class status(tuple):
48 class status(tuple):
49 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
49 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
50 and 'ignored' properties are only relevant to the working copy.
50 and 'ignored' properties are only relevant to the working copy.
51 '''
51 '''
52
52
53 __slots__ = ()
53 __slots__ = ()
54
54
55 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
55 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
56 clean):
56 clean):
57 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
57 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
58 ignored, clean))
58 ignored, clean))
59
59
60 @property
60 @property
61 def modified(self):
61 def modified(self):
62 '''files that have been modified'''
62 '''files that have been modified'''
63 return self[0]
63 return self[0]
64
64
65 @property
65 @property
66 def added(self):
66 def added(self):
67 '''files that have been added'''
67 '''files that have been added'''
68 return self[1]
68 return self[1]
69
69
70 @property
70 @property
71 def removed(self):
71 def removed(self):
72 '''files that have been removed'''
72 '''files that have been removed'''
73 return self[2]
73 return self[2]
74
74
75 @property
75 @property
76 def deleted(self):
76 def deleted(self):
77 '''files that are in the dirstate, but have been deleted from the
77 '''files that are in the dirstate, but have been deleted from the
78 working copy (aka "missing")
78 working copy (aka "missing")
79 '''
79 '''
80 return self[3]
80 return self[3]
81
81
82 @property
82 @property
83 def unknown(self):
83 def unknown(self):
84 '''files not in the dirstate that are not ignored'''
84 '''files not in the dirstate that are not ignored'''
85 return self[4]
85 return self[4]
86
86
87 @property
87 @property
88 def ignored(self):
88 def ignored(self):
89 '''files not in the dirstate that are ignored (by _dirignore())'''
89 '''files not in the dirstate that are ignored (by _dirignore())'''
90 return self[5]
90 return self[5]
91
91
92 @property
92 @property
93 def clean(self):
93 def clean(self):
94 '''files that have not been modified'''
94 '''files that have not been modified'''
95 return self[6]
95 return self[6]
96
96
97 def __repr__(self, *args, **kwargs):
97 def __repr__(self, *args, **kwargs):
98 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
98 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
99 'unknown=%r, ignored=%r, clean=%r>') % self)
99 'unknown=%r, ignored=%r, clean=%r>') % self)
100
100
101 def itersubrepos(ctx1, ctx2):
101 def itersubrepos(ctx1, ctx2):
102 """find subrepos in ctx1 or ctx2"""
102 """find subrepos in ctx1 or ctx2"""
103 # Create a (subpath, ctx) mapping where we prefer subpaths from
103 # Create a (subpath, ctx) mapping where we prefer subpaths from
104 # ctx1. The subpaths from ctx2 are important when the .hgsub file
104 # ctx1. The subpaths from ctx2 are important when the .hgsub file
105 # has been modified (in ctx2) but not yet committed (in ctx1).
105 # has been modified (in ctx2) but not yet committed (in ctx1).
106 subpaths = dict.fromkeys(ctx2.substate, ctx2)
106 subpaths = dict.fromkeys(ctx2.substate, ctx2)
107 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
107 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
108
108
109 missing = set()
109 missing = set()
110
110
111 for subpath in ctx2.substate:
111 for subpath in ctx2.substate:
112 if subpath not in ctx1.substate:
112 if subpath not in ctx1.substate:
113 del subpaths[subpath]
113 del subpaths[subpath]
114 missing.add(subpath)
114 missing.add(subpath)
115
115
116 for subpath, ctx in sorted(subpaths.iteritems()):
116 for subpath, ctx in sorted(subpaths.iteritems()):
117 yield subpath, ctx.sub(subpath)
117 yield subpath, ctx.sub(subpath)
118
118
119 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
119 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
120 # status and diff will have an accurate result when it does
120 # status and diff will have an accurate result when it does
121 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
121 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
122 # against itself.
122 # against itself.
123 for subpath in missing:
123 for subpath in missing:
124 yield subpath, ctx2.nullsub(subpath, ctx1)
124 yield subpath, ctx2.nullsub(subpath, ctx1)
125
125
126 def nochangesfound(ui, repo, excluded=None):
126 def nochangesfound(ui, repo, excluded=None):
127 '''Report no changes for push/pull, excluded is None or a list of
127 '''Report no changes for push/pull, excluded is None or a list of
128 nodes excluded from the push/pull.
128 nodes excluded from the push/pull.
129 '''
129 '''
130 secretlist = []
130 secretlist = []
131 if excluded:
131 if excluded:
132 for n in excluded:
132 for n in excluded:
133 ctx = repo[n]
133 ctx = repo[n]
134 if ctx.phase() >= phases.secret and not ctx.extinct():
134 if ctx.phase() >= phases.secret and not ctx.extinct():
135 secretlist.append(n)
135 secretlist.append(n)
136
136
137 if secretlist:
137 if secretlist:
138 ui.status(_("no changes found (ignored %d secret changesets)\n")
138 ui.status(_("no changes found (ignored %d secret changesets)\n")
139 % len(secretlist))
139 % len(secretlist))
140 else:
140 else:
141 ui.status(_("no changes found\n"))
141 ui.status(_("no changes found\n"))
142
142
143 def callcatch(ui, func):
143 def callcatch(ui, func):
144 """call func() with global exception handling
144 """call func() with global exception handling
145
145
146 return func() if no exception happens. otherwise do some error handling
146 return func() if no exception happens. otherwise do some error handling
147 and return an exit code accordingly. does not handle all exceptions.
147 and return an exit code accordingly. does not handle all exceptions.
148 """
148 """
149 try:
149 try:
150 try:
150 try:
151 return func()
151 return func()
152 except: # re-raises
152 except: # re-raises
153 ui.traceback()
153 ui.traceback()
154 raise
154 raise
155 # Global exception handling, alphabetically
155 # Global exception handling, alphabetically
156 # Mercurial-specific first, followed by built-in and library exceptions
156 # Mercurial-specific first, followed by built-in and library exceptions
157 except error.LockHeld as inst:
157 except error.LockHeld as inst:
158 if inst.errno == errno.ETIMEDOUT:
158 if inst.errno == errno.ETIMEDOUT:
159 reason = _('timed out waiting for lock held by %r') % inst.locker
159 reason = _('timed out waiting for lock held by %r') % inst.locker
160 else:
160 else:
161 reason = _('lock held by %r') % inst.locker
161 reason = _('lock held by %r') % inst.locker
162 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
162 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
163 if not inst.locker:
163 if not inst.locker:
164 ui.warn(_("(lock might be very busy)\n"))
164 ui.warn(_("(lock might be very busy)\n"))
165 except error.LockUnavailable as inst:
165 except error.LockUnavailable as inst:
166 ui.warn(_("abort: could not lock %s: %s\n") %
166 ui.warn(_("abort: could not lock %s: %s\n") %
167 (inst.desc or inst.filename, inst.strerror))
167 (inst.desc or inst.filename, inst.strerror))
168 except error.OutOfBandError as inst:
168 except error.OutOfBandError as inst:
169 if inst.args:
169 if inst.args:
170 msg = _("abort: remote error:\n")
170 msg = _("abort: remote error:\n")
171 else:
171 else:
172 msg = _("abort: remote error\n")
172 msg = _("abort: remote error\n")
173 ui.warn(msg)
173 ui.warn(msg)
174 if inst.args:
174 if inst.args:
175 ui.warn(''.join(inst.args))
175 ui.warn(''.join(inst.args))
176 if inst.hint:
176 if inst.hint:
177 ui.warn('(%s)\n' % inst.hint)
177 ui.warn('(%s)\n' % inst.hint)
178 except error.RepoError as inst:
178 except error.RepoError as inst:
179 ui.warn(_("abort: %s!\n") % inst)
179 ui.warn(_("abort: %s!\n") % inst)
180 if inst.hint:
180 if inst.hint:
181 ui.warn(_("(%s)\n") % inst.hint)
181 ui.warn(_("(%s)\n") % inst.hint)
182 except error.ResponseError as inst:
182 except error.ResponseError as inst:
183 ui.warn(_("abort: %s") % inst.args[0])
183 ui.warn(_("abort: %s") % inst.args[0])
184 if not isinstance(inst.args[1], basestring):
184 if not isinstance(inst.args[1], basestring):
185 ui.warn(" %r\n" % (inst.args[1],))
185 ui.warn(" %r\n" % (inst.args[1],))
186 elif not inst.args[1]:
186 elif not inst.args[1]:
187 ui.warn(_(" empty string\n"))
187 ui.warn(_(" empty string\n"))
188 else:
188 else:
189 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
189 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
190 except error.CensoredNodeError as inst:
190 except error.CensoredNodeError as inst:
191 ui.warn(_("abort: file censored %s!\n") % inst)
191 ui.warn(_("abort: file censored %s!\n") % inst)
192 except error.RevlogError as inst:
192 except error.RevlogError as inst:
193 ui.warn(_("abort: %s!\n") % inst)
193 ui.warn(_("abort: %s!\n") % inst)
194 except error.InterventionRequired as inst:
194 except error.InterventionRequired as inst:
195 ui.warn("%s\n" % inst)
195 ui.warn("%s\n" % inst)
196 if inst.hint:
196 if inst.hint:
197 ui.warn(_("(%s)\n") % inst.hint)
197 ui.warn(_("(%s)\n") % inst.hint)
198 return 1
198 return 1
199 except error.WdirUnsupported:
199 except error.WdirUnsupported:
200 ui.warn(_("abort: working directory revision cannot be specified\n"))
200 ui.warn(_("abort: working directory revision cannot be specified\n"))
201 except error.Abort as inst:
201 except error.Abort as inst:
202 ui.warn(_("abort: %s\n") % inst)
202 ui.warn(_("abort: %s\n") % inst)
203 if inst.hint:
203 if inst.hint:
204 ui.warn(_("(%s)\n") % inst.hint)
204 ui.warn(_("(%s)\n") % inst.hint)
205 except ImportError as inst:
205 except ImportError as inst:
206 ui.warn(_("abort: %s!\n") % inst)
206 ui.warn(_("abort: %s!\n") % inst)
207 m = str(inst).split()[-1]
207 m = str(inst).split()[-1]
208 if m in "mpatch bdiff".split():
208 if m in "mpatch bdiff".split():
209 ui.warn(_("(did you forget to compile extensions?)\n"))
209 ui.warn(_("(did you forget to compile extensions?)\n"))
210 elif m in "zlib".split():
210 elif m in "zlib".split():
211 ui.warn(_("(is your Python install correct?)\n"))
211 ui.warn(_("(is your Python install correct?)\n"))
212 except IOError as inst:
212 except IOError as inst:
213 if util.safehasattr(inst, "code"):
213 if util.safehasattr(inst, "code"):
214 ui.warn(_("abort: %s\n") % inst)
214 ui.warn(_("abort: %s\n") % inst)
215 elif util.safehasattr(inst, "reason"):
215 elif util.safehasattr(inst, "reason"):
216 try: # usually it is in the form (errno, strerror)
216 try: # usually it is in the form (errno, strerror)
217 reason = inst.reason.args[1]
217 reason = inst.reason.args[1]
218 except (AttributeError, IndexError):
218 except (AttributeError, IndexError):
219 # it might be anything, for example a string
219 # it might be anything, for example a string
220 reason = inst.reason
220 reason = inst.reason
221 if isinstance(reason, unicode):
221 if isinstance(reason, unicode):
222 # SSLError of Python 2.7.9 contains a unicode
222 # SSLError of Python 2.7.9 contains a unicode
223 reason = encoding.unitolocal(reason)
223 reason = encoding.unitolocal(reason)
224 ui.warn(_("abort: error: %s\n") % reason)
224 ui.warn(_("abort: error: %s\n") % reason)
225 elif (util.safehasattr(inst, "args")
225 elif (util.safehasattr(inst, "args")
226 and inst.args and inst.args[0] == errno.EPIPE):
226 and inst.args and inst.args[0] == errno.EPIPE):
227 pass
227 pass
228 elif getattr(inst, "strerror", None):
228 elif getattr(inst, "strerror", None):
229 if getattr(inst, "filename", None):
229 if getattr(inst, "filename", None):
230 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
230 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
231 else:
231 else:
232 ui.warn(_("abort: %s\n") % inst.strerror)
232 ui.warn(_("abort: %s\n") % inst.strerror)
233 else:
233 else:
234 raise
234 raise
235 except OSError as inst:
235 except OSError as inst:
236 if getattr(inst, "filename", None) is not None:
236 if getattr(inst, "filename", None) is not None:
237 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
237 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
238 else:
238 else:
239 ui.warn(_("abort: %s\n") % inst.strerror)
239 ui.warn(_("abort: %s\n") % inst.strerror)
240 except MemoryError:
240 except MemoryError:
241 ui.warn(_("abort: out of memory\n"))
241 ui.warn(_("abort: out of memory\n"))
242 except SystemExit as inst:
242 except SystemExit as inst:
243 # Commands shouldn't sys.exit directly, but give a return code.
243 # Commands shouldn't sys.exit directly, but give a return code.
244 # Just in case catch this and and pass exit code to caller.
244 # Just in case catch this and and pass exit code to caller.
245 return inst.code
245 return inst.code
246 except socket.error as inst:
246 except socket.error as inst:
247 ui.warn(_("abort: %s\n") % inst.args[-1])
247 ui.warn(_("abort: %s\n") % inst.args[-1])
248
248
249 return -1
249 return -1
250
250
251 def checknewlabel(repo, lbl, kind):
251 def checknewlabel(repo, lbl, kind):
252 # Do not use the "kind" parameter in ui output.
252 # Do not use the "kind" parameter in ui output.
253 # It makes strings difficult to translate.
253 # It makes strings difficult to translate.
254 if lbl in ['tip', '.', 'null']:
254 if lbl in ['tip', '.', 'null']:
255 raise error.Abort(_("the name '%s' is reserved") % lbl)
255 raise error.Abort(_("the name '%s' is reserved") % lbl)
256 for c in (':', '\0', '\n', '\r'):
256 for c in (':', '\0', '\n', '\r'):
257 if c in lbl:
257 if c in lbl:
258 raise error.Abort(_("%r cannot be used in a name") % c)
258 raise error.Abort(_("%r cannot be used in a name") % c)
259 try:
259 try:
260 int(lbl)
260 int(lbl)
261 raise error.Abort(_("cannot use an integer as a name"))
261 raise error.Abort(_("cannot use an integer as a name"))
262 except ValueError:
262 except ValueError:
263 pass
263 pass
264
264
265 def checkfilename(f):
265 def checkfilename(f):
266 '''Check that the filename f is an acceptable filename for a tracked file'''
266 '''Check that the filename f is an acceptable filename for a tracked file'''
267 if '\r' in f or '\n' in f:
267 if '\r' in f or '\n' in f:
268 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
268 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
269
269
270 def checkportable(ui, f):
270 def checkportable(ui, f):
271 '''Check if filename f is portable and warn or abort depending on config'''
271 '''Check if filename f is portable and warn or abort depending on config'''
272 checkfilename(f)
272 checkfilename(f)
273 abort, warn = checkportabilityalert(ui)
273 abort, warn = checkportabilityalert(ui)
274 if abort or warn:
274 if abort or warn:
275 msg = util.checkwinfilename(f)
275 msg = util.checkwinfilename(f)
276 if msg:
276 if msg:
277 msg = "%s: %r" % (msg, f)
277 msg = "%s: %r" % (msg, f)
278 if abort:
278 if abort:
279 raise error.Abort(msg)
279 raise error.Abort(msg)
280 ui.warn(_("warning: %s\n") % msg)
280 ui.warn(_("warning: %s\n") % msg)
281
281
282 def checkportabilityalert(ui):
282 def checkportabilityalert(ui):
283 '''check if the user's config requests nothing, a warning, or abort for
283 '''check if the user's config requests nothing, a warning, or abort for
284 non-portable filenames'''
284 non-portable filenames'''
285 val = ui.config('ui', 'portablefilenames', 'warn')
285 val = ui.config('ui', 'portablefilenames', 'warn')
286 lval = val.lower()
286 lval = val.lower()
287 bval = util.parsebool(val)
287 bval = util.parsebool(val)
288 abort = pycompat.osname == 'nt' or lval == 'abort'
288 abort = pycompat.osname == 'nt' or lval == 'abort'
289 warn = bval or lval == 'warn'
289 warn = bval or lval == 'warn'
290 if bval is None and not (warn or abort or lval == 'ignore'):
290 if bval is None and not (warn or abort or lval == 'ignore'):
291 raise error.ConfigError(
291 raise error.ConfigError(
292 _("ui.portablefilenames value is invalid ('%s')") % val)
292 _("ui.portablefilenames value is invalid ('%s')") % val)
293 return abort, warn
293 return abort, warn
294
294
295 class casecollisionauditor(object):
295 class casecollisionauditor(object):
296 def __init__(self, ui, abort, dirstate):
296 def __init__(self, ui, abort, dirstate):
297 self._ui = ui
297 self._ui = ui
298 self._abort = abort
298 self._abort = abort
299 allfiles = '\0'.join(dirstate._map)
299 allfiles = '\0'.join(dirstate._map)
300 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
300 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
301 self._dirstate = dirstate
301 self._dirstate = dirstate
302 # The purpose of _newfiles is so that we don't complain about
302 # The purpose of _newfiles is so that we don't complain about
303 # case collisions if someone were to call this object with the
303 # case collisions if someone were to call this object with the
304 # same filename twice.
304 # same filename twice.
305 self._newfiles = set()
305 self._newfiles = set()
306
306
307 def __call__(self, f):
307 def __call__(self, f):
308 if f in self._newfiles:
308 if f in self._newfiles:
309 return
309 return
310 fl = encoding.lower(f)
310 fl = encoding.lower(f)
311 if fl in self._loweredfiles and f not in self._dirstate:
311 if fl in self._loweredfiles and f not in self._dirstate:
312 msg = _('possible case-folding collision for %s') % f
312 msg = _('possible case-folding collision for %s') % f
313 if self._abort:
313 if self._abort:
314 raise error.Abort(msg)
314 raise error.Abort(msg)
315 self._ui.warn(_("warning: %s\n") % msg)
315 self._ui.warn(_("warning: %s\n") % msg)
316 self._loweredfiles.add(fl)
316 self._loweredfiles.add(fl)
317 self._newfiles.add(f)
317 self._newfiles.add(f)
318
318
319 def filteredhash(repo, maxrev):
319 def filteredhash(repo, maxrev):
320 """build hash of filtered revisions in the current repoview.
320 """build hash of filtered revisions in the current repoview.
321
321
322 Multiple caches perform up-to-date validation by checking that the
322 Multiple caches perform up-to-date validation by checking that the
323 tiprev and tipnode stored in the cache file match the current repository.
323 tiprev and tipnode stored in the cache file match the current repository.
324 However, this is not sufficient for validating repoviews because the set
324 However, this is not sufficient for validating repoviews because the set
325 of revisions in the view may change without the repository tiprev and
325 of revisions in the view may change without the repository tiprev and
326 tipnode changing.
326 tipnode changing.
327
327
328 This function hashes all the revs filtered from the view and returns
328 This function hashes all the revs filtered from the view and returns
329 that SHA-1 digest.
329 that SHA-1 digest.
330 """
330 """
331 cl = repo.changelog
331 cl = repo.changelog
332 if not cl.filteredrevs:
332 if not cl.filteredrevs:
333 return None
333 return None
334 key = None
334 key = None
335 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
335 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
336 if revs:
336 if revs:
337 s = hashlib.sha1()
337 s = hashlib.sha1()
338 for rev in revs:
338 for rev in revs:
339 s.update('%d;' % rev)
339 s.update('%d;' % rev)
340 key = s.digest()
340 key = s.digest()
341 return key
341 return key
342
342
343 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
343 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
344 '''yield every hg repository under path, always recursively.
344 '''yield every hg repository under path, always recursively.
345 The recurse flag will only control recursion into repo working dirs'''
345 The recurse flag will only control recursion into repo working dirs'''
346 def errhandler(err):
346 def errhandler(err):
347 if err.filename == path:
347 if err.filename == path:
348 raise err
348 raise err
349 samestat = getattr(os.path, 'samestat', None)
349 samestat = getattr(os.path, 'samestat', None)
350 if followsym and samestat is not None:
350 if followsym and samestat is not None:
351 def adddir(dirlst, dirname):
351 def adddir(dirlst, dirname):
352 match = False
352 match = False
353 dirstat = os.stat(dirname)
353 dirstat = os.stat(dirname)
354 for lstdirstat in dirlst:
354 for lstdirstat in dirlst:
355 if samestat(dirstat, lstdirstat):
355 if samestat(dirstat, lstdirstat):
356 match = True
356 match = True
357 break
357 break
358 if not match:
358 if not match:
359 dirlst.append(dirstat)
359 dirlst.append(dirstat)
360 return not match
360 return not match
361 else:
361 else:
362 followsym = False
362 followsym = False
363
363
364 if (seen_dirs is None) and followsym:
364 if (seen_dirs is None) and followsym:
365 seen_dirs = []
365 seen_dirs = []
366 adddir(seen_dirs, path)
366 adddir(seen_dirs, path)
367 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
367 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
368 dirs.sort()
368 dirs.sort()
369 if '.hg' in dirs:
369 if '.hg' in dirs:
370 yield root # found a repository
370 yield root # found a repository
371 qroot = os.path.join(root, '.hg', 'patches')
371 qroot = os.path.join(root, '.hg', 'patches')
372 if os.path.isdir(os.path.join(qroot, '.hg')):
372 if os.path.isdir(os.path.join(qroot, '.hg')):
373 yield qroot # we have a patch queue repo here
373 yield qroot # we have a patch queue repo here
374 if recurse:
374 if recurse:
375 # avoid recursing inside the .hg directory
375 # avoid recursing inside the .hg directory
376 dirs.remove('.hg')
376 dirs.remove('.hg')
377 else:
377 else:
378 dirs[:] = [] # don't descend further
378 dirs[:] = [] # don't descend further
379 elif followsym:
379 elif followsym:
380 newdirs = []
380 newdirs = []
381 for d in dirs:
381 for d in dirs:
382 fname = os.path.join(root, d)
382 fname = os.path.join(root, d)
383 if adddir(seen_dirs, fname):
383 if adddir(seen_dirs, fname):
384 if os.path.islink(fname):
384 if os.path.islink(fname):
385 for hgname in walkrepos(fname, True, seen_dirs):
385 for hgname in walkrepos(fname, True, seen_dirs):
386 yield hgname
386 yield hgname
387 else:
387 else:
388 newdirs.append(d)
388 newdirs.append(d)
389 dirs[:] = newdirs
389 dirs[:] = newdirs
390
390
391 def binnode(ctx):
391 def binnode(ctx):
392 """Return binary node id for a given basectx"""
392 """Return binary node id for a given basectx"""
393 node = ctx.node()
393 node = ctx.node()
394 if node is None:
394 if node is None:
395 return wdirid
395 return wdirid
396 return node
396 return node
397
397
398 def intrev(ctx):
398 def intrev(ctx):
399 """Return integer for a given basectx that can be used in comparison or
399 """Return integer for a given basectx that can be used in comparison or
400 arithmetic operation"""
400 arithmetic operation"""
401 rev = ctx.rev()
401 rev = ctx.rev()
402 if rev is None:
402 if rev is None:
403 return wdirrev
403 return wdirrev
404 return rev
404 return rev
405
405
406 def revsingle(repo, revspec, default='.'):
406 def revsingle(repo, revspec, default='.'):
407 if not revspec and revspec != 0:
407 if not revspec and revspec != 0:
408 return repo[default]
408 return repo[default]
409
409
410 l = revrange(repo, [revspec])
410 l = revrange(repo, [revspec])
411 if not l:
411 if not l:
412 raise error.Abort(_('empty revision set'))
412 raise error.Abort(_('empty revision set'))
413 return repo[l.last()]
413 return repo[l.last()]
414
414
415 def _pairspec(revspec):
415 def _pairspec(revspec):
416 tree = revsetlang.parse(revspec)
416 tree = revsetlang.parse(revspec)
417 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
417 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
418
418
419 def revpair(repo, revs):
419 def revpair(repo, revs):
420 if not revs:
420 if not revs:
421 return repo.dirstate.p1(), None
421 return repo.dirstate.p1(), None
422
422
423 l = revrange(repo, revs)
423 l = revrange(repo, revs)
424
424
425 if not l:
425 if not l:
426 first = second = None
426 first = second = None
427 elif l.isascending():
427 elif l.isascending():
428 first = l.min()
428 first = l.min()
429 second = l.max()
429 second = l.max()
430 elif l.isdescending():
430 elif l.isdescending():
431 first = l.max()
431 first = l.max()
432 second = l.min()
432 second = l.min()
433 else:
433 else:
434 first = l.first()
434 first = l.first()
435 second = l.last()
435 second = l.last()
436
436
437 if first is None:
437 if first is None:
438 raise error.Abort(_('empty revision range'))
438 raise error.Abort(_('empty revision range'))
439 if (first == second and len(revs) >= 2
439 if (first == second and len(revs) >= 2
440 and not all(revrange(repo, [r]) for r in revs)):
440 and not all(revrange(repo, [r]) for r in revs)):
441 raise error.Abort(_('empty revision on one side of range'))
441 raise error.Abort(_('empty revision on one side of range'))
442
442
443 # if top-level is range expression, the result must always be a pair
443 # if top-level is range expression, the result must always be a pair
444 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
444 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
445 return repo.lookup(first), None
445 return repo.lookup(first), None
446
446
447 return repo.lookup(first), repo.lookup(second)
447 return repo.lookup(first), repo.lookup(second)
448
448
449 def revrange(repo, specs):
449 def revrange(repo, specs):
450 """Execute 1 to many revsets and return the union.
450 """Execute 1 to many revsets and return the union.
451
451
452 This is the preferred mechanism for executing revsets using user-specified
452 This is the preferred mechanism for executing revsets using user-specified
453 config options, such as revset aliases.
453 config options, such as revset aliases.
454
454
455 The revsets specified by ``specs`` will be executed via a chained ``OR``
455 The revsets specified by ``specs`` will be executed via a chained ``OR``
456 expression. If ``specs`` is empty, an empty result is returned.
456 expression. If ``specs`` is empty, an empty result is returned.
457
457
458 ``specs`` can contain integers, in which case they are assumed to be
458 ``specs`` can contain integers, in which case they are assumed to be
459 revision numbers.
459 revision numbers.
460
460
461 It is assumed the revsets are already formatted. If you have arguments
461 It is assumed the revsets are already formatted. If you have arguments
462 that need to be expanded in the revset, call ``revsetlang.formatspec()``
462 that need to be expanded in the revset, call ``revsetlang.formatspec()``
463 and pass the result as an element of ``specs``.
463 and pass the result as an element of ``specs``.
464
464
465 Specifying a single revset is allowed.
465 Specifying a single revset is allowed.
466
466
467 Returns a ``revset.abstractsmartset`` which is a list-like interface over
467 Returns a ``revset.abstractsmartset`` which is a list-like interface over
468 integer revisions.
468 integer revisions.
469 """
469 """
470 allspecs = []
470 allspecs = []
471 for spec in specs:
471 for spec in specs:
472 if isinstance(spec, int):
472 if isinstance(spec, int):
473 spec = revsetlang.formatspec('rev(%d)', spec)
473 spec = revsetlang.formatspec('rev(%d)', spec)
474 allspecs.append(spec)
474 allspecs.append(spec)
475 return repo.anyrevs(allspecs, user=True)
475 return repo.anyrevs(allspecs, user=True)
476
476
477 def meaningfulparents(repo, ctx):
477 def meaningfulparents(repo, ctx):
478 """Return list of meaningful (or all if debug) parentrevs for rev.
478 """Return list of meaningful (or all if debug) parentrevs for rev.
479
479
480 For merges (two non-nullrev revisions) both parents are meaningful.
480 For merges (two non-nullrev revisions) both parents are meaningful.
481 Otherwise the first parent revision is considered meaningful if it
481 Otherwise the first parent revision is considered meaningful if it
482 is not the preceding revision.
482 is not the preceding revision.
483 """
483 """
484 parents = ctx.parents()
484 parents = ctx.parents()
485 if len(parents) > 1:
485 if len(parents) > 1:
486 return parents
486 return parents
487 if repo.ui.debugflag:
487 if repo.ui.debugflag:
488 return [parents[0], repo['null']]
488 return [parents[0], repo['null']]
489 if parents[0].rev() >= intrev(ctx) - 1:
489 if parents[0].rev() >= intrev(ctx) - 1:
490 return []
490 return []
491 return parents
491 return parents
492
492
493 def expandpats(pats):
493 def expandpats(pats):
494 '''Expand bare globs when running on windows.
494 '''Expand bare globs when running on windows.
495 On posix we assume it already has already been done by sh.'''
495 On posix we assume it already has already been done by sh.'''
496 if not util.expandglobs:
496 if not util.expandglobs:
497 return list(pats)
497 return list(pats)
498 ret = []
498 ret = []
499 for kindpat in pats:
499 for kindpat in pats:
500 kind, pat = matchmod._patsplit(kindpat, None)
500 kind, pat = matchmod._patsplit(kindpat, None)
501 if kind is None:
501 if kind is None:
502 try:
502 try:
503 globbed = glob.glob(pat)
503 globbed = glob.glob(pat)
504 except re.error:
504 except re.error:
505 globbed = [pat]
505 globbed = [pat]
506 if globbed:
506 if globbed:
507 ret.extend(globbed)
507 ret.extend(globbed)
508 continue
508 continue
509 ret.append(kindpat)
509 ret.append(kindpat)
510 return ret
510 return ret
511
511
512 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
512 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
513 badfn=None):
513 badfn=None):
514 '''Return a matcher and the patterns that were used.
514 '''Return a matcher and the patterns that were used.
515 The matcher will warn about bad matches, unless an alternate badfn callback
515 The matcher will warn about bad matches, unless an alternate badfn callback
516 is provided.'''
516 is provided.'''
517 if pats == ("",):
517 if pats == ("",):
518 pats = []
518 pats = []
519 if opts is None:
519 if opts is None:
520 opts = {}
520 opts = {}
521 if not globbed and default == 'relpath':
521 if not globbed and default == 'relpath':
522 pats = expandpats(pats or [])
522 pats = expandpats(pats or [])
523
523
524 def bad(f, msg):
524 def bad(f, msg):
525 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
525 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
526
526
527 if badfn is None:
527 if badfn is None:
528 badfn = bad
528 badfn = bad
529
529
530 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
530 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
531 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
531 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
532
532
533 if m.always():
533 if m.always():
534 pats = []
534 pats = []
535 return m, pats
535 return m, pats
536
536
537 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
537 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
538 badfn=None):
538 badfn=None):
539 '''Return a matcher that will warn about bad matches.'''
539 '''Return a matcher that will warn about bad matches.'''
540 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
540 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
541
541
542 def matchall(repo):
542 def matchall(repo):
543 '''Return a matcher that will efficiently match everything.'''
543 '''Return a matcher that will efficiently match everything.'''
544 return matchmod.always(repo.root, repo.getcwd())
544 return matchmod.always(repo.root, repo.getcwd())
545
545
546 def matchfiles(repo, files, badfn=None):
546 def matchfiles(repo, files, badfn=None):
547 '''Return a matcher that will efficiently match exactly these files.'''
547 '''Return a matcher that will efficiently match exactly these files.'''
548 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
548 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
549
549
550 def origpath(ui, repo, filepath):
550 def origpath(ui, repo, filepath):
551 '''customize where .orig files are created
551 '''customize where .orig files are created
552
552
553 Fetch user defined path from config file: [ui] origbackuppath = <path>
553 Fetch user defined path from config file: [ui] origbackuppath = <path>
554 Fall back to default (filepath) if not specified
554 Fall back to default (filepath) if not specified
555 '''
555 '''
556 origbackuppath = ui.config('ui', 'origbackuppath', None)
556 origbackuppath = ui.config('ui', 'origbackuppath', None)
557 if origbackuppath is None:
557 if origbackuppath is None:
558 return filepath + ".orig"
558 return filepath + ".orig"
559
559
560 filepathfromroot = os.path.relpath(filepath, start=repo.root)
560 filepathfromroot = os.path.relpath(filepath, start=repo.root)
561 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
561 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
562
562
563 origbackupdir = repo.vfs.dirname(fullorigpath)
563 origbackupdir = repo.vfs.dirname(fullorigpath)
564 if not repo.vfs.exists(origbackupdir):
564 if not repo.vfs.exists(origbackupdir):
565 ui.note(_('creating directory: %s\n') % origbackupdir)
565 ui.note(_('creating directory: %s\n') % origbackupdir)
566 util.makedirs(origbackupdir)
566 util.makedirs(origbackupdir)
567
567
568 return fullorigpath + ".orig"
568 return fullorigpath + ".orig"
569
569
570 class _containsnode(object):
570 class _containsnode(object):
571 """proxy __contains__(node) to container.__contains__ which accepts revs"""
571 """proxy __contains__(node) to container.__contains__ which accepts revs"""
572
572
573 def __init__(self, repo, revcontainer):
573 def __init__(self, repo, revcontainer):
574 self._torev = repo.changelog.rev
574 self._torev = repo.changelog.rev
575 self._revcontains = revcontainer.__contains__
575 self._revcontains = revcontainer.__contains__
576
576
577 def __contains__(self, node):
577 def __contains__(self, node):
578 return self._revcontains(self._torev(node))
578 return self._revcontains(self._torev(node))
579
579
580 def cleanupnodes(repo, mapping, operation):
580 def cleanupnodes(repo, mapping, operation):
581 """do common cleanups when old nodes are replaced by new nodes
581 """do common cleanups when old nodes are replaced by new nodes
582
582
583 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
583 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
584 (we might also want to move working directory parent in the future)
584 (we might also want to move working directory parent in the future)
585
585
586 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
586 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
587 replacements. operation is a string, like "rebase".
587 replacements. operation is a string, like "rebase".
588 """
588 """
589 if not util.safehasattr(mapping, 'items'):
589 if not util.safehasattr(mapping, 'items'):
590 mapping = {n: () for n in mapping}
590 mapping = {n: () for n in mapping}
591
591
592 with repo.transaction('cleanup') as tr:
592 with repo.transaction('cleanup') as tr:
593 # Move bookmarks
593 # Move bookmarks
594 bmarks = repo._bookmarks
594 bmarks = repo._bookmarks
595 bmarkchanged = False
595 bmarkchanged = False
596 allnewnodes = [n for ns in mapping.values() for n in ns]
596 allnewnodes = [n for ns in mapping.values() for n in ns]
597 for oldnode, newnodes in mapping.items():
597 for oldnode, newnodes in mapping.items():
598 oldbmarks = repo.nodebookmarks(oldnode)
598 oldbmarks = repo.nodebookmarks(oldnode)
599 if not oldbmarks:
599 if not oldbmarks:
600 continue
600 continue
601 from . import bookmarks # avoid import cycle
601 from . import bookmarks # avoid import cycle
602 bmarkchanged = True
602 bmarkchanged = True
603 if len(newnodes) > 1:
603 if len(newnodes) > 1:
604 # usually a split, take the one with biggest rev number
604 # usually a split, take the one with biggest rev number
605 newnode = next(repo.set('max(%ln)', newnodes)).node()
605 newnode = next(repo.set('max(%ln)', newnodes)).node()
606 elif len(newnodes) == 0:
606 elif len(newnodes) == 0:
607 # move bookmark backwards
607 # move bookmark backwards
608 roots = list(repo.set('max((::%n) - %ln)', oldnode,
608 roots = list(repo.set('max((::%n) - %ln)', oldnode,
609 list(mapping)))
609 list(mapping)))
610 if roots:
610 if roots:
611 newnode = roots[0].node()
611 newnode = roots[0].node()
612 else:
612 else:
613 newnode = nullid
613 newnode = nullid
614 else:
614 else:
615 newnode = newnodes[0]
615 newnode = newnodes[0]
616 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
616 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
617 (oldbmarks, hex(oldnode), hex(newnode)))
617 (oldbmarks, hex(oldnode), hex(newnode)))
618 # Delete divergent bookmarks being parents of related newnodes
618 # Delete divergent bookmarks being parents of related newnodes
619 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
619 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
620 allnewnodes, newnode, oldnode)
620 allnewnodes, newnode, oldnode)
621 deletenodes = _containsnode(repo, deleterevs)
621 deletenodes = _containsnode(repo, deleterevs)
622 for name in oldbmarks:
622 for name in oldbmarks:
623 bmarks[name] = newnode
623 bmarks[name] = newnode
624 bookmarks.deletedivergent(repo, deletenodes, name)
624 bookmarks.deletedivergent(repo, deletenodes, name)
625 if bmarkchanged:
625 if bmarkchanged:
626 bmarks.recordchange(tr)
626 bmarks.recordchange(tr)
627
627
628 # Obsolete or strip nodes
628 # Obsolete or strip nodes
629 if obsolete.isenabled(repo, obsolete.createmarkersopt):
629 if obsolete.isenabled(repo, obsolete.createmarkersopt):
630 # If a node is already obsoleted, and we want to obsolete it
630 # If a node is already obsoleted, and we want to obsolete it
631 # without a successor, skip that obssolete request since it's
631 # without a successor, skip that obssolete request since it's
632 # unnecessary. That's the "if s or not isobs(n)" check below.
632 # unnecessary. That's the "if s or not isobs(n)" check below.
633 # Also sort the node in topology order, that might be useful for
633 # Also sort the node in topology order, that might be useful for
634 # some obsstore logic.
634 # some obsstore logic.
635 # NOTE: the filtering and sorting might belong to createmarkers.
635 # NOTE: the filtering and sorting might belong to createmarkers.
636 # Unfiltered repo is needed since nodes in mapping might be hidden.
636 # Unfiltered repo is needed since nodes in mapping might be hidden.
637 unfi = repo.unfiltered()
637 unfi = repo.unfiltered()
638 isobs = unfi.obsstore.successors.__contains__
638 isobs = unfi.obsstore.successors.__contains__
639 torev = unfi.changelog.rev
639 torev = unfi.changelog.rev
640 sortfunc = lambda ns: torev(ns[0])
640 sortfunc = lambda ns: torev(ns[0])
641 rels = [(unfi[n], (unfi[m] for m in s))
641 rels = [(unfi[n], tuple(unfi[m] for m in s))
642 for n, s in sorted(mapping.items(), key=sortfunc)
642 for n, s in sorted(mapping.items(), key=sortfunc)
643 if s or not isobs(n)]
643 if s or not isobs(n)]
644 obsolete.createmarkers(repo, rels, operation=operation)
644 obsolete.createmarkers(repo, rels, operation=operation)
645 else:
645 else:
646 from . import repair # avoid import cycle
646 from . import repair # avoid import cycle
647 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
647 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
648
648
649 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
649 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
650 if opts is None:
650 if opts is None:
651 opts = {}
651 opts = {}
652 m = matcher
652 m = matcher
653 if dry_run is None:
653 if dry_run is None:
654 dry_run = opts.get('dry_run')
654 dry_run = opts.get('dry_run')
655 if similarity is None:
655 if similarity is None:
656 similarity = float(opts.get('similarity') or 0)
656 similarity = float(opts.get('similarity') or 0)
657
657
658 ret = 0
658 ret = 0
659 join = lambda f: os.path.join(prefix, f)
659 join = lambda f: os.path.join(prefix, f)
660
660
661 wctx = repo[None]
661 wctx = repo[None]
662 for subpath in sorted(wctx.substate):
662 for subpath in sorted(wctx.substate):
663 submatch = matchmod.subdirmatcher(subpath, m)
663 submatch = matchmod.subdirmatcher(subpath, m)
664 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
664 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
665 sub = wctx.sub(subpath)
665 sub = wctx.sub(subpath)
666 try:
666 try:
667 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
667 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
668 ret = 1
668 ret = 1
669 except error.LookupError:
669 except error.LookupError:
670 repo.ui.status(_("skipping missing subrepository: %s\n")
670 repo.ui.status(_("skipping missing subrepository: %s\n")
671 % join(subpath))
671 % join(subpath))
672
672
673 rejected = []
673 rejected = []
674 def badfn(f, msg):
674 def badfn(f, msg):
675 if f in m.files():
675 if f in m.files():
676 m.bad(f, msg)
676 m.bad(f, msg)
677 rejected.append(f)
677 rejected.append(f)
678
678
679 badmatch = matchmod.badmatch(m, badfn)
679 badmatch = matchmod.badmatch(m, badfn)
680 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
680 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
681 badmatch)
681 badmatch)
682
682
683 unknownset = set(unknown + forgotten)
683 unknownset = set(unknown + forgotten)
684 toprint = unknownset.copy()
684 toprint = unknownset.copy()
685 toprint.update(deleted)
685 toprint.update(deleted)
686 for abs in sorted(toprint):
686 for abs in sorted(toprint):
687 if repo.ui.verbose or not m.exact(abs):
687 if repo.ui.verbose or not m.exact(abs):
688 if abs in unknownset:
688 if abs in unknownset:
689 status = _('adding %s\n') % m.uipath(abs)
689 status = _('adding %s\n') % m.uipath(abs)
690 else:
690 else:
691 status = _('removing %s\n') % m.uipath(abs)
691 status = _('removing %s\n') % m.uipath(abs)
692 repo.ui.status(status)
692 repo.ui.status(status)
693
693
694 renames = _findrenames(repo, m, added + unknown, removed + deleted,
694 renames = _findrenames(repo, m, added + unknown, removed + deleted,
695 similarity)
695 similarity)
696
696
697 if not dry_run:
697 if not dry_run:
698 _markchanges(repo, unknown + forgotten, deleted, renames)
698 _markchanges(repo, unknown + forgotten, deleted, renames)
699
699
700 for f in rejected:
700 for f in rejected:
701 if f in m.files():
701 if f in m.files():
702 return 1
702 return 1
703 return ret
703 return ret
704
704
705 def marktouched(repo, files, similarity=0.0):
705 def marktouched(repo, files, similarity=0.0):
706 '''Assert that files have somehow been operated upon. files are relative to
706 '''Assert that files have somehow been operated upon. files are relative to
707 the repo root.'''
707 the repo root.'''
708 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
708 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
709 rejected = []
709 rejected = []
710
710
711 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
711 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
712
712
713 if repo.ui.verbose:
713 if repo.ui.verbose:
714 unknownset = set(unknown + forgotten)
714 unknownset = set(unknown + forgotten)
715 toprint = unknownset.copy()
715 toprint = unknownset.copy()
716 toprint.update(deleted)
716 toprint.update(deleted)
717 for abs in sorted(toprint):
717 for abs in sorted(toprint):
718 if abs in unknownset:
718 if abs in unknownset:
719 status = _('adding %s\n') % abs
719 status = _('adding %s\n') % abs
720 else:
720 else:
721 status = _('removing %s\n') % abs
721 status = _('removing %s\n') % abs
722 repo.ui.status(status)
722 repo.ui.status(status)
723
723
724 renames = _findrenames(repo, m, added + unknown, removed + deleted,
724 renames = _findrenames(repo, m, added + unknown, removed + deleted,
725 similarity)
725 similarity)
726
726
727 _markchanges(repo, unknown + forgotten, deleted, renames)
727 _markchanges(repo, unknown + forgotten, deleted, renames)
728
728
729 for f in rejected:
729 for f in rejected:
730 if f in m.files():
730 if f in m.files():
731 return 1
731 return 1
732 return 0
732 return 0
733
733
734 def _interestingfiles(repo, matcher):
734 def _interestingfiles(repo, matcher):
735 '''Walk dirstate with matcher, looking for files that addremove would care
735 '''Walk dirstate with matcher, looking for files that addremove would care
736 about.
736 about.
737
737
738 This is different from dirstate.status because it doesn't care about
738 This is different from dirstate.status because it doesn't care about
739 whether files are modified or clean.'''
739 whether files are modified or clean.'''
740 added, unknown, deleted, removed, forgotten = [], [], [], [], []
740 added, unknown, deleted, removed, forgotten = [], [], [], [], []
741 audit_path = pathutil.pathauditor(repo.root)
741 audit_path = pathutil.pathauditor(repo.root)
742
742
743 ctx = repo[None]
743 ctx = repo[None]
744 dirstate = repo.dirstate
744 dirstate = repo.dirstate
745 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
745 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
746 full=False)
746 full=False)
747 for abs, st in walkresults.iteritems():
747 for abs, st in walkresults.iteritems():
748 dstate = dirstate[abs]
748 dstate = dirstate[abs]
749 if dstate == '?' and audit_path.check(abs):
749 if dstate == '?' and audit_path.check(abs):
750 unknown.append(abs)
750 unknown.append(abs)
751 elif dstate != 'r' and not st:
751 elif dstate != 'r' and not st:
752 deleted.append(abs)
752 deleted.append(abs)
753 elif dstate == 'r' and st:
753 elif dstate == 'r' and st:
754 forgotten.append(abs)
754 forgotten.append(abs)
755 # for finding renames
755 # for finding renames
756 elif dstate == 'r' and not st:
756 elif dstate == 'r' and not st:
757 removed.append(abs)
757 removed.append(abs)
758 elif dstate == 'a':
758 elif dstate == 'a':
759 added.append(abs)
759 added.append(abs)
760
760
761 return added, unknown, deleted, removed, forgotten
761 return added, unknown, deleted, removed, forgotten
762
762
763 def _findrenames(repo, matcher, added, removed, similarity):
763 def _findrenames(repo, matcher, added, removed, similarity):
764 '''Find renames from removed files to added ones.'''
764 '''Find renames from removed files to added ones.'''
765 renames = {}
765 renames = {}
766 if similarity > 0:
766 if similarity > 0:
767 for old, new, score in similar.findrenames(repo, added, removed,
767 for old, new, score in similar.findrenames(repo, added, removed,
768 similarity):
768 similarity):
769 if (repo.ui.verbose or not matcher.exact(old)
769 if (repo.ui.verbose or not matcher.exact(old)
770 or not matcher.exact(new)):
770 or not matcher.exact(new)):
771 repo.ui.status(_('recording removal of %s as rename to %s '
771 repo.ui.status(_('recording removal of %s as rename to %s '
772 '(%d%% similar)\n') %
772 '(%d%% similar)\n') %
773 (matcher.rel(old), matcher.rel(new),
773 (matcher.rel(old), matcher.rel(new),
774 score * 100))
774 score * 100))
775 renames[new] = old
775 renames[new] = old
776 return renames
776 return renames
777
777
778 def _markchanges(repo, unknown, deleted, renames):
778 def _markchanges(repo, unknown, deleted, renames):
779 '''Marks the files in unknown as added, the files in deleted as removed,
779 '''Marks the files in unknown as added, the files in deleted as removed,
780 and the files in renames as copied.'''
780 and the files in renames as copied.'''
781 wctx = repo[None]
781 wctx = repo[None]
782 with repo.wlock():
782 with repo.wlock():
783 wctx.forget(deleted)
783 wctx.forget(deleted)
784 wctx.add(unknown)
784 wctx.add(unknown)
785 for new, old in renames.iteritems():
785 for new, old in renames.iteritems():
786 wctx.copy(old, new)
786 wctx.copy(old, new)
787
787
788 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
788 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
789 """Update the dirstate to reflect the intent of copying src to dst. For
789 """Update the dirstate to reflect the intent of copying src to dst. For
790 different reasons it might not end with dst being marked as copied from src.
790 different reasons it might not end with dst being marked as copied from src.
791 """
791 """
792 origsrc = repo.dirstate.copied(src) or src
792 origsrc = repo.dirstate.copied(src) or src
793 if dst == origsrc: # copying back a copy?
793 if dst == origsrc: # copying back a copy?
794 if repo.dirstate[dst] not in 'mn' and not dryrun:
794 if repo.dirstate[dst] not in 'mn' and not dryrun:
795 repo.dirstate.normallookup(dst)
795 repo.dirstate.normallookup(dst)
796 else:
796 else:
797 if repo.dirstate[origsrc] == 'a' and origsrc == src:
797 if repo.dirstate[origsrc] == 'a' and origsrc == src:
798 if not ui.quiet:
798 if not ui.quiet:
799 ui.warn(_("%s has not been committed yet, so no copy "
799 ui.warn(_("%s has not been committed yet, so no copy "
800 "data will be stored for %s.\n")
800 "data will be stored for %s.\n")
801 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
801 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
802 if repo.dirstate[dst] in '?r' and not dryrun:
802 if repo.dirstate[dst] in '?r' and not dryrun:
803 wctx.add([dst])
803 wctx.add([dst])
804 elif not dryrun:
804 elif not dryrun:
805 wctx.copy(origsrc, dst)
805 wctx.copy(origsrc, dst)
806
806
807 def readrequires(opener, supported):
807 def readrequires(opener, supported):
808 '''Reads and parses .hg/requires and checks if all entries found
808 '''Reads and parses .hg/requires and checks if all entries found
809 are in the list of supported features.'''
809 are in the list of supported features.'''
810 requirements = set(opener.read("requires").splitlines())
810 requirements = set(opener.read("requires").splitlines())
811 missings = []
811 missings = []
812 for r in requirements:
812 for r in requirements:
813 if r not in supported:
813 if r not in supported:
814 if not r or not r[0].isalnum():
814 if not r or not r[0].isalnum():
815 raise error.RequirementError(_(".hg/requires file is corrupt"))
815 raise error.RequirementError(_(".hg/requires file is corrupt"))
816 missings.append(r)
816 missings.append(r)
817 missings.sort()
817 missings.sort()
818 if missings:
818 if missings:
819 raise error.RequirementError(
819 raise error.RequirementError(
820 _("repository requires features unknown to this Mercurial: %s")
820 _("repository requires features unknown to this Mercurial: %s")
821 % " ".join(missings),
821 % " ".join(missings),
822 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
822 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
823 " for more information"))
823 " for more information"))
824 return requirements
824 return requirements
825
825
826 def writerequires(opener, requirements):
826 def writerequires(opener, requirements):
827 with opener('requires', 'w') as fp:
827 with opener('requires', 'w') as fp:
828 for r in sorted(requirements):
828 for r in sorted(requirements):
829 fp.write("%s\n" % r)
829 fp.write("%s\n" % r)
830
830
831 class filecachesubentry(object):
831 class filecachesubentry(object):
832 def __init__(self, path, stat):
832 def __init__(self, path, stat):
833 self.path = path
833 self.path = path
834 self.cachestat = None
834 self.cachestat = None
835 self._cacheable = None
835 self._cacheable = None
836
836
837 if stat:
837 if stat:
838 self.cachestat = filecachesubentry.stat(self.path)
838 self.cachestat = filecachesubentry.stat(self.path)
839
839
840 if self.cachestat:
840 if self.cachestat:
841 self._cacheable = self.cachestat.cacheable()
841 self._cacheable = self.cachestat.cacheable()
842 else:
842 else:
843 # None means we don't know yet
843 # None means we don't know yet
844 self._cacheable = None
844 self._cacheable = None
845
845
846 def refresh(self):
846 def refresh(self):
847 if self.cacheable():
847 if self.cacheable():
848 self.cachestat = filecachesubentry.stat(self.path)
848 self.cachestat = filecachesubentry.stat(self.path)
849
849
850 def cacheable(self):
850 def cacheable(self):
851 if self._cacheable is not None:
851 if self._cacheable is not None:
852 return self._cacheable
852 return self._cacheable
853
853
854 # we don't know yet, assume it is for now
854 # we don't know yet, assume it is for now
855 return True
855 return True
856
856
857 def changed(self):
857 def changed(self):
858 # no point in going further if we can't cache it
858 # no point in going further if we can't cache it
859 if not self.cacheable():
859 if not self.cacheable():
860 return True
860 return True
861
861
862 newstat = filecachesubentry.stat(self.path)
862 newstat = filecachesubentry.stat(self.path)
863
863
864 # we may not know if it's cacheable yet, check again now
864 # we may not know if it's cacheable yet, check again now
865 if newstat and self._cacheable is None:
865 if newstat and self._cacheable is None:
866 self._cacheable = newstat.cacheable()
866 self._cacheable = newstat.cacheable()
867
867
868 # check again
868 # check again
869 if not self._cacheable:
869 if not self._cacheable:
870 return True
870 return True
871
871
872 if self.cachestat != newstat:
872 if self.cachestat != newstat:
873 self.cachestat = newstat
873 self.cachestat = newstat
874 return True
874 return True
875 else:
875 else:
876 return False
876 return False
877
877
878 @staticmethod
878 @staticmethod
879 def stat(path):
879 def stat(path):
880 try:
880 try:
881 return util.cachestat(path)
881 return util.cachestat(path)
882 except OSError as e:
882 except OSError as e:
883 if e.errno != errno.ENOENT:
883 if e.errno != errno.ENOENT:
884 raise
884 raise
885
885
886 class filecacheentry(object):
886 class filecacheentry(object):
887 def __init__(self, paths, stat=True):
887 def __init__(self, paths, stat=True):
888 self._entries = []
888 self._entries = []
889 for path in paths:
889 for path in paths:
890 self._entries.append(filecachesubentry(path, stat))
890 self._entries.append(filecachesubentry(path, stat))
891
891
892 def changed(self):
892 def changed(self):
893 '''true if any entry has changed'''
893 '''true if any entry has changed'''
894 for entry in self._entries:
894 for entry in self._entries:
895 if entry.changed():
895 if entry.changed():
896 return True
896 return True
897 return False
897 return False
898
898
899 def refresh(self):
899 def refresh(self):
900 for entry in self._entries:
900 for entry in self._entries:
901 entry.refresh()
901 entry.refresh()
902
902
903 class filecache(object):
903 class filecache(object):
904 '''A property like decorator that tracks files under .hg/ for updates.
904 '''A property like decorator that tracks files under .hg/ for updates.
905
905
906 Records stat info when called in _filecache.
906 Records stat info when called in _filecache.
907
907
908 On subsequent calls, compares old stat info with new info, and recreates the
908 On subsequent calls, compares old stat info with new info, and recreates the
909 object when any of the files changes, updating the new stat info in
909 object when any of the files changes, updating the new stat info in
910 _filecache.
910 _filecache.
911
911
912 Mercurial either atomic renames or appends for files under .hg,
912 Mercurial either atomic renames or appends for files under .hg,
913 so to ensure the cache is reliable we need the filesystem to be able
913 so to ensure the cache is reliable we need the filesystem to be able
914 to tell us if a file has been replaced. If it can't, we fallback to
914 to tell us if a file has been replaced. If it can't, we fallback to
915 recreating the object on every call (essentially the same behavior as
915 recreating the object on every call (essentially the same behavior as
916 propertycache).
916 propertycache).
917
917
918 '''
918 '''
919 def __init__(self, *paths):
919 def __init__(self, *paths):
920 self.paths = paths
920 self.paths = paths
921
921
922 def join(self, obj, fname):
922 def join(self, obj, fname):
923 """Used to compute the runtime path of a cached file.
923 """Used to compute the runtime path of a cached file.
924
924
925 Users should subclass filecache and provide their own version of this
925 Users should subclass filecache and provide their own version of this
926 function to call the appropriate join function on 'obj' (an instance
926 function to call the appropriate join function on 'obj' (an instance
927 of the class that its member function was decorated).
927 of the class that its member function was decorated).
928 """
928 """
929 raise NotImplementedError
929 raise NotImplementedError
930
930
931 def __call__(self, func):
931 def __call__(self, func):
932 self.func = func
932 self.func = func
933 self.name = func.__name__.encode('ascii')
933 self.name = func.__name__.encode('ascii')
934 return self
934 return self
935
935
936 def __get__(self, obj, type=None):
936 def __get__(self, obj, type=None):
937 # if accessed on the class, return the descriptor itself.
937 # if accessed on the class, return the descriptor itself.
938 if obj is None:
938 if obj is None:
939 return self
939 return self
940 # do we need to check if the file changed?
940 # do we need to check if the file changed?
941 if self.name in obj.__dict__:
941 if self.name in obj.__dict__:
942 assert self.name in obj._filecache, self.name
942 assert self.name in obj._filecache, self.name
943 return obj.__dict__[self.name]
943 return obj.__dict__[self.name]
944
944
945 entry = obj._filecache.get(self.name)
945 entry = obj._filecache.get(self.name)
946
946
947 if entry:
947 if entry:
948 if entry.changed():
948 if entry.changed():
949 entry.obj = self.func(obj)
949 entry.obj = self.func(obj)
950 else:
950 else:
951 paths = [self.join(obj, path) for path in self.paths]
951 paths = [self.join(obj, path) for path in self.paths]
952
952
953 # We stat -before- creating the object so our cache doesn't lie if
953 # We stat -before- creating the object so our cache doesn't lie if
954 # a writer modified between the time we read and stat
954 # a writer modified between the time we read and stat
955 entry = filecacheentry(paths, True)
955 entry = filecacheentry(paths, True)
956 entry.obj = self.func(obj)
956 entry.obj = self.func(obj)
957
957
958 obj._filecache[self.name] = entry
958 obj._filecache[self.name] = entry
959
959
960 obj.__dict__[self.name] = entry.obj
960 obj.__dict__[self.name] = entry.obj
961 return entry.obj
961 return entry.obj
962
962
963 def __set__(self, obj, value):
963 def __set__(self, obj, value):
964 if self.name not in obj._filecache:
964 if self.name not in obj._filecache:
965 # we add an entry for the missing value because X in __dict__
965 # we add an entry for the missing value because X in __dict__
966 # implies X in _filecache
966 # implies X in _filecache
967 paths = [self.join(obj, path) for path in self.paths]
967 paths = [self.join(obj, path) for path in self.paths]
968 ce = filecacheentry(paths, False)
968 ce = filecacheentry(paths, False)
969 obj._filecache[self.name] = ce
969 obj._filecache[self.name] = ce
970 else:
970 else:
971 ce = obj._filecache[self.name]
971 ce = obj._filecache[self.name]
972
972
973 ce.obj = value # update cached copy
973 ce.obj = value # update cached copy
974 obj.__dict__[self.name] = value # update copy returned by obj.x
974 obj.__dict__[self.name] = value # update copy returned by obj.x
975
975
976 def __delete__(self, obj):
976 def __delete__(self, obj):
977 try:
977 try:
978 del obj.__dict__[self.name]
978 del obj.__dict__[self.name]
979 except KeyError:
979 except KeyError:
980 raise AttributeError(self.name)
980 raise AttributeError(self.name)
981
981
982 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
982 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
983 if lock is None:
983 if lock is None:
984 raise error.LockInheritanceContractViolation(
984 raise error.LockInheritanceContractViolation(
985 'lock can only be inherited while held')
985 'lock can only be inherited while held')
986 if environ is None:
986 if environ is None:
987 environ = {}
987 environ = {}
988 with lock.inherit() as locker:
988 with lock.inherit() as locker:
989 environ[envvar] = locker
989 environ[envvar] = locker
990 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
990 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
991
991
992 def wlocksub(repo, cmd, *args, **kwargs):
992 def wlocksub(repo, cmd, *args, **kwargs):
993 """run cmd as a subprocess that allows inheriting repo's wlock
993 """run cmd as a subprocess that allows inheriting repo's wlock
994
994
995 This can only be called while the wlock is held. This takes all the
995 This can only be called while the wlock is held. This takes all the
996 arguments that ui.system does, and returns the exit code of the
996 arguments that ui.system does, and returns the exit code of the
997 subprocess."""
997 subprocess."""
998 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
998 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
999 **kwargs)
999 **kwargs)
1000
1000
1001 def gdinitconfig(ui):
1001 def gdinitconfig(ui):
1002 """helper function to know if a repo should be created as general delta
1002 """helper function to know if a repo should be created as general delta
1003 """
1003 """
1004 # experimental config: format.generaldelta
1004 # experimental config: format.generaldelta
1005 return (ui.configbool('format', 'generaldelta')
1005 return (ui.configbool('format', 'generaldelta')
1006 or ui.configbool('format', 'usegeneraldelta'))
1006 or ui.configbool('format', 'usegeneraldelta'))
1007
1007
1008 def gddeltaconfig(ui):
1008 def gddeltaconfig(ui):
1009 """helper function to know if incoming delta should be optimised
1009 """helper function to know if incoming delta should be optimised
1010 """
1010 """
1011 # experimental config: format.generaldelta
1011 # experimental config: format.generaldelta
1012 return ui.configbool('format', 'generaldelta')
1012 return ui.configbool('format', 'generaldelta')
1013
1013
1014 class simplekeyvaluefile(object):
1014 class simplekeyvaluefile(object):
1015 """A simple file with key=value lines
1015 """A simple file with key=value lines
1016
1016
1017 Keys must be alphanumerics and start with a letter, values must not
1017 Keys must be alphanumerics and start with a letter, values must not
1018 contain '\n' characters"""
1018 contain '\n' characters"""
1019 firstlinekey = '__firstline'
1019 firstlinekey = '__firstline'
1020
1020
1021 def __init__(self, vfs, path, keys=None):
1021 def __init__(self, vfs, path, keys=None):
1022 self.vfs = vfs
1022 self.vfs = vfs
1023 self.path = path
1023 self.path = path
1024
1024
1025 def read(self, firstlinenonkeyval=False):
1025 def read(self, firstlinenonkeyval=False):
1026 """Read the contents of a simple key-value file
1026 """Read the contents of a simple key-value file
1027
1027
1028 'firstlinenonkeyval' indicates whether the first line of file should
1028 'firstlinenonkeyval' indicates whether the first line of file should
1029 be treated as a key-value pair or reuturned fully under the
1029 be treated as a key-value pair or reuturned fully under the
1030 __firstline key."""
1030 __firstline key."""
1031 lines = self.vfs.readlines(self.path)
1031 lines = self.vfs.readlines(self.path)
1032 d = {}
1032 d = {}
1033 if firstlinenonkeyval:
1033 if firstlinenonkeyval:
1034 if not lines:
1034 if not lines:
1035 e = _("empty simplekeyvalue file")
1035 e = _("empty simplekeyvalue file")
1036 raise error.CorruptedState(e)
1036 raise error.CorruptedState(e)
1037 # we don't want to include '\n' in the __firstline
1037 # we don't want to include '\n' in the __firstline
1038 d[self.firstlinekey] = lines[0][:-1]
1038 d[self.firstlinekey] = lines[0][:-1]
1039 del lines[0]
1039 del lines[0]
1040
1040
1041 try:
1041 try:
1042 # the 'if line.strip()' part prevents us from failing on empty
1042 # the 'if line.strip()' part prevents us from failing on empty
1043 # lines which only contain '\n' therefore are not skipped
1043 # lines which only contain '\n' therefore are not skipped
1044 # by 'if line'
1044 # by 'if line'
1045 updatedict = dict(line[:-1].split('=', 1) for line in lines
1045 updatedict = dict(line[:-1].split('=', 1) for line in lines
1046 if line.strip())
1046 if line.strip())
1047 if self.firstlinekey in updatedict:
1047 if self.firstlinekey in updatedict:
1048 e = _("%r can't be used as a key")
1048 e = _("%r can't be used as a key")
1049 raise error.CorruptedState(e % self.firstlinekey)
1049 raise error.CorruptedState(e % self.firstlinekey)
1050 d.update(updatedict)
1050 d.update(updatedict)
1051 except ValueError as e:
1051 except ValueError as e:
1052 raise error.CorruptedState(str(e))
1052 raise error.CorruptedState(str(e))
1053 return d
1053 return d
1054
1054
1055 def write(self, data, firstline=None):
1055 def write(self, data, firstline=None):
1056 """Write key=>value mapping to a file
1056 """Write key=>value mapping to a file
1057 data is a dict. Keys must be alphanumerical and start with a letter.
1057 data is a dict. Keys must be alphanumerical and start with a letter.
1058 Values must not contain newline characters.
1058 Values must not contain newline characters.
1059
1059
1060 If 'firstline' is not None, it is written to file before
1060 If 'firstline' is not None, it is written to file before
1061 everything else, as it is, not in a key=value form"""
1061 everything else, as it is, not in a key=value form"""
1062 lines = []
1062 lines = []
1063 if firstline is not None:
1063 if firstline is not None:
1064 lines.append('%s\n' % firstline)
1064 lines.append('%s\n' % firstline)
1065
1065
1066 for k, v in data.items():
1066 for k, v in data.items():
1067 if k == self.firstlinekey:
1067 if k == self.firstlinekey:
1068 e = "key name '%s' is reserved" % self.firstlinekey
1068 e = "key name '%s' is reserved" % self.firstlinekey
1069 raise error.ProgrammingError(e)
1069 raise error.ProgrammingError(e)
1070 if not k[0].isalpha():
1070 if not k[0].isalpha():
1071 e = "keys must start with a letter in a key-value file"
1071 e = "keys must start with a letter in a key-value file"
1072 raise error.ProgrammingError(e)
1072 raise error.ProgrammingError(e)
1073 if not k.isalnum():
1073 if not k.isalnum():
1074 e = "invalid key name in a simple key-value file"
1074 e = "invalid key name in a simple key-value file"
1075 raise error.ProgrammingError(e)
1075 raise error.ProgrammingError(e)
1076 if '\n' in v:
1076 if '\n' in v:
1077 e = "invalid value in a simple key-value file"
1077 e = "invalid value in a simple key-value file"
1078 raise error.ProgrammingError(e)
1078 raise error.ProgrammingError(e)
1079 lines.append("%s=%s\n" % (k, v))
1079 lines.append("%s=%s\n" % (k, v))
1080 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1080 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1081 fp.write(''.join(lines))
1081 fp.write(''.join(lines))
1082
1082
1083 def registersummarycallback(repo, otr):
1083 def registersummarycallback(repo, otr):
1084 """register a callback to issue a summary after the transaction is closed
1084 """register a callback to issue a summary after the transaction is closed
1085 """
1085 """
1086 reporef = weakref.ref(repo)
1086 reporef = weakref.ref(repo)
1087 def reportsummary(tr):
1087 def reportsummary(tr):
1088 """the actual callback reporting the summary"""
1088 """the actual callback reporting the summary"""
1089 repo = reporef()
1089 repo = reporef()
1090 obsoleted = obsutil.getobsoleted(repo, tr)
1090 obsoleted = obsutil.getobsoleted(repo, tr)
1091 if obsoleted:
1091 if obsoleted:
1092 repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
1092 repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
1093 otr.addpostclose('00-txnreport', reportsummary)
1093 otr.addpostclose('00-txnreport', reportsummary)
General Comments 0
You need to be logged in to leave comments. Login now