##// END OF EJS Templates
cleanupnodes: rename "mapping" to "replacements"...
Martin von Zweigbergk -
r34363:2dbd6d25 stable
parent child Browse files
Show More
@@ -1,1110 +1,1110 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 wdirid,
22 wdirid,
23 wdirrev,
23 wdirrev,
24 )
24 )
25
25
26 from . import (
26 from . import (
27 encoding,
27 encoding,
28 error,
28 error,
29 match as matchmod,
29 match as matchmod,
30 obsolete,
30 obsolete,
31 obsutil,
31 obsutil,
32 pathutil,
32 pathutil,
33 phases,
33 phases,
34 pycompat,
34 pycompat,
35 revsetlang,
35 revsetlang,
36 similar,
36 similar,
37 util,
37 util,
38 )
38 )
39
39
40 if pycompat.osname == 'nt':
40 if pycompat.osname == 'nt':
41 from . import scmwindows as scmplatform
41 from . import scmwindows as scmplatform
42 else:
42 else:
43 from . import scmposix as scmplatform
43 from . import scmposix as scmplatform
44
44
45 termsize = scmplatform.termsize
45 termsize = scmplatform.termsize
46
46
47 class status(tuple):
47 class status(tuple):
48 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
48 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
49 and 'ignored' properties are only relevant to the working copy.
49 and 'ignored' properties are only relevant to the working copy.
50 '''
50 '''
51
51
52 __slots__ = ()
52 __slots__ = ()
53
53
54 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
54 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
55 clean):
55 clean):
56 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
56 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
57 ignored, clean))
57 ignored, clean))
58
58
59 @property
59 @property
60 def modified(self):
60 def modified(self):
61 '''files that have been modified'''
61 '''files that have been modified'''
62 return self[0]
62 return self[0]
63
63
64 @property
64 @property
65 def added(self):
65 def added(self):
66 '''files that have been added'''
66 '''files that have been added'''
67 return self[1]
67 return self[1]
68
68
69 @property
69 @property
70 def removed(self):
70 def removed(self):
71 '''files that have been removed'''
71 '''files that have been removed'''
72 return self[2]
72 return self[2]
73
73
74 @property
74 @property
75 def deleted(self):
75 def deleted(self):
76 '''files that are in the dirstate, but have been deleted from the
76 '''files that are in the dirstate, but have been deleted from the
77 working copy (aka "missing")
77 working copy (aka "missing")
78 '''
78 '''
79 return self[3]
79 return self[3]
80
80
81 @property
81 @property
82 def unknown(self):
82 def unknown(self):
83 '''files not in the dirstate that are not ignored'''
83 '''files not in the dirstate that are not ignored'''
84 return self[4]
84 return self[4]
85
85
86 @property
86 @property
87 def ignored(self):
87 def ignored(self):
88 '''files not in the dirstate that are ignored (by _dirignore())'''
88 '''files not in the dirstate that are ignored (by _dirignore())'''
89 return self[5]
89 return self[5]
90
90
91 @property
91 @property
92 def clean(self):
92 def clean(self):
93 '''files that have not been modified'''
93 '''files that have not been modified'''
94 return self[6]
94 return self[6]
95
95
96 def __repr__(self, *args, **kwargs):
96 def __repr__(self, *args, **kwargs):
97 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
97 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
98 'unknown=%r, ignored=%r, clean=%r>') % self)
98 'unknown=%r, ignored=%r, clean=%r>') % self)
99
99
100 def itersubrepos(ctx1, ctx2):
100 def itersubrepos(ctx1, ctx2):
101 """find subrepos in ctx1 or ctx2"""
101 """find subrepos in ctx1 or ctx2"""
102 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 # Create a (subpath, ctx) mapping where we prefer subpaths from
103 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 # ctx1. The subpaths from ctx2 are important when the .hgsub file
104 # has been modified (in ctx2) but not yet committed (in ctx1).
104 # has been modified (in ctx2) but not yet committed (in ctx1).
105 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 subpaths = dict.fromkeys(ctx2.substate, ctx2)
106 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
107
107
108 missing = set()
108 missing = set()
109
109
110 for subpath in ctx2.substate:
110 for subpath in ctx2.substate:
111 if subpath not in ctx1.substate:
111 if subpath not in ctx1.substate:
112 del subpaths[subpath]
112 del subpaths[subpath]
113 missing.add(subpath)
113 missing.add(subpath)
114
114
115 for subpath, ctx in sorted(subpaths.iteritems()):
115 for subpath, ctx in sorted(subpaths.iteritems()):
116 yield subpath, ctx.sub(subpath)
116 yield subpath, ctx.sub(subpath)
117
117
118 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
119 # status and diff will have an accurate result when it does
119 # status and diff will have an accurate result when it does
120 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
121 # against itself.
121 # against itself.
122 for subpath in missing:
122 for subpath in missing:
123 yield subpath, ctx2.nullsub(subpath, ctx1)
123 yield subpath, ctx2.nullsub(subpath, ctx1)
124
124
125 def nochangesfound(ui, repo, excluded=None):
125 def nochangesfound(ui, repo, excluded=None):
126 '''Report no changes for push/pull, excluded is None or a list of
126 '''Report no changes for push/pull, excluded is None or a list of
127 nodes excluded from the push/pull.
127 nodes excluded from the push/pull.
128 '''
128 '''
129 secretlist = []
129 secretlist = []
130 if excluded:
130 if excluded:
131 for n in excluded:
131 for n in excluded:
132 ctx = repo[n]
132 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
134 secretlist.append(n)
135
135
136 if secretlist:
136 if secretlist:
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
138 % len(secretlist))
138 % len(secretlist))
139 else:
139 else:
140 ui.status(_("no changes found\n"))
140 ui.status(_("no changes found\n"))
141
141
142 def callcatch(ui, func):
142 def callcatch(ui, func):
143 """call func() with global exception handling
143 """call func() with global exception handling
144
144
145 return func() if no exception happens. otherwise do some error handling
145 return func() if no exception happens. otherwise do some error handling
146 and return an exit code accordingly. does not handle all exceptions.
146 and return an exit code accordingly. does not handle all exceptions.
147 """
147 """
148 try:
148 try:
149 try:
149 try:
150 return func()
150 return func()
151 except: # re-raises
151 except: # re-raises
152 ui.traceback()
152 ui.traceback()
153 raise
153 raise
154 # Global exception handling, alphabetically
154 # Global exception handling, alphabetically
155 # Mercurial-specific first, followed by built-in and library exceptions
155 # Mercurial-specific first, followed by built-in and library exceptions
156 except error.LockHeld as inst:
156 except error.LockHeld as inst:
157 if inst.errno == errno.ETIMEDOUT:
157 if inst.errno == errno.ETIMEDOUT:
158 reason = _('timed out waiting for lock held by %r') % inst.locker
158 reason = _('timed out waiting for lock held by %r') % inst.locker
159 else:
159 else:
160 reason = _('lock held by %r') % inst.locker
160 reason = _('lock held by %r') % inst.locker
161 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
161 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
162 if not inst.locker:
162 if not inst.locker:
163 ui.warn(_("(lock might be very busy)\n"))
163 ui.warn(_("(lock might be very busy)\n"))
164 except error.LockUnavailable as inst:
164 except error.LockUnavailable as inst:
165 ui.warn(_("abort: could not lock %s: %s\n") %
165 ui.warn(_("abort: could not lock %s: %s\n") %
166 (inst.desc or inst.filename, inst.strerror))
166 (inst.desc or inst.filename, inst.strerror))
167 except error.OutOfBandError as inst:
167 except error.OutOfBandError as inst:
168 if inst.args:
168 if inst.args:
169 msg = _("abort: remote error:\n")
169 msg = _("abort: remote error:\n")
170 else:
170 else:
171 msg = _("abort: remote error\n")
171 msg = _("abort: remote error\n")
172 ui.warn(msg)
172 ui.warn(msg)
173 if inst.args:
173 if inst.args:
174 ui.warn(''.join(inst.args))
174 ui.warn(''.join(inst.args))
175 if inst.hint:
175 if inst.hint:
176 ui.warn('(%s)\n' % inst.hint)
176 ui.warn('(%s)\n' % inst.hint)
177 except error.RepoError as inst:
177 except error.RepoError as inst:
178 ui.warn(_("abort: %s!\n") % inst)
178 ui.warn(_("abort: %s!\n") % inst)
179 if inst.hint:
179 if inst.hint:
180 ui.warn(_("(%s)\n") % inst.hint)
180 ui.warn(_("(%s)\n") % inst.hint)
181 except error.ResponseError as inst:
181 except error.ResponseError as inst:
182 ui.warn(_("abort: %s") % inst.args[0])
182 ui.warn(_("abort: %s") % inst.args[0])
183 if not isinstance(inst.args[1], basestring):
183 if not isinstance(inst.args[1], basestring):
184 ui.warn(" %r\n" % (inst.args[1],))
184 ui.warn(" %r\n" % (inst.args[1],))
185 elif not inst.args[1]:
185 elif not inst.args[1]:
186 ui.warn(_(" empty string\n"))
186 ui.warn(_(" empty string\n"))
187 else:
187 else:
188 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
188 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
189 except error.CensoredNodeError as inst:
189 except error.CensoredNodeError as inst:
190 ui.warn(_("abort: file censored %s!\n") % inst)
190 ui.warn(_("abort: file censored %s!\n") % inst)
191 except error.RevlogError as inst:
191 except error.RevlogError as inst:
192 ui.warn(_("abort: %s!\n") % inst)
192 ui.warn(_("abort: %s!\n") % inst)
193 except error.InterventionRequired as inst:
193 except error.InterventionRequired as inst:
194 ui.warn("%s\n" % inst)
194 ui.warn("%s\n" % inst)
195 if inst.hint:
195 if inst.hint:
196 ui.warn(_("(%s)\n") % inst.hint)
196 ui.warn(_("(%s)\n") % inst.hint)
197 return 1
197 return 1
198 except error.WdirUnsupported:
198 except error.WdirUnsupported:
199 ui.warn(_("abort: working directory revision cannot be specified\n"))
199 ui.warn(_("abort: working directory revision cannot be specified\n"))
200 except error.Abort as inst:
200 except error.Abort as inst:
201 ui.warn(_("abort: %s\n") % inst)
201 ui.warn(_("abort: %s\n") % inst)
202 if inst.hint:
202 if inst.hint:
203 ui.warn(_("(%s)\n") % inst.hint)
203 ui.warn(_("(%s)\n") % inst.hint)
204 except ImportError as inst:
204 except ImportError as inst:
205 ui.warn(_("abort: %s!\n") % inst)
205 ui.warn(_("abort: %s!\n") % inst)
206 m = str(inst).split()[-1]
206 m = str(inst).split()[-1]
207 if m in "mpatch bdiff".split():
207 if m in "mpatch bdiff".split():
208 ui.warn(_("(did you forget to compile extensions?)\n"))
208 ui.warn(_("(did you forget to compile extensions?)\n"))
209 elif m in "zlib".split():
209 elif m in "zlib".split():
210 ui.warn(_("(is your Python install correct?)\n"))
210 ui.warn(_("(is your Python install correct?)\n"))
211 except IOError as inst:
211 except IOError as inst:
212 if util.safehasattr(inst, "code"):
212 if util.safehasattr(inst, "code"):
213 ui.warn(_("abort: %s\n") % inst)
213 ui.warn(_("abort: %s\n") % inst)
214 elif util.safehasattr(inst, "reason"):
214 elif util.safehasattr(inst, "reason"):
215 try: # usually it is in the form (errno, strerror)
215 try: # usually it is in the form (errno, strerror)
216 reason = inst.reason.args[1]
216 reason = inst.reason.args[1]
217 except (AttributeError, IndexError):
217 except (AttributeError, IndexError):
218 # it might be anything, for example a string
218 # it might be anything, for example a string
219 reason = inst.reason
219 reason = inst.reason
220 if isinstance(reason, unicode):
220 if isinstance(reason, unicode):
221 # SSLError of Python 2.7.9 contains a unicode
221 # SSLError of Python 2.7.9 contains a unicode
222 reason = encoding.unitolocal(reason)
222 reason = encoding.unitolocal(reason)
223 ui.warn(_("abort: error: %s\n") % reason)
223 ui.warn(_("abort: error: %s\n") % reason)
224 elif (util.safehasattr(inst, "args")
224 elif (util.safehasattr(inst, "args")
225 and inst.args and inst.args[0] == errno.EPIPE):
225 and inst.args and inst.args[0] == errno.EPIPE):
226 pass
226 pass
227 elif getattr(inst, "strerror", None):
227 elif getattr(inst, "strerror", None):
228 if getattr(inst, "filename", None):
228 if getattr(inst, "filename", None):
229 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
229 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
230 else:
230 else:
231 ui.warn(_("abort: %s\n") % inst.strerror)
231 ui.warn(_("abort: %s\n") % inst.strerror)
232 else:
232 else:
233 raise
233 raise
234 except OSError as inst:
234 except OSError as inst:
235 if getattr(inst, "filename", None) is not None:
235 if getattr(inst, "filename", None) is not None:
236 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
236 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
237 else:
237 else:
238 ui.warn(_("abort: %s\n") % inst.strerror)
238 ui.warn(_("abort: %s\n") % inst.strerror)
239 except MemoryError:
239 except MemoryError:
240 ui.warn(_("abort: out of memory\n"))
240 ui.warn(_("abort: out of memory\n"))
241 except SystemExit as inst:
241 except SystemExit as inst:
242 # Commands shouldn't sys.exit directly, but give a return code.
242 # Commands shouldn't sys.exit directly, but give a return code.
243 # Just in case catch this and and pass exit code to caller.
243 # Just in case catch this and and pass exit code to caller.
244 return inst.code
244 return inst.code
245 except socket.error as inst:
245 except socket.error as inst:
246 ui.warn(_("abort: %s\n") % inst.args[-1])
246 ui.warn(_("abort: %s\n") % inst.args[-1])
247
247
248 return -1
248 return -1
249
249
250 def checknewlabel(repo, lbl, kind):
250 def checknewlabel(repo, lbl, kind):
251 # Do not use the "kind" parameter in ui output.
251 # Do not use the "kind" parameter in ui output.
252 # It makes strings difficult to translate.
252 # It makes strings difficult to translate.
253 if lbl in ['tip', '.', 'null']:
253 if lbl in ['tip', '.', 'null']:
254 raise error.Abort(_("the name '%s' is reserved") % lbl)
254 raise error.Abort(_("the name '%s' is reserved") % lbl)
255 for c in (':', '\0', '\n', '\r'):
255 for c in (':', '\0', '\n', '\r'):
256 if c in lbl:
256 if c in lbl:
257 raise error.Abort(_("%r cannot be used in a name") % c)
257 raise error.Abort(_("%r cannot be used in a name") % c)
258 try:
258 try:
259 int(lbl)
259 int(lbl)
260 raise error.Abort(_("cannot use an integer as a name"))
260 raise error.Abort(_("cannot use an integer as a name"))
261 except ValueError:
261 except ValueError:
262 pass
262 pass
263
263
264 def checkfilename(f):
264 def checkfilename(f):
265 '''Check that the filename f is an acceptable filename for a tracked file'''
265 '''Check that the filename f is an acceptable filename for a tracked file'''
266 if '\r' in f or '\n' in f:
266 if '\r' in f or '\n' in f:
267 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
267 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
268
268
269 def checkportable(ui, f):
269 def checkportable(ui, f):
270 '''Check if filename f is portable and warn or abort depending on config'''
270 '''Check if filename f is portable and warn or abort depending on config'''
271 checkfilename(f)
271 checkfilename(f)
272 abort, warn = checkportabilityalert(ui)
272 abort, warn = checkportabilityalert(ui)
273 if abort or warn:
273 if abort or warn:
274 msg = util.checkwinfilename(f)
274 msg = util.checkwinfilename(f)
275 if msg:
275 if msg:
276 msg = "%s: %r" % (msg, f)
276 msg = "%s: %r" % (msg, f)
277 if abort:
277 if abort:
278 raise error.Abort(msg)
278 raise error.Abort(msg)
279 ui.warn(_("warning: %s\n") % msg)
279 ui.warn(_("warning: %s\n") % msg)
280
280
281 def checkportabilityalert(ui):
281 def checkportabilityalert(ui):
282 '''check if the user's config requests nothing, a warning, or abort for
282 '''check if the user's config requests nothing, a warning, or abort for
283 non-portable filenames'''
283 non-portable filenames'''
284 val = ui.config('ui', 'portablefilenames')
284 val = ui.config('ui', 'portablefilenames')
285 lval = val.lower()
285 lval = val.lower()
286 bval = util.parsebool(val)
286 bval = util.parsebool(val)
287 abort = pycompat.osname == 'nt' or lval == 'abort'
287 abort = pycompat.osname == 'nt' or lval == 'abort'
288 warn = bval or lval == 'warn'
288 warn = bval or lval == 'warn'
289 if bval is None and not (warn or abort or lval == 'ignore'):
289 if bval is None and not (warn or abort or lval == 'ignore'):
290 raise error.ConfigError(
290 raise error.ConfigError(
291 _("ui.portablefilenames value is invalid ('%s')") % val)
291 _("ui.portablefilenames value is invalid ('%s')") % val)
292 return abort, warn
292 return abort, warn
293
293
294 class casecollisionauditor(object):
294 class casecollisionauditor(object):
295 def __init__(self, ui, abort, dirstate):
295 def __init__(self, ui, abort, dirstate):
296 self._ui = ui
296 self._ui = ui
297 self._abort = abort
297 self._abort = abort
298 allfiles = '\0'.join(dirstate._map)
298 allfiles = '\0'.join(dirstate._map)
299 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
299 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
300 self._dirstate = dirstate
300 self._dirstate = dirstate
301 # The purpose of _newfiles is so that we don't complain about
301 # The purpose of _newfiles is so that we don't complain about
302 # case collisions if someone were to call this object with the
302 # case collisions if someone were to call this object with the
303 # same filename twice.
303 # same filename twice.
304 self._newfiles = set()
304 self._newfiles = set()
305
305
306 def __call__(self, f):
306 def __call__(self, f):
307 if f in self._newfiles:
307 if f in self._newfiles:
308 return
308 return
309 fl = encoding.lower(f)
309 fl = encoding.lower(f)
310 if fl in self._loweredfiles and f not in self._dirstate:
310 if fl in self._loweredfiles and f not in self._dirstate:
311 msg = _('possible case-folding collision for %s') % f
311 msg = _('possible case-folding collision for %s') % f
312 if self._abort:
312 if self._abort:
313 raise error.Abort(msg)
313 raise error.Abort(msg)
314 self._ui.warn(_("warning: %s\n") % msg)
314 self._ui.warn(_("warning: %s\n") % msg)
315 self._loweredfiles.add(fl)
315 self._loweredfiles.add(fl)
316 self._newfiles.add(f)
316 self._newfiles.add(f)
317
317
318 def filteredhash(repo, maxrev):
318 def filteredhash(repo, maxrev):
319 """build hash of filtered revisions in the current repoview.
319 """build hash of filtered revisions in the current repoview.
320
320
321 Multiple caches perform up-to-date validation by checking that the
321 Multiple caches perform up-to-date validation by checking that the
322 tiprev and tipnode stored in the cache file match the current repository.
322 tiprev and tipnode stored in the cache file match the current repository.
323 However, this is not sufficient for validating repoviews because the set
323 However, this is not sufficient for validating repoviews because the set
324 of revisions in the view may change without the repository tiprev and
324 of revisions in the view may change without the repository tiprev and
325 tipnode changing.
325 tipnode changing.
326
326
327 This function hashes all the revs filtered from the view and returns
327 This function hashes all the revs filtered from the view and returns
328 that SHA-1 digest.
328 that SHA-1 digest.
329 """
329 """
330 cl = repo.changelog
330 cl = repo.changelog
331 if not cl.filteredrevs:
331 if not cl.filteredrevs:
332 return None
332 return None
333 key = None
333 key = None
334 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
334 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
335 if revs:
335 if revs:
336 s = hashlib.sha1()
336 s = hashlib.sha1()
337 for rev in revs:
337 for rev in revs:
338 s.update('%d;' % rev)
338 s.update('%d;' % rev)
339 key = s.digest()
339 key = s.digest()
340 return key
340 return key
341
341
342 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
342 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
343 '''yield every hg repository under path, always recursively.
343 '''yield every hg repository under path, always recursively.
344 The recurse flag will only control recursion into repo working dirs'''
344 The recurse flag will only control recursion into repo working dirs'''
345 def errhandler(err):
345 def errhandler(err):
346 if err.filename == path:
346 if err.filename == path:
347 raise err
347 raise err
348 samestat = getattr(os.path, 'samestat', None)
348 samestat = getattr(os.path, 'samestat', None)
349 if followsym and samestat is not None:
349 if followsym and samestat is not None:
350 def adddir(dirlst, dirname):
350 def adddir(dirlst, dirname):
351 match = False
351 match = False
352 dirstat = os.stat(dirname)
352 dirstat = os.stat(dirname)
353 for lstdirstat in dirlst:
353 for lstdirstat in dirlst:
354 if samestat(dirstat, lstdirstat):
354 if samestat(dirstat, lstdirstat):
355 match = True
355 match = True
356 break
356 break
357 if not match:
357 if not match:
358 dirlst.append(dirstat)
358 dirlst.append(dirstat)
359 return not match
359 return not match
360 else:
360 else:
361 followsym = False
361 followsym = False
362
362
363 if (seen_dirs is None) and followsym:
363 if (seen_dirs is None) and followsym:
364 seen_dirs = []
364 seen_dirs = []
365 adddir(seen_dirs, path)
365 adddir(seen_dirs, path)
366 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
366 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
367 dirs.sort()
367 dirs.sort()
368 if '.hg' in dirs:
368 if '.hg' in dirs:
369 yield root # found a repository
369 yield root # found a repository
370 qroot = os.path.join(root, '.hg', 'patches')
370 qroot = os.path.join(root, '.hg', 'patches')
371 if os.path.isdir(os.path.join(qroot, '.hg')):
371 if os.path.isdir(os.path.join(qroot, '.hg')):
372 yield qroot # we have a patch queue repo here
372 yield qroot # we have a patch queue repo here
373 if recurse:
373 if recurse:
374 # avoid recursing inside the .hg directory
374 # avoid recursing inside the .hg directory
375 dirs.remove('.hg')
375 dirs.remove('.hg')
376 else:
376 else:
377 dirs[:] = [] # don't descend further
377 dirs[:] = [] # don't descend further
378 elif followsym:
378 elif followsym:
379 newdirs = []
379 newdirs = []
380 for d in dirs:
380 for d in dirs:
381 fname = os.path.join(root, d)
381 fname = os.path.join(root, d)
382 if adddir(seen_dirs, fname):
382 if adddir(seen_dirs, fname):
383 if os.path.islink(fname):
383 if os.path.islink(fname):
384 for hgname in walkrepos(fname, True, seen_dirs):
384 for hgname in walkrepos(fname, True, seen_dirs):
385 yield hgname
385 yield hgname
386 else:
386 else:
387 newdirs.append(d)
387 newdirs.append(d)
388 dirs[:] = newdirs
388 dirs[:] = newdirs
389
389
390 def binnode(ctx):
390 def binnode(ctx):
391 """Return binary node id for a given basectx"""
391 """Return binary node id for a given basectx"""
392 node = ctx.node()
392 node = ctx.node()
393 if node is None:
393 if node is None:
394 return wdirid
394 return wdirid
395 return node
395 return node
396
396
397 def intrev(ctx):
397 def intrev(ctx):
398 """Return integer for a given basectx that can be used in comparison or
398 """Return integer for a given basectx that can be used in comparison or
399 arithmetic operation"""
399 arithmetic operation"""
400 rev = ctx.rev()
400 rev = ctx.rev()
401 if rev is None:
401 if rev is None:
402 return wdirrev
402 return wdirrev
403 return rev
403 return rev
404
404
405 def revsingle(repo, revspec, default='.'):
405 def revsingle(repo, revspec, default='.'):
406 if not revspec and revspec != 0:
406 if not revspec and revspec != 0:
407 return repo[default]
407 return repo[default]
408
408
409 l = revrange(repo, [revspec])
409 l = revrange(repo, [revspec])
410 if not l:
410 if not l:
411 raise error.Abort(_('empty revision set'))
411 raise error.Abort(_('empty revision set'))
412 return repo[l.last()]
412 return repo[l.last()]
413
413
414 def _pairspec(revspec):
414 def _pairspec(revspec):
415 tree = revsetlang.parse(revspec)
415 tree = revsetlang.parse(revspec)
416 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
416 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
417
417
418 def revpair(repo, revs):
418 def revpair(repo, revs):
419 if not revs:
419 if not revs:
420 return repo.dirstate.p1(), None
420 return repo.dirstate.p1(), None
421
421
422 l = revrange(repo, revs)
422 l = revrange(repo, revs)
423
423
424 if not l:
424 if not l:
425 first = second = None
425 first = second = None
426 elif l.isascending():
426 elif l.isascending():
427 first = l.min()
427 first = l.min()
428 second = l.max()
428 second = l.max()
429 elif l.isdescending():
429 elif l.isdescending():
430 first = l.max()
430 first = l.max()
431 second = l.min()
431 second = l.min()
432 else:
432 else:
433 first = l.first()
433 first = l.first()
434 second = l.last()
434 second = l.last()
435
435
436 if first is None:
436 if first is None:
437 raise error.Abort(_('empty revision range'))
437 raise error.Abort(_('empty revision range'))
438 if (first == second and len(revs) >= 2
438 if (first == second and len(revs) >= 2
439 and not all(revrange(repo, [r]) for r in revs)):
439 and not all(revrange(repo, [r]) for r in revs)):
440 raise error.Abort(_('empty revision on one side of range'))
440 raise error.Abort(_('empty revision on one side of range'))
441
441
442 # if top-level is range expression, the result must always be a pair
442 # if top-level is range expression, the result must always be a pair
443 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
443 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
444 return repo.lookup(first), None
444 return repo.lookup(first), None
445
445
446 return repo.lookup(first), repo.lookup(second)
446 return repo.lookup(first), repo.lookup(second)
447
447
448 def revrange(repo, specs):
448 def revrange(repo, specs):
449 """Execute 1 to many revsets and return the union.
449 """Execute 1 to many revsets and return the union.
450
450
451 This is the preferred mechanism for executing revsets using user-specified
451 This is the preferred mechanism for executing revsets using user-specified
452 config options, such as revset aliases.
452 config options, such as revset aliases.
453
453
454 The revsets specified by ``specs`` will be executed via a chained ``OR``
454 The revsets specified by ``specs`` will be executed via a chained ``OR``
455 expression. If ``specs`` is empty, an empty result is returned.
455 expression. If ``specs`` is empty, an empty result is returned.
456
456
457 ``specs`` can contain integers, in which case they are assumed to be
457 ``specs`` can contain integers, in which case they are assumed to be
458 revision numbers.
458 revision numbers.
459
459
460 It is assumed the revsets are already formatted. If you have arguments
460 It is assumed the revsets are already formatted. If you have arguments
461 that need to be expanded in the revset, call ``revsetlang.formatspec()``
461 that need to be expanded in the revset, call ``revsetlang.formatspec()``
462 and pass the result as an element of ``specs``.
462 and pass the result as an element of ``specs``.
463
463
464 Specifying a single revset is allowed.
464 Specifying a single revset is allowed.
465
465
466 Returns a ``revset.abstractsmartset`` which is a list-like interface over
466 Returns a ``revset.abstractsmartset`` which is a list-like interface over
467 integer revisions.
467 integer revisions.
468 """
468 """
469 allspecs = []
469 allspecs = []
470 for spec in specs:
470 for spec in specs:
471 if isinstance(spec, int):
471 if isinstance(spec, int):
472 spec = revsetlang.formatspec('rev(%d)', spec)
472 spec = revsetlang.formatspec('rev(%d)', spec)
473 allspecs.append(spec)
473 allspecs.append(spec)
474 return repo.anyrevs(allspecs, user=True)
474 return repo.anyrevs(allspecs, user=True)
475
475
476 def meaningfulparents(repo, ctx):
476 def meaningfulparents(repo, ctx):
477 """Return list of meaningful (or all if debug) parentrevs for rev.
477 """Return list of meaningful (or all if debug) parentrevs for rev.
478
478
479 For merges (two non-nullrev revisions) both parents are meaningful.
479 For merges (two non-nullrev revisions) both parents are meaningful.
480 Otherwise the first parent revision is considered meaningful if it
480 Otherwise the first parent revision is considered meaningful if it
481 is not the preceding revision.
481 is not the preceding revision.
482 """
482 """
483 parents = ctx.parents()
483 parents = ctx.parents()
484 if len(parents) > 1:
484 if len(parents) > 1:
485 return parents
485 return parents
486 if repo.ui.debugflag:
486 if repo.ui.debugflag:
487 return [parents[0], repo['null']]
487 return [parents[0], repo['null']]
488 if parents[0].rev() >= intrev(ctx) - 1:
488 if parents[0].rev() >= intrev(ctx) - 1:
489 return []
489 return []
490 return parents
490 return parents
491
491
492 def expandpats(pats):
492 def expandpats(pats):
493 '''Expand bare globs when running on windows.
493 '''Expand bare globs when running on windows.
494 On posix we assume it already has already been done by sh.'''
494 On posix we assume it already has already been done by sh.'''
495 if not util.expandglobs:
495 if not util.expandglobs:
496 return list(pats)
496 return list(pats)
497 ret = []
497 ret = []
498 for kindpat in pats:
498 for kindpat in pats:
499 kind, pat = matchmod._patsplit(kindpat, None)
499 kind, pat = matchmod._patsplit(kindpat, None)
500 if kind is None:
500 if kind is None:
501 try:
501 try:
502 globbed = glob.glob(pat)
502 globbed = glob.glob(pat)
503 except re.error:
503 except re.error:
504 globbed = [pat]
504 globbed = [pat]
505 if globbed:
505 if globbed:
506 ret.extend(globbed)
506 ret.extend(globbed)
507 continue
507 continue
508 ret.append(kindpat)
508 ret.append(kindpat)
509 return ret
509 return ret
510
510
511 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
511 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
512 badfn=None):
512 badfn=None):
513 '''Return a matcher and the patterns that were used.
513 '''Return a matcher and the patterns that were used.
514 The matcher will warn about bad matches, unless an alternate badfn callback
514 The matcher will warn about bad matches, unless an alternate badfn callback
515 is provided.'''
515 is provided.'''
516 if pats == ("",):
516 if pats == ("",):
517 pats = []
517 pats = []
518 if opts is None:
518 if opts is None:
519 opts = {}
519 opts = {}
520 if not globbed and default == 'relpath':
520 if not globbed and default == 'relpath':
521 pats = expandpats(pats or [])
521 pats = expandpats(pats or [])
522
522
523 def bad(f, msg):
523 def bad(f, msg):
524 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
524 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
525
525
526 if badfn is None:
526 if badfn is None:
527 badfn = bad
527 badfn = bad
528
528
529 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
529 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
530 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
530 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
531
531
532 if m.always():
532 if m.always():
533 pats = []
533 pats = []
534 return m, pats
534 return m, pats
535
535
536 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
536 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
537 badfn=None):
537 badfn=None):
538 '''Return a matcher that will warn about bad matches.'''
538 '''Return a matcher that will warn about bad matches.'''
539 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
539 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
540
540
541 def matchall(repo):
541 def matchall(repo):
542 '''Return a matcher that will efficiently match everything.'''
542 '''Return a matcher that will efficiently match everything.'''
543 return matchmod.always(repo.root, repo.getcwd())
543 return matchmod.always(repo.root, repo.getcwd())
544
544
545 def matchfiles(repo, files, badfn=None):
545 def matchfiles(repo, files, badfn=None):
546 '''Return a matcher that will efficiently match exactly these files.'''
546 '''Return a matcher that will efficiently match exactly these files.'''
547 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
547 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
548
548
549 def origpath(ui, repo, filepath):
549 def origpath(ui, repo, filepath):
550 '''customize where .orig files are created
550 '''customize where .orig files are created
551
551
552 Fetch user defined path from config file: [ui] origbackuppath = <path>
552 Fetch user defined path from config file: [ui] origbackuppath = <path>
553 Fall back to default (filepath) if not specified
553 Fall back to default (filepath) if not specified
554 '''
554 '''
555 origbackuppath = ui.config('ui', 'origbackuppath')
555 origbackuppath = ui.config('ui', 'origbackuppath')
556 if origbackuppath is None:
556 if origbackuppath is None:
557 return filepath + ".orig"
557 return filepath + ".orig"
558
558
559 filepathfromroot = os.path.relpath(filepath, start=repo.root)
559 filepathfromroot = os.path.relpath(filepath, start=repo.root)
560 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
560 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
561
561
562 origbackupdir = repo.vfs.dirname(fullorigpath)
562 origbackupdir = repo.vfs.dirname(fullorigpath)
563 if not repo.vfs.exists(origbackupdir):
563 if not repo.vfs.exists(origbackupdir):
564 ui.note(_('creating directory: %s\n') % origbackupdir)
564 ui.note(_('creating directory: %s\n') % origbackupdir)
565 util.makedirs(origbackupdir)
565 util.makedirs(origbackupdir)
566
566
567 return fullorigpath + ".orig"
567 return fullorigpath + ".orig"
568
568
569 class _containsnode(object):
569 class _containsnode(object):
570 """proxy __contains__(node) to container.__contains__ which accepts revs"""
570 """proxy __contains__(node) to container.__contains__ which accepts revs"""
571
571
572 def __init__(self, repo, revcontainer):
572 def __init__(self, repo, revcontainer):
573 self._torev = repo.changelog.rev
573 self._torev = repo.changelog.rev
574 self._revcontains = revcontainer.__contains__
574 self._revcontains = revcontainer.__contains__
575
575
576 def __contains__(self, node):
576 def __contains__(self, node):
577 return self._revcontains(self._torev(node))
577 return self._revcontains(self._torev(node))
578
578
579 def cleanupnodes(repo, mapping, operation):
579 def cleanupnodes(repo, replacements, operation):
580 """do common cleanups when old nodes are replaced by new nodes
580 """do common cleanups when old nodes are replaced by new nodes
581
581
582 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
582 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
583 (we might also want to move working directory parent in the future)
583 (we might also want to move working directory parent in the future)
584
584
585 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
585 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
586 replacements. operation is a string, like "rebase".
586 have replacements. operation is a string, like "rebase".
587 """
587 """
588 if not util.safehasattr(mapping, 'items'):
588 if not util.safehasattr(replacements, 'items'):
589 mapping = {n: () for n in mapping}
589 replacements = {n: () for n in replacements}
590
590
591 # Calculate bookmark movements
591 # Calculate bookmark movements
592 moves = {}
592 moves = {}
593 # Unfiltered repo is needed since nodes in mapping might be hidden.
593 # Unfiltered repo is needed since nodes in replacements might be hidden.
594 unfi = repo.unfiltered()
594 unfi = repo.unfiltered()
595 for oldnode, newnodes in mapping.items():
595 for oldnode, newnodes in replacements.items():
596 if len(newnodes) > 1:
596 if len(newnodes) > 1:
597 # usually a split, take the one with biggest rev number
597 # usually a split, take the one with biggest rev number
598 newnode = next(unfi.set('max(%ln)', newnodes)).node()
598 newnode = next(unfi.set('max(%ln)', newnodes)).node()
599 elif len(newnodes) == 0:
599 elif len(newnodes) == 0:
600 # move bookmark backwards
600 # move bookmark backwards
601 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
601 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
602 list(mapping)))
602 list(replacements)))
603 if roots:
603 if roots:
604 newnode = roots[0].node()
604 newnode = roots[0].node()
605 else:
605 else:
606 newnode = nullid
606 newnode = nullid
607 else:
607 else:
608 newnode = newnodes[0]
608 newnode = newnodes[0]
609 moves[oldnode] = newnode
609 moves[oldnode] = newnode
610
610
611 with repo.transaction('cleanup') as tr:
611 with repo.transaction('cleanup') as tr:
612 # Move bookmarks
612 # Move bookmarks
613 bmarks = repo._bookmarks
613 bmarks = repo._bookmarks
614 bmarkchanges = []
614 bmarkchanges = []
615 allnewnodes = [n for ns in mapping.values() for n in ns]
615 allnewnodes = [n for ns in replacements.values() for n in ns]
616 for oldnode, newnode in moves.items():
616 for oldnode, newnode in moves.items():
617 oldbmarks = repo.nodebookmarks(oldnode)
617 oldbmarks = repo.nodebookmarks(oldnode)
618 if not oldbmarks:
618 if not oldbmarks:
619 continue
619 continue
620 from . import bookmarks # avoid import cycle
620 from . import bookmarks # avoid import cycle
621 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
621 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
622 (oldbmarks, hex(oldnode), hex(newnode)))
622 (oldbmarks, hex(oldnode), hex(newnode)))
623 # Delete divergent bookmarks being parents of related newnodes
623 # Delete divergent bookmarks being parents of related newnodes
624 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
624 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
625 allnewnodes, newnode, oldnode)
625 allnewnodes, newnode, oldnode)
626 deletenodes = _containsnode(repo, deleterevs)
626 deletenodes = _containsnode(repo, deleterevs)
627 for name in oldbmarks:
627 for name in oldbmarks:
628 bmarkchanges.append((name, newnode))
628 bmarkchanges.append((name, newnode))
629 for b in bookmarks.divergent2delete(repo, deletenodes, name):
629 for b in bookmarks.divergent2delete(repo, deletenodes, name):
630 bmarkchanges.append((b, None))
630 bmarkchanges.append((b, None))
631
631
632 if bmarkchanges:
632 if bmarkchanges:
633 bmarks.applychanges(repo, tr, bmarkchanges)
633 bmarks.applychanges(repo, tr, bmarkchanges)
634
634
635 # Obsolete or strip nodes
635 # Obsolete or strip nodes
636 if obsolete.isenabled(repo, obsolete.createmarkersopt):
636 if obsolete.isenabled(repo, obsolete.createmarkersopt):
637 # If a node is already obsoleted, and we want to obsolete it
637 # If a node is already obsoleted, and we want to obsolete it
638 # without a successor, skip that obssolete request since it's
638 # without a successor, skip that obssolete request since it's
639 # unnecessary. That's the "if s or not isobs(n)" check below.
639 # unnecessary. That's the "if s or not isobs(n)" check below.
640 # Also sort the node in topology order, that might be useful for
640 # Also sort the node in topology order, that might be useful for
641 # some obsstore logic.
641 # some obsstore logic.
642 # NOTE: the filtering and sorting might belong to createmarkers.
642 # NOTE: the filtering and sorting might belong to createmarkers.
643 isobs = unfi.obsstore.successors.__contains__
643 isobs = unfi.obsstore.successors.__contains__
644 torev = unfi.changelog.rev
644 torev = unfi.changelog.rev
645 sortfunc = lambda ns: torev(ns[0])
645 sortfunc = lambda ns: torev(ns[0])
646 rels = [(unfi[n], tuple(unfi[m] for m in s))
646 rels = [(unfi[n], tuple(unfi[m] for m in s))
647 for n, s in sorted(mapping.items(), key=sortfunc)
647 for n, s in sorted(replacements.items(), key=sortfunc)
648 if s or not isobs(n)]
648 if s or not isobs(n)]
649 obsolete.createmarkers(repo, rels, operation=operation)
649 obsolete.createmarkers(repo, rels, operation=operation)
650 else:
650 else:
651 from . import repair # avoid import cycle
651 from . import repair # avoid import cycle
652 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
652 repair.delayedstrip(repo.ui, repo, list(replacements), operation)
653
653
654 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
654 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
655 if opts is None:
655 if opts is None:
656 opts = {}
656 opts = {}
657 m = matcher
657 m = matcher
658 if dry_run is None:
658 if dry_run is None:
659 dry_run = opts.get('dry_run')
659 dry_run = opts.get('dry_run')
660 if similarity is None:
660 if similarity is None:
661 similarity = float(opts.get('similarity') or 0)
661 similarity = float(opts.get('similarity') or 0)
662
662
663 ret = 0
663 ret = 0
664 join = lambda f: os.path.join(prefix, f)
664 join = lambda f: os.path.join(prefix, f)
665
665
666 wctx = repo[None]
666 wctx = repo[None]
667 for subpath in sorted(wctx.substate):
667 for subpath in sorted(wctx.substate):
668 submatch = matchmod.subdirmatcher(subpath, m)
668 submatch = matchmod.subdirmatcher(subpath, m)
669 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
669 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
670 sub = wctx.sub(subpath)
670 sub = wctx.sub(subpath)
671 try:
671 try:
672 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
672 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
673 ret = 1
673 ret = 1
674 except error.LookupError:
674 except error.LookupError:
675 repo.ui.status(_("skipping missing subrepository: %s\n")
675 repo.ui.status(_("skipping missing subrepository: %s\n")
676 % join(subpath))
676 % join(subpath))
677
677
678 rejected = []
678 rejected = []
679 def badfn(f, msg):
679 def badfn(f, msg):
680 if f in m.files():
680 if f in m.files():
681 m.bad(f, msg)
681 m.bad(f, msg)
682 rejected.append(f)
682 rejected.append(f)
683
683
684 badmatch = matchmod.badmatch(m, badfn)
684 badmatch = matchmod.badmatch(m, badfn)
685 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
685 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
686 badmatch)
686 badmatch)
687
687
688 unknownset = set(unknown + forgotten)
688 unknownset = set(unknown + forgotten)
689 toprint = unknownset.copy()
689 toprint = unknownset.copy()
690 toprint.update(deleted)
690 toprint.update(deleted)
691 for abs in sorted(toprint):
691 for abs in sorted(toprint):
692 if repo.ui.verbose or not m.exact(abs):
692 if repo.ui.verbose or not m.exact(abs):
693 if abs in unknownset:
693 if abs in unknownset:
694 status = _('adding %s\n') % m.uipath(abs)
694 status = _('adding %s\n') % m.uipath(abs)
695 else:
695 else:
696 status = _('removing %s\n') % m.uipath(abs)
696 status = _('removing %s\n') % m.uipath(abs)
697 repo.ui.status(status)
697 repo.ui.status(status)
698
698
699 renames = _findrenames(repo, m, added + unknown, removed + deleted,
699 renames = _findrenames(repo, m, added + unknown, removed + deleted,
700 similarity)
700 similarity)
701
701
702 if not dry_run:
702 if not dry_run:
703 _markchanges(repo, unknown + forgotten, deleted, renames)
703 _markchanges(repo, unknown + forgotten, deleted, renames)
704
704
705 for f in rejected:
705 for f in rejected:
706 if f in m.files():
706 if f in m.files():
707 return 1
707 return 1
708 return ret
708 return ret
709
709
710 def marktouched(repo, files, similarity=0.0):
710 def marktouched(repo, files, similarity=0.0):
711 '''Assert that files have somehow been operated upon. files are relative to
711 '''Assert that files have somehow been operated upon. files are relative to
712 the repo root.'''
712 the repo root.'''
713 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
713 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
714 rejected = []
714 rejected = []
715
715
716 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
716 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
717
717
718 if repo.ui.verbose:
718 if repo.ui.verbose:
719 unknownset = set(unknown + forgotten)
719 unknownset = set(unknown + forgotten)
720 toprint = unknownset.copy()
720 toprint = unknownset.copy()
721 toprint.update(deleted)
721 toprint.update(deleted)
722 for abs in sorted(toprint):
722 for abs in sorted(toprint):
723 if abs in unknownset:
723 if abs in unknownset:
724 status = _('adding %s\n') % abs
724 status = _('adding %s\n') % abs
725 else:
725 else:
726 status = _('removing %s\n') % abs
726 status = _('removing %s\n') % abs
727 repo.ui.status(status)
727 repo.ui.status(status)
728
728
729 renames = _findrenames(repo, m, added + unknown, removed + deleted,
729 renames = _findrenames(repo, m, added + unknown, removed + deleted,
730 similarity)
730 similarity)
731
731
732 _markchanges(repo, unknown + forgotten, deleted, renames)
732 _markchanges(repo, unknown + forgotten, deleted, renames)
733
733
734 for f in rejected:
734 for f in rejected:
735 if f in m.files():
735 if f in m.files():
736 return 1
736 return 1
737 return 0
737 return 0
738
738
739 def _interestingfiles(repo, matcher):
739 def _interestingfiles(repo, matcher):
740 '''Walk dirstate with matcher, looking for files that addremove would care
740 '''Walk dirstate with matcher, looking for files that addremove would care
741 about.
741 about.
742
742
743 This is different from dirstate.status because it doesn't care about
743 This is different from dirstate.status because it doesn't care about
744 whether files are modified or clean.'''
744 whether files are modified or clean.'''
745 added, unknown, deleted, removed, forgotten = [], [], [], [], []
745 added, unknown, deleted, removed, forgotten = [], [], [], [], []
746 audit_path = pathutil.pathauditor(repo.root, cached=True)
746 audit_path = pathutil.pathauditor(repo.root, cached=True)
747
747
748 ctx = repo[None]
748 ctx = repo[None]
749 dirstate = repo.dirstate
749 dirstate = repo.dirstate
750 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
750 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
751 full=False)
751 full=False)
752 for abs, st in walkresults.iteritems():
752 for abs, st in walkresults.iteritems():
753 dstate = dirstate[abs]
753 dstate = dirstate[abs]
754 if dstate == '?' and audit_path.check(abs):
754 if dstate == '?' and audit_path.check(abs):
755 unknown.append(abs)
755 unknown.append(abs)
756 elif dstate != 'r' and not st:
756 elif dstate != 'r' and not st:
757 deleted.append(abs)
757 deleted.append(abs)
758 elif dstate == 'r' and st:
758 elif dstate == 'r' and st:
759 forgotten.append(abs)
759 forgotten.append(abs)
760 # for finding renames
760 # for finding renames
761 elif dstate == 'r' and not st:
761 elif dstate == 'r' and not st:
762 removed.append(abs)
762 removed.append(abs)
763 elif dstate == 'a':
763 elif dstate == 'a':
764 added.append(abs)
764 added.append(abs)
765
765
766 return added, unknown, deleted, removed, forgotten
766 return added, unknown, deleted, removed, forgotten
767
767
768 def _findrenames(repo, matcher, added, removed, similarity):
768 def _findrenames(repo, matcher, added, removed, similarity):
769 '''Find renames from removed files to added ones.'''
769 '''Find renames from removed files to added ones.'''
770 renames = {}
770 renames = {}
771 if similarity > 0:
771 if similarity > 0:
772 for old, new, score in similar.findrenames(repo, added, removed,
772 for old, new, score in similar.findrenames(repo, added, removed,
773 similarity):
773 similarity):
774 if (repo.ui.verbose or not matcher.exact(old)
774 if (repo.ui.verbose or not matcher.exact(old)
775 or not matcher.exact(new)):
775 or not matcher.exact(new)):
776 repo.ui.status(_('recording removal of %s as rename to %s '
776 repo.ui.status(_('recording removal of %s as rename to %s '
777 '(%d%% similar)\n') %
777 '(%d%% similar)\n') %
778 (matcher.rel(old), matcher.rel(new),
778 (matcher.rel(old), matcher.rel(new),
779 score * 100))
779 score * 100))
780 renames[new] = old
780 renames[new] = old
781 return renames
781 return renames
782
782
783 def _markchanges(repo, unknown, deleted, renames):
783 def _markchanges(repo, unknown, deleted, renames):
784 '''Marks the files in unknown as added, the files in deleted as removed,
784 '''Marks the files in unknown as added, the files in deleted as removed,
785 and the files in renames as copied.'''
785 and the files in renames as copied.'''
786 wctx = repo[None]
786 wctx = repo[None]
787 with repo.wlock():
787 with repo.wlock():
788 wctx.forget(deleted)
788 wctx.forget(deleted)
789 wctx.add(unknown)
789 wctx.add(unknown)
790 for new, old in renames.iteritems():
790 for new, old in renames.iteritems():
791 wctx.copy(old, new)
791 wctx.copy(old, new)
792
792
793 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
793 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
794 """Update the dirstate to reflect the intent of copying src to dst. For
794 """Update the dirstate to reflect the intent of copying src to dst. For
795 different reasons it might not end with dst being marked as copied from src.
795 different reasons it might not end with dst being marked as copied from src.
796 """
796 """
797 origsrc = repo.dirstate.copied(src) or src
797 origsrc = repo.dirstate.copied(src) or src
798 if dst == origsrc: # copying back a copy?
798 if dst == origsrc: # copying back a copy?
799 if repo.dirstate[dst] not in 'mn' and not dryrun:
799 if repo.dirstate[dst] not in 'mn' and not dryrun:
800 repo.dirstate.normallookup(dst)
800 repo.dirstate.normallookup(dst)
801 else:
801 else:
802 if repo.dirstate[origsrc] == 'a' and origsrc == src:
802 if repo.dirstate[origsrc] == 'a' and origsrc == src:
803 if not ui.quiet:
803 if not ui.quiet:
804 ui.warn(_("%s has not been committed yet, so no copy "
804 ui.warn(_("%s has not been committed yet, so no copy "
805 "data will be stored for %s.\n")
805 "data will be stored for %s.\n")
806 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
806 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
807 if repo.dirstate[dst] in '?r' and not dryrun:
807 if repo.dirstate[dst] in '?r' and not dryrun:
808 wctx.add([dst])
808 wctx.add([dst])
809 elif not dryrun:
809 elif not dryrun:
810 wctx.copy(origsrc, dst)
810 wctx.copy(origsrc, dst)
811
811
812 def readrequires(opener, supported):
812 def readrequires(opener, supported):
813 '''Reads and parses .hg/requires and checks if all entries found
813 '''Reads and parses .hg/requires and checks if all entries found
814 are in the list of supported features.'''
814 are in the list of supported features.'''
815 requirements = set(opener.read("requires").splitlines())
815 requirements = set(opener.read("requires").splitlines())
816 missings = []
816 missings = []
817 for r in requirements:
817 for r in requirements:
818 if r not in supported:
818 if r not in supported:
819 if not r or not r[0].isalnum():
819 if not r or not r[0].isalnum():
820 raise error.RequirementError(_(".hg/requires file is corrupt"))
820 raise error.RequirementError(_(".hg/requires file is corrupt"))
821 missings.append(r)
821 missings.append(r)
822 missings.sort()
822 missings.sort()
823 if missings:
823 if missings:
824 raise error.RequirementError(
824 raise error.RequirementError(
825 _("repository requires features unknown to this Mercurial: %s")
825 _("repository requires features unknown to this Mercurial: %s")
826 % " ".join(missings),
826 % " ".join(missings),
827 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
827 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
828 " for more information"))
828 " for more information"))
829 return requirements
829 return requirements
830
830
831 def writerequires(opener, requirements):
831 def writerequires(opener, requirements):
832 with opener('requires', 'w') as fp:
832 with opener('requires', 'w') as fp:
833 for r in sorted(requirements):
833 for r in sorted(requirements):
834 fp.write("%s\n" % r)
834 fp.write("%s\n" % r)
835
835
836 class filecachesubentry(object):
836 class filecachesubentry(object):
837 def __init__(self, path, stat):
837 def __init__(self, path, stat):
838 self.path = path
838 self.path = path
839 self.cachestat = None
839 self.cachestat = None
840 self._cacheable = None
840 self._cacheable = None
841
841
842 if stat:
842 if stat:
843 self.cachestat = filecachesubentry.stat(self.path)
843 self.cachestat = filecachesubentry.stat(self.path)
844
844
845 if self.cachestat:
845 if self.cachestat:
846 self._cacheable = self.cachestat.cacheable()
846 self._cacheable = self.cachestat.cacheable()
847 else:
847 else:
848 # None means we don't know yet
848 # None means we don't know yet
849 self._cacheable = None
849 self._cacheable = None
850
850
851 def refresh(self):
851 def refresh(self):
852 if self.cacheable():
852 if self.cacheable():
853 self.cachestat = filecachesubentry.stat(self.path)
853 self.cachestat = filecachesubentry.stat(self.path)
854
854
855 def cacheable(self):
855 def cacheable(self):
856 if self._cacheable is not None:
856 if self._cacheable is not None:
857 return self._cacheable
857 return self._cacheable
858
858
859 # we don't know yet, assume it is for now
859 # we don't know yet, assume it is for now
860 return True
860 return True
861
861
862 def changed(self):
862 def changed(self):
863 # no point in going further if we can't cache it
863 # no point in going further if we can't cache it
864 if not self.cacheable():
864 if not self.cacheable():
865 return True
865 return True
866
866
867 newstat = filecachesubentry.stat(self.path)
867 newstat = filecachesubentry.stat(self.path)
868
868
869 # we may not know if it's cacheable yet, check again now
869 # we may not know if it's cacheable yet, check again now
870 if newstat and self._cacheable is None:
870 if newstat and self._cacheable is None:
871 self._cacheable = newstat.cacheable()
871 self._cacheable = newstat.cacheable()
872
872
873 # check again
873 # check again
874 if not self._cacheable:
874 if not self._cacheable:
875 return True
875 return True
876
876
877 if self.cachestat != newstat:
877 if self.cachestat != newstat:
878 self.cachestat = newstat
878 self.cachestat = newstat
879 return True
879 return True
880 else:
880 else:
881 return False
881 return False
882
882
883 @staticmethod
883 @staticmethod
884 def stat(path):
884 def stat(path):
885 try:
885 try:
886 return util.cachestat(path)
886 return util.cachestat(path)
887 except OSError as e:
887 except OSError as e:
888 if e.errno != errno.ENOENT:
888 if e.errno != errno.ENOENT:
889 raise
889 raise
890
890
891 class filecacheentry(object):
891 class filecacheentry(object):
892 def __init__(self, paths, stat=True):
892 def __init__(self, paths, stat=True):
893 self._entries = []
893 self._entries = []
894 for path in paths:
894 for path in paths:
895 self._entries.append(filecachesubentry(path, stat))
895 self._entries.append(filecachesubentry(path, stat))
896
896
897 def changed(self):
897 def changed(self):
898 '''true if any entry has changed'''
898 '''true if any entry has changed'''
899 for entry in self._entries:
899 for entry in self._entries:
900 if entry.changed():
900 if entry.changed():
901 return True
901 return True
902 return False
902 return False
903
903
904 def refresh(self):
904 def refresh(self):
905 for entry in self._entries:
905 for entry in self._entries:
906 entry.refresh()
906 entry.refresh()
907
907
908 class filecache(object):
908 class filecache(object):
909 '''A property like decorator that tracks files under .hg/ for updates.
909 '''A property like decorator that tracks files under .hg/ for updates.
910
910
911 Records stat info when called in _filecache.
911 Records stat info when called in _filecache.
912
912
913 On subsequent calls, compares old stat info with new info, and recreates the
913 On subsequent calls, compares old stat info with new info, and recreates the
914 object when any of the files changes, updating the new stat info in
914 object when any of the files changes, updating the new stat info in
915 _filecache.
915 _filecache.
916
916
917 Mercurial either atomic renames or appends for files under .hg,
917 Mercurial either atomic renames or appends for files under .hg,
918 so to ensure the cache is reliable we need the filesystem to be able
918 so to ensure the cache is reliable we need the filesystem to be able
919 to tell us if a file has been replaced. If it can't, we fallback to
919 to tell us if a file has been replaced. If it can't, we fallback to
920 recreating the object on every call (essentially the same behavior as
920 recreating the object on every call (essentially the same behavior as
921 propertycache).
921 propertycache).
922
922
923 '''
923 '''
924 def __init__(self, *paths):
924 def __init__(self, *paths):
925 self.paths = paths
925 self.paths = paths
926
926
927 def join(self, obj, fname):
927 def join(self, obj, fname):
928 """Used to compute the runtime path of a cached file.
928 """Used to compute the runtime path of a cached file.
929
929
930 Users should subclass filecache and provide their own version of this
930 Users should subclass filecache and provide their own version of this
931 function to call the appropriate join function on 'obj' (an instance
931 function to call the appropriate join function on 'obj' (an instance
932 of the class that its member function was decorated).
932 of the class that its member function was decorated).
933 """
933 """
934 raise NotImplementedError
934 raise NotImplementedError
935
935
936 def __call__(self, func):
936 def __call__(self, func):
937 self.func = func
937 self.func = func
938 self.name = func.__name__.encode('ascii')
938 self.name = func.__name__.encode('ascii')
939 return self
939 return self
940
940
941 def __get__(self, obj, type=None):
941 def __get__(self, obj, type=None):
942 # if accessed on the class, return the descriptor itself.
942 # if accessed on the class, return the descriptor itself.
943 if obj is None:
943 if obj is None:
944 return self
944 return self
945 # do we need to check if the file changed?
945 # do we need to check if the file changed?
946 if self.name in obj.__dict__:
946 if self.name in obj.__dict__:
947 assert self.name in obj._filecache, self.name
947 assert self.name in obj._filecache, self.name
948 return obj.__dict__[self.name]
948 return obj.__dict__[self.name]
949
949
950 entry = obj._filecache.get(self.name)
950 entry = obj._filecache.get(self.name)
951
951
952 if entry:
952 if entry:
953 if entry.changed():
953 if entry.changed():
954 entry.obj = self.func(obj)
954 entry.obj = self.func(obj)
955 else:
955 else:
956 paths = [self.join(obj, path) for path in self.paths]
956 paths = [self.join(obj, path) for path in self.paths]
957
957
958 # We stat -before- creating the object so our cache doesn't lie if
958 # We stat -before- creating the object so our cache doesn't lie if
959 # a writer modified between the time we read and stat
959 # a writer modified between the time we read and stat
960 entry = filecacheentry(paths, True)
960 entry = filecacheentry(paths, True)
961 entry.obj = self.func(obj)
961 entry.obj = self.func(obj)
962
962
963 obj._filecache[self.name] = entry
963 obj._filecache[self.name] = entry
964
964
965 obj.__dict__[self.name] = entry.obj
965 obj.__dict__[self.name] = entry.obj
966 return entry.obj
966 return entry.obj
967
967
968 def __set__(self, obj, value):
968 def __set__(self, obj, value):
969 if self.name not in obj._filecache:
969 if self.name not in obj._filecache:
970 # we add an entry for the missing value because X in __dict__
970 # we add an entry for the missing value because X in __dict__
971 # implies X in _filecache
971 # implies X in _filecache
972 paths = [self.join(obj, path) for path in self.paths]
972 paths = [self.join(obj, path) for path in self.paths]
973 ce = filecacheentry(paths, False)
973 ce = filecacheentry(paths, False)
974 obj._filecache[self.name] = ce
974 obj._filecache[self.name] = ce
975 else:
975 else:
976 ce = obj._filecache[self.name]
976 ce = obj._filecache[self.name]
977
977
978 ce.obj = value # update cached copy
978 ce.obj = value # update cached copy
979 obj.__dict__[self.name] = value # update copy returned by obj.x
979 obj.__dict__[self.name] = value # update copy returned by obj.x
980
980
981 def __delete__(self, obj):
981 def __delete__(self, obj):
982 try:
982 try:
983 del obj.__dict__[self.name]
983 del obj.__dict__[self.name]
984 except KeyError:
984 except KeyError:
985 raise AttributeError(self.name)
985 raise AttributeError(self.name)
986
986
987 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
987 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
988 if lock is None:
988 if lock is None:
989 raise error.LockInheritanceContractViolation(
989 raise error.LockInheritanceContractViolation(
990 'lock can only be inherited while held')
990 'lock can only be inherited while held')
991 if environ is None:
991 if environ is None:
992 environ = {}
992 environ = {}
993 with lock.inherit() as locker:
993 with lock.inherit() as locker:
994 environ[envvar] = locker
994 environ[envvar] = locker
995 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
995 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
996
996
997 def wlocksub(repo, cmd, *args, **kwargs):
997 def wlocksub(repo, cmd, *args, **kwargs):
998 """run cmd as a subprocess that allows inheriting repo's wlock
998 """run cmd as a subprocess that allows inheriting repo's wlock
999
999
1000 This can only be called while the wlock is held. This takes all the
1000 This can only be called while the wlock is held. This takes all the
1001 arguments that ui.system does, and returns the exit code of the
1001 arguments that ui.system does, and returns the exit code of the
1002 subprocess."""
1002 subprocess."""
1003 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1003 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1004 **kwargs)
1004 **kwargs)
1005
1005
1006 def gdinitconfig(ui):
1006 def gdinitconfig(ui):
1007 """helper function to know if a repo should be created as general delta
1007 """helper function to know if a repo should be created as general delta
1008 """
1008 """
1009 # experimental config: format.generaldelta
1009 # experimental config: format.generaldelta
1010 return (ui.configbool('format', 'generaldelta')
1010 return (ui.configbool('format', 'generaldelta')
1011 or ui.configbool('format', 'usegeneraldelta'))
1011 or ui.configbool('format', 'usegeneraldelta'))
1012
1012
1013 def gddeltaconfig(ui):
1013 def gddeltaconfig(ui):
1014 """helper function to know if incoming delta should be optimised
1014 """helper function to know if incoming delta should be optimised
1015 """
1015 """
1016 # experimental config: format.generaldelta
1016 # experimental config: format.generaldelta
1017 return ui.configbool('format', 'generaldelta')
1017 return ui.configbool('format', 'generaldelta')
1018
1018
1019 class simplekeyvaluefile(object):
1019 class simplekeyvaluefile(object):
1020 """A simple file with key=value lines
1020 """A simple file with key=value lines
1021
1021
1022 Keys must be alphanumerics and start with a letter, values must not
1022 Keys must be alphanumerics and start with a letter, values must not
1023 contain '\n' characters"""
1023 contain '\n' characters"""
1024 firstlinekey = '__firstline'
1024 firstlinekey = '__firstline'
1025
1025
1026 def __init__(self, vfs, path, keys=None):
1026 def __init__(self, vfs, path, keys=None):
1027 self.vfs = vfs
1027 self.vfs = vfs
1028 self.path = path
1028 self.path = path
1029
1029
1030 def read(self, firstlinenonkeyval=False):
1030 def read(self, firstlinenonkeyval=False):
1031 """Read the contents of a simple key-value file
1031 """Read the contents of a simple key-value file
1032
1032
1033 'firstlinenonkeyval' indicates whether the first line of file should
1033 'firstlinenonkeyval' indicates whether the first line of file should
1034 be treated as a key-value pair or reuturned fully under the
1034 be treated as a key-value pair or reuturned fully under the
1035 __firstline key."""
1035 __firstline key."""
1036 lines = self.vfs.readlines(self.path)
1036 lines = self.vfs.readlines(self.path)
1037 d = {}
1037 d = {}
1038 if firstlinenonkeyval:
1038 if firstlinenonkeyval:
1039 if not lines:
1039 if not lines:
1040 e = _("empty simplekeyvalue file")
1040 e = _("empty simplekeyvalue file")
1041 raise error.CorruptedState(e)
1041 raise error.CorruptedState(e)
1042 # we don't want to include '\n' in the __firstline
1042 # we don't want to include '\n' in the __firstline
1043 d[self.firstlinekey] = lines[0][:-1]
1043 d[self.firstlinekey] = lines[0][:-1]
1044 del lines[0]
1044 del lines[0]
1045
1045
1046 try:
1046 try:
1047 # the 'if line.strip()' part prevents us from failing on empty
1047 # the 'if line.strip()' part prevents us from failing on empty
1048 # lines which only contain '\n' therefore are not skipped
1048 # lines which only contain '\n' therefore are not skipped
1049 # by 'if line'
1049 # by 'if line'
1050 updatedict = dict(line[:-1].split('=', 1) for line in lines
1050 updatedict = dict(line[:-1].split('=', 1) for line in lines
1051 if line.strip())
1051 if line.strip())
1052 if self.firstlinekey in updatedict:
1052 if self.firstlinekey in updatedict:
1053 e = _("%r can't be used as a key")
1053 e = _("%r can't be used as a key")
1054 raise error.CorruptedState(e % self.firstlinekey)
1054 raise error.CorruptedState(e % self.firstlinekey)
1055 d.update(updatedict)
1055 d.update(updatedict)
1056 except ValueError as e:
1056 except ValueError as e:
1057 raise error.CorruptedState(str(e))
1057 raise error.CorruptedState(str(e))
1058 return d
1058 return d
1059
1059
1060 def write(self, data, firstline=None):
1060 def write(self, data, firstline=None):
1061 """Write key=>value mapping to a file
1061 """Write key=>value mapping to a file
1062 data is a dict. Keys must be alphanumerical and start with a letter.
1062 data is a dict. Keys must be alphanumerical and start with a letter.
1063 Values must not contain newline characters.
1063 Values must not contain newline characters.
1064
1064
1065 If 'firstline' is not None, it is written to file before
1065 If 'firstline' is not None, it is written to file before
1066 everything else, as it is, not in a key=value form"""
1066 everything else, as it is, not in a key=value form"""
1067 lines = []
1067 lines = []
1068 if firstline is not None:
1068 if firstline is not None:
1069 lines.append('%s\n' % firstline)
1069 lines.append('%s\n' % firstline)
1070
1070
1071 for k, v in data.items():
1071 for k, v in data.items():
1072 if k == self.firstlinekey:
1072 if k == self.firstlinekey:
1073 e = "key name '%s' is reserved" % self.firstlinekey
1073 e = "key name '%s' is reserved" % self.firstlinekey
1074 raise error.ProgrammingError(e)
1074 raise error.ProgrammingError(e)
1075 if not k[0].isalpha():
1075 if not k[0].isalpha():
1076 e = "keys must start with a letter in a key-value file"
1076 e = "keys must start with a letter in a key-value file"
1077 raise error.ProgrammingError(e)
1077 raise error.ProgrammingError(e)
1078 if not k.isalnum():
1078 if not k.isalnum():
1079 e = "invalid key name in a simple key-value file"
1079 e = "invalid key name in a simple key-value file"
1080 raise error.ProgrammingError(e)
1080 raise error.ProgrammingError(e)
1081 if '\n' in v:
1081 if '\n' in v:
1082 e = "invalid value in a simple key-value file"
1082 e = "invalid value in a simple key-value file"
1083 raise error.ProgrammingError(e)
1083 raise error.ProgrammingError(e)
1084 lines.append("%s=%s\n" % (k, v))
1084 lines.append("%s=%s\n" % (k, v))
1085 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1085 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1086 fp.write(''.join(lines))
1086 fp.write(''.join(lines))
1087
1087
1088 _reportobsoletedsource = [
1088 _reportobsoletedsource = [
1089 'debugobsolete',
1089 'debugobsolete',
1090 'pull',
1090 'pull',
1091 'push',
1091 'push',
1092 'serve',
1092 'serve',
1093 'unbundle',
1093 'unbundle',
1094 ]
1094 ]
1095
1095
1096 def registersummarycallback(repo, otr, txnname=''):
1096 def registersummarycallback(repo, otr, txnname=''):
1097 """register a callback to issue a summary after the transaction is closed
1097 """register a callback to issue a summary after the transaction is closed
1098 """
1098 """
1099 for source in _reportobsoletedsource:
1099 for source in _reportobsoletedsource:
1100 if txnname.startswith(source):
1100 if txnname.startswith(source):
1101 reporef = weakref.ref(repo)
1101 reporef = weakref.ref(repo)
1102 def reportsummary(tr):
1102 def reportsummary(tr):
1103 """the actual callback reporting the summary"""
1103 """the actual callback reporting the summary"""
1104 repo = reporef()
1104 repo = reporef()
1105 obsoleted = obsutil.getobsoleted(repo, tr)
1105 obsoleted = obsutil.getobsoleted(repo, tr)
1106 if obsoleted:
1106 if obsoleted:
1107 repo.ui.status(_('obsoleted %i changesets\n')
1107 repo.ui.status(_('obsoleted %i changesets\n')
1108 % len(obsoleted))
1108 % len(obsoleted))
1109 otr.addpostclose('00-txnreport', reportsummary)
1109 otr.addpostclose('00-txnreport', reportsummary)
1110 break
1110 break
General Comments 0
You need to be logged in to leave comments. Login now