##// END OF EJS Templates
scmutil: use resolvehexnodeidprefix() from revsymbol()...
Martin von Zweigbergk -
r37697:ab828755 default
parent child Browse files
Show More
@@ -1,1548 +1,1548 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 revsetlang,
38 revsetlang,
39 similar,
39 similar,
40 url,
40 url,
41 util,
41 util,
42 vfs,
42 vfs,
43 )
43 )
44
44
45 from .utils import (
45 from .utils import (
46 procutil,
46 procutil,
47 stringutil,
47 stringutil,
48 )
48 )
49
49
50 if pycompat.iswindows:
50 if pycompat.iswindows:
51 from . import scmwindows as scmplatform
51 from . import scmwindows as scmplatform
52 else:
52 else:
53 from . import scmposix as scmplatform
53 from . import scmposix as scmplatform
54
54
55 termsize = scmplatform.termsize
55 termsize = scmplatform.termsize
56
56
57 class status(tuple):
57 class status(tuple):
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 and 'ignored' properties are only relevant to the working copy.
59 and 'ignored' properties are only relevant to the working copy.
60 '''
60 '''
61
61
62 __slots__ = ()
62 __slots__ = ()
63
63
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 clean):
65 clean):
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 ignored, clean))
67 ignored, clean))
68
68
69 @property
69 @property
70 def modified(self):
70 def modified(self):
71 '''files that have been modified'''
71 '''files that have been modified'''
72 return self[0]
72 return self[0]
73
73
74 @property
74 @property
75 def added(self):
75 def added(self):
76 '''files that have been added'''
76 '''files that have been added'''
77 return self[1]
77 return self[1]
78
78
79 @property
79 @property
80 def removed(self):
80 def removed(self):
81 '''files that have been removed'''
81 '''files that have been removed'''
82 return self[2]
82 return self[2]
83
83
84 @property
84 @property
85 def deleted(self):
85 def deleted(self):
86 '''files that are in the dirstate, but have been deleted from the
86 '''files that are in the dirstate, but have been deleted from the
87 working copy (aka "missing")
87 working copy (aka "missing")
88 '''
88 '''
89 return self[3]
89 return self[3]
90
90
91 @property
91 @property
92 def unknown(self):
92 def unknown(self):
93 '''files not in the dirstate that are not ignored'''
93 '''files not in the dirstate that are not ignored'''
94 return self[4]
94 return self[4]
95
95
96 @property
96 @property
97 def ignored(self):
97 def ignored(self):
98 '''files not in the dirstate that are ignored (by _dirignore())'''
98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 return self[5]
99 return self[5]
100
100
101 @property
101 @property
102 def clean(self):
102 def clean(self):
103 '''files that have not been modified'''
103 '''files that have not been modified'''
104 return self[6]
104 return self[6]
105
105
106 def __repr__(self, *args, **kwargs):
106 def __repr__(self, *args, **kwargs):
107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
108 'unknown=%r, ignored=%r, clean=%r>') % self)
108 'unknown=%r, ignored=%r, clean=%r>') % self)
109
109
110 def itersubrepos(ctx1, ctx2):
110 def itersubrepos(ctx1, ctx2):
111 """find subrepos in ctx1 or ctx2"""
111 """find subrepos in ctx1 or ctx2"""
112 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 # Create a (subpath, ctx) mapping where we prefer subpaths from
113 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 # ctx1. The subpaths from ctx2 are important when the .hgsub file
114 # has been modified (in ctx2) but not yet committed (in ctx1).
114 # has been modified (in ctx2) but not yet committed (in ctx1).
115 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 subpaths = dict.fromkeys(ctx2.substate, ctx2)
116 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
117
117
118 missing = set()
118 missing = set()
119
119
120 for subpath in ctx2.substate:
120 for subpath in ctx2.substate:
121 if subpath not in ctx1.substate:
121 if subpath not in ctx1.substate:
122 del subpaths[subpath]
122 del subpaths[subpath]
123 missing.add(subpath)
123 missing.add(subpath)
124
124
125 for subpath, ctx in sorted(subpaths.iteritems()):
125 for subpath, ctx in sorted(subpaths.iteritems()):
126 yield subpath, ctx.sub(subpath)
126 yield subpath, ctx.sub(subpath)
127
127
128 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
129 # status and diff will have an accurate result when it does
129 # status and diff will have an accurate result when it does
130 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
131 # against itself.
131 # against itself.
132 for subpath in missing:
132 for subpath in missing:
133 yield subpath, ctx2.nullsub(subpath, ctx1)
133 yield subpath, ctx2.nullsub(subpath, ctx1)
134
134
135 def nochangesfound(ui, repo, excluded=None):
135 def nochangesfound(ui, repo, excluded=None):
136 '''Report no changes for push/pull, excluded is None or a list of
136 '''Report no changes for push/pull, excluded is None or a list of
137 nodes excluded from the push/pull.
137 nodes excluded from the push/pull.
138 '''
138 '''
139 secretlist = []
139 secretlist = []
140 if excluded:
140 if excluded:
141 for n in excluded:
141 for n in excluded:
142 ctx = repo[n]
142 ctx = repo[n]
143 if ctx.phase() >= phases.secret and not ctx.extinct():
143 if ctx.phase() >= phases.secret and not ctx.extinct():
144 secretlist.append(n)
144 secretlist.append(n)
145
145
146 if secretlist:
146 if secretlist:
147 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 ui.status(_("no changes found (ignored %d secret changesets)\n")
148 % len(secretlist))
148 % len(secretlist))
149 else:
149 else:
150 ui.status(_("no changes found\n"))
150 ui.status(_("no changes found\n"))
151
151
152 def callcatch(ui, func):
152 def callcatch(ui, func):
153 """call func() with global exception handling
153 """call func() with global exception handling
154
154
155 return func() if no exception happens. otherwise do some error handling
155 return func() if no exception happens. otherwise do some error handling
156 and return an exit code accordingly. does not handle all exceptions.
156 and return an exit code accordingly. does not handle all exceptions.
157 """
157 """
158 try:
158 try:
159 try:
159 try:
160 return func()
160 return func()
161 except: # re-raises
161 except: # re-raises
162 ui.traceback()
162 ui.traceback()
163 raise
163 raise
164 # Global exception handling, alphabetically
164 # Global exception handling, alphabetically
165 # Mercurial-specific first, followed by built-in and library exceptions
165 # Mercurial-specific first, followed by built-in and library exceptions
166 except error.LockHeld as inst:
166 except error.LockHeld as inst:
167 if inst.errno == errno.ETIMEDOUT:
167 if inst.errno == errno.ETIMEDOUT:
168 reason = _('timed out waiting for lock held by %r') % inst.locker
168 reason = _('timed out waiting for lock held by %r') % inst.locker
169 else:
169 else:
170 reason = _('lock held by %r') % inst.locker
170 reason = _('lock held by %r') % inst.locker
171 ui.warn(_("abort: %s: %s\n")
171 ui.warn(_("abort: %s: %s\n")
172 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
173 if not inst.locker:
173 if not inst.locker:
174 ui.warn(_("(lock might be very busy)\n"))
174 ui.warn(_("(lock might be very busy)\n"))
175 except error.LockUnavailable as inst:
175 except error.LockUnavailable as inst:
176 ui.warn(_("abort: could not lock %s: %s\n") %
176 ui.warn(_("abort: could not lock %s: %s\n") %
177 (inst.desc or stringutil.forcebytestr(inst.filename),
177 (inst.desc or stringutil.forcebytestr(inst.filename),
178 encoding.strtolocal(inst.strerror)))
178 encoding.strtolocal(inst.strerror)))
179 except error.OutOfBandError as inst:
179 except error.OutOfBandError as inst:
180 if inst.args:
180 if inst.args:
181 msg = _("abort: remote error:\n")
181 msg = _("abort: remote error:\n")
182 else:
182 else:
183 msg = _("abort: remote error\n")
183 msg = _("abort: remote error\n")
184 ui.warn(msg)
184 ui.warn(msg)
185 if inst.args:
185 if inst.args:
186 ui.warn(''.join(inst.args))
186 ui.warn(''.join(inst.args))
187 if inst.hint:
187 if inst.hint:
188 ui.warn('(%s)\n' % inst.hint)
188 ui.warn('(%s)\n' % inst.hint)
189 except error.RepoError as inst:
189 except error.RepoError as inst:
190 ui.warn(_("abort: %s!\n") % inst)
190 ui.warn(_("abort: %s!\n") % inst)
191 if inst.hint:
191 if inst.hint:
192 ui.warn(_("(%s)\n") % inst.hint)
192 ui.warn(_("(%s)\n") % inst.hint)
193 except error.ResponseError as inst:
193 except error.ResponseError as inst:
194 ui.warn(_("abort: %s") % inst.args[0])
194 ui.warn(_("abort: %s") % inst.args[0])
195 msg = inst.args[1]
195 msg = inst.args[1]
196 if isinstance(msg, type(u'')):
196 if isinstance(msg, type(u'')):
197 msg = pycompat.sysbytes(msg)
197 msg = pycompat.sysbytes(msg)
198 if not isinstance(msg, bytes):
198 if not isinstance(msg, bytes):
199 ui.warn(" %r\n" % (msg,))
199 ui.warn(" %r\n" % (msg,))
200 elif not msg:
200 elif not msg:
201 ui.warn(_(" empty string\n"))
201 ui.warn(_(" empty string\n"))
202 else:
202 else:
203 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
204 except error.CensoredNodeError as inst:
204 except error.CensoredNodeError as inst:
205 ui.warn(_("abort: file censored %s!\n") % inst)
205 ui.warn(_("abort: file censored %s!\n") % inst)
206 except error.RevlogError as inst:
206 except error.RevlogError as inst:
207 ui.warn(_("abort: %s!\n") % inst)
207 ui.warn(_("abort: %s!\n") % inst)
208 except error.InterventionRequired as inst:
208 except error.InterventionRequired as inst:
209 ui.warn("%s\n" % inst)
209 ui.warn("%s\n" % inst)
210 if inst.hint:
210 if inst.hint:
211 ui.warn(_("(%s)\n") % inst.hint)
211 ui.warn(_("(%s)\n") % inst.hint)
212 return 1
212 return 1
213 except error.WdirUnsupported:
213 except error.WdirUnsupported:
214 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 ui.warn(_("abort: working directory revision cannot be specified\n"))
215 except error.Abort as inst:
215 except error.Abort as inst:
216 ui.warn(_("abort: %s\n") % inst)
216 ui.warn(_("abort: %s\n") % inst)
217 if inst.hint:
217 if inst.hint:
218 ui.warn(_("(%s)\n") % inst.hint)
218 ui.warn(_("(%s)\n") % inst.hint)
219 except ImportError as inst:
219 except ImportError as inst:
220 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
221 m = stringutil.forcebytestr(inst).split()[-1]
221 m = stringutil.forcebytestr(inst).split()[-1]
222 if m in "mpatch bdiff".split():
222 if m in "mpatch bdiff".split():
223 ui.warn(_("(did you forget to compile extensions?)\n"))
223 ui.warn(_("(did you forget to compile extensions?)\n"))
224 elif m in "zlib".split():
224 elif m in "zlib".split():
225 ui.warn(_("(is your Python install correct?)\n"))
225 ui.warn(_("(is your Python install correct?)\n"))
226 except IOError as inst:
226 except IOError as inst:
227 if util.safehasattr(inst, "code"):
227 if util.safehasattr(inst, "code"):
228 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
229 elif util.safehasattr(inst, "reason"):
229 elif util.safehasattr(inst, "reason"):
230 try: # usually it is in the form (errno, strerror)
230 try: # usually it is in the form (errno, strerror)
231 reason = inst.reason.args[1]
231 reason = inst.reason.args[1]
232 except (AttributeError, IndexError):
232 except (AttributeError, IndexError):
233 # it might be anything, for example a string
233 # it might be anything, for example a string
234 reason = inst.reason
234 reason = inst.reason
235 if isinstance(reason, unicode):
235 if isinstance(reason, unicode):
236 # SSLError of Python 2.7.9 contains a unicode
236 # SSLError of Python 2.7.9 contains a unicode
237 reason = encoding.unitolocal(reason)
237 reason = encoding.unitolocal(reason)
238 ui.warn(_("abort: error: %s\n") % reason)
238 ui.warn(_("abort: error: %s\n") % reason)
239 elif (util.safehasattr(inst, "args")
239 elif (util.safehasattr(inst, "args")
240 and inst.args and inst.args[0] == errno.EPIPE):
240 and inst.args and inst.args[0] == errno.EPIPE):
241 pass
241 pass
242 elif getattr(inst, "strerror", None):
242 elif getattr(inst, "strerror", None):
243 if getattr(inst, "filename", None):
243 if getattr(inst, "filename", None):
244 ui.warn(_("abort: %s: %s\n") % (
244 ui.warn(_("abort: %s: %s\n") % (
245 encoding.strtolocal(inst.strerror),
245 encoding.strtolocal(inst.strerror),
246 stringutil.forcebytestr(inst.filename)))
246 stringutil.forcebytestr(inst.filename)))
247 else:
247 else:
248 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 else:
249 else:
250 raise
250 raise
251 except OSError as inst:
251 except OSError as inst:
252 if getattr(inst, "filename", None) is not None:
252 if getattr(inst, "filename", None) is not None:
253 ui.warn(_("abort: %s: '%s'\n") % (
253 ui.warn(_("abort: %s: '%s'\n") % (
254 encoding.strtolocal(inst.strerror),
254 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
255 stringutil.forcebytestr(inst.filename)))
256 else:
256 else:
257 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 except MemoryError:
258 except MemoryError:
259 ui.warn(_("abort: out of memory\n"))
259 ui.warn(_("abort: out of memory\n"))
260 except SystemExit as inst:
260 except SystemExit as inst:
261 # Commands shouldn't sys.exit directly, but give a return code.
261 # Commands shouldn't sys.exit directly, but give a return code.
262 # Just in case catch this and and pass exit code to caller.
262 # Just in case catch this and and pass exit code to caller.
263 return inst.code
263 return inst.code
264 except socket.error as inst:
264 except socket.error as inst:
265 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
266
266
267 return -1
267 return -1
268
268
269 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
272 if lbl in ['tip', '.', 'null']:
272 if lbl in ['tip', '.', 'null']:
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 for c in (':', '\0', '\n', '\r'):
274 for c in (':', '\0', '\n', '\r'):
275 if c in lbl:
275 if c in lbl:
276 raise error.Abort(
276 raise error.Abort(
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 try:
278 try:
279 int(lbl)
279 int(lbl)
280 raise error.Abort(_("cannot use an integer as a name"))
280 raise error.Abort(_("cannot use an integer as a name"))
281 except ValueError:
281 except ValueError:
282 pass
282 pass
283 if lbl.strip() != lbl:
283 if lbl.strip() != lbl:
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285
285
286 def checkfilename(f):
286 def checkfilename(f):
287 '''Check that the filename f is an acceptable filename for a tracked file'''
287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 if '\r' in f or '\n' in f:
288 if '\r' in f or '\n' in f:
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
290
290
291 def checkportable(ui, f):
291 def checkportable(ui, f):
292 '''Check if filename f is portable and warn or abort depending on config'''
292 '''Check if filename f is portable and warn or abort depending on config'''
293 checkfilename(f)
293 checkfilename(f)
294 abort, warn = checkportabilityalert(ui)
294 abort, warn = checkportabilityalert(ui)
295 if abort or warn:
295 if abort or warn:
296 msg = util.checkwinfilename(f)
296 msg = util.checkwinfilename(f)
297 if msg:
297 if msg:
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 if abort:
299 if abort:
300 raise error.Abort(msg)
300 raise error.Abort(msg)
301 ui.warn(_("warning: %s\n") % msg)
301 ui.warn(_("warning: %s\n") % msg)
302
302
303 def checkportabilityalert(ui):
303 def checkportabilityalert(ui):
304 '''check if the user's config requests nothing, a warning, or abort for
304 '''check if the user's config requests nothing, a warning, or abort for
305 non-portable filenames'''
305 non-portable filenames'''
306 val = ui.config('ui', 'portablefilenames')
306 val = ui.config('ui', 'portablefilenames')
307 lval = val.lower()
307 lval = val.lower()
308 bval = stringutil.parsebool(val)
308 bval = stringutil.parsebool(val)
309 abort = pycompat.iswindows or lval == 'abort'
309 abort = pycompat.iswindows or lval == 'abort'
310 warn = bval or lval == 'warn'
310 warn = bval or lval == 'warn'
311 if bval is None and not (warn or abort or lval == 'ignore'):
311 if bval is None and not (warn or abort or lval == 'ignore'):
312 raise error.ConfigError(
312 raise error.ConfigError(
313 _("ui.portablefilenames value is invalid ('%s')") % val)
313 _("ui.portablefilenames value is invalid ('%s')") % val)
314 return abort, warn
314 return abort, warn
315
315
316 class casecollisionauditor(object):
316 class casecollisionauditor(object):
317 def __init__(self, ui, abort, dirstate):
317 def __init__(self, ui, abort, dirstate):
318 self._ui = ui
318 self._ui = ui
319 self._abort = abort
319 self._abort = abort
320 allfiles = '\0'.join(dirstate._map)
320 allfiles = '\0'.join(dirstate._map)
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 self._dirstate = dirstate
322 self._dirstate = dirstate
323 # The purpose of _newfiles is so that we don't complain about
323 # The purpose of _newfiles is so that we don't complain about
324 # case collisions if someone were to call this object with the
324 # case collisions if someone were to call this object with the
325 # same filename twice.
325 # same filename twice.
326 self._newfiles = set()
326 self._newfiles = set()
327
327
328 def __call__(self, f):
328 def __call__(self, f):
329 if f in self._newfiles:
329 if f in self._newfiles:
330 return
330 return
331 fl = encoding.lower(f)
331 fl = encoding.lower(f)
332 if fl in self._loweredfiles and f not in self._dirstate:
332 if fl in self._loweredfiles and f not in self._dirstate:
333 msg = _('possible case-folding collision for %s') % f
333 msg = _('possible case-folding collision for %s') % f
334 if self._abort:
334 if self._abort:
335 raise error.Abort(msg)
335 raise error.Abort(msg)
336 self._ui.warn(_("warning: %s\n") % msg)
336 self._ui.warn(_("warning: %s\n") % msg)
337 self._loweredfiles.add(fl)
337 self._loweredfiles.add(fl)
338 self._newfiles.add(f)
338 self._newfiles.add(f)
339
339
340 def filteredhash(repo, maxrev):
340 def filteredhash(repo, maxrev):
341 """build hash of filtered revisions in the current repoview.
341 """build hash of filtered revisions in the current repoview.
342
342
343 Multiple caches perform up-to-date validation by checking that the
343 Multiple caches perform up-to-date validation by checking that the
344 tiprev and tipnode stored in the cache file match the current repository.
344 tiprev and tipnode stored in the cache file match the current repository.
345 However, this is not sufficient for validating repoviews because the set
345 However, this is not sufficient for validating repoviews because the set
346 of revisions in the view may change without the repository tiprev and
346 of revisions in the view may change without the repository tiprev and
347 tipnode changing.
347 tipnode changing.
348
348
349 This function hashes all the revs filtered from the view and returns
349 This function hashes all the revs filtered from the view and returns
350 that SHA-1 digest.
350 that SHA-1 digest.
351 """
351 """
352 cl = repo.changelog
352 cl = repo.changelog
353 if not cl.filteredrevs:
353 if not cl.filteredrevs:
354 return None
354 return None
355 key = None
355 key = None
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 if revs:
357 if revs:
358 s = hashlib.sha1()
358 s = hashlib.sha1()
359 for rev in revs:
359 for rev in revs:
360 s.update('%d;' % rev)
360 s.update('%d;' % rev)
361 key = s.digest()
361 key = s.digest()
362 return key
362 return key
363
363
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 '''yield every hg repository under path, always recursively.
365 '''yield every hg repository under path, always recursively.
366 The recurse flag will only control recursion into repo working dirs'''
366 The recurse flag will only control recursion into repo working dirs'''
367 def errhandler(err):
367 def errhandler(err):
368 if err.filename == path:
368 if err.filename == path:
369 raise err
369 raise err
370 samestat = getattr(os.path, 'samestat', None)
370 samestat = getattr(os.path, 'samestat', None)
371 if followsym and samestat is not None:
371 if followsym and samestat is not None:
372 def adddir(dirlst, dirname):
372 def adddir(dirlst, dirname):
373 dirstat = os.stat(dirname)
373 dirstat = os.stat(dirname)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 if not match:
375 if not match:
376 dirlst.append(dirstat)
376 dirlst.append(dirstat)
377 return not match
377 return not match
378 else:
378 else:
379 followsym = False
379 followsym = False
380
380
381 if (seen_dirs is None) and followsym:
381 if (seen_dirs is None) and followsym:
382 seen_dirs = []
382 seen_dirs = []
383 adddir(seen_dirs, path)
383 adddir(seen_dirs, path)
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 dirs.sort()
385 dirs.sort()
386 if '.hg' in dirs:
386 if '.hg' in dirs:
387 yield root # found a repository
387 yield root # found a repository
388 qroot = os.path.join(root, '.hg', 'patches')
388 qroot = os.path.join(root, '.hg', 'patches')
389 if os.path.isdir(os.path.join(qroot, '.hg')):
389 if os.path.isdir(os.path.join(qroot, '.hg')):
390 yield qroot # we have a patch queue repo here
390 yield qroot # we have a patch queue repo here
391 if recurse:
391 if recurse:
392 # avoid recursing inside the .hg directory
392 # avoid recursing inside the .hg directory
393 dirs.remove('.hg')
393 dirs.remove('.hg')
394 else:
394 else:
395 dirs[:] = [] # don't descend further
395 dirs[:] = [] # don't descend further
396 elif followsym:
396 elif followsym:
397 newdirs = []
397 newdirs = []
398 for d in dirs:
398 for d in dirs:
399 fname = os.path.join(root, d)
399 fname = os.path.join(root, d)
400 if adddir(seen_dirs, fname):
400 if adddir(seen_dirs, fname):
401 if os.path.islink(fname):
401 if os.path.islink(fname):
402 for hgname in walkrepos(fname, True, seen_dirs):
402 for hgname in walkrepos(fname, True, seen_dirs):
403 yield hgname
403 yield hgname
404 else:
404 else:
405 newdirs.append(d)
405 newdirs.append(d)
406 dirs[:] = newdirs
406 dirs[:] = newdirs
407
407
408 def binnode(ctx):
408 def binnode(ctx):
409 """Return binary node id for a given basectx"""
409 """Return binary node id for a given basectx"""
410 node = ctx.node()
410 node = ctx.node()
411 if node is None:
411 if node is None:
412 return wdirid
412 return wdirid
413 return node
413 return node
414
414
415 def intrev(ctx):
415 def intrev(ctx):
416 """Return integer for a given basectx that can be used in comparison or
416 """Return integer for a given basectx that can be used in comparison or
417 arithmetic operation"""
417 arithmetic operation"""
418 rev = ctx.rev()
418 rev = ctx.rev()
419 if rev is None:
419 if rev is None:
420 return wdirrev
420 return wdirrev
421 return rev
421 return rev
422
422
423 def formatchangeid(ctx):
423 def formatchangeid(ctx):
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 template provided by logcmdutil.changesettemplater"""
425 template provided by logcmdutil.changesettemplater"""
426 repo = ctx.repo()
426 repo = ctx.repo()
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428
428
429 def formatrevnode(ui, rev, node):
429 def formatrevnode(ui, rev, node):
430 """Format given revision and node depending on the current verbosity"""
430 """Format given revision and node depending on the current verbosity"""
431 if ui.debugflag:
431 if ui.debugflag:
432 hexfunc = hex
432 hexfunc = hex
433 else:
433 else:
434 hexfunc = short
434 hexfunc = short
435 return '%d:%s' % (rev, hexfunc(node))
435 return '%d:%s' % (rev, hexfunc(node))
436
436
437 def resolvehexnodeidprefix(repo, prefix):
437 def resolvehexnodeidprefix(repo, prefix):
438 # Uses unfiltered repo because it's faster when prefix is ambiguous/
438 # Uses unfiltered repo because it's faster when prefix is ambiguous/
439 # This matches the "shortest" template function.
439 # This matches the "shortest" template function.
440 node = repo.unfiltered().changelog._partialmatch(prefix)
440 node = repo.unfiltered().changelog._partialmatch(prefix)
441 if node is None:
441 if node is None:
442 return
442 return
443 repo.changelog.rev(node) # make sure node isn't filtered
443 repo.changelog.rev(node) # make sure node isn't filtered
444 return node
444 return node
445
445
446 def isrevsymbol(repo, symbol):
446 def isrevsymbol(repo, symbol):
447 """Checks if a symbol exists in the repo.
447 """Checks if a symbol exists in the repo.
448
448
449 See revsymbol() for details. Raises error.LookupError if the symbol is an
449 See revsymbol() for details. Raises error.LookupError if the symbol is an
450 ambiguous nodeid prefix.
450 ambiguous nodeid prefix.
451 """
451 """
452 try:
452 try:
453 revsymbol(repo, symbol)
453 revsymbol(repo, symbol)
454 return True
454 return True
455 except error.RepoLookupError:
455 except error.RepoLookupError:
456 return False
456 return False
457
457
458 def revsymbol(repo, symbol):
458 def revsymbol(repo, symbol):
459 """Returns a context given a single revision symbol (as string).
459 """Returns a context given a single revision symbol (as string).
460
460
461 This is similar to revsingle(), but accepts only a single revision symbol,
461 This is similar to revsingle(), but accepts only a single revision symbol,
462 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
462 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
463 not "max(public())".
463 not "max(public())".
464 """
464 """
465 if not isinstance(symbol, bytes):
465 if not isinstance(symbol, bytes):
466 msg = ("symbol (%s of type %s) was not a string, did you mean "
466 msg = ("symbol (%s of type %s) was not a string, did you mean "
467 "repo[symbol]?" % (symbol, type(symbol)))
467 "repo[symbol]?" % (symbol, type(symbol)))
468 raise error.ProgrammingError(msg)
468 raise error.ProgrammingError(msg)
469 try:
469 try:
470 if symbol in ('.', 'tip', 'null'):
470 if symbol in ('.', 'tip', 'null'):
471 return repo[symbol]
471 return repo[symbol]
472
472
473 try:
473 try:
474 r = int(symbol)
474 r = int(symbol)
475 if '%d' % r != symbol:
475 if '%d' % r != symbol:
476 raise ValueError
476 raise ValueError
477 l = len(repo.changelog)
477 l = len(repo.changelog)
478 if r < 0:
478 if r < 0:
479 r += l
479 r += l
480 if r < 0 or r >= l and r != wdirrev:
480 if r < 0 or r >= l and r != wdirrev:
481 raise ValueError
481 raise ValueError
482 return repo[r]
482 return repo[r]
483 except error.FilteredIndexError:
483 except error.FilteredIndexError:
484 raise
484 raise
485 except (ValueError, OverflowError, IndexError):
485 except (ValueError, OverflowError, IndexError):
486 pass
486 pass
487
487
488 if len(symbol) == 40:
488 if len(symbol) == 40:
489 try:
489 try:
490 node = bin(symbol)
490 node = bin(symbol)
491 rev = repo.changelog.rev(node)
491 rev = repo.changelog.rev(node)
492 return repo[rev]
492 return repo[rev]
493 except error.FilteredLookupError:
493 except error.FilteredLookupError:
494 raise
494 raise
495 except (TypeError, LookupError):
495 except (TypeError, LookupError):
496 pass
496 pass
497
497
498 # look up bookmarks through the name interface
498 # look up bookmarks through the name interface
499 try:
499 try:
500 node = repo.names.singlenode(repo, symbol)
500 node = repo.names.singlenode(repo, symbol)
501 rev = repo.changelog.rev(node)
501 rev = repo.changelog.rev(node)
502 return repo[rev]
502 return repo[rev]
503 except KeyError:
503 except KeyError:
504 pass
504 pass
505
505
506 node = repo.unfiltered().changelog._partialmatch(symbol)
506 node = resolvehexnodeidprefix(repo, symbol)
507 if node is not None:
507 if node is not None:
508 rev = repo.changelog.rev(node)
508 rev = repo.changelog.rev(node)
509 return repo[rev]
509 return repo[rev]
510
510
511 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
511 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
512
512
513 except error.WdirUnsupported:
513 except error.WdirUnsupported:
514 return repo[None]
514 return repo[None]
515 except (error.FilteredIndexError, error.FilteredLookupError,
515 except (error.FilteredIndexError, error.FilteredLookupError,
516 error.FilteredRepoLookupError):
516 error.FilteredRepoLookupError):
517 raise _filterederror(repo, symbol)
517 raise _filterederror(repo, symbol)
518
518
519 def _filterederror(repo, changeid):
519 def _filterederror(repo, changeid):
520 """build an exception to be raised about a filtered changeid
520 """build an exception to be raised about a filtered changeid
521
521
522 This is extracted in a function to help extensions (eg: evolve) to
522 This is extracted in a function to help extensions (eg: evolve) to
523 experiment with various message variants."""
523 experiment with various message variants."""
524 if repo.filtername.startswith('visible'):
524 if repo.filtername.startswith('visible'):
525
525
526 # Check if the changeset is obsolete
526 # Check if the changeset is obsolete
527 unfilteredrepo = repo.unfiltered()
527 unfilteredrepo = repo.unfiltered()
528 ctx = revsymbol(unfilteredrepo, changeid)
528 ctx = revsymbol(unfilteredrepo, changeid)
529
529
530 # If the changeset is obsolete, enrich the message with the reason
530 # If the changeset is obsolete, enrich the message with the reason
531 # that made this changeset not visible
531 # that made this changeset not visible
532 if ctx.obsolete():
532 if ctx.obsolete():
533 msg = obsutil._getfilteredreason(repo, changeid, ctx)
533 msg = obsutil._getfilteredreason(repo, changeid, ctx)
534 else:
534 else:
535 msg = _("hidden revision '%s'") % changeid
535 msg = _("hidden revision '%s'") % changeid
536
536
537 hint = _('use --hidden to access hidden revisions')
537 hint = _('use --hidden to access hidden revisions')
538
538
539 return error.FilteredRepoLookupError(msg, hint=hint)
539 return error.FilteredRepoLookupError(msg, hint=hint)
540 msg = _("filtered revision '%s' (not in '%s' subset)")
540 msg = _("filtered revision '%s' (not in '%s' subset)")
541 msg %= (changeid, repo.filtername)
541 msg %= (changeid, repo.filtername)
542 return error.FilteredRepoLookupError(msg)
542 return error.FilteredRepoLookupError(msg)
543
543
544 def revsingle(repo, revspec, default='.', localalias=None):
544 def revsingle(repo, revspec, default='.', localalias=None):
545 if not revspec and revspec != 0:
545 if not revspec and revspec != 0:
546 return repo[default]
546 return repo[default]
547
547
548 l = revrange(repo, [revspec], localalias=localalias)
548 l = revrange(repo, [revspec], localalias=localalias)
549 if not l:
549 if not l:
550 raise error.Abort(_('empty revision set'))
550 raise error.Abort(_('empty revision set'))
551 return repo[l.last()]
551 return repo[l.last()]
552
552
553 def _pairspec(revspec):
553 def _pairspec(revspec):
554 tree = revsetlang.parse(revspec)
554 tree = revsetlang.parse(revspec)
555 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
555 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
556
556
557 def revpairnodes(repo, revs):
557 def revpairnodes(repo, revs):
558 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
558 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
559 ctx1, ctx2 = revpair(repo, revs)
559 ctx1, ctx2 = revpair(repo, revs)
560 return ctx1.node(), ctx2.node()
560 return ctx1.node(), ctx2.node()
561
561
562 def revpair(repo, revs):
562 def revpair(repo, revs):
563 if not revs:
563 if not revs:
564 return repo['.'], repo[None]
564 return repo['.'], repo[None]
565
565
566 l = revrange(repo, revs)
566 l = revrange(repo, revs)
567
567
568 if not l:
568 if not l:
569 first = second = None
569 first = second = None
570 elif l.isascending():
570 elif l.isascending():
571 first = l.min()
571 first = l.min()
572 second = l.max()
572 second = l.max()
573 elif l.isdescending():
573 elif l.isdescending():
574 first = l.max()
574 first = l.max()
575 second = l.min()
575 second = l.min()
576 else:
576 else:
577 first = l.first()
577 first = l.first()
578 second = l.last()
578 second = l.last()
579
579
580 if first is None:
580 if first is None:
581 raise error.Abort(_('empty revision range'))
581 raise error.Abort(_('empty revision range'))
582 if (first == second and len(revs) >= 2
582 if (first == second and len(revs) >= 2
583 and not all(revrange(repo, [r]) for r in revs)):
583 and not all(revrange(repo, [r]) for r in revs)):
584 raise error.Abort(_('empty revision on one side of range'))
584 raise error.Abort(_('empty revision on one side of range'))
585
585
586 # if top-level is range expression, the result must always be a pair
586 # if top-level is range expression, the result must always be a pair
587 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
587 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
588 return repo[first], repo[None]
588 return repo[first], repo[None]
589
589
590 return repo[first], repo[second]
590 return repo[first], repo[second]
591
591
592 def revrange(repo, specs, localalias=None):
592 def revrange(repo, specs, localalias=None):
593 """Execute 1 to many revsets and return the union.
593 """Execute 1 to many revsets and return the union.
594
594
595 This is the preferred mechanism for executing revsets using user-specified
595 This is the preferred mechanism for executing revsets using user-specified
596 config options, such as revset aliases.
596 config options, such as revset aliases.
597
597
598 The revsets specified by ``specs`` will be executed via a chained ``OR``
598 The revsets specified by ``specs`` will be executed via a chained ``OR``
599 expression. If ``specs`` is empty, an empty result is returned.
599 expression. If ``specs`` is empty, an empty result is returned.
600
600
601 ``specs`` can contain integers, in which case they are assumed to be
601 ``specs`` can contain integers, in which case they are assumed to be
602 revision numbers.
602 revision numbers.
603
603
604 It is assumed the revsets are already formatted. If you have arguments
604 It is assumed the revsets are already formatted. If you have arguments
605 that need to be expanded in the revset, call ``revsetlang.formatspec()``
605 that need to be expanded in the revset, call ``revsetlang.formatspec()``
606 and pass the result as an element of ``specs``.
606 and pass the result as an element of ``specs``.
607
607
608 Specifying a single revset is allowed.
608 Specifying a single revset is allowed.
609
609
610 Returns a ``revset.abstractsmartset`` which is a list-like interface over
610 Returns a ``revset.abstractsmartset`` which is a list-like interface over
611 integer revisions.
611 integer revisions.
612 """
612 """
613 allspecs = []
613 allspecs = []
614 for spec in specs:
614 for spec in specs:
615 if isinstance(spec, int):
615 if isinstance(spec, int):
616 spec = revsetlang.formatspec('rev(%d)', spec)
616 spec = revsetlang.formatspec('rev(%d)', spec)
617 allspecs.append(spec)
617 allspecs.append(spec)
618 return repo.anyrevs(allspecs, user=True, localalias=localalias)
618 return repo.anyrevs(allspecs, user=True, localalias=localalias)
619
619
620 def meaningfulparents(repo, ctx):
620 def meaningfulparents(repo, ctx):
621 """Return list of meaningful (or all if debug) parentrevs for rev.
621 """Return list of meaningful (or all if debug) parentrevs for rev.
622
622
623 For merges (two non-nullrev revisions) both parents are meaningful.
623 For merges (two non-nullrev revisions) both parents are meaningful.
624 Otherwise the first parent revision is considered meaningful if it
624 Otherwise the first parent revision is considered meaningful if it
625 is not the preceding revision.
625 is not the preceding revision.
626 """
626 """
627 parents = ctx.parents()
627 parents = ctx.parents()
628 if len(parents) > 1:
628 if len(parents) > 1:
629 return parents
629 return parents
630 if repo.ui.debugflag:
630 if repo.ui.debugflag:
631 return [parents[0], repo['null']]
631 return [parents[0], repo['null']]
632 if parents[0].rev() >= intrev(ctx) - 1:
632 if parents[0].rev() >= intrev(ctx) - 1:
633 return []
633 return []
634 return parents
634 return parents
635
635
636 def expandpats(pats):
636 def expandpats(pats):
637 '''Expand bare globs when running on windows.
637 '''Expand bare globs when running on windows.
638 On posix we assume it already has already been done by sh.'''
638 On posix we assume it already has already been done by sh.'''
639 if not util.expandglobs:
639 if not util.expandglobs:
640 return list(pats)
640 return list(pats)
641 ret = []
641 ret = []
642 for kindpat in pats:
642 for kindpat in pats:
643 kind, pat = matchmod._patsplit(kindpat, None)
643 kind, pat = matchmod._patsplit(kindpat, None)
644 if kind is None:
644 if kind is None:
645 try:
645 try:
646 globbed = glob.glob(pat)
646 globbed = glob.glob(pat)
647 except re.error:
647 except re.error:
648 globbed = [pat]
648 globbed = [pat]
649 if globbed:
649 if globbed:
650 ret.extend(globbed)
650 ret.extend(globbed)
651 continue
651 continue
652 ret.append(kindpat)
652 ret.append(kindpat)
653 return ret
653 return ret
654
654
655 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
655 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
656 badfn=None):
656 badfn=None):
657 '''Return a matcher and the patterns that were used.
657 '''Return a matcher and the patterns that were used.
658 The matcher will warn about bad matches, unless an alternate badfn callback
658 The matcher will warn about bad matches, unless an alternate badfn callback
659 is provided.'''
659 is provided.'''
660 if pats == ("",):
660 if pats == ("",):
661 pats = []
661 pats = []
662 if opts is None:
662 if opts is None:
663 opts = {}
663 opts = {}
664 if not globbed and default == 'relpath':
664 if not globbed and default == 'relpath':
665 pats = expandpats(pats or [])
665 pats = expandpats(pats or [])
666
666
667 def bad(f, msg):
667 def bad(f, msg):
668 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
668 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
669
669
670 if badfn is None:
670 if badfn is None:
671 badfn = bad
671 badfn = bad
672
672
673 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
673 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
674 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
674 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
675
675
676 if m.always():
676 if m.always():
677 pats = []
677 pats = []
678 return m, pats
678 return m, pats
679
679
680 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
680 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
681 badfn=None):
681 badfn=None):
682 '''Return a matcher that will warn about bad matches.'''
682 '''Return a matcher that will warn about bad matches.'''
683 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
683 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
684
684
685 def matchall(repo):
685 def matchall(repo):
686 '''Return a matcher that will efficiently match everything.'''
686 '''Return a matcher that will efficiently match everything.'''
687 return matchmod.always(repo.root, repo.getcwd())
687 return matchmod.always(repo.root, repo.getcwd())
688
688
689 def matchfiles(repo, files, badfn=None):
689 def matchfiles(repo, files, badfn=None):
690 '''Return a matcher that will efficiently match exactly these files.'''
690 '''Return a matcher that will efficiently match exactly these files.'''
691 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
691 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
692
692
693 def parsefollowlinespattern(repo, rev, pat, msg):
693 def parsefollowlinespattern(repo, rev, pat, msg):
694 """Return a file name from `pat` pattern suitable for usage in followlines
694 """Return a file name from `pat` pattern suitable for usage in followlines
695 logic.
695 logic.
696 """
696 """
697 if not matchmod.patkind(pat):
697 if not matchmod.patkind(pat):
698 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
698 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
699 else:
699 else:
700 ctx = repo[rev]
700 ctx = repo[rev]
701 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
701 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
702 files = [f for f in ctx if m(f)]
702 files = [f for f in ctx if m(f)]
703 if len(files) != 1:
703 if len(files) != 1:
704 raise error.ParseError(msg)
704 raise error.ParseError(msg)
705 return files[0]
705 return files[0]
706
706
707 def origpath(ui, repo, filepath):
707 def origpath(ui, repo, filepath):
708 '''customize where .orig files are created
708 '''customize where .orig files are created
709
709
710 Fetch user defined path from config file: [ui] origbackuppath = <path>
710 Fetch user defined path from config file: [ui] origbackuppath = <path>
711 Fall back to default (filepath with .orig suffix) if not specified
711 Fall back to default (filepath with .orig suffix) if not specified
712 '''
712 '''
713 origbackuppath = ui.config('ui', 'origbackuppath')
713 origbackuppath = ui.config('ui', 'origbackuppath')
714 if not origbackuppath:
714 if not origbackuppath:
715 return filepath + ".orig"
715 return filepath + ".orig"
716
716
717 # Convert filepath from an absolute path into a path inside the repo.
717 # Convert filepath from an absolute path into a path inside the repo.
718 filepathfromroot = util.normpath(os.path.relpath(filepath,
718 filepathfromroot = util.normpath(os.path.relpath(filepath,
719 start=repo.root))
719 start=repo.root))
720
720
721 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
721 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
722 origbackupdir = origvfs.dirname(filepathfromroot)
722 origbackupdir = origvfs.dirname(filepathfromroot)
723 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
723 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
724 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
724 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
725
725
726 # Remove any files that conflict with the backup file's path
726 # Remove any files that conflict with the backup file's path
727 for f in reversed(list(util.finddirs(filepathfromroot))):
727 for f in reversed(list(util.finddirs(filepathfromroot))):
728 if origvfs.isfileorlink(f):
728 if origvfs.isfileorlink(f):
729 ui.note(_('removing conflicting file: %s\n')
729 ui.note(_('removing conflicting file: %s\n')
730 % origvfs.join(f))
730 % origvfs.join(f))
731 origvfs.unlink(f)
731 origvfs.unlink(f)
732 break
732 break
733
733
734 origvfs.makedirs(origbackupdir)
734 origvfs.makedirs(origbackupdir)
735
735
736 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
736 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
737 ui.note(_('removing conflicting directory: %s\n')
737 ui.note(_('removing conflicting directory: %s\n')
738 % origvfs.join(filepathfromroot))
738 % origvfs.join(filepathfromroot))
739 origvfs.rmtree(filepathfromroot, forcibly=True)
739 origvfs.rmtree(filepathfromroot, forcibly=True)
740
740
741 return origvfs.join(filepathfromroot)
741 return origvfs.join(filepathfromroot)
742
742
743 class _containsnode(object):
743 class _containsnode(object):
744 """proxy __contains__(node) to container.__contains__ which accepts revs"""
744 """proxy __contains__(node) to container.__contains__ which accepts revs"""
745
745
746 def __init__(self, repo, revcontainer):
746 def __init__(self, repo, revcontainer):
747 self._torev = repo.changelog.rev
747 self._torev = repo.changelog.rev
748 self._revcontains = revcontainer.__contains__
748 self._revcontains = revcontainer.__contains__
749
749
750 def __contains__(self, node):
750 def __contains__(self, node):
751 return self._revcontains(self._torev(node))
751 return self._revcontains(self._torev(node))
752
752
753 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
753 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
754 """do common cleanups when old nodes are replaced by new nodes
754 """do common cleanups when old nodes are replaced by new nodes
755
755
756 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
756 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
757 (we might also want to move working directory parent in the future)
757 (we might also want to move working directory parent in the future)
758
758
759 By default, bookmark moves are calculated automatically from 'replacements',
759 By default, bookmark moves are calculated automatically from 'replacements',
760 but 'moves' can be used to override that. Also, 'moves' may include
760 but 'moves' can be used to override that. Also, 'moves' may include
761 additional bookmark moves that should not have associated obsmarkers.
761 additional bookmark moves that should not have associated obsmarkers.
762
762
763 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
763 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
764 have replacements. operation is a string, like "rebase".
764 have replacements. operation is a string, like "rebase".
765
765
766 metadata is dictionary containing metadata to be stored in obsmarker if
766 metadata is dictionary containing metadata to be stored in obsmarker if
767 obsolescence is enabled.
767 obsolescence is enabled.
768 """
768 """
769 if not replacements and not moves:
769 if not replacements and not moves:
770 return
770 return
771
771
772 # translate mapping's other forms
772 # translate mapping's other forms
773 if not util.safehasattr(replacements, 'items'):
773 if not util.safehasattr(replacements, 'items'):
774 replacements = {n: () for n in replacements}
774 replacements = {n: () for n in replacements}
775
775
776 # Calculate bookmark movements
776 # Calculate bookmark movements
777 if moves is None:
777 if moves is None:
778 moves = {}
778 moves = {}
779 # Unfiltered repo is needed since nodes in replacements might be hidden.
779 # Unfiltered repo is needed since nodes in replacements might be hidden.
780 unfi = repo.unfiltered()
780 unfi = repo.unfiltered()
781 for oldnode, newnodes in replacements.items():
781 for oldnode, newnodes in replacements.items():
782 if oldnode in moves:
782 if oldnode in moves:
783 continue
783 continue
784 if len(newnodes) > 1:
784 if len(newnodes) > 1:
785 # usually a split, take the one with biggest rev number
785 # usually a split, take the one with biggest rev number
786 newnode = next(unfi.set('max(%ln)', newnodes)).node()
786 newnode = next(unfi.set('max(%ln)', newnodes)).node()
787 elif len(newnodes) == 0:
787 elif len(newnodes) == 0:
788 # move bookmark backwards
788 # move bookmark backwards
789 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
789 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
790 list(replacements)))
790 list(replacements)))
791 if roots:
791 if roots:
792 newnode = roots[0].node()
792 newnode = roots[0].node()
793 else:
793 else:
794 newnode = nullid
794 newnode = nullid
795 else:
795 else:
796 newnode = newnodes[0]
796 newnode = newnodes[0]
797 moves[oldnode] = newnode
797 moves[oldnode] = newnode
798
798
799 with repo.transaction('cleanup') as tr:
799 with repo.transaction('cleanup') as tr:
800 # Move bookmarks
800 # Move bookmarks
801 bmarks = repo._bookmarks
801 bmarks = repo._bookmarks
802 bmarkchanges = []
802 bmarkchanges = []
803 allnewnodes = [n for ns in replacements.values() for n in ns]
803 allnewnodes = [n for ns in replacements.values() for n in ns]
804 for oldnode, newnode in moves.items():
804 for oldnode, newnode in moves.items():
805 oldbmarks = repo.nodebookmarks(oldnode)
805 oldbmarks = repo.nodebookmarks(oldnode)
806 if not oldbmarks:
806 if not oldbmarks:
807 continue
807 continue
808 from . import bookmarks # avoid import cycle
808 from . import bookmarks # avoid import cycle
809 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
809 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
810 (util.rapply(pycompat.maybebytestr, oldbmarks),
810 (util.rapply(pycompat.maybebytestr, oldbmarks),
811 hex(oldnode), hex(newnode)))
811 hex(oldnode), hex(newnode)))
812 # Delete divergent bookmarks being parents of related newnodes
812 # Delete divergent bookmarks being parents of related newnodes
813 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
813 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
814 allnewnodes, newnode, oldnode)
814 allnewnodes, newnode, oldnode)
815 deletenodes = _containsnode(repo, deleterevs)
815 deletenodes = _containsnode(repo, deleterevs)
816 for name in oldbmarks:
816 for name in oldbmarks:
817 bmarkchanges.append((name, newnode))
817 bmarkchanges.append((name, newnode))
818 for b in bookmarks.divergent2delete(repo, deletenodes, name):
818 for b in bookmarks.divergent2delete(repo, deletenodes, name):
819 bmarkchanges.append((b, None))
819 bmarkchanges.append((b, None))
820
820
821 if bmarkchanges:
821 if bmarkchanges:
822 bmarks.applychanges(repo, tr, bmarkchanges)
822 bmarks.applychanges(repo, tr, bmarkchanges)
823
823
824 # Obsolete or strip nodes
824 # Obsolete or strip nodes
825 if obsolete.isenabled(repo, obsolete.createmarkersopt):
825 if obsolete.isenabled(repo, obsolete.createmarkersopt):
826 # If a node is already obsoleted, and we want to obsolete it
826 # If a node is already obsoleted, and we want to obsolete it
827 # without a successor, skip that obssolete request since it's
827 # without a successor, skip that obssolete request since it's
828 # unnecessary. That's the "if s or not isobs(n)" check below.
828 # unnecessary. That's the "if s or not isobs(n)" check below.
829 # Also sort the node in topology order, that might be useful for
829 # Also sort the node in topology order, that might be useful for
830 # some obsstore logic.
830 # some obsstore logic.
831 # NOTE: the filtering and sorting might belong to createmarkers.
831 # NOTE: the filtering and sorting might belong to createmarkers.
832 isobs = unfi.obsstore.successors.__contains__
832 isobs = unfi.obsstore.successors.__contains__
833 torev = unfi.changelog.rev
833 torev = unfi.changelog.rev
834 sortfunc = lambda ns: torev(ns[0])
834 sortfunc = lambda ns: torev(ns[0])
835 rels = [(unfi[n], tuple(unfi[m] for m in s))
835 rels = [(unfi[n], tuple(unfi[m] for m in s))
836 for n, s in sorted(replacements.items(), key=sortfunc)
836 for n, s in sorted(replacements.items(), key=sortfunc)
837 if s or not isobs(n)]
837 if s or not isobs(n)]
838 if rels:
838 if rels:
839 obsolete.createmarkers(repo, rels, operation=operation,
839 obsolete.createmarkers(repo, rels, operation=operation,
840 metadata=metadata)
840 metadata=metadata)
841 else:
841 else:
842 from . import repair # avoid import cycle
842 from . import repair # avoid import cycle
843 tostrip = list(replacements)
843 tostrip = list(replacements)
844 if tostrip:
844 if tostrip:
845 repair.delayedstrip(repo.ui, repo, tostrip, operation)
845 repair.delayedstrip(repo.ui, repo, tostrip, operation)
846
846
847 def addremove(repo, matcher, prefix, opts=None):
847 def addremove(repo, matcher, prefix, opts=None):
848 if opts is None:
848 if opts is None:
849 opts = {}
849 opts = {}
850 m = matcher
850 m = matcher
851 dry_run = opts.get('dry_run')
851 dry_run = opts.get('dry_run')
852 try:
852 try:
853 similarity = float(opts.get('similarity') or 0)
853 similarity = float(opts.get('similarity') or 0)
854 except ValueError:
854 except ValueError:
855 raise error.Abort(_('similarity must be a number'))
855 raise error.Abort(_('similarity must be a number'))
856 if similarity < 0 or similarity > 100:
856 if similarity < 0 or similarity > 100:
857 raise error.Abort(_('similarity must be between 0 and 100'))
857 raise error.Abort(_('similarity must be between 0 and 100'))
858 similarity /= 100.0
858 similarity /= 100.0
859
859
860 ret = 0
860 ret = 0
861 join = lambda f: os.path.join(prefix, f)
861 join = lambda f: os.path.join(prefix, f)
862
862
863 wctx = repo[None]
863 wctx = repo[None]
864 for subpath in sorted(wctx.substate):
864 for subpath in sorted(wctx.substate):
865 submatch = matchmod.subdirmatcher(subpath, m)
865 submatch = matchmod.subdirmatcher(subpath, m)
866 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
866 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
867 sub = wctx.sub(subpath)
867 sub = wctx.sub(subpath)
868 try:
868 try:
869 if sub.addremove(submatch, prefix, opts):
869 if sub.addremove(submatch, prefix, opts):
870 ret = 1
870 ret = 1
871 except error.LookupError:
871 except error.LookupError:
872 repo.ui.status(_("skipping missing subrepository: %s\n")
872 repo.ui.status(_("skipping missing subrepository: %s\n")
873 % join(subpath))
873 % join(subpath))
874
874
875 rejected = []
875 rejected = []
876 def badfn(f, msg):
876 def badfn(f, msg):
877 if f in m.files():
877 if f in m.files():
878 m.bad(f, msg)
878 m.bad(f, msg)
879 rejected.append(f)
879 rejected.append(f)
880
880
881 badmatch = matchmod.badmatch(m, badfn)
881 badmatch = matchmod.badmatch(m, badfn)
882 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
882 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
883 badmatch)
883 badmatch)
884
884
885 unknownset = set(unknown + forgotten)
885 unknownset = set(unknown + forgotten)
886 toprint = unknownset.copy()
886 toprint = unknownset.copy()
887 toprint.update(deleted)
887 toprint.update(deleted)
888 for abs in sorted(toprint):
888 for abs in sorted(toprint):
889 if repo.ui.verbose or not m.exact(abs):
889 if repo.ui.verbose or not m.exact(abs):
890 if abs in unknownset:
890 if abs in unknownset:
891 status = _('adding %s\n') % m.uipath(abs)
891 status = _('adding %s\n') % m.uipath(abs)
892 else:
892 else:
893 status = _('removing %s\n') % m.uipath(abs)
893 status = _('removing %s\n') % m.uipath(abs)
894 repo.ui.status(status)
894 repo.ui.status(status)
895
895
896 renames = _findrenames(repo, m, added + unknown, removed + deleted,
896 renames = _findrenames(repo, m, added + unknown, removed + deleted,
897 similarity)
897 similarity)
898
898
899 if not dry_run:
899 if not dry_run:
900 _markchanges(repo, unknown + forgotten, deleted, renames)
900 _markchanges(repo, unknown + forgotten, deleted, renames)
901
901
902 for f in rejected:
902 for f in rejected:
903 if f in m.files():
903 if f in m.files():
904 return 1
904 return 1
905 return ret
905 return ret
906
906
907 def marktouched(repo, files, similarity=0.0):
907 def marktouched(repo, files, similarity=0.0):
908 '''Assert that files have somehow been operated upon. files are relative to
908 '''Assert that files have somehow been operated upon. files are relative to
909 the repo root.'''
909 the repo root.'''
910 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
910 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
911 rejected = []
911 rejected = []
912
912
913 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
913 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
914
914
915 if repo.ui.verbose:
915 if repo.ui.verbose:
916 unknownset = set(unknown + forgotten)
916 unknownset = set(unknown + forgotten)
917 toprint = unknownset.copy()
917 toprint = unknownset.copy()
918 toprint.update(deleted)
918 toprint.update(deleted)
919 for abs in sorted(toprint):
919 for abs in sorted(toprint):
920 if abs in unknownset:
920 if abs in unknownset:
921 status = _('adding %s\n') % abs
921 status = _('adding %s\n') % abs
922 else:
922 else:
923 status = _('removing %s\n') % abs
923 status = _('removing %s\n') % abs
924 repo.ui.status(status)
924 repo.ui.status(status)
925
925
926 renames = _findrenames(repo, m, added + unknown, removed + deleted,
926 renames = _findrenames(repo, m, added + unknown, removed + deleted,
927 similarity)
927 similarity)
928
928
929 _markchanges(repo, unknown + forgotten, deleted, renames)
929 _markchanges(repo, unknown + forgotten, deleted, renames)
930
930
931 for f in rejected:
931 for f in rejected:
932 if f in m.files():
932 if f in m.files():
933 return 1
933 return 1
934 return 0
934 return 0
935
935
936 def _interestingfiles(repo, matcher):
936 def _interestingfiles(repo, matcher):
937 '''Walk dirstate with matcher, looking for files that addremove would care
937 '''Walk dirstate with matcher, looking for files that addremove would care
938 about.
938 about.
939
939
940 This is different from dirstate.status because it doesn't care about
940 This is different from dirstate.status because it doesn't care about
941 whether files are modified or clean.'''
941 whether files are modified or clean.'''
942 added, unknown, deleted, removed, forgotten = [], [], [], [], []
942 added, unknown, deleted, removed, forgotten = [], [], [], [], []
943 audit_path = pathutil.pathauditor(repo.root, cached=True)
943 audit_path = pathutil.pathauditor(repo.root, cached=True)
944
944
945 ctx = repo[None]
945 ctx = repo[None]
946 dirstate = repo.dirstate
946 dirstate = repo.dirstate
947 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
947 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
948 unknown=True, ignored=False, full=False)
948 unknown=True, ignored=False, full=False)
949 for abs, st in walkresults.iteritems():
949 for abs, st in walkresults.iteritems():
950 dstate = dirstate[abs]
950 dstate = dirstate[abs]
951 if dstate == '?' and audit_path.check(abs):
951 if dstate == '?' and audit_path.check(abs):
952 unknown.append(abs)
952 unknown.append(abs)
953 elif dstate != 'r' and not st:
953 elif dstate != 'r' and not st:
954 deleted.append(abs)
954 deleted.append(abs)
955 elif dstate == 'r' and st:
955 elif dstate == 'r' and st:
956 forgotten.append(abs)
956 forgotten.append(abs)
957 # for finding renames
957 # for finding renames
958 elif dstate == 'r' and not st:
958 elif dstate == 'r' and not st:
959 removed.append(abs)
959 removed.append(abs)
960 elif dstate == 'a':
960 elif dstate == 'a':
961 added.append(abs)
961 added.append(abs)
962
962
963 return added, unknown, deleted, removed, forgotten
963 return added, unknown, deleted, removed, forgotten
964
964
965 def _findrenames(repo, matcher, added, removed, similarity):
965 def _findrenames(repo, matcher, added, removed, similarity):
966 '''Find renames from removed files to added ones.'''
966 '''Find renames from removed files to added ones.'''
967 renames = {}
967 renames = {}
968 if similarity > 0:
968 if similarity > 0:
969 for old, new, score in similar.findrenames(repo, added, removed,
969 for old, new, score in similar.findrenames(repo, added, removed,
970 similarity):
970 similarity):
971 if (repo.ui.verbose or not matcher.exact(old)
971 if (repo.ui.verbose or not matcher.exact(old)
972 or not matcher.exact(new)):
972 or not matcher.exact(new)):
973 repo.ui.status(_('recording removal of %s as rename to %s '
973 repo.ui.status(_('recording removal of %s as rename to %s '
974 '(%d%% similar)\n') %
974 '(%d%% similar)\n') %
975 (matcher.rel(old), matcher.rel(new),
975 (matcher.rel(old), matcher.rel(new),
976 score * 100))
976 score * 100))
977 renames[new] = old
977 renames[new] = old
978 return renames
978 return renames
979
979
980 def _markchanges(repo, unknown, deleted, renames):
980 def _markchanges(repo, unknown, deleted, renames):
981 '''Marks the files in unknown as added, the files in deleted as removed,
981 '''Marks the files in unknown as added, the files in deleted as removed,
982 and the files in renames as copied.'''
982 and the files in renames as copied.'''
983 wctx = repo[None]
983 wctx = repo[None]
984 with repo.wlock():
984 with repo.wlock():
985 wctx.forget(deleted)
985 wctx.forget(deleted)
986 wctx.add(unknown)
986 wctx.add(unknown)
987 for new, old in renames.iteritems():
987 for new, old in renames.iteritems():
988 wctx.copy(old, new)
988 wctx.copy(old, new)
989
989
990 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
990 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
991 """Update the dirstate to reflect the intent of copying src to dst. For
991 """Update the dirstate to reflect the intent of copying src to dst. For
992 different reasons it might not end with dst being marked as copied from src.
992 different reasons it might not end with dst being marked as copied from src.
993 """
993 """
994 origsrc = repo.dirstate.copied(src) or src
994 origsrc = repo.dirstate.copied(src) or src
995 if dst == origsrc: # copying back a copy?
995 if dst == origsrc: # copying back a copy?
996 if repo.dirstate[dst] not in 'mn' and not dryrun:
996 if repo.dirstate[dst] not in 'mn' and not dryrun:
997 repo.dirstate.normallookup(dst)
997 repo.dirstate.normallookup(dst)
998 else:
998 else:
999 if repo.dirstate[origsrc] == 'a' and origsrc == src:
999 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1000 if not ui.quiet:
1000 if not ui.quiet:
1001 ui.warn(_("%s has not been committed yet, so no copy "
1001 ui.warn(_("%s has not been committed yet, so no copy "
1002 "data will be stored for %s.\n")
1002 "data will be stored for %s.\n")
1003 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1003 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1004 if repo.dirstate[dst] in '?r' and not dryrun:
1004 if repo.dirstate[dst] in '?r' and not dryrun:
1005 wctx.add([dst])
1005 wctx.add([dst])
1006 elif not dryrun:
1006 elif not dryrun:
1007 wctx.copy(origsrc, dst)
1007 wctx.copy(origsrc, dst)
1008
1008
1009 def readrequires(opener, supported):
1009 def readrequires(opener, supported):
1010 '''Reads and parses .hg/requires and checks if all entries found
1010 '''Reads and parses .hg/requires and checks if all entries found
1011 are in the list of supported features.'''
1011 are in the list of supported features.'''
1012 requirements = set(opener.read("requires").splitlines())
1012 requirements = set(opener.read("requires").splitlines())
1013 missings = []
1013 missings = []
1014 for r in requirements:
1014 for r in requirements:
1015 if r not in supported:
1015 if r not in supported:
1016 if not r or not r[0:1].isalnum():
1016 if not r or not r[0:1].isalnum():
1017 raise error.RequirementError(_(".hg/requires file is corrupt"))
1017 raise error.RequirementError(_(".hg/requires file is corrupt"))
1018 missings.append(r)
1018 missings.append(r)
1019 missings.sort()
1019 missings.sort()
1020 if missings:
1020 if missings:
1021 raise error.RequirementError(
1021 raise error.RequirementError(
1022 _("repository requires features unknown to this Mercurial: %s")
1022 _("repository requires features unknown to this Mercurial: %s")
1023 % " ".join(missings),
1023 % " ".join(missings),
1024 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1024 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1025 " for more information"))
1025 " for more information"))
1026 return requirements
1026 return requirements
1027
1027
1028 def writerequires(opener, requirements):
1028 def writerequires(opener, requirements):
1029 with opener('requires', 'w') as fp:
1029 with opener('requires', 'w') as fp:
1030 for r in sorted(requirements):
1030 for r in sorted(requirements):
1031 fp.write("%s\n" % r)
1031 fp.write("%s\n" % r)
1032
1032
1033 class filecachesubentry(object):
1033 class filecachesubentry(object):
1034 def __init__(self, path, stat):
1034 def __init__(self, path, stat):
1035 self.path = path
1035 self.path = path
1036 self.cachestat = None
1036 self.cachestat = None
1037 self._cacheable = None
1037 self._cacheable = None
1038
1038
1039 if stat:
1039 if stat:
1040 self.cachestat = filecachesubentry.stat(self.path)
1040 self.cachestat = filecachesubentry.stat(self.path)
1041
1041
1042 if self.cachestat:
1042 if self.cachestat:
1043 self._cacheable = self.cachestat.cacheable()
1043 self._cacheable = self.cachestat.cacheable()
1044 else:
1044 else:
1045 # None means we don't know yet
1045 # None means we don't know yet
1046 self._cacheable = None
1046 self._cacheable = None
1047
1047
1048 def refresh(self):
1048 def refresh(self):
1049 if self.cacheable():
1049 if self.cacheable():
1050 self.cachestat = filecachesubentry.stat(self.path)
1050 self.cachestat = filecachesubentry.stat(self.path)
1051
1051
1052 def cacheable(self):
1052 def cacheable(self):
1053 if self._cacheable is not None:
1053 if self._cacheable is not None:
1054 return self._cacheable
1054 return self._cacheable
1055
1055
1056 # we don't know yet, assume it is for now
1056 # we don't know yet, assume it is for now
1057 return True
1057 return True
1058
1058
1059 def changed(self):
1059 def changed(self):
1060 # no point in going further if we can't cache it
1060 # no point in going further if we can't cache it
1061 if not self.cacheable():
1061 if not self.cacheable():
1062 return True
1062 return True
1063
1063
1064 newstat = filecachesubentry.stat(self.path)
1064 newstat = filecachesubentry.stat(self.path)
1065
1065
1066 # we may not know if it's cacheable yet, check again now
1066 # we may not know if it's cacheable yet, check again now
1067 if newstat and self._cacheable is None:
1067 if newstat and self._cacheable is None:
1068 self._cacheable = newstat.cacheable()
1068 self._cacheable = newstat.cacheable()
1069
1069
1070 # check again
1070 # check again
1071 if not self._cacheable:
1071 if not self._cacheable:
1072 return True
1072 return True
1073
1073
1074 if self.cachestat != newstat:
1074 if self.cachestat != newstat:
1075 self.cachestat = newstat
1075 self.cachestat = newstat
1076 return True
1076 return True
1077 else:
1077 else:
1078 return False
1078 return False
1079
1079
1080 @staticmethod
1080 @staticmethod
1081 def stat(path):
1081 def stat(path):
1082 try:
1082 try:
1083 return util.cachestat(path)
1083 return util.cachestat(path)
1084 except OSError as e:
1084 except OSError as e:
1085 if e.errno != errno.ENOENT:
1085 if e.errno != errno.ENOENT:
1086 raise
1086 raise
1087
1087
1088 class filecacheentry(object):
1088 class filecacheentry(object):
1089 def __init__(self, paths, stat=True):
1089 def __init__(self, paths, stat=True):
1090 self._entries = []
1090 self._entries = []
1091 for path in paths:
1091 for path in paths:
1092 self._entries.append(filecachesubentry(path, stat))
1092 self._entries.append(filecachesubentry(path, stat))
1093
1093
1094 def changed(self):
1094 def changed(self):
1095 '''true if any entry has changed'''
1095 '''true if any entry has changed'''
1096 for entry in self._entries:
1096 for entry in self._entries:
1097 if entry.changed():
1097 if entry.changed():
1098 return True
1098 return True
1099 return False
1099 return False
1100
1100
1101 def refresh(self):
1101 def refresh(self):
1102 for entry in self._entries:
1102 for entry in self._entries:
1103 entry.refresh()
1103 entry.refresh()
1104
1104
1105 class filecache(object):
1105 class filecache(object):
1106 '''A property like decorator that tracks files under .hg/ for updates.
1106 '''A property like decorator that tracks files under .hg/ for updates.
1107
1107
1108 Records stat info when called in _filecache.
1108 Records stat info when called in _filecache.
1109
1109
1110 On subsequent calls, compares old stat info with new info, and recreates the
1110 On subsequent calls, compares old stat info with new info, and recreates the
1111 object when any of the files changes, updating the new stat info in
1111 object when any of the files changes, updating the new stat info in
1112 _filecache.
1112 _filecache.
1113
1113
1114 Mercurial either atomic renames or appends for files under .hg,
1114 Mercurial either atomic renames or appends for files under .hg,
1115 so to ensure the cache is reliable we need the filesystem to be able
1115 so to ensure the cache is reliable we need the filesystem to be able
1116 to tell us if a file has been replaced. If it can't, we fallback to
1116 to tell us if a file has been replaced. If it can't, we fallback to
1117 recreating the object on every call (essentially the same behavior as
1117 recreating the object on every call (essentially the same behavior as
1118 propertycache).
1118 propertycache).
1119
1119
1120 '''
1120 '''
1121 def __init__(self, *paths):
1121 def __init__(self, *paths):
1122 self.paths = paths
1122 self.paths = paths
1123
1123
1124 def join(self, obj, fname):
1124 def join(self, obj, fname):
1125 """Used to compute the runtime path of a cached file.
1125 """Used to compute the runtime path of a cached file.
1126
1126
1127 Users should subclass filecache and provide their own version of this
1127 Users should subclass filecache and provide their own version of this
1128 function to call the appropriate join function on 'obj' (an instance
1128 function to call the appropriate join function on 'obj' (an instance
1129 of the class that its member function was decorated).
1129 of the class that its member function was decorated).
1130 """
1130 """
1131 raise NotImplementedError
1131 raise NotImplementedError
1132
1132
1133 def __call__(self, func):
1133 def __call__(self, func):
1134 self.func = func
1134 self.func = func
1135 self.name = func.__name__.encode('ascii')
1135 self.name = func.__name__.encode('ascii')
1136 return self
1136 return self
1137
1137
1138 def __get__(self, obj, type=None):
1138 def __get__(self, obj, type=None):
1139 # if accessed on the class, return the descriptor itself.
1139 # if accessed on the class, return the descriptor itself.
1140 if obj is None:
1140 if obj is None:
1141 return self
1141 return self
1142 # do we need to check if the file changed?
1142 # do we need to check if the file changed?
1143 if self.name in obj.__dict__:
1143 if self.name in obj.__dict__:
1144 assert self.name in obj._filecache, self.name
1144 assert self.name in obj._filecache, self.name
1145 return obj.__dict__[self.name]
1145 return obj.__dict__[self.name]
1146
1146
1147 entry = obj._filecache.get(self.name)
1147 entry = obj._filecache.get(self.name)
1148
1148
1149 if entry:
1149 if entry:
1150 if entry.changed():
1150 if entry.changed():
1151 entry.obj = self.func(obj)
1151 entry.obj = self.func(obj)
1152 else:
1152 else:
1153 paths = [self.join(obj, path) for path in self.paths]
1153 paths = [self.join(obj, path) for path in self.paths]
1154
1154
1155 # We stat -before- creating the object so our cache doesn't lie if
1155 # We stat -before- creating the object so our cache doesn't lie if
1156 # a writer modified between the time we read and stat
1156 # a writer modified between the time we read and stat
1157 entry = filecacheentry(paths, True)
1157 entry = filecacheentry(paths, True)
1158 entry.obj = self.func(obj)
1158 entry.obj = self.func(obj)
1159
1159
1160 obj._filecache[self.name] = entry
1160 obj._filecache[self.name] = entry
1161
1161
1162 obj.__dict__[self.name] = entry.obj
1162 obj.__dict__[self.name] = entry.obj
1163 return entry.obj
1163 return entry.obj
1164
1164
1165 def __set__(self, obj, value):
1165 def __set__(self, obj, value):
1166 if self.name not in obj._filecache:
1166 if self.name not in obj._filecache:
1167 # we add an entry for the missing value because X in __dict__
1167 # we add an entry for the missing value because X in __dict__
1168 # implies X in _filecache
1168 # implies X in _filecache
1169 paths = [self.join(obj, path) for path in self.paths]
1169 paths = [self.join(obj, path) for path in self.paths]
1170 ce = filecacheentry(paths, False)
1170 ce = filecacheentry(paths, False)
1171 obj._filecache[self.name] = ce
1171 obj._filecache[self.name] = ce
1172 else:
1172 else:
1173 ce = obj._filecache[self.name]
1173 ce = obj._filecache[self.name]
1174
1174
1175 ce.obj = value # update cached copy
1175 ce.obj = value # update cached copy
1176 obj.__dict__[self.name] = value # update copy returned by obj.x
1176 obj.__dict__[self.name] = value # update copy returned by obj.x
1177
1177
1178 def __delete__(self, obj):
1178 def __delete__(self, obj):
1179 try:
1179 try:
1180 del obj.__dict__[self.name]
1180 del obj.__dict__[self.name]
1181 except KeyError:
1181 except KeyError:
1182 raise AttributeError(self.name)
1182 raise AttributeError(self.name)
1183
1183
1184 def extdatasource(repo, source):
1184 def extdatasource(repo, source):
1185 """Gather a map of rev -> value dict from the specified source
1185 """Gather a map of rev -> value dict from the specified source
1186
1186
1187 A source spec is treated as a URL, with a special case shell: type
1187 A source spec is treated as a URL, with a special case shell: type
1188 for parsing the output from a shell command.
1188 for parsing the output from a shell command.
1189
1189
1190 The data is parsed as a series of newline-separated records where
1190 The data is parsed as a series of newline-separated records where
1191 each record is a revision specifier optionally followed by a space
1191 each record is a revision specifier optionally followed by a space
1192 and a freeform string value. If the revision is known locally, it
1192 and a freeform string value. If the revision is known locally, it
1193 is converted to a rev, otherwise the record is skipped.
1193 is converted to a rev, otherwise the record is skipped.
1194
1194
1195 Note that both key and value are treated as UTF-8 and converted to
1195 Note that both key and value are treated as UTF-8 and converted to
1196 the local encoding. This allows uniformity between local and
1196 the local encoding. This allows uniformity between local and
1197 remote data sources.
1197 remote data sources.
1198 """
1198 """
1199
1199
1200 spec = repo.ui.config("extdata", source)
1200 spec = repo.ui.config("extdata", source)
1201 if not spec:
1201 if not spec:
1202 raise error.Abort(_("unknown extdata source '%s'") % source)
1202 raise error.Abort(_("unknown extdata source '%s'") % source)
1203
1203
1204 data = {}
1204 data = {}
1205 src = proc = None
1205 src = proc = None
1206 try:
1206 try:
1207 if spec.startswith("shell:"):
1207 if spec.startswith("shell:"):
1208 # external commands should be run relative to the repo root
1208 # external commands should be run relative to the repo root
1209 cmd = spec[6:]
1209 cmd = spec[6:]
1210 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1210 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1211 close_fds=procutil.closefds,
1211 close_fds=procutil.closefds,
1212 stdout=subprocess.PIPE, cwd=repo.root)
1212 stdout=subprocess.PIPE, cwd=repo.root)
1213 src = proc.stdout
1213 src = proc.stdout
1214 else:
1214 else:
1215 # treat as a URL or file
1215 # treat as a URL or file
1216 src = url.open(repo.ui, spec)
1216 src = url.open(repo.ui, spec)
1217 for l in src:
1217 for l in src:
1218 if " " in l:
1218 if " " in l:
1219 k, v = l.strip().split(" ", 1)
1219 k, v = l.strip().split(" ", 1)
1220 else:
1220 else:
1221 k, v = l.strip(), ""
1221 k, v = l.strip(), ""
1222
1222
1223 k = encoding.tolocal(k)
1223 k = encoding.tolocal(k)
1224 try:
1224 try:
1225 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1225 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1226 except (error.LookupError, error.RepoLookupError):
1226 except (error.LookupError, error.RepoLookupError):
1227 pass # we ignore data for nodes that don't exist locally
1227 pass # we ignore data for nodes that don't exist locally
1228 finally:
1228 finally:
1229 if proc:
1229 if proc:
1230 proc.communicate()
1230 proc.communicate()
1231 if src:
1231 if src:
1232 src.close()
1232 src.close()
1233 if proc and proc.returncode != 0:
1233 if proc and proc.returncode != 0:
1234 raise error.Abort(_("extdata command '%s' failed: %s")
1234 raise error.Abort(_("extdata command '%s' failed: %s")
1235 % (cmd, procutil.explainexit(proc.returncode)))
1235 % (cmd, procutil.explainexit(proc.returncode)))
1236
1236
1237 return data
1237 return data
1238
1238
1239 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1239 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1240 if lock is None:
1240 if lock is None:
1241 raise error.LockInheritanceContractViolation(
1241 raise error.LockInheritanceContractViolation(
1242 'lock can only be inherited while held')
1242 'lock can only be inherited while held')
1243 if environ is None:
1243 if environ is None:
1244 environ = {}
1244 environ = {}
1245 with lock.inherit() as locker:
1245 with lock.inherit() as locker:
1246 environ[envvar] = locker
1246 environ[envvar] = locker
1247 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1247 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1248
1248
1249 def wlocksub(repo, cmd, *args, **kwargs):
1249 def wlocksub(repo, cmd, *args, **kwargs):
1250 """run cmd as a subprocess that allows inheriting repo's wlock
1250 """run cmd as a subprocess that allows inheriting repo's wlock
1251
1251
1252 This can only be called while the wlock is held. This takes all the
1252 This can only be called while the wlock is held. This takes all the
1253 arguments that ui.system does, and returns the exit code of the
1253 arguments that ui.system does, and returns the exit code of the
1254 subprocess."""
1254 subprocess."""
1255 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1255 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1256 **kwargs)
1256 **kwargs)
1257
1257
1258 def gdinitconfig(ui):
1258 def gdinitconfig(ui):
1259 """helper function to know if a repo should be created as general delta
1259 """helper function to know if a repo should be created as general delta
1260 """
1260 """
1261 # experimental config: format.generaldelta
1261 # experimental config: format.generaldelta
1262 return (ui.configbool('format', 'generaldelta')
1262 return (ui.configbool('format', 'generaldelta')
1263 or ui.configbool('format', 'usegeneraldelta'))
1263 or ui.configbool('format', 'usegeneraldelta'))
1264
1264
1265 def gddeltaconfig(ui):
1265 def gddeltaconfig(ui):
1266 """helper function to know if incoming delta should be optimised
1266 """helper function to know if incoming delta should be optimised
1267 """
1267 """
1268 # experimental config: format.generaldelta
1268 # experimental config: format.generaldelta
1269 return ui.configbool('format', 'generaldelta')
1269 return ui.configbool('format', 'generaldelta')
1270
1270
1271 class simplekeyvaluefile(object):
1271 class simplekeyvaluefile(object):
1272 """A simple file with key=value lines
1272 """A simple file with key=value lines
1273
1273
1274 Keys must be alphanumerics and start with a letter, values must not
1274 Keys must be alphanumerics and start with a letter, values must not
1275 contain '\n' characters"""
1275 contain '\n' characters"""
1276 firstlinekey = '__firstline'
1276 firstlinekey = '__firstline'
1277
1277
1278 def __init__(self, vfs, path, keys=None):
1278 def __init__(self, vfs, path, keys=None):
1279 self.vfs = vfs
1279 self.vfs = vfs
1280 self.path = path
1280 self.path = path
1281
1281
1282 def read(self, firstlinenonkeyval=False):
1282 def read(self, firstlinenonkeyval=False):
1283 """Read the contents of a simple key-value file
1283 """Read the contents of a simple key-value file
1284
1284
1285 'firstlinenonkeyval' indicates whether the first line of file should
1285 'firstlinenonkeyval' indicates whether the first line of file should
1286 be treated as a key-value pair or reuturned fully under the
1286 be treated as a key-value pair or reuturned fully under the
1287 __firstline key."""
1287 __firstline key."""
1288 lines = self.vfs.readlines(self.path)
1288 lines = self.vfs.readlines(self.path)
1289 d = {}
1289 d = {}
1290 if firstlinenonkeyval:
1290 if firstlinenonkeyval:
1291 if not lines:
1291 if not lines:
1292 e = _("empty simplekeyvalue file")
1292 e = _("empty simplekeyvalue file")
1293 raise error.CorruptedState(e)
1293 raise error.CorruptedState(e)
1294 # we don't want to include '\n' in the __firstline
1294 # we don't want to include '\n' in the __firstline
1295 d[self.firstlinekey] = lines[0][:-1]
1295 d[self.firstlinekey] = lines[0][:-1]
1296 del lines[0]
1296 del lines[0]
1297
1297
1298 try:
1298 try:
1299 # the 'if line.strip()' part prevents us from failing on empty
1299 # the 'if line.strip()' part prevents us from failing on empty
1300 # lines which only contain '\n' therefore are not skipped
1300 # lines which only contain '\n' therefore are not skipped
1301 # by 'if line'
1301 # by 'if line'
1302 updatedict = dict(line[:-1].split('=', 1) for line in lines
1302 updatedict = dict(line[:-1].split('=', 1) for line in lines
1303 if line.strip())
1303 if line.strip())
1304 if self.firstlinekey in updatedict:
1304 if self.firstlinekey in updatedict:
1305 e = _("%r can't be used as a key")
1305 e = _("%r can't be used as a key")
1306 raise error.CorruptedState(e % self.firstlinekey)
1306 raise error.CorruptedState(e % self.firstlinekey)
1307 d.update(updatedict)
1307 d.update(updatedict)
1308 except ValueError as e:
1308 except ValueError as e:
1309 raise error.CorruptedState(str(e))
1309 raise error.CorruptedState(str(e))
1310 return d
1310 return d
1311
1311
1312 def write(self, data, firstline=None):
1312 def write(self, data, firstline=None):
1313 """Write key=>value mapping to a file
1313 """Write key=>value mapping to a file
1314 data is a dict. Keys must be alphanumerical and start with a letter.
1314 data is a dict. Keys must be alphanumerical and start with a letter.
1315 Values must not contain newline characters.
1315 Values must not contain newline characters.
1316
1316
1317 If 'firstline' is not None, it is written to file before
1317 If 'firstline' is not None, it is written to file before
1318 everything else, as it is, not in a key=value form"""
1318 everything else, as it is, not in a key=value form"""
1319 lines = []
1319 lines = []
1320 if firstline is not None:
1320 if firstline is not None:
1321 lines.append('%s\n' % firstline)
1321 lines.append('%s\n' % firstline)
1322
1322
1323 for k, v in data.items():
1323 for k, v in data.items():
1324 if k == self.firstlinekey:
1324 if k == self.firstlinekey:
1325 e = "key name '%s' is reserved" % self.firstlinekey
1325 e = "key name '%s' is reserved" % self.firstlinekey
1326 raise error.ProgrammingError(e)
1326 raise error.ProgrammingError(e)
1327 if not k[0:1].isalpha():
1327 if not k[0:1].isalpha():
1328 e = "keys must start with a letter in a key-value file"
1328 e = "keys must start with a letter in a key-value file"
1329 raise error.ProgrammingError(e)
1329 raise error.ProgrammingError(e)
1330 if not k.isalnum():
1330 if not k.isalnum():
1331 e = "invalid key name in a simple key-value file"
1331 e = "invalid key name in a simple key-value file"
1332 raise error.ProgrammingError(e)
1332 raise error.ProgrammingError(e)
1333 if '\n' in v:
1333 if '\n' in v:
1334 e = "invalid value in a simple key-value file"
1334 e = "invalid value in a simple key-value file"
1335 raise error.ProgrammingError(e)
1335 raise error.ProgrammingError(e)
1336 lines.append("%s=%s\n" % (k, v))
1336 lines.append("%s=%s\n" % (k, v))
1337 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1337 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1338 fp.write(''.join(lines))
1338 fp.write(''.join(lines))
1339
1339
1340 _reportobsoletedsource = [
1340 _reportobsoletedsource = [
1341 'debugobsolete',
1341 'debugobsolete',
1342 'pull',
1342 'pull',
1343 'push',
1343 'push',
1344 'serve',
1344 'serve',
1345 'unbundle',
1345 'unbundle',
1346 ]
1346 ]
1347
1347
1348 _reportnewcssource = [
1348 _reportnewcssource = [
1349 'pull',
1349 'pull',
1350 'unbundle',
1350 'unbundle',
1351 ]
1351 ]
1352
1352
1353 # a list of (repo, ctx, files) functions called by various commands to allow
1353 # a list of (repo, ctx, files) functions called by various commands to allow
1354 # extensions to ensure the corresponding files are available locally, before the
1354 # extensions to ensure the corresponding files are available locally, before the
1355 # command uses them.
1355 # command uses them.
1356 fileprefetchhooks = util.hooks()
1356 fileprefetchhooks = util.hooks()
1357
1357
1358 # A marker that tells the evolve extension to suppress its own reporting
1358 # A marker that tells the evolve extension to suppress its own reporting
1359 _reportstroubledchangesets = True
1359 _reportstroubledchangesets = True
1360
1360
1361 def registersummarycallback(repo, otr, txnname=''):
1361 def registersummarycallback(repo, otr, txnname=''):
1362 """register a callback to issue a summary after the transaction is closed
1362 """register a callback to issue a summary after the transaction is closed
1363 """
1363 """
1364 def txmatch(sources):
1364 def txmatch(sources):
1365 return any(txnname.startswith(source) for source in sources)
1365 return any(txnname.startswith(source) for source in sources)
1366
1366
1367 categories = []
1367 categories = []
1368
1368
1369 def reportsummary(func):
1369 def reportsummary(func):
1370 """decorator for report callbacks."""
1370 """decorator for report callbacks."""
1371 # The repoview life cycle is shorter than the one of the actual
1371 # The repoview life cycle is shorter than the one of the actual
1372 # underlying repository. So the filtered object can die before the
1372 # underlying repository. So the filtered object can die before the
1373 # weakref is used leading to troubles. We keep a reference to the
1373 # weakref is used leading to troubles. We keep a reference to the
1374 # unfiltered object and restore the filtering when retrieving the
1374 # unfiltered object and restore the filtering when retrieving the
1375 # repository through the weakref.
1375 # repository through the weakref.
1376 filtername = repo.filtername
1376 filtername = repo.filtername
1377 reporef = weakref.ref(repo.unfiltered())
1377 reporef = weakref.ref(repo.unfiltered())
1378 def wrapped(tr):
1378 def wrapped(tr):
1379 repo = reporef()
1379 repo = reporef()
1380 if filtername:
1380 if filtername:
1381 repo = repo.filtered(filtername)
1381 repo = repo.filtered(filtername)
1382 func(repo, tr)
1382 func(repo, tr)
1383 newcat = '%02i-txnreport' % len(categories)
1383 newcat = '%02i-txnreport' % len(categories)
1384 otr.addpostclose(newcat, wrapped)
1384 otr.addpostclose(newcat, wrapped)
1385 categories.append(newcat)
1385 categories.append(newcat)
1386 return wrapped
1386 return wrapped
1387
1387
1388 if txmatch(_reportobsoletedsource):
1388 if txmatch(_reportobsoletedsource):
1389 @reportsummary
1389 @reportsummary
1390 def reportobsoleted(repo, tr):
1390 def reportobsoleted(repo, tr):
1391 obsoleted = obsutil.getobsoleted(repo, tr)
1391 obsoleted = obsutil.getobsoleted(repo, tr)
1392 if obsoleted:
1392 if obsoleted:
1393 repo.ui.status(_('obsoleted %i changesets\n')
1393 repo.ui.status(_('obsoleted %i changesets\n')
1394 % len(obsoleted))
1394 % len(obsoleted))
1395
1395
1396 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1396 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1397 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1397 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1398 instabilitytypes = [
1398 instabilitytypes = [
1399 ('orphan', 'orphan'),
1399 ('orphan', 'orphan'),
1400 ('phase-divergent', 'phasedivergent'),
1400 ('phase-divergent', 'phasedivergent'),
1401 ('content-divergent', 'contentdivergent'),
1401 ('content-divergent', 'contentdivergent'),
1402 ]
1402 ]
1403
1403
1404 def getinstabilitycounts(repo):
1404 def getinstabilitycounts(repo):
1405 filtered = repo.changelog.filteredrevs
1405 filtered = repo.changelog.filteredrevs
1406 counts = {}
1406 counts = {}
1407 for instability, revset in instabilitytypes:
1407 for instability, revset in instabilitytypes:
1408 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1408 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1409 filtered)
1409 filtered)
1410 return counts
1410 return counts
1411
1411
1412 oldinstabilitycounts = getinstabilitycounts(repo)
1412 oldinstabilitycounts = getinstabilitycounts(repo)
1413 @reportsummary
1413 @reportsummary
1414 def reportnewinstabilities(repo, tr):
1414 def reportnewinstabilities(repo, tr):
1415 newinstabilitycounts = getinstabilitycounts(repo)
1415 newinstabilitycounts = getinstabilitycounts(repo)
1416 for instability, revset in instabilitytypes:
1416 for instability, revset in instabilitytypes:
1417 delta = (newinstabilitycounts[instability] -
1417 delta = (newinstabilitycounts[instability] -
1418 oldinstabilitycounts[instability])
1418 oldinstabilitycounts[instability])
1419 if delta > 0:
1419 if delta > 0:
1420 repo.ui.warn(_('%i new %s changesets\n') %
1420 repo.ui.warn(_('%i new %s changesets\n') %
1421 (delta, instability))
1421 (delta, instability))
1422
1422
1423 if txmatch(_reportnewcssource):
1423 if txmatch(_reportnewcssource):
1424 @reportsummary
1424 @reportsummary
1425 def reportnewcs(repo, tr):
1425 def reportnewcs(repo, tr):
1426 """Report the range of new revisions pulled/unbundled."""
1426 """Report the range of new revisions pulled/unbundled."""
1427 newrevs = tr.changes.get('revs', xrange(0, 0))
1427 newrevs = tr.changes.get('revs', xrange(0, 0))
1428 if not newrevs:
1428 if not newrevs:
1429 return
1429 return
1430
1430
1431 # Compute the bounds of new revisions' range, excluding obsoletes.
1431 # Compute the bounds of new revisions' range, excluding obsoletes.
1432 unfi = repo.unfiltered()
1432 unfi = repo.unfiltered()
1433 revs = unfi.revs('%ld and not obsolete()', newrevs)
1433 revs = unfi.revs('%ld and not obsolete()', newrevs)
1434 if not revs:
1434 if not revs:
1435 # Got only obsoletes.
1435 # Got only obsoletes.
1436 return
1436 return
1437 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1437 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1438
1438
1439 if minrev == maxrev:
1439 if minrev == maxrev:
1440 revrange = minrev
1440 revrange = minrev
1441 else:
1441 else:
1442 revrange = '%s:%s' % (minrev, maxrev)
1442 revrange = '%s:%s' % (minrev, maxrev)
1443 repo.ui.status(_('new changesets %s\n') % revrange)
1443 repo.ui.status(_('new changesets %s\n') % revrange)
1444
1444
1445 def nodesummaries(repo, nodes, maxnumnodes=4):
1445 def nodesummaries(repo, nodes, maxnumnodes=4):
1446 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1446 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1447 return ' '.join(short(h) for h in nodes)
1447 return ' '.join(short(h) for h in nodes)
1448 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1448 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1449 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1449 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1450
1450
1451 def enforcesinglehead(repo, tr, desc):
1451 def enforcesinglehead(repo, tr, desc):
1452 """check that no named branch has multiple heads"""
1452 """check that no named branch has multiple heads"""
1453 if desc in ('strip', 'repair'):
1453 if desc in ('strip', 'repair'):
1454 # skip the logic during strip
1454 # skip the logic during strip
1455 return
1455 return
1456 visible = repo.filtered('visible')
1456 visible = repo.filtered('visible')
1457 # possible improvement: we could restrict the check to affected branch
1457 # possible improvement: we could restrict the check to affected branch
1458 for name, heads in visible.branchmap().iteritems():
1458 for name, heads in visible.branchmap().iteritems():
1459 if len(heads) > 1:
1459 if len(heads) > 1:
1460 msg = _('rejecting multiple heads on branch "%s"')
1460 msg = _('rejecting multiple heads on branch "%s"')
1461 msg %= name
1461 msg %= name
1462 hint = _('%d heads: %s')
1462 hint = _('%d heads: %s')
1463 hint %= (len(heads), nodesummaries(repo, heads))
1463 hint %= (len(heads), nodesummaries(repo, heads))
1464 raise error.Abort(msg, hint=hint)
1464 raise error.Abort(msg, hint=hint)
1465
1465
1466 def wrapconvertsink(sink):
1466 def wrapconvertsink(sink):
1467 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1467 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1468 before it is used, whether or not the convert extension was formally loaded.
1468 before it is used, whether or not the convert extension was formally loaded.
1469 """
1469 """
1470 return sink
1470 return sink
1471
1471
1472 def unhidehashlikerevs(repo, specs, hiddentype):
1472 def unhidehashlikerevs(repo, specs, hiddentype):
1473 """parse the user specs and unhide changesets whose hash or revision number
1473 """parse the user specs and unhide changesets whose hash or revision number
1474 is passed.
1474 is passed.
1475
1475
1476 hiddentype can be: 1) 'warn': warn while unhiding changesets
1476 hiddentype can be: 1) 'warn': warn while unhiding changesets
1477 2) 'nowarn': don't warn while unhiding changesets
1477 2) 'nowarn': don't warn while unhiding changesets
1478
1478
1479 returns a repo object with the required changesets unhidden
1479 returns a repo object with the required changesets unhidden
1480 """
1480 """
1481 if not repo.filtername or not repo.ui.configbool('experimental',
1481 if not repo.filtername or not repo.ui.configbool('experimental',
1482 'directaccess'):
1482 'directaccess'):
1483 return repo
1483 return repo
1484
1484
1485 if repo.filtername not in ('visible', 'visible-hidden'):
1485 if repo.filtername not in ('visible', 'visible-hidden'):
1486 return repo
1486 return repo
1487
1487
1488 symbols = set()
1488 symbols = set()
1489 for spec in specs:
1489 for spec in specs:
1490 try:
1490 try:
1491 tree = revsetlang.parse(spec)
1491 tree = revsetlang.parse(spec)
1492 except error.ParseError: # will be reported by scmutil.revrange()
1492 except error.ParseError: # will be reported by scmutil.revrange()
1493 continue
1493 continue
1494
1494
1495 symbols.update(revsetlang.gethashlikesymbols(tree))
1495 symbols.update(revsetlang.gethashlikesymbols(tree))
1496
1496
1497 if not symbols:
1497 if not symbols:
1498 return repo
1498 return repo
1499
1499
1500 revs = _getrevsfromsymbols(repo, symbols)
1500 revs = _getrevsfromsymbols(repo, symbols)
1501
1501
1502 if not revs:
1502 if not revs:
1503 return repo
1503 return repo
1504
1504
1505 if hiddentype == 'warn':
1505 if hiddentype == 'warn':
1506 unfi = repo.unfiltered()
1506 unfi = repo.unfiltered()
1507 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1507 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1508 repo.ui.warn(_("warning: accessing hidden changesets for write "
1508 repo.ui.warn(_("warning: accessing hidden changesets for write "
1509 "operation: %s\n") % revstr)
1509 "operation: %s\n") % revstr)
1510
1510
1511 # we have to use new filtername to separate branch/tags cache until we can
1511 # we have to use new filtername to separate branch/tags cache until we can
1512 # disbale these cache when revisions are dynamically pinned.
1512 # disbale these cache when revisions are dynamically pinned.
1513 return repo.filtered('visible-hidden', revs)
1513 return repo.filtered('visible-hidden', revs)
1514
1514
1515 def _getrevsfromsymbols(repo, symbols):
1515 def _getrevsfromsymbols(repo, symbols):
1516 """parse the list of symbols and returns a set of revision numbers of hidden
1516 """parse the list of symbols and returns a set of revision numbers of hidden
1517 changesets present in symbols"""
1517 changesets present in symbols"""
1518 revs = set()
1518 revs = set()
1519 unfi = repo.unfiltered()
1519 unfi = repo.unfiltered()
1520 unficl = unfi.changelog
1520 unficl = unfi.changelog
1521 cl = repo.changelog
1521 cl = repo.changelog
1522 tiprev = len(unficl)
1522 tiprev = len(unficl)
1523 pmatch = unficl._partialmatch
1523 pmatch = unficl._partialmatch
1524 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1524 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1525 for s in symbols:
1525 for s in symbols:
1526 try:
1526 try:
1527 n = int(s)
1527 n = int(s)
1528 if n <= tiprev:
1528 if n <= tiprev:
1529 if not allowrevnums:
1529 if not allowrevnums:
1530 continue
1530 continue
1531 else:
1531 else:
1532 if n not in cl:
1532 if n not in cl:
1533 revs.add(n)
1533 revs.add(n)
1534 continue
1534 continue
1535 except ValueError:
1535 except ValueError:
1536 pass
1536 pass
1537
1537
1538 try:
1538 try:
1539 s = pmatch(s)
1539 s = pmatch(s)
1540 except (error.LookupError, error.WdirUnsupported):
1540 except (error.LookupError, error.WdirUnsupported):
1541 s = None
1541 s = None
1542
1542
1543 if s is not None:
1543 if s is not None:
1544 rev = unficl.rev(s)
1544 rev = unficl.rev(s)
1545 if rev not in cl:
1545 if rev not in cl:
1546 revs.add(rev)
1546 revs.add(rev)
1547
1547
1548 return revs
1548 return revs
General Comments 0
You need to be logged in to leave comments. Login now