##// END OF EJS Templates
scmutil: handle full hex nodeids in revsymbol()...
Martin von Zweigbergk -
r37546:d2b484ee default
parent child Browse files
Show More
@@ -1,1517 +1,1530 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 hex,
22 hex,
22 nullid,
23 nullid,
23 short,
24 short,
24 wdirid,
25 wdirid,
25 wdirrev,
26 wdirrev,
26 )
27 )
27
28
28 from . import (
29 from . import (
29 encoding,
30 encoding,
30 error,
31 error,
31 match as matchmod,
32 match as matchmod,
32 obsolete,
33 obsolete,
33 obsutil,
34 obsutil,
34 pathutil,
35 pathutil,
35 phases,
36 phases,
36 pycompat,
37 pycompat,
37 revsetlang,
38 revsetlang,
38 similar,
39 similar,
39 url,
40 url,
40 util,
41 util,
41 vfs,
42 vfs,
42 )
43 )
43
44
44 from .utils import (
45 from .utils import (
45 procutil,
46 procutil,
46 stringutil,
47 stringutil,
47 )
48 )
48
49
49 if pycompat.iswindows:
50 if pycompat.iswindows:
50 from . import scmwindows as scmplatform
51 from . import scmwindows as scmplatform
51 else:
52 else:
52 from . import scmposix as scmplatform
53 from . import scmposix as scmplatform
53
54
54 termsize = scmplatform.termsize
55 termsize = scmplatform.termsize
55
56
56 class status(tuple):
57 class status(tuple):
57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 and 'ignored' properties are only relevant to the working copy.
59 and 'ignored' properties are only relevant to the working copy.
59 '''
60 '''
60
61
61 __slots__ = ()
62 __slots__ = ()
62
63
63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 clean):
65 clean):
65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 ignored, clean))
67 ignored, clean))
67
68
68 @property
69 @property
69 def modified(self):
70 def modified(self):
70 '''files that have been modified'''
71 '''files that have been modified'''
71 return self[0]
72 return self[0]
72
73
73 @property
74 @property
74 def added(self):
75 def added(self):
75 '''files that have been added'''
76 '''files that have been added'''
76 return self[1]
77 return self[1]
77
78
78 @property
79 @property
79 def removed(self):
80 def removed(self):
80 '''files that have been removed'''
81 '''files that have been removed'''
81 return self[2]
82 return self[2]
82
83
83 @property
84 @property
84 def deleted(self):
85 def deleted(self):
85 '''files that are in the dirstate, but have been deleted from the
86 '''files that are in the dirstate, but have been deleted from the
86 working copy (aka "missing")
87 working copy (aka "missing")
87 '''
88 '''
88 return self[3]
89 return self[3]
89
90
90 @property
91 @property
91 def unknown(self):
92 def unknown(self):
92 '''files not in the dirstate that are not ignored'''
93 '''files not in the dirstate that are not ignored'''
93 return self[4]
94 return self[4]
94
95
95 @property
96 @property
96 def ignored(self):
97 def ignored(self):
97 '''files not in the dirstate that are ignored (by _dirignore())'''
98 '''files not in the dirstate that are ignored (by _dirignore())'''
98 return self[5]
99 return self[5]
99
100
100 @property
101 @property
101 def clean(self):
102 def clean(self):
102 '''files that have not been modified'''
103 '''files that have not been modified'''
103 return self[6]
104 return self[6]
104
105
105 def __repr__(self, *args, **kwargs):
106 def __repr__(self, *args, **kwargs):
106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 'unknown=%r, ignored=%r, clean=%r>') % self)
108 'unknown=%r, ignored=%r, clean=%r>') % self)
108
109
109 def itersubrepos(ctx1, ctx2):
110 def itersubrepos(ctx1, ctx2):
110 """find subrepos in ctx1 or ctx2"""
111 """find subrepos in ctx1 or ctx2"""
111 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 # has been modified (in ctx2) but not yet committed (in ctx1).
114 # has been modified (in ctx2) but not yet committed (in ctx1).
114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116
117
117 missing = set()
118 missing = set()
118
119
119 for subpath in ctx2.substate:
120 for subpath in ctx2.substate:
120 if subpath not in ctx1.substate:
121 if subpath not in ctx1.substate:
121 del subpaths[subpath]
122 del subpaths[subpath]
122 missing.add(subpath)
123 missing.add(subpath)
123
124
124 for subpath, ctx in sorted(subpaths.iteritems()):
125 for subpath, ctx in sorted(subpaths.iteritems()):
125 yield subpath, ctx.sub(subpath)
126 yield subpath, ctx.sub(subpath)
126
127
127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 # status and diff will have an accurate result when it does
129 # status and diff will have an accurate result when it does
129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 # against itself.
131 # against itself.
131 for subpath in missing:
132 for subpath in missing:
132 yield subpath, ctx2.nullsub(subpath, ctx1)
133 yield subpath, ctx2.nullsub(subpath, ctx1)
133
134
134 def nochangesfound(ui, repo, excluded=None):
135 def nochangesfound(ui, repo, excluded=None):
135 '''Report no changes for push/pull, excluded is None or a list of
136 '''Report no changes for push/pull, excluded is None or a list of
136 nodes excluded from the push/pull.
137 nodes excluded from the push/pull.
137 '''
138 '''
138 secretlist = []
139 secretlist = []
139 if excluded:
140 if excluded:
140 for n in excluded:
141 for n in excluded:
141 ctx = repo[n]
142 ctx = repo[n]
142 if ctx.phase() >= phases.secret and not ctx.extinct():
143 if ctx.phase() >= phases.secret and not ctx.extinct():
143 secretlist.append(n)
144 secretlist.append(n)
144
145
145 if secretlist:
146 if secretlist:
146 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 % len(secretlist))
148 % len(secretlist))
148 else:
149 else:
149 ui.status(_("no changes found\n"))
150 ui.status(_("no changes found\n"))
150
151
151 def callcatch(ui, func):
152 def callcatch(ui, func):
152 """call func() with global exception handling
153 """call func() with global exception handling
153
154
154 return func() if no exception happens. otherwise do some error handling
155 return func() if no exception happens. otherwise do some error handling
155 and return an exit code accordingly. does not handle all exceptions.
156 and return an exit code accordingly. does not handle all exceptions.
156 """
157 """
157 try:
158 try:
158 try:
159 try:
159 return func()
160 return func()
160 except: # re-raises
161 except: # re-raises
161 ui.traceback()
162 ui.traceback()
162 raise
163 raise
163 # Global exception handling, alphabetically
164 # Global exception handling, alphabetically
164 # Mercurial-specific first, followed by built-in and library exceptions
165 # Mercurial-specific first, followed by built-in and library exceptions
165 except error.LockHeld as inst:
166 except error.LockHeld as inst:
166 if inst.errno == errno.ETIMEDOUT:
167 if inst.errno == errno.ETIMEDOUT:
167 reason = _('timed out waiting for lock held by %r') % inst.locker
168 reason = _('timed out waiting for lock held by %r') % inst.locker
168 else:
169 else:
169 reason = _('lock held by %r') % inst.locker
170 reason = _('lock held by %r') % inst.locker
170 ui.warn(_("abort: %s: %s\n")
171 ui.warn(_("abort: %s: %s\n")
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 if not inst.locker:
173 if not inst.locker:
173 ui.warn(_("(lock might be very busy)\n"))
174 ui.warn(_("(lock might be very busy)\n"))
174 except error.LockUnavailable as inst:
175 except error.LockUnavailable as inst:
175 ui.warn(_("abort: could not lock %s: %s\n") %
176 ui.warn(_("abort: could not lock %s: %s\n") %
176 (inst.desc or stringutil.forcebytestr(inst.filename),
177 (inst.desc or stringutil.forcebytestr(inst.filename),
177 encoding.strtolocal(inst.strerror)))
178 encoding.strtolocal(inst.strerror)))
178 except error.OutOfBandError as inst:
179 except error.OutOfBandError as inst:
179 if inst.args:
180 if inst.args:
180 msg = _("abort: remote error:\n")
181 msg = _("abort: remote error:\n")
181 else:
182 else:
182 msg = _("abort: remote error\n")
183 msg = _("abort: remote error\n")
183 ui.warn(msg)
184 ui.warn(msg)
184 if inst.args:
185 if inst.args:
185 ui.warn(''.join(inst.args))
186 ui.warn(''.join(inst.args))
186 if inst.hint:
187 if inst.hint:
187 ui.warn('(%s)\n' % inst.hint)
188 ui.warn('(%s)\n' % inst.hint)
188 except error.RepoError as inst:
189 except error.RepoError as inst:
189 ui.warn(_("abort: %s!\n") % inst)
190 ui.warn(_("abort: %s!\n") % inst)
190 if inst.hint:
191 if inst.hint:
191 ui.warn(_("(%s)\n") % inst.hint)
192 ui.warn(_("(%s)\n") % inst.hint)
192 except error.ResponseError as inst:
193 except error.ResponseError as inst:
193 ui.warn(_("abort: %s") % inst.args[0])
194 ui.warn(_("abort: %s") % inst.args[0])
194 msg = inst.args[1]
195 msg = inst.args[1]
195 if isinstance(msg, type(u'')):
196 if isinstance(msg, type(u'')):
196 msg = pycompat.sysbytes(msg)
197 msg = pycompat.sysbytes(msg)
197 if not isinstance(msg, bytes):
198 if not isinstance(msg, bytes):
198 ui.warn(" %r\n" % (msg,))
199 ui.warn(" %r\n" % (msg,))
199 elif not msg:
200 elif not msg:
200 ui.warn(_(" empty string\n"))
201 ui.warn(_(" empty string\n"))
201 else:
202 else:
202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 except error.CensoredNodeError as inst:
204 except error.CensoredNodeError as inst:
204 ui.warn(_("abort: file censored %s!\n") % inst)
205 ui.warn(_("abort: file censored %s!\n") % inst)
205 except error.RevlogError as inst:
206 except error.RevlogError as inst:
206 ui.warn(_("abort: %s!\n") % inst)
207 ui.warn(_("abort: %s!\n") % inst)
207 except error.InterventionRequired as inst:
208 except error.InterventionRequired as inst:
208 ui.warn("%s\n" % inst)
209 ui.warn("%s\n" % inst)
209 if inst.hint:
210 if inst.hint:
210 ui.warn(_("(%s)\n") % inst.hint)
211 ui.warn(_("(%s)\n") % inst.hint)
211 return 1
212 return 1
212 except error.WdirUnsupported:
213 except error.WdirUnsupported:
213 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 except error.Abort as inst:
215 except error.Abort as inst:
215 ui.warn(_("abort: %s\n") % inst)
216 ui.warn(_("abort: %s\n") % inst)
216 if inst.hint:
217 if inst.hint:
217 ui.warn(_("(%s)\n") % inst.hint)
218 ui.warn(_("(%s)\n") % inst.hint)
218 except ImportError as inst:
219 except ImportError as inst:
219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 m = stringutil.forcebytestr(inst).split()[-1]
221 m = stringutil.forcebytestr(inst).split()[-1]
221 if m in "mpatch bdiff".split():
222 if m in "mpatch bdiff".split():
222 ui.warn(_("(did you forget to compile extensions?)\n"))
223 ui.warn(_("(did you forget to compile extensions?)\n"))
223 elif m in "zlib".split():
224 elif m in "zlib".split():
224 ui.warn(_("(is your Python install correct?)\n"))
225 ui.warn(_("(is your Python install correct?)\n"))
225 except IOError as inst:
226 except IOError as inst:
226 if util.safehasattr(inst, "code"):
227 if util.safehasattr(inst, "code"):
227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 elif util.safehasattr(inst, "reason"):
229 elif util.safehasattr(inst, "reason"):
229 try: # usually it is in the form (errno, strerror)
230 try: # usually it is in the form (errno, strerror)
230 reason = inst.reason.args[1]
231 reason = inst.reason.args[1]
231 except (AttributeError, IndexError):
232 except (AttributeError, IndexError):
232 # it might be anything, for example a string
233 # it might be anything, for example a string
233 reason = inst.reason
234 reason = inst.reason
234 if isinstance(reason, unicode):
235 if isinstance(reason, unicode):
235 # SSLError of Python 2.7.9 contains a unicode
236 # SSLError of Python 2.7.9 contains a unicode
236 reason = encoding.unitolocal(reason)
237 reason = encoding.unitolocal(reason)
237 ui.warn(_("abort: error: %s\n") % reason)
238 ui.warn(_("abort: error: %s\n") % reason)
238 elif (util.safehasattr(inst, "args")
239 elif (util.safehasattr(inst, "args")
239 and inst.args and inst.args[0] == errno.EPIPE):
240 and inst.args and inst.args[0] == errno.EPIPE):
240 pass
241 pass
241 elif getattr(inst, "strerror", None):
242 elif getattr(inst, "strerror", None):
242 if getattr(inst, "filename", None):
243 if getattr(inst, "filename", None):
243 ui.warn(_("abort: %s: %s\n") % (
244 ui.warn(_("abort: %s: %s\n") % (
244 encoding.strtolocal(inst.strerror),
245 encoding.strtolocal(inst.strerror),
245 stringutil.forcebytestr(inst.filename)))
246 stringutil.forcebytestr(inst.filename)))
246 else:
247 else:
247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 else:
249 else:
249 raise
250 raise
250 except OSError as inst:
251 except OSError as inst:
251 if getattr(inst, "filename", None) is not None:
252 if getattr(inst, "filename", None) is not None:
252 ui.warn(_("abort: %s: '%s'\n") % (
253 ui.warn(_("abort: %s: '%s'\n") % (
253 encoding.strtolocal(inst.strerror),
254 encoding.strtolocal(inst.strerror),
254 stringutil.forcebytestr(inst.filename)))
255 stringutil.forcebytestr(inst.filename)))
255 else:
256 else:
256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 except MemoryError:
258 except MemoryError:
258 ui.warn(_("abort: out of memory\n"))
259 ui.warn(_("abort: out of memory\n"))
259 except SystemExit as inst:
260 except SystemExit as inst:
260 # Commands shouldn't sys.exit directly, but give a return code.
261 # Commands shouldn't sys.exit directly, but give a return code.
261 # Just in case catch this and and pass exit code to caller.
262 # Just in case catch this and and pass exit code to caller.
262 return inst.code
263 return inst.code
263 except socket.error as inst:
264 except socket.error as inst:
264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265
266
266 return -1
267 return -1
267
268
268 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
269 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
270 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
271 if lbl in ['tip', '.', 'null']:
272 if lbl in ['tip', '.', 'null']:
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 for c in (':', '\0', '\n', '\r'):
274 for c in (':', '\0', '\n', '\r'):
274 if c in lbl:
275 if c in lbl:
275 raise error.Abort(
276 raise error.Abort(
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 try:
278 try:
278 int(lbl)
279 int(lbl)
279 raise error.Abort(_("cannot use an integer as a name"))
280 raise error.Abort(_("cannot use an integer as a name"))
280 except ValueError:
281 except ValueError:
281 pass
282 pass
282 if lbl.strip() != lbl:
283 if lbl.strip() != lbl:
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284
285
285 def checkfilename(f):
286 def checkfilename(f):
286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 '''Check that the filename f is an acceptable filename for a tracked file'''
287 if '\r' in f or '\n' in f:
288 if '\r' in f or '\n' in f:
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289
290
290 def checkportable(ui, f):
291 def checkportable(ui, f):
291 '''Check if filename f is portable and warn or abort depending on config'''
292 '''Check if filename f is portable and warn or abort depending on config'''
292 checkfilename(f)
293 checkfilename(f)
293 abort, warn = checkportabilityalert(ui)
294 abort, warn = checkportabilityalert(ui)
294 if abort or warn:
295 if abort or warn:
295 msg = util.checkwinfilename(f)
296 msg = util.checkwinfilename(f)
296 if msg:
297 if msg:
297 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 if abort:
299 if abort:
299 raise error.Abort(msg)
300 raise error.Abort(msg)
300 ui.warn(_("warning: %s\n") % msg)
301 ui.warn(_("warning: %s\n") % msg)
301
302
302 def checkportabilityalert(ui):
303 def checkportabilityalert(ui):
303 '''check if the user's config requests nothing, a warning, or abort for
304 '''check if the user's config requests nothing, a warning, or abort for
304 non-portable filenames'''
305 non-portable filenames'''
305 val = ui.config('ui', 'portablefilenames')
306 val = ui.config('ui', 'portablefilenames')
306 lval = val.lower()
307 lval = val.lower()
307 bval = stringutil.parsebool(val)
308 bval = stringutil.parsebool(val)
308 abort = pycompat.iswindows or lval == 'abort'
309 abort = pycompat.iswindows or lval == 'abort'
309 warn = bval or lval == 'warn'
310 warn = bval or lval == 'warn'
310 if bval is None and not (warn or abort or lval == 'ignore'):
311 if bval is None and not (warn or abort or lval == 'ignore'):
311 raise error.ConfigError(
312 raise error.ConfigError(
312 _("ui.portablefilenames value is invalid ('%s')") % val)
313 _("ui.portablefilenames value is invalid ('%s')") % val)
313 return abort, warn
314 return abort, warn
314
315
315 class casecollisionauditor(object):
316 class casecollisionauditor(object):
316 def __init__(self, ui, abort, dirstate):
317 def __init__(self, ui, abort, dirstate):
317 self._ui = ui
318 self._ui = ui
318 self._abort = abort
319 self._abort = abort
319 allfiles = '\0'.join(dirstate._map)
320 allfiles = '\0'.join(dirstate._map)
320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._dirstate = dirstate
322 self._dirstate = dirstate
322 # The purpose of _newfiles is so that we don't complain about
323 # The purpose of _newfiles is so that we don't complain about
323 # case collisions if someone were to call this object with the
324 # case collisions if someone were to call this object with the
324 # same filename twice.
325 # same filename twice.
325 self._newfiles = set()
326 self._newfiles = set()
326
327
327 def __call__(self, f):
328 def __call__(self, f):
328 if f in self._newfiles:
329 if f in self._newfiles:
329 return
330 return
330 fl = encoding.lower(f)
331 fl = encoding.lower(f)
331 if fl in self._loweredfiles and f not in self._dirstate:
332 if fl in self._loweredfiles and f not in self._dirstate:
332 msg = _('possible case-folding collision for %s') % f
333 msg = _('possible case-folding collision for %s') % f
333 if self._abort:
334 if self._abort:
334 raise error.Abort(msg)
335 raise error.Abort(msg)
335 self._ui.warn(_("warning: %s\n") % msg)
336 self._ui.warn(_("warning: %s\n") % msg)
336 self._loweredfiles.add(fl)
337 self._loweredfiles.add(fl)
337 self._newfiles.add(f)
338 self._newfiles.add(f)
338
339
339 def filteredhash(repo, maxrev):
340 def filteredhash(repo, maxrev):
340 """build hash of filtered revisions in the current repoview.
341 """build hash of filtered revisions in the current repoview.
341
342
342 Multiple caches perform up-to-date validation by checking that the
343 Multiple caches perform up-to-date validation by checking that the
343 tiprev and tipnode stored in the cache file match the current repository.
344 tiprev and tipnode stored in the cache file match the current repository.
344 However, this is not sufficient for validating repoviews because the set
345 However, this is not sufficient for validating repoviews because the set
345 of revisions in the view may change without the repository tiprev and
346 of revisions in the view may change without the repository tiprev and
346 tipnode changing.
347 tipnode changing.
347
348
348 This function hashes all the revs filtered from the view and returns
349 This function hashes all the revs filtered from the view and returns
349 that SHA-1 digest.
350 that SHA-1 digest.
350 """
351 """
351 cl = repo.changelog
352 cl = repo.changelog
352 if not cl.filteredrevs:
353 if not cl.filteredrevs:
353 return None
354 return None
354 key = None
355 key = None
355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 if revs:
357 if revs:
357 s = hashlib.sha1()
358 s = hashlib.sha1()
358 for rev in revs:
359 for rev in revs:
359 s.update('%d;' % rev)
360 s.update('%d;' % rev)
360 key = s.digest()
361 key = s.digest()
361 return key
362 return key
362
363
363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 '''yield every hg repository under path, always recursively.
365 '''yield every hg repository under path, always recursively.
365 The recurse flag will only control recursion into repo working dirs'''
366 The recurse flag will only control recursion into repo working dirs'''
366 def errhandler(err):
367 def errhandler(err):
367 if err.filename == path:
368 if err.filename == path:
368 raise err
369 raise err
369 samestat = getattr(os.path, 'samestat', None)
370 samestat = getattr(os.path, 'samestat', None)
370 if followsym and samestat is not None:
371 if followsym and samestat is not None:
371 def adddir(dirlst, dirname):
372 def adddir(dirlst, dirname):
372 dirstat = os.stat(dirname)
373 dirstat = os.stat(dirname)
373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 if not match:
375 if not match:
375 dirlst.append(dirstat)
376 dirlst.append(dirstat)
376 return not match
377 return not match
377 else:
378 else:
378 followsym = False
379 followsym = False
379
380
380 if (seen_dirs is None) and followsym:
381 if (seen_dirs is None) and followsym:
381 seen_dirs = []
382 seen_dirs = []
382 adddir(seen_dirs, path)
383 adddir(seen_dirs, path)
383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 dirs.sort()
385 dirs.sort()
385 if '.hg' in dirs:
386 if '.hg' in dirs:
386 yield root # found a repository
387 yield root # found a repository
387 qroot = os.path.join(root, '.hg', 'patches')
388 qroot = os.path.join(root, '.hg', 'patches')
388 if os.path.isdir(os.path.join(qroot, '.hg')):
389 if os.path.isdir(os.path.join(qroot, '.hg')):
389 yield qroot # we have a patch queue repo here
390 yield qroot # we have a patch queue repo here
390 if recurse:
391 if recurse:
391 # avoid recursing inside the .hg directory
392 # avoid recursing inside the .hg directory
392 dirs.remove('.hg')
393 dirs.remove('.hg')
393 else:
394 else:
394 dirs[:] = [] # don't descend further
395 dirs[:] = [] # don't descend further
395 elif followsym:
396 elif followsym:
396 newdirs = []
397 newdirs = []
397 for d in dirs:
398 for d in dirs:
398 fname = os.path.join(root, d)
399 fname = os.path.join(root, d)
399 if adddir(seen_dirs, fname):
400 if adddir(seen_dirs, fname):
400 if os.path.islink(fname):
401 if os.path.islink(fname):
401 for hgname in walkrepos(fname, True, seen_dirs):
402 for hgname in walkrepos(fname, True, seen_dirs):
402 yield hgname
403 yield hgname
403 else:
404 else:
404 newdirs.append(d)
405 newdirs.append(d)
405 dirs[:] = newdirs
406 dirs[:] = newdirs
406
407
407 def binnode(ctx):
408 def binnode(ctx):
408 """Return binary node id for a given basectx"""
409 """Return binary node id for a given basectx"""
409 node = ctx.node()
410 node = ctx.node()
410 if node is None:
411 if node is None:
411 return wdirid
412 return wdirid
412 return node
413 return node
413
414
414 def intrev(ctx):
415 def intrev(ctx):
415 """Return integer for a given basectx that can be used in comparison or
416 """Return integer for a given basectx that can be used in comparison or
416 arithmetic operation"""
417 arithmetic operation"""
417 rev = ctx.rev()
418 rev = ctx.rev()
418 if rev is None:
419 if rev is None:
419 return wdirrev
420 return wdirrev
420 return rev
421 return rev
421
422
422 def formatchangeid(ctx):
423 def formatchangeid(ctx):
423 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 template provided by logcmdutil.changesettemplater"""
425 template provided by logcmdutil.changesettemplater"""
425 repo = ctx.repo()
426 repo = ctx.repo()
426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427
428
428 def formatrevnode(ui, rev, node):
429 def formatrevnode(ui, rev, node):
429 """Format given revision and node depending on the current verbosity"""
430 """Format given revision and node depending on the current verbosity"""
430 if ui.debugflag:
431 if ui.debugflag:
431 hexfunc = hex
432 hexfunc = hex
432 else:
433 else:
433 hexfunc = short
434 hexfunc = short
434 return '%d:%s' % (rev, hexfunc(node))
435 return '%d:%s' % (rev, hexfunc(node))
435
436
436 def resolvepartialhexnodeid(repo, prefix):
437 def resolvepartialhexnodeid(repo, prefix):
437 # Uses unfiltered repo because it's faster when then prefix is ambiguous/
438 # Uses unfiltered repo because it's faster when then prefix is ambiguous/
438 # This matches the "shortest" template function.
439 # This matches the "shortest" template function.
439 node = repo.unfiltered().changelog._partialmatch(prefix)
440 node = repo.unfiltered().changelog._partialmatch(prefix)
440 if node is None:
441 if node is None:
441 return
442 return
442 repo.changelog.rev(node) # make sure node isn't filtered
443 repo.changelog.rev(node) # make sure node isn't filtered
443 return node
444 return node
444
445
445 def isrevsymbol(repo, symbol):
446 def isrevsymbol(repo, symbol):
446 try:
447 try:
447 revsymbol(repo, symbol)
448 revsymbol(repo, symbol)
448 return True
449 return True
449 except error.RepoLookupError:
450 except error.RepoLookupError:
450 return False
451 return False
451
452
452 def revsymbol(repo, symbol):
453 def revsymbol(repo, symbol):
453 """Returns a context given a single revision symbol (as string).
454 """Returns a context given a single revision symbol (as string).
454
455
455 This is similar to revsingle(), but accepts only a single revision symbol,
456 This is similar to revsingle(), but accepts only a single revision symbol,
456 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
457 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
457 not "max(public())".
458 not "max(public())".
458 """
459 """
459 if not isinstance(symbol, bytes):
460 if not isinstance(symbol, bytes):
460 msg = ("symbol (%s of type %s) was not a string, did you mean "
461 msg = ("symbol (%s of type %s) was not a string, did you mean "
461 "repo[symbol]?" % (symbol, type(symbol)))
462 "repo[symbol]?" % (symbol, type(symbol)))
462 raise error.ProgrammingError(msg)
463 raise error.ProgrammingError(msg)
463 try:
464 try:
464 if symbol in ('.', 'tip', 'null'):
465 if symbol in ('.', 'tip', 'null'):
465 return repo[symbol]
466 return repo[symbol]
466
467
467 try:
468 try:
468 r = int(symbol)
469 r = int(symbol)
469 if '%d' % r != symbol:
470 if '%d' % r != symbol:
470 raise ValueError
471 raise ValueError
471 l = len(repo.changelog)
472 l = len(repo.changelog)
472 if r < 0:
473 if r < 0:
473 r += l
474 r += l
474 if r < 0 or r >= l and r != wdirrev:
475 if r < 0 or r >= l and r != wdirrev:
475 raise ValueError
476 raise ValueError
476 return repo[r]
477 return repo[r]
477 except error.FilteredIndexError:
478 except error.FilteredIndexError:
478 raise
479 raise
479 except (ValueError, OverflowError, IndexError):
480 except (ValueError, OverflowError, IndexError):
480 pass
481 pass
481
482
483 if len(symbol) == 40:
484 try:
485 node = bin(symbol)
486 rev = repo.changelog.rev(node)
487 return repo[rev]
488 except error.FilteredLookupError:
489 raise
490 except (TypeError, LookupError):
491 pass
492
482 return repo[symbol]
493 return repo[symbol]
483
494
495 except error.WdirUnsupported:
496 return repo[None]
484 except (error.FilteredIndexError, error.FilteredLookupError,
497 except (error.FilteredIndexError, error.FilteredLookupError,
485 error.FilteredRepoLookupError):
498 error.FilteredRepoLookupError):
486 raise _filterederror(repo, symbol)
499 raise _filterederror(repo, symbol)
487
500
488 def _filterederror(repo, changeid):
501 def _filterederror(repo, changeid):
489 """build an exception to be raised about a filtered changeid
502 """build an exception to be raised about a filtered changeid
490
503
491 This is extracted in a function to help extensions (eg: evolve) to
504 This is extracted in a function to help extensions (eg: evolve) to
492 experiment with various message variants."""
505 experiment with various message variants."""
493 if repo.filtername.startswith('visible'):
506 if repo.filtername.startswith('visible'):
494
507
495 # Check if the changeset is obsolete
508 # Check if the changeset is obsolete
496 unfilteredrepo = repo.unfiltered()
509 unfilteredrepo = repo.unfiltered()
497 ctx = revsymbol(unfilteredrepo, changeid)
510 ctx = revsymbol(unfilteredrepo, changeid)
498
511
499 # If the changeset is obsolete, enrich the message with the reason
512 # If the changeset is obsolete, enrich the message with the reason
500 # that made this changeset not visible
513 # that made this changeset not visible
501 if ctx.obsolete():
514 if ctx.obsolete():
502 msg = obsutil._getfilteredreason(repo, changeid, ctx)
515 msg = obsutil._getfilteredreason(repo, changeid, ctx)
503 else:
516 else:
504 msg = _("hidden revision '%s'") % changeid
517 msg = _("hidden revision '%s'") % changeid
505
518
506 hint = _('use --hidden to access hidden revisions')
519 hint = _('use --hidden to access hidden revisions')
507
520
508 return error.FilteredRepoLookupError(msg, hint=hint)
521 return error.FilteredRepoLookupError(msg, hint=hint)
509 msg = _("filtered revision '%s' (not in '%s' subset)")
522 msg = _("filtered revision '%s' (not in '%s' subset)")
510 msg %= (changeid, repo.filtername)
523 msg %= (changeid, repo.filtername)
511 return error.FilteredRepoLookupError(msg)
524 return error.FilteredRepoLookupError(msg)
512
525
513 def revsingle(repo, revspec, default='.', localalias=None):
526 def revsingle(repo, revspec, default='.', localalias=None):
514 if not revspec and revspec != 0:
527 if not revspec and revspec != 0:
515 return repo[default]
528 return repo[default]
516
529
517 l = revrange(repo, [revspec], localalias=localalias)
530 l = revrange(repo, [revspec], localalias=localalias)
518 if not l:
531 if not l:
519 raise error.Abort(_('empty revision set'))
532 raise error.Abort(_('empty revision set'))
520 return repo[l.last()]
533 return repo[l.last()]
521
534
522 def _pairspec(revspec):
535 def _pairspec(revspec):
523 tree = revsetlang.parse(revspec)
536 tree = revsetlang.parse(revspec)
524 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
537 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
525
538
526 def revpairnodes(repo, revs):
539 def revpairnodes(repo, revs):
527 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
540 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
528 ctx1, ctx2 = revpair(repo, revs)
541 ctx1, ctx2 = revpair(repo, revs)
529 return ctx1.node(), ctx2.node()
542 return ctx1.node(), ctx2.node()
530
543
531 def revpair(repo, revs):
544 def revpair(repo, revs):
532 if not revs:
545 if not revs:
533 return repo['.'], repo[None]
546 return repo['.'], repo[None]
534
547
535 l = revrange(repo, revs)
548 l = revrange(repo, revs)
536
549
537 if not l:
550 if not l:
538 first = second = None
551 first = second = None
539 elif l.isascending():
552 elif l.isascending():
540 first = l.min()
553 first = l.min()
541 second = l.max()
554 second = l.max()
542 elif l.isdescending():
555 elif l.isdescending():
543 first = l.max()
556 first = l.max()
544 second = l.min()
557 second = l.min()
545 else:
558 else:
546 first = l.first()
559 first = l.first()
547 second = l.last()
560 second = l.last()
548
561
549 if first is None:
562 if first is None:
550 raise error.Abort(_('empty revision range'))
563 raise error.Abort(_('empty revision range'))
551 if (first == second and len(revs) >= 2
564 if (first == second and len(revs) >= 2
552 and not all(revrange(repo, [r]) for r in revs)):
565 and not all(revrange(repo, [r]) for r in revs)):
553 raise error.Abort(_('empty revision on one side of range'))
566 raise error.Abort(_('empty revision on one side of range'))
554
567
555 # if top-level is range expression, the result must always be a pair
568 # if top-level is range expression, the result must always be a pair
556 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
569 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
557 return repo[first], repo[None]
570 return repo[first], repo[None]
558
571
559 return repo[first], repo[second]
572 return repo[first], repo[second]
560
573
561 def revrange(repo, specs, localalias=None):
574 def revrange(repo, specs, localalias=None):
562 """Execute 1 to many revsets and return the union.
575 """Execute 1 to many revsets and return the union.
563
576
564 This is the preferred mechanism for executing revsets using user-specified
577 This is the preferred mechanism for executing revsets using user-specified
565 config options, such as revset aliases.
578 config options, such as revset aliases.
566
579
567 The revsets specified by ``specs`` will be executed via a chained ``OR``
580 The revsets specified by ``specs`` will be executed via a chained ``OR``
568 expression. If ``specs`` is empty, an empty result is returned.
581 expression. If ``specs`` is empty, an empty result is returned.
569
582
570 ``specs`` can contain integers, in which case they are assumed to be
583 ``specs`` can contain integers, in which case they are assumed to be
571 revision numbers.
584 revision numbers.
572
585
573 It is assumed the revsets are already formatted. If you have arguments
586 It is assumed the revsets are already formatted. If you have arguments
574 that need to be expanded in the revset, call ``revsetlang.formatspec()``
587 that need to be expanded in the revset, call ``revsetlang.formatspec()``
575 and pass the result as an element of ``specs``.
588 and pass the result as an element of ``specs``.
576
589
577 Specifying a single revset is allowed.
590 Specifying a single revset is allowed.
578
591
579 Returns a ``revset.abstractsmartset`` which is a list-like interface over
592 Returns a ``revset.abstractsmartset`` which is a list-like interface over
580 integer revisions.
593 integer revisions.
581 """
594 """
582 allspecs = []
595 allspecs = []
583 for spec in specs:
596 for spec in specs:
584 if isinstance(spec, int):
597 if isinstance(spec, int):
585 spec = revsetlang.formatspec('rev(%d)', spec)
598 spec = revsetlang.formatspec('rev(%d)', spec)
586 allspecs.append(spec)
599 allspecs.append(spec)
587 return repo.anyrevs(allspecs, user=True, localalias=localalias)
600 return repo.anyrevs(allspecs, user=True, localalias=localalias)
588
601
589 def meaningfulparents(repo, ctx):
602 def meaningfulparents(repo, ctx):
590 """Return list of meaningful (or all if debug) parentrevs for rev.
603 """Return list of meaningful (or all if debug) parentrevs for rev.
591
604
592 For merges (two non-nullrev revisions) both parents are meaningful.
605 For merges (two non-nullrev revisions) both parents are meaningful.
593 Otherwise the first parent revision is considered meaningful if it
606 Otherwise the first parent revision is considered meaningful if it
594 is not the preceding revision.
607 is not the preceding revision.
595 """
608 """
596 parents = ctx.parents()
609 parents = ctx.parents()
597 if len(parents) > 1:
610 if len(parents) > 1:
598 return parents
611 return parents
599 if repo.ui.debugflag:
612 if repo.ui.debugflag:
600 return [parents[0], repo['null']]
613 return [parents[0], repo['null']]
601 if parents[0].rev() >= intrev(ctx) - 1:
614 if parents[0].rev() >= intrev(ctx) - 1:
602 return []
615 return []
603 return parents
616 return parents
604
617
605 def expandpats(pats):
618 def expandpats(pats):
606 '''Expand bare globs when running on windows.
619 '''Expand bare globs when running on windows.
607 On posix we assume it already has already been done by sh.'''
620 On posix we assume it already has already been done by sh.'''
608 if not util.expandglobs:
621 if not util.expandglobs:
609 return list(pats)
622 return list(pats)
610 ret = []
623 ret = []
611 for kindpat in pats:
624 for kindpat in pats:
612 kind, pat = matchmod._patsplit(kindpat, None)
625 kind, pat = matchmod._patsplit(kindpat, None)
613 if kind is None:
626 if kind is None:
614 try:
627 try:
615 globbed = glob.glob(pat)
628 globbed = glob.glob(pat)
616 except re.error:
629 except re.error:
617 globbed = [pat]
630 globbed = [pat]
618 if globbed:
631 if globbed:
619 ret.extend(globbed)
632 ret.extend(globbed)
620 continue
633 continue
621 ret.append(kindpat)
634 ret.append(kindpat)
622 return ret
635 return ret
623
636
624 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
637 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
625 badfn=None):
638 badfn=None):
626 '''Return a matcher and the patterns that were used.
639 '''Return a matcher and the patterns that were used.
627 The matcher will warn about bad matches, unless an alternate badfn callback
640 The matcher will warn about bad matches, unless an alternate badfn callback
628 is provided.'''
641 is provided.'''
629 if pats == ("",):
642 if pats == ("",):
630 pats = []
643 pats = []
631 if opts is None:
644 if opts is None:
632 opts = {}
645 opts = {}
633 if not globbed and default == 'relpath':
646 if not globbed and default == 'relpath':
634 pats = expandpats(pats or [])
647 pats = expandpats(pats or [])
635
648
636 def bad(f, msg):
649 def bad(f, msg):
637 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
650 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
638
651
639 if badfn is None:
652 if badfn is None:
640 badfn = bad
653 badfn = bad
641
654
642 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
655 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
643 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
656 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
644
657
645 if m.always():
658 if m.always():
646 pats = []
659 pats = []
647 return m, pats
660 return m, pats
648
661
649 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
662 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
650 badfn=None):
663 badfn=None):
651 '''Return a matcher that will warn about bad matches.'''
664 '''Return a matcher that will warn about bad matches.'''
652 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
665 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
653
666
654 def matchall(repo):
667 def matchall(repo):
655 '''Return a matcher that will efficiently match everything.'''
668 '''Return a matcher that will efficiently match everything.'''
656 return matchmod.always(repo.root, repo.getcwd())
669 return matchmod.always(repo.root, repo.getcwd())
657
670
658 def matchfiles(repo, files, badfn=None):
671 def matchfiles(repo, files, badfn=None):
659 '''Return a matcher that will efficiently match exactly these files.'''
672 '''Return a matcher that will efficiently match exactly these files.'''
660 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
673 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
661
674
662 def parsefollowlinespattern(repo, rev, pat, msg):
675 def parsefollowlinespattern(repo, rev, pat, msg):
663 """Return a file name from `pat` pattern suitable for usage in followlines
676 """Return a file name from `pat` pattern suitable for usage in followlines
664 logic.
677 logic.
665 """
678 """
666 if not matchmod.patkind(pat):
679 if not matchmod.patkind(pat):
667 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
680 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
668 else:
681 else:
669 ctx = repo[rev]
682 ctx = repo[rev]
670 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
683 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
671 files = [f for f in ctx if m(f)]
684 files = [f for f in ctx if m(f)]
672 if len(files) != 1:
685 if len(files) != 1:
673 raise error.ParseError(msg)
686 raise error.ParseError(msg)
674 return files[0]
687 return files[0]
675
688
676 def origpath(ui, repo, filepath):
689 def origpath(ui, repo, filepath):
677 '''customize where .orig files are created
690 '''customize where .orig files are created
678
691
679 Fetch user defined path from config file: [ui] origbackuppath = <path>
692 Fetch user defined path from config file: [ui] origbackuppath = <path>
680 Fall back to default (filepath with .orig suffix) if not specified
693 Fall back to default (filepath with .orig suffix) if not specified
681 '''
694 '''
682 origbackuppath = ui.config('ui', 'origbackuppath')
695 origbackuppath = ui.config('ui', 'origbackuppath')
683 if not origbackuppath:
696 if not origbackuppath:
684 return filepath + ".orig"
697 return filepath + ".orig"
685
698
686 # Convert filepath from an absolute path into a path inside the repo.
699 # Convert filepath from an absolute path into a path inside the repo.
687 filepathfromroot = util.normpath(os.path.relpath(filepath,
700 filepathfromroot = util.normpath(os.path.relpath(filepath,
688 start=repo.root))
701 start=repo.root))
689
702
690 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
703 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
691 origbackupdir = origvfs.dirname(filepathfromroot)
704 origbackupdir = origvfs.dirname(filepathfromroot)
692 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
705 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
693 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
706 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
694
707
695 # Remove any files that conflict with the backup file's path
708 # Remove any files that conflict with the backup file's path
696 for f in reversed(list(util.finddirs(filepathfromroot))):
709 for f in reversed(list(util.finddirs(filepathfromroot))):
697 if origvfs.isfileorlink(f):
710 if origvfs.isfileorlink(f):
698 ui.note(_('removing conflicting file: %s\n')
711 ui.note(_('removing conflicting file: %s\n')
699 % origvfs.join(f))
712 % origvfs.join(f))
700 origvfs.unlink(f)
713 origvfs.unlink(f)
701 break
714 break
702
715
703 origvfs.makedirs(origbackupdir)
716 origvfs.makedirs(origbackupdir)
704
717
705 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
718 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
706 ui.note(_('removing conflicting directory: %s\n')
719 ui.note(_('removing conflicting directory: %s\n')
707 % origvfs.join(filepathfromroot))
720 % origvfs.join(filepathfromroot))
708 origvfs.rmtree(filepathfromroot, forcibly=True)
721 origvfs.rmtree(filepathfromroot, forcibly=True)
709
722
710 return origvfs.join(filepathfromroot)
723 return origvfs.join(filepathfromroot)
711
724
712 class _containsnode(object):
725 class _containsnode(object):
713 """proxy __contains__(node) to container.__contains__ which accepts revs"""
726 """proxy __contains__(node) to container.__contains__ which accepts revs"""
714
727
715 def __init__(self, repo, revcontainer):
728 def __init__(self, repo, revcontainer):
716 self._torev = repo.changelog.rev
729 self._torev = repo.changelog.rev
717 self._revcontains = revcontainer.__contains__
730 self._revcontains = revcontainer.__contains__
718
731
719 def __contains__(self, node):
732 def __contains__(self, node):
720 return self._revcontains(self._torev(node))
733 return self._revcontains(self._torev(node))
721
734
722 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
735 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
723 """do common cleanups when old nodes are replaced by new nodes
736 """do common cleanups when old nodes are replaced by new nodes
724
737
725 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
738 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
726 (we might also want to move working directory parent in the future)
739 (we might also want to move working directory parent in the future)
727
740
728 By default, bookmark moves are calculated automatically from 'replacements',
741 By default, bookmark moves are calculated automatically from 'replacements',
729 but 'moves' can be used to override that. Also, 'moves' may include
742 but 'moves' can be used to override that. Also, 'moves' may include
730 additional bookmark moves that should not have associated obsmarkers.
743 additional bookmark moves that should not have associated obsmarkers.
731
744
732 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
745 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
733 have replacements. operation is a string, like "rebase".
746 have replacements. operation is a string, like "rebase".
734
747
735 metadata is dictionary containing metadata to be stored in obsmarker if
748 metadata is dictionary containing metadata to be stored in obsmarker if
736 obsolescence is enabled.
749 obsolescence is enabled.
737 """
750 """
738 if not replacements and not moves:
751 if not replacements and not moves:
739 return
752 return
740
753
741 # translate mapping's other forms
754 # translate mapping's other forms
742 if not util.safehasattr(replacements, 'items'):
755 if not util.safehasattr(replacements, 'items'):
743 replacements = {n: () for n in replacements}
756 replacements = {n: () for n in replacements}
744
757
745 # Calculate bookmark movements
758 # Calculate bookmark movements
746 if moves is None:
759 if moves is None:
747 moves = {}
760 moves = {}
748 # Unfiltered repo is needed since nodes in replacements might be hidden.
761 # Unfiltered repo is needed since nodes in replacements might be hidden.
749 unfi = repo.unfiltered()
762 unfi = repo.unfiltered()
750 for oldnode, newnodes in replacements.items():
763 for oldnode, newnodes in replacements.items():
751 if oldnode in moves:
764 if oldnode in moves:
752 continue
765 continue
753 if len(newnodes) > 1:
766 if len(newnodes) > 1:
754 # usually a split, take the one with biggest rev number
767 # usually a split, take the one with biggest rev number
755 newnode = next(unfi.set('max(%ln)', newnodes)).node()
768 newnode = next(unfi.set('max(%ln)', newnodes)).node()
756 elif len(newnodes) == 0:
769 elif len(newnodes) == 0:
757 # move bookmark backwards
770 # move bookmark backwards
758 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
771 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
759 list(replacements)))
772 list(replacements)))
760 if roots:
773 if roots:
761 newnode = roots[0].node()
774 newnode = roots[0].node()
762 else:
775 else:
763 newnode = nullid
776 newnode = nullid
764 else:
777 else:
765 newnode = newnodes[0]
778 newnode = newnodes[0]
766 moves[oldnode] = newnode
779 moves[oldnode] = newnode
767
780
768 with repo.transaction('cleanup') as tr:
781 with repo.transaction('cleanup') as tr:
769 # Move bookmarks
782 # Move bookmarks
770 bmarks = repo._bookmarks
783 bmarks = repo._bookmarks
771 bmarkchanges = []
784 bmarkchanges = []
772 allnewnodes = [n for ns in replacements.values() for n in ns]
785 allnewnodes = [n for ns in replacements.values() for n in ns]
773 for oldnode, newnode in moves.items():
786 for oldnode, newnode in moves.items():
774 oldbmarks = repo.nodebookmarks(oldnode)
787 oldbmarks = repo.nodebookmarks(oldnode)
775 if not oldbmarks:
788 if not oldbmarks:
776 continue
789 continue
777 from . import bookmarks # avoid import cycle
790 from . import bookmarks # avoid import cycle
778 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
791 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
779 (util.rapply(pycompat.maybebytestr, oldbmarks),
792 (util.rapply(pycompat.maybebytestr, oldbmarks),
780 hex(oldnode), hex(newnode)))
793 hex(oldnode), hex(newnode)))
781 # Delete divergent bookmarks being parents of related newnodes
794 # Delete divergent bookmarks being parents of related newnodes
782 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
795 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
783 allnewnodes, newnode, oldnode)
796 allnewnodes, newnode, oldnode)
784 deletenodes = _containsnode(repo, deleterevs)
797 deletenodes = _containsnode(repo, deleterevs)
785 for name in oldbmarks:
798 for name in oldbmarks:
786 bmarkchanges.append((name, newnode))
799 bmarkchanges.append((name, newnode))
787 for b in bookmarks.divergent2delete(repo, deletenodes, name):
800 for b in bookmarks.divergent2delete(repo, deletenodes, name):
788 bmarkchanges.append((b, None))
801 bmarkchanges.append((b, None))
789
802
790 if bmarkchanges:
803 if bmarkchanges:
791 bmarks.applychanges(repo, tr, bmarkchanges)
804 bmarks.applychanges(repo, tr, bmarkchanges)
792
805
793 # Obsolete or strip nodes
806 # Obsolete or strip nodes
794 if obsolete.isenabled(repo, obsolete.createmarkersopt):
807 if obsolete.isenabled(repo, obsolete.createmarkersopt):
795 # If a node is already obsoleted, and we want to obsolete it
808 # If a node is already obsoleted, and we want to obsolete it
796 # without a successor, skip that obssolete request since it's
809 # without a successor, skip that obssolete request since it's
797 # unnecessary. That's the "if s or not isobs(n)" check below.
810 # unnecessary. That's the "if s or not isobs(n)" check below.
798 # Also sort the node in topology order, that might be useful for
811 # Also sort the node in topology order, that might be useful for
799 # some obsstore logic.
812 # some obsstore logic.
800 # NOTE: the filtering and sorting might belong to createmarkers.
813 # NOTE: the filtering and sorting might belong to createmarkers.
801 isobs = unfi.obsstore.successors.__contains__
814 isobs = unfi.obsstore.successors.__contains__
802 torev = unfi.changelog.rev
815 torev = unfi.changelog.rev
803 sortfunc = lambda ns: torev(ns[0])
816 sortfunc = lambda ns: torev(ns[0])
804 rels = [(unfi[n], tuple(unfi[m] for m in s))
817 rels = [(unfi[n], tuple(unfi[m] for m in s))
805 for n, s in sorted(replacements.items(), key=sortfunc)
818 for n, s in sorted(replacements.items(), key=sortfunc)
806 if s or not isobs(n)]
819 if s or not isobs(n)]
807 if rels:
820 if rels:
808 obsolete.createmarkers(repo, rels, operation=operation,
821 obsolete.createmarkers(repo, rels, operation=operation,
809 metadata=metadata)
822 metadata=metadata)
810 else:
823 else:
811 from . import repair # avoid import cycle
824 from . import repair # avoid import cycle
812 tostrip = list(replacements)
825 tostrip = list(replacements)
813 if tostrip:
826 if tostrip:
814 repair.delayedstrip(repo.ui, repo, tostrip, operation)
827 repair.delayedstrip(repo.ui, repo, tostrip, operation)
815
828
816 def addremove(repo, matcher, prefix, opts=None):
829 def addremove(repo, matcher, prefix, opts=None):
817 if opts is None:
830 if opts is None:
818 opts = {}
831 opts = {}
819 m = matcher
832 m = matcher
820 dry_run = opts.get('dry_run')
833 dry_run = opts.get('dry_run')
821 try:
834 try:
822 similarity = float(opts.get('similarity') or 0)
835 similarity = float(opts.get('similarity') or 0)
823 except ValueError:
836 except ValueError:
824 raise error.Abort(_('similarity must be a number'))
837 raise error.Abort(_('similarity must be a number'))
825 if similarity < 0 or similarity > 100:
838 if similarity < 0 or similarity > 100:
826 raise error.Abort(_('similarity must be between 0 and 100'))
839 raise error.Abort(_('similarity must be between 0 and 100'))
827 similarity /= 100.0
840 similarity /= 100.0
828
841
829 ret = 0
842 ret = 0
830 join = lambda f: os.path.join(prefix, f)
843 join = lambda f: os.path.join(prefix, f)
831
844
832 wctx = repo[None]
845 wctx = repo[None]
833 for subpath in sorted(wctx.substate):
846 for subpath in sorted(wctx.substate):
834 submatch = matchmod.subdirmatcher(subpath, m)
847 submatch = matchmod.subdirmatcher(subpath, m)
835 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
848 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
836 sub = wctx.sub(subpath)
849 sub = wctx.sub(subpath)
837 try:
850 try:
838 if sub.addremove(submatch, prefix, opts):
851 if sub.addremove(submatch, prefix, opts):
839 ret = 1
852 ret = 1
840 except error.LookupError:
853 except error.LookupError:
841 repo.ui.status(_("skipping missing subrepository: %s\n")
854 repo.ui.status(_("skipping missing subrepository: %s\n")
842 % join(subpath))
855 % join(subpath))
843
856
844 rejected = []
857 rejected = []
845 def badfn(f, msg):
858 def badfn(f, msg):
846 if f in m.files():
859 if f in m.files():
847 m.bad(f, msg)
860 m.bad(f, msg)
848 rejected.append(f)
861 rejected.append(f)
849
862
850 badmatch = matchmod.badmatch(m, badfn)
863 badmatch = matchmod.badmatch(m, badfn)
851 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
864 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
852 badmatch)
865 badmatch)
853
866
854 unknownset = set(unknown + forgotten)
867 unknownset = set(unknown + forgotten)
855 toprint = unknownset.copy()
868 toprint = unknownset.copy()
856 toprint.update(deleted)
869 toprint.update(deleted)
857 for abs in sorted(toprint):
870 for abs in sorted(toprint):
858 if repo.ui.verbose or not m.exact(abs):
871 if repo.ui.verbose or not m.exact(abs):
859 if abs in unknownset:
872 if abs in unknownset:
860 status = _('adding %s\n') % m.uipath(abs)
873 status = _('adding %s\n') % m.uipath(abs)
861 else:
874 else:
862 status = _('removing %s\n') % m.uipath(abs)
875 status = _('removing %s\n') % m.uipath(abs)
863 repo.ui.status(status)
876 repo.ui.status(status)
864
877
865 renames = _findrenames(repo, m, added + unknown, removed + deleted,
878 renames = _findrenames(repo, m, added + unknown, removed + deleted,
866 similarity)
879 similarity)
867
880
868 if not dry_run:
881 if not dry_run:
869 _markchanges(repo, unknown + forgotten, deleted, renames)
882 _markchanges(repo, unknown + forgotten, deleted, renames)
870
883
871 for f in rejected:
884 for f in rejected:
872 if f in m.files():
885 if f in m.files():
873 return 1
886 return 1
874 return ret
887 return ret
875
888
876 def marktouched(repo, files, similarity=0.0):
889 def marktouched(repo, files, similarity=0.0):
877 '''Assert that files have somehow been operated upon. files are relative to
890 '''Assert that files have somehow been operated upon. files are relative to
878 the repo root.'''
891 the repo root.'''
879 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
892 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
880 rejected = []
893 rejected = []
881
894
882 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
895 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
883
896
884 if repo.ui.verbose:
897 if repo.ui.verbose:
885 unknownset = set(unknown + forgotten)
898 unknownset = set(unknown + forgotten)
886 toprint = unknownset.copy()
899 toprint = unknownset.copy()
887 toprint.update(deleted)
900 toprint.update(deleted)
888 for abs in sorted(toprint):
901 for abs in sorted(toprint):
889 if abs in unknownset:
902 if abs in unknownset:
890 status = _('adding %s\n') % abs
903 status = _('adding %s\n') % abs
891 else:
904 else:
892 status = _('removing %s\n') % abs
905 status = _('removing %s\n') % abs
893 repo.ui.status(status)
906 repo.ui.status(status)
894
907
895 renames = _findrenames(repo, m, added + unknown, removed + deleted,
908 renames = _findrenames(repo, m, added + unknown, removed + deleted,
896 similarity)
909 similarity)
897
910
898 _markchanges(repo, unknown + forgotten, deleted, renames)
911 _markchanges(repo, unknown + forgotten, deleted, renames)
899
912
900 for f in rejected:
913 for f in rejected:
901 if f in m.files():
914 if f in m.files():
902 return 1
915 return 1
903 return 0
916 return 0
904
917
905 def _interestingfiles(repo, matcher):
918 def _interestingfiles(repo, matcher):
906 '''Walk dirstate with matcher, looking for files that addremove would care
919 '''Walk dirstate with matcher, looking for files that addremove would care
907 about.
920 about.
908
921
909 This is different from dirstate.status because it doesn't care about
922 This is different from dirstate.status because it doesn't care about
910 whether files are modified or clean.'''
923 whether files are modified or clean.'''
911 added, unknown, deleted, removed, forgotten = [], [], [], [], []
924 added, unknown, deleted, removed, forgotten = [], [], [], [], []
912 audit_path = pathutil.pathauditor(repo.root, cached=True)
925 audit_path = pathutil.pathauditor(repo.root, cached=True)
913
926
914 ctx = repo[None]
927 ctx = repo[None]
915 dirstate = repo.dirstate
928 dirstate = repo.dirstate
916 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
929 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
917 unknown=True, ignored=False, full=False)
930 unknown=True, ignored=False, full=False)
918 for abs, st in walkresults.iteritems():
931 for abs, st in walkresults.iteritems():
919 dstate = dirstate[abs]
932 dstate = dirstate[abs]
920 if dstate == '?' and audit_path.check(abs):
933 if dstate == '?' and audit_path.check(abs):
921 unknown.append(abs)
934 unknown.append(abs)
922 elif dstate != 'r' and not st:
935 elif dstate != 'r' and not st:
923 deleted.append(abs)
936 deleted.append(abs)
924 elif dstate == 'r' and st:
937 elif dstate == 'r' and st:
925 forgotten.append(abs)
938 forgotten.append(abs)
926 # for finding renames
939 # for finding renames
927 elif dstate == 'r' and not st:
940 elif dstate == 'r' and not st:
928 removed.append(abs)
941 removed.append(abs)
929 elif dstate == 'a':
942 elif dstate == 'a':
930 added.append(abs)
943 added.append(abs)
931
944
932 return added, unknown, deleted, removed, forgotten
945 return added, unknown, deleted, removed, forgotten
933
946
934 def _findrenames(repo, matcher, added, removed, similarity):
947 def _findrenames(repo, matcher, added, removed, similarity):
935 '''Find renames from removed files to added ones.'''
948 '''Find renames from removed files to added ones.'''
936 renames = {}
949 renames = {}
937 if similarity > 0:
950 if similarity > 0:
938 for old, new, score in similar.findrenames(repo, added, removed,
951 for old, new, score in similar.findrenames(repo, added, removed,
939 similarity):
952 similarity):
940 if (repo.ui.verbose or not matcher.exact(old)
953 if (repo.ui.verbose or not matcher.exact(old)
941 or not matcher.exact(new)):
954 or not matcher.exact(new)):
942 repo.ui.status(_('recording removal of %s as rename to %s '
955 repo.ui.status(_('recording removal of %s as rename to %s '
943 '(%d%% similar)\n') %
956 '(%d%% similar)\n') %
944 (matcher.rel(old), matcher.rel(new),
957 (matcher.rel(old), matcher.rel(new),
945 score * 100))
958 score * 100))
946 renames[new] = old
959 renames[new] = old
947 return renames
960 return renames
948
961
949 def _markchanges(repo, unknown, deleted, renames):
962 def _markchanges(repo, unknown, deleted, renames):
950 '''Marks the files in unknown as added, the files in deleted as removed,
963 '''Marks the files in unknown as added, the files in deleted as removed,
951 and the files in renames as copied.'''
964 and the files in renames as copied.'''
952 wctx = repo[None]
965 wctx = repo[None]
953 with repo.wlock():
966 with repo.wlock():
954 wctx.forget(deleted)
967 wctx.forget(deleted)
955 wctx.add(unknown)
968 wctx.add(unknown)
956 for new, old in renames.iteritems():
969 for new, old in renames.iteritems():
957 wctx.copy(old, new)
970 wctx.copy(old, new)
958
971
959 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
972 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
960 """Update the dirstate to reflect the intent of copying src to dst. For
973 """Update the dirstate to reflect the intent of copying src to dst. For
961 different reasons it might not end with dst being marked as copied from src.
974 different reasons it might not end with dst being marked as copied from src.
962 """
975 """
963 origsrc = repo.dirstate.copied(src) or src
976 origsrc = repo.dirstate.copied(src) or src
964 if dst == origsrc: # copying back a copy?
977 if dst == origsrc: # copying back a copy?
965 if repo.dirstate[dst] not in 'mn' and not dryrun:
978 if repo.dirstate[dst] not in 'mn' and not dryrun:
966 repo.dirstate.normallookup(dst)
979 repo.dirstate.normallookup(dst)
967 else:
980 else:
968 if repo.dirstate[origsrc] == 'a' and origsrc == src:
981 if repo.dirstate[origsrc] == 'a' and origsrc == src:
969 if not ui.quiet:
982 if not ui.quiet:
970 ui.warn(_("%s has not been committed yet, so no copy "
983 ui.warn(_("%s has not been committed yet, so no copy "
971 "data will be stored for %s.\n")
984 "data will be stored for %s.\n")
972 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
985 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
973 if repo.dirstate[dst] in '?r' and not dryrun:
986 if repo.dirstate[dst] in '?r' and not dryrun:
974 wctx.add([dst])
987 wctx.add([dst])
975 elif not dryrun:
988 elif not dryrun:
976 wctx.copy(origsrc, dst)
989 wctx.copy(origsrc, dst)
977
990
978 def readrequires(opener, supported):
991 def readrequires(opener, supported):
979 '''Reads and parses .hg/requires and checks if all entries found
992 '''Reads and parses .hg/requires and checks if all entries found
980 are in the list of supported features.'''
993 are in the list of supported features.'''
981 requirements = set(opener.read("requires").splitlines())
994 requirements = set(opener.read("requires").splitlines())
982 missings = []
995 missings = []
983 for r in requirements:
996 for r in requirements:
984 if r not in supported:
997 if r not in supported:
985 if not r or not r[0:1].isalnum():
998 if not r or not r[0:1].isalnum():
986 raise error.RequirementError(_(".hg/requires file is corrupt"))
999 raise error.RequirementError(_(".hg/requires file is corrupt"))
987 missings.append(r)
1000 missings.append(r)
988 missings.sort()
1001 missings.sort()
989 if missings:
1002 if missings:
990 raise error.RequirementError(
1003 raise error.RequirementError(
991 _("repository requires features unknown to this Mercurial: %s")
1004 _("repository requires features unknown to this Mercurial: %s")
992 % " ".join(missings),
1005 % " ".join(missings),
993 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1006 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
994 " for more information"))
1007 " for more information"))
995 return requirements
1008 return requirements
996
1009
997 def writerequires(opener, requirements):
1010 def writerequires(opener, requirements):
998 with opener('requires', 'w') as fp:
1011 with opener('requires', 'w') as fp:
999 for r in sorted(requirements):
1012 for r in sorted(requirements):
1000 fp.write("%s\n" % r)
1013 fp.write("%s\n" % r)
1001
1014
1002 class filecachesubentry(object):
1015 class filecachesubentry(object):
1003 def __init__(self, path, stat):
1016 def __init__(self, path, stat):
1004 self.path = path
1017 self.path = path
1005 self.cachestat = None
1018 self.cachestat = None
1006 self._cacheable = None
1019 self._cacheable = None
1007
1020
1008 if stat:
1021 if stat:
1009 self.cachestat = filecachesubentry.stat(self.path)
1022 self.cachestat = filecachesubentry.stat(self.path)
1010
1023
1011 if self.cachestat:
1024 if self.cachestat:
1012 self._cacheable = self.cachestat.cacheable()
1025 self._cacheable = self.cachestat.cacheable()
1013 else:
1026 else:
1014 # None means we don't know yet
1027 # None means we don't know yet
1015 self._cacheable = None
1028 self._cacheable = None
1016
1029
1017 def refresh(self):
1030 def refresh(self):
1018 if self.cacheable():
1031 if self.cacheable():
1019 self.cachestat = filecachesubentry.stat(self.path)
1032 self.cachestat = filecachesubentry.stat(self.path)
1020
1033
1021 def cacheable(self):
1034 def cacheable(self):
1022 if self._cacheable is not None:
1035 if self._cacheable is not None:
1023 return self._cacheable
1036 return self._cacheable
1024
1037
1025 # we don't know yet, assume it is for now
1038 # we don't know yet, assume it is for now
1026 return True
1039 return True
1027
1040
1028 def changed(self):
1041 def changed(self):
1029 # no point in going further if we can't cache it
1042 # no point in going further if we can't cache it
1030 if not self.cacheable():
1043 if not self.cacheable():
1031 return True
1044 return True
1032
1045
1033 newstat = filecachesubentry.stat(self.path)
1046 newstat = filecachesubentry.stat(self.path)
1034
1047
1035 # we may not know if it's cacheable yet, check again now
1048 # we may not know if it's cacheable yet, check again now
1036 if newstat and self._cacheable is None:
1049 if newstat and self._cacheable is None:
1037 self._cacheable = newstat.cacheable()
1050 self._cacheable = newstat.cacheable()
1038
1051
1039 # check again
1052 # check again
1040 if not self._cacheable:
1053 if not self._cacheable:
1041 return True
1054 return True
1042
1055
1043 if self.cachestat != newstat:
1056 if self.cachestat != newstat:
1044 self.cachestat = newstat
1057 self.cachestat = newstat
1045 return True
1058 return True
1046 else:
1059 else:
1047 return False
1060 return False
1048
1061
1049 @staticmethod
1062 @staticmethod
1050 def stat(path):
1063 def stat(path):
1051 try:
1064 try:
1052 return util.cachestat(path)
1065 return util.cachestat(path)
1053 except OSError as e:
1066 except OSError as e:
1054 if e.errno != errno.ENOENT:
1067 if e.errno != errno.ENOENT:
1055 raise
1068 raise
1056
1069
1057 class filecacheentry(object):
1070 class filecacheentry(object):
1058 def __init__(self, paths, stat=True):
1071 def __init__(self, paths, stat=True):
1059 self._entries = []
1072 self._entries = []
1060 for path in paths:
1073 for path in paths:
1061 self._entries.append(filecachesubentry(path, stat))
1074 self._entries.append(filecachesubentry(path, stat))
1062
1075
1063 def changed(self):
1076 def changed(self):
1064 '''true if any entry has changed'''
1077 '''true if any entry has changed'''
1065 for entry in self._entries:
1078 for entry in self._entries:
1066 if entry.changed():
1079 if entry.changed():
1067 return True
1080 return True
1068 return False
1081 return False
1069
1082
1070 def refresh(self):
1083 def refresh(self):
1071 for entry in self._entries:
1084 for entry in self._entries:
1072 entry.refresh()
1085 entry.refresh()
1073
1086
1074 class filecache(object):
1087 class filecache(object):
1075 '''A property like decorator that tracks files under .hg/ for updates.
1088 '''A property like decorator that tracks files under .hg/ for updates.
1076
1089
1077 Records stat info when called in _filecache.
1090 Records stat info when called in _filecache.
1078
1091
1079 On subsequent calls, compares old stat info with new info, and recreates the
1092 On subsequent calls, compares old stat info with new info, and recreates the
1080 object when any of the files changes, updating the new stat info in
1093 object when any of the files changes, updating the new stat info in
1081 _filecache.
1094 _filecache.
1082
1095
1083 Mercurial either atomic renames or appends for files under .hg,
1096 Mercurial either atomic renames or appends for files under .hg,
1084 so to ensure the cache is reliable we need the filesystem to be able
1097 so to ensure the cache is reliable we need the filesystem to be able
1085 to tell us if a file has been replaced. If it can't, we fallback to
1098 to tell us if a file has been replaced. If it can't, we fallback to
1086 recreating the object on every call (essentially the same behavior as
1099 recreating the object on every call (essentially the same behavior as
1087 propertycache).
1100 propertycache).
1088
1101
1089 '''
1102 '''
1090 def __init__(self, *paths):
1103 def __init__(self, *paths):
1091 self.paths = paths
1104 self.paths = paths
1092
1105
1093 def join(self, obj, fname):
1106 def join(self, obj, fname):
1094 """Used to compute the runtime path of a cached file.
1107 """Used to compute the runtime path of a cached file.
1095
1108
1096 Users should subclass filecache and provide their own version of this
1109 Users should subclass filecache and provide their own version of this
1097 function to call the appropriate join function on 'obj' (an instance
1110 function to call the appropriate join function on 'obj' (an instance
1098 of the class that its member function was decorated).
1111 of the class that its member function was decorated).
1099 """
1112 """
1100 raise NotImplementedError
1113 raise NotImplementedError
1101
1114
1102 def __call__(self, func):
1115 def __call__(self, func):
1103 self.func = func
1116 self.func = func
1104 self.name = func.__name__.encode('ascii')
1117 self.name = func.__name__.encode('ascii')
1105 return self
1118 return self
1106
1119
1107 def __get__(self, obj, type=None):
1120 def __get__(self, obj, type=None):
1108 # if accessed on the class, return the descriptor itself.
1121 # if accessed on the class, return the descriptor itself.
1109 if obj is None:
1122 if obj is None:
1110 return self
1123 return self
1111 # do we need to check if the file changed?
1124 # do we need to check if the file changed?
1112 if self.name in obj.__dict__:
1125 if self.name in obj.__dict__:
1113 assert self.name in obj._filecache, self.name
1126 assert self.name in obj._filecache, self.name
1114 return obj.__dict__[self.name]
1127 return obj.__dict__[self.name]
1115
1128
1116 entry = obj._filecache.get(self.name)
1129 entry = obj._filecache.get(self.name)
1117
1130
1118 if entry:
1131 if entry:
1119 if entry.changed():
1132 if entry.changed():
1120 entry.obj = self.func(obj)
1133 entry.obj = self.func(obj)
1121 else:
1134 else:
1122 paths = [self.join(obj, path) for path in self.paths]
1135 paths = [self.join(obj, path) for path in self.paths]
1123
1136
1124 # We stat -before- creating the object so our cache doesn't lie if
1137 # We stat -before- creating the object so our cache doesn't lie if
1125 # a writer modified between the time we read and stat
1138 # a writer modified between the time we read and stat
1126 entry = filecacheentry(paths, True)
1139 entry = filecacheentry(paths, True)
1127 entry.obj = self.func(obj)
1140 entry.obj = self.func(obj)
1128
1141
1129 obj._filecache[self.name] = entry
1142 obj._filecache[self.name] = entry
1130
1143
1131 obj.__dict__[self.name] = entry.obj
1144 obj.__dict__[self.name] = entry.obj
1132 return entry.obj
1145 return entry.obj
1133
1146
1134 def __set__(self, obj, value):
1147 def __set__(self, obj, value):
1135 if self.name not in obj._filecache:
1148 if self.name not in obj._filecache:
1136 # we add an entry for the missing value because X in __dict__
1149 # we add an entry for the missing value because X in __dict__
1137 # implies X in _filecache
1150 # implies X in _filecache
1138 paths = [self.join(obj, path) for path in self.paths]
1151 paths = [self.join(obj, path) for path in self.paths]
1139 ce = filecacheentry(paths, False)
1152 ce = filecacheentry(paths, False)
1140 obj._filecache[self.name] = ce
1153 obj._filecache[self.name] = ce
1141 else:
1154 else:
1142 ce = obj._filecache[self.name]
1155 ce = obj._filecache[self.name]
1143
1156
1144 ce.obj = value # update cached copy
1157 ce.obj = value # update cached copy
1145 obj.__dict__[self.name] = value # update copy returned by obj.x
1158 obj.__dict__[self.name] = value # update copy returned by obj.x
1146
1159
1147 def __delete__(self, obj):
1160 def __delete__(self, obj):
1148 try:
1161 try:
1149 del obj.__dict__[self.name]
1162 del obj.__dict__[self.name]
1150 except KeyError:
1163 except KeyError:
1151 raise AttributeError(self.name)
1164 raise AttributeError(self.name)
1152
1165
1153 def extdatasource(repo, source):
1166 def extdatasource(repo, source):
1154 """Gather a map of rev -> value dict from the specified source
1167 """Gather a map of rev -> value dict from the specified source
1155
1168
1156 A source spec is treated as a URL, with a special case shell: type
1169 A source spec is treated as a URL, with a special case shell: type
1157 for parsing the output from a shell command.
1170 for parsing the output from a shell command.
1158
1171
1159 The data is parsed as a series of newline-separated records where
1172 The data is parsed as a series of newline-separated records where
1160 each record is a revision specifier optionally followed by a space
1173 each record is a revision specifier optionally followed by a space
1161 and a freeform string value. If the revision is known locally, it
1174 and a freeform string value. If the revision is known locally, it
1162 is converted to a rev, otherwise the record is skipped.
1175 is converted to a rev, otherwise the record is skipped.
1163
1176
1164 Note that both key and value are treated as UTF-8 and converted to
1177 Note that both key and value are treated as UTF-8 and converted to
1165 the local encoding. This allows uniformity between local and
1178 the local encoding. This allows uniformity between local and
1166 remote data sources.
1179 remote data sources.
1167 """
1180 """
1168
1181
1169 spec = repo.ui.config("extdata", source)
1182 spec = repo.ui.config("extdata", source)
1170 if not spec:
1183 if not spec:
1171 raise error.Abort(_("unknown extdata source '%s'") % source)
1184 raise error.Abort(_("unknown extdata source '%s'") % source)
1172
1185
1173 data = {}
1186 data = {}
1174 src = proc = None
1187 src = proc = None
1175 try:
1188 try:
1176 if spec.startswith("shell:"):
1189 if spec.startswith("shell:"):
1177 # external commands should be run relative to the repo root
1190 # external commands should be run relative to the repo root
1178 cmd = spec[6:]
1191 cmd = spec[6:]
1179 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1192 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1180 close_fds=procutil.closefds,
1193 close_fds=procutil.closefds,
1181 stdout=subprocess.PIPE, cwd=repo.root)
1194 stdout=subprocess.PIPE, cwd=repo.root)
1182 src = proc.stdout
1195 src = proc.stdout
1183 else:
1196 else:
1184 # treat as a URL or file
1197 # treat as a URL or file
1185 src = url.open(repo.ui, spec)
1198 src = url.open(repo.ui, spec)
1186 for l in src:
1199 for l in src:
1187 if " " in l:
1200 if " " in l:
1188 k, v = l.strip().split(" ", 1)
1201 k, v = l.strip().split(" ", 1)
1189 else:
1202 else:
1190 k, v = l.strip(), ""
1203 k, v = l.strip(), ""
1191
1204
1192 k = encoding.tolocal(k)
1205 k = encoding.tolocal(k)
1193 try:
1206 try:
1194 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1207 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1195 except (error.LookupError, error.RepoLookupError):
1208 except (error.LookupError, error.RepoLookupError):
1196 pass # we ignore data for nodes that don't exist locally
1209 pass # we ignore data for nodes that don't exist locally
1197 finally:
1210 finally:
1198 if proc:
1211 if proc:
1199 proc.communicate()
1212 proc.communicate()
1200 if src:
1213 if src:
1201 src.close()
1214 src.close()
1202 if proc and proc.returncode != 0:
1215 if proc and proc.returncode != 0:
1203 raise error.Abort(_("extdata command '%s' failed: %s")
1216 raise error.Abort(_("extdata command '%s' failed: %s")
1204 % (cmd, procutil.explainexit(proc.returncode)))
1217 % (cmd, procutil.explainexit(proc.returncode)))
1205
1218
1206 return data
1219 return data
1207
1220
1208 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1221 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1209 if lock is None:
1222 if lock is None:
1210 raise error.LockInheritanceContractViolation(
1223 raise error.LockInheritanceContractViolation(
1211 'lock can only be inherited while held')
1224 'lock can only be inherited while held')
1212 if environ is None:
1225 if environ is None:
1213 environ = {}
1226 environ = {}
1214 with lock.inherit() as locker:
1227 with lock.inherit() as locker:
1215 environ[envvar] = locker
1228 environ[envvar] = locker
1216 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1229 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1217
1230
1218 def wlocksub(repo, cmd, *args, **kwargs):
1231 def wlocksub(repo, cmd, *args, **kwargs):
1219 """run cmd as a subprocess that allows inheriting repo's wlock
1232 """run cmd as a subprocess that allows inheriting repo's wlock
1220
1233
1221 This can only be called while the wlock is held. This takes all the
1234 This can only be called while the wlock is held. This takes all the
1222 arguments that ui.system does, and returns the exit code of the
1235 arguments that ui.system does, and returns the exit code of the
1223 subprocess."""
1236 subprocess."""
1224 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1237 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1225 **kwargs)
1238 **kwargs)
1226
1239
1227 def gdinitconfig(ui):
1240 def gdinitconfig(ui):
1228 """helper function to know if a repo should be created as general delta
1241 """helper function to know if a repo should be created as general delta
1229 """
1242 """
1230 # experimental config: format.generaldelta
1243 # experimental config: format.generaldelta
1231 return (ui.configbool('format', 'generaldelta')
1244 return (ui.configbool('format', 'generaldelta')
1232 or ui.configbool('format', 'usegeneraldelta'))
1245 or ui.configbool('format', 'usegeneraldelta'))
1233
1246
1234 def gddeltaconfig(ui):
1247 def gddeltaconfig(ui):
1235 """helper function to know if incoming delta should be optimised
1248 """helper function to know if incoming delta should be optimised
1236 """
1249 """
1237 # experimental config: format.generaldelta
1250 # experimental config: format.generaldelta
1238 return ui.configbool('format', 'generaldelta')
1251 return ui.configbool('format', 'generaldelta')
1239
1252
1240 class simplekeyvaluefile(object):
1253 class simplekeyvaluefile(object):
1241 """A simple file with key=value lines
1254 """A simple file with key=value lines
1242
1255
1243 Keys must be alphanumerics and start with a letter, values must not
1256 Keys must be alphanumerics and start with a letter, values must not
1244 contain '\n' characters"""
1257 contain '\n' characters"""
1245 firstlinekey = '__firstline'
1258 firstlinekey = '__firstline'
1246
1259
1247 def __init__(self, vfs, path, keys=None):
1260 def __init__(self, vfs, path, keys=None):
1248 self.vfs = vfs
1261 self.vfs = vfs
1249 self.path = path
1262 self.path = path
1250
1263
1251 def read(self, firstlinenonkeyval=False):
1264 def read(self, firstlinenonkeyval=False):
1252 """Read the contents of a simple key-value file
1265 """Read the contents of a simple key-value file
1253
1266
1254 'firstlinenonkeyval' indicates whether the first line of file should
1267 'firstlinenonkeyval' indicates whether the first line of file should
1255 be treated as a key-value pair or reuturned fully under the
1268 be treated as a key-value pair or reuturned fully under the
1256 __firstline key."""
1269 __firstline key."""
1257 lines = self.vfs.readlines(self.path)
1270 lines = self.vfs.readlines(self.path)
1258 d = {}
1271 d = {}
1259 if firstlinenonkeyval:
1272 if firstlinenonkeyval:
1260 if not lines:
1273 if not lines:
1261 e = _("empty simplekeyvalue file")
1274 e = _("empty simplekeyvalue file")
1262 raise error.CorruptedState(e)
1275 raise error.CorruptedState(e)
1263 # we don't want to include '\n' in the __firstline
1276 # we don't want to include '\n' in the __firstline
1264 d[self.firstlinekey] = lines[0][:-1]
1277 d[self.firstlinekey] = lines[0][:-1]
1265 del lines[0]
1278 del lines[0]
1266
1279
1267 try:
1280 try:
1268 # the 'if line.strip()' part prevents us from failing on empty
1281 # the 'if line.strip()' part prevents us from failing on empty
1269 # lines which only contain '\n' therefore are not skipped
1282 # lines which only contain '\n' therefore are not skipped
1270 # by 'if line'
1283 # by 'if line'
1271 updatedict = dict(line[:-1].split('=', 1) for line in lines
1284 updatedict = dict(line[:-1].split('=', 1) for line in lines
1272 if line.strip())
1285 if line.strip())
1273 if self.firstlinekey in updatedict:
1286 if self.firstlinekey in updatedict:
1274 e = _("%r can't be used as a key")
1287 e = _("%r can't be used as a key")
1275 raise error.CorruptedState(e % self.firstlinekey)
1288 raise error.CorruptedState(e % self.firstlinekey)
1276 d.update(updatedict)
1289 d.update(updatedict)
1277 except ValueError as e:
1290 except ValueError as e:
1278 raise error.CorruptedState(str(e))
1291 raise error.CorruptedState(str(e))
1279 return d
1292 return d
1280
1293
1281 def write(self, data, firstline=None):
1294 def write(self, data, firstline=None):
1282 """Write key=>value mapping to a file
1295 """Write key=>value mapping to a file
1283 data is a dict. Keys must be alphanumerical and start with a letter.
1296 data is a dict. Keys must be alphanumerical and start with a letter.
1284 Values must not contain newline characters.
1297 Values must not contain newline characters.
1285
1298
1286 If 'firstline' is not None, it is written to file before
1299 If 'firstline' is not None, it is written to file before
1287 everything else, as it is, not in a key=value form"""
1300 everything else, as it is, not in a key=value form"""
1288 lines = []
1301 lines = []
1289 if firstline is not None:
1302 if firstline is not None:
1290 lines.append('%s\n' % firstline)
1303 lines.append('%s\n' % firstline)
1291
1304
1292 for k, v in data.items():
1305 for k, v in data.items():
1293 if k == self.firstlinekey:
1306 if k == self.firstlinekey:
1294 e = "key name '%s' is reserved" % self.firstlinekey
1307 e = "key name '%s' is reserved" % self.firstlinekey
1295 raise error.ProgrammingError(e)
1308 raise error.ProgrammingError(e)
1296 if not k[0:1].isalpha():
1309 if not k[0:1].isalpha():
1297 e = "keys must start with a letter in a key-value file"
1310 e = "keys must start with a letter in a key-value file"
1298 raise error.ProgrammingError(e)
1311 raise error.ProgrammingError(e)
1299 if not k.isalnum():
1312 if not k.isalnum():
1300 e = "invalid key name in a simple key-value file"
1313 e = "invalid key name in a simple key-value file"
1301 raise error.ProgrammingError(e)
1314 raise error.ProgrammingError(e)
1302 if '\n' in v:
1315 if '\n' in v:
1303 e = "invalid value in a simple key-value file"
1316 e = "invalid value in a simple key-value file"
1304 raise error.ProgrammingError(e)
1317 raise error.ProgrammingError(e)
1305 lines.append("%s=%s\n" % (k, v))
1318 lines.append("%s=%s\n" % (k, v))
1306 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1319 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1307 fp.write(''.join(lines))
1320 fp.write(''.join(lines))
1308
1321
1309 _reportobsoletedsource = [
1322 _reportobsoletedsource = [
1310 'debugobsolete',
1323 'debugobsolete',
1311 'pull',
1324 'pull',
1312 'push',
1325 'push',
1313 'serve',
1326 'serve',
1314 'unbundle',
1327 'unbundle',
1315 ]
1328 ]
1316
1329
1317 _reportnewcssource = [
1330 _reportnewcssource = [
1318 'pull',
1331 'pull',
1319 'unbundle',
1332 'unbundle',
1320 ]
1333 ]
1321
1334
1322 # a list of (repo, ctx, files) functions called by various commands to allow
1335 # a list of (repo, ctx, files) functions called by various commands to allow
1323 # extensions to ensure the corresponding files are available locally, before the
1336 # extensions to ensure the corresponding files are available locally, before the
1324 # command uses them.
1337 # command uses them.
1325 fileprefetchhooks = util.hooks()
1338 fileprefetchhooks = util.hooks()
1326
1339
1327 # A marker that tells the evolve extension to suppress its own reporting
1340 # A marker that tells the evolve extension to suppress its own reporting
1328 _reportstroubledchangesets = True
1341 _reportstroubledchangesets = True
1329
1342
1330 def registersummarycallback(repo, otr, txnname=''):
1343 def registersummarycallback(repo, otr, txnname=''):
1331 """register a callback to issue a summary after the transaction is closed
1344 """register a callback to issue a summary after the transaction is closed
1332 """
1345 """
1333 def txmatch(sources):
1346 def txmatch(sources):
1334 return any(txnname.startswith(source) for source in sources)
1347 return any(txnname.startswith(source) for source in sources)
1335
1348
1336 categories = []
1349 categories = []
1337
1350
1338 def reportsummary(func):
1351 def reportsummary(func):
1339 """decorator for report callbacks."""
1352 """decorator for report callbacks."""
1340 # The repoview life cycle is shorter than the one of the actual
1353 # The repoview life cycle is shorter than the one of the actual
1341 # underlying repository. So the filtered object can die before the
1354 # underlying repository. So the filtered object can die before the
1342 # weakref is used leading to troubles. We keep a reference to the
1355 # weakref is used leading to troubles. We keep a reference to the
1343 # unfiltered object and restore the filtering when retrieving the
1356 # unfiltered object and restore the filtering when retrieving the
1344 # repository through the weakref.
1357 # repository through the weakref.
1345 filtername = repo.filtername
1358 filtername = repo.filtername
1346 reporef = weakref.ref(repo.unfiltered())
1359 reporef = weakref.ref(repo.unfiltered())
1347 def wrapped(tr):
1360 def wrapped(tr):
1348 repo = reporef()
1361 repo = reporef()
1349 if filtername:
1362 if filtername:
1350 repo = repo.filtered(filtername)
1363 repo = repo.filtered(filtername)
1351 func(repo, tr)
1364 func(repo, tr)
1352 newcat = '%02i-txnreport' % len(categories)
1365 newcat = '%02i-txnreport' % len(categories)
1353 otr.addpostclose(newcat, wrapped)
1366 otr.addpostclose(newcat, wrapped)
1354 categories.append(newcat)
1367 categories.append(newcat)
1355 return wrapped
1368 return wrapped
1356
1369
1357 if txmatch(_reportobsoletedsource):
1370 if txmatch(_reportobsoletedsource):
1358 @reportsummary
1371 @reportsummary
1359 def reportobsoleted(repo, tr):
1372 def reportobsoleted(repo, tr):
1360 obsoleted = obsutil.getobsoleted(repo, tr)
1373 obsoleted = obsutil.getobsoleted(repo, tr)
1361 if obsoleted:
1374 if obsoleted:
1362 repo.ui.status(_('obsoleted %i changesets\n')
1375 repo.ui.status(_('obsoleted %i changesets\n')
1363 % len(obsoleted))
1376 % len(obsoleted))
1364
1377
1365 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1378 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1366 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1379 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1367 instabilitytypes = [
1380 instabilitytypes = [
1368 ('orphan', 'orphan'),
1381 ('orphan', 'orphan'),
1369 ('phase-divergent', 'phasedivergent'),
1382 ('phase-divergent', 'phasedivergent'),
1370 ('content-divergent', 'contentdivergent'),
1383 ('content-divergent', 'contentdivergent'),
1371 ]
1384 ]
1372
1385
1373 def getinstabilitycounts(repo):
1386 def getinstabilitycounts(repo):
1374 filtered = repo.changelog.filteredrevs
1387 filtered = repo.changelog.filteredrevs
1375 counts = {}
1388 counts = {}
1376 for instability, revset in instabilitytypes:
1389 for instability, revset in instabilitytypes:
1377 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1390 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1378 filtered)
1391 filtered)
1379 return counts
1392 return counts
1380
1393
1381 oldinstabilitycounts = getinstabilitycounts(repo)
1394 oldinstabilitycounts = getinstabilitycounts(repo)
1382 @reportsummary
1395 @reportsummary
1383 def reportnewinstabilities(repo, tr):
1396 def reportnewinstabilities(repo, tr):
1384 newinstabilitycounts = getinstabilitycounts(repo)
1397 newinstabilitycounts = getinstabilitycounts(repo)
1385 for instability, revset in instabilitytypes:
1398 for instability, revset in instabilitytypes:
1386 delta = (newinstabilitycounts[instability] -
1399 delta = (newinstabilitycounts[instability] -
1387 oldinstabilitycounts[instability])
1400 oldinstabilitycounts[instability])
1388 if delta > 0:
1401 if delta > 0:
1389 repo.ui.warn(_('%i new %s changesets\n') %
1402 repo.ui.warn(_('%i new %s changesets\n') %
1390 (delta, instability))
1403 (delta, instability))
1391
1404
1392 if txmatch(_reportnewcssource):
1405 if txmatch(_reportnewcssource):
1393 @reportsummary
1406 @reportsummary
1394 def reportnewcs(repo, tr):
1407 def reportnewcs(repo, tr):
1395 """Report the range of new revisions pulled/unbundled."""
1408 """Report the range of new revisions pulled/unbundled."""
1396 newrevs = tr.changes.get('revs', xrange(0, 0))
1409 newrevs = tr.changes.get('revs', xrange(0, 0))
1397 if not newrevs:
1410 if not newrevs:
1398 return
1411 return
1399
1412
1400 # Compute the bounds of new revisions' range, excluding obsoletes.
1413 # Compute the bounds of new revisions' range, excluding obsoletes.
1401 unfi = repo.unfiltered()
1414 unfi = repo.unfiltered()
1402 revs = unfi.revs('%ld and not obsolete()', newrevs)
1415 revs = unfi.revs('%ld and not obsolete()', newrevs)
1403 if not revs:
1416 if not revs:
1404 # Got only obsoletes.
1417 # Got only obsoletes.
1405 return
1418 return
1406 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1419 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1407
1420
1408 if minrev == maxrev:
1421 if minrev == maxrev:
1409 revrange = minrev
1422 revrange = minrev
1410 else:
1423 else:
1411 revrange = '%s:%s' % (minrev, maxrev)
1424 revrange = '%s:%s' % (minrev, maxrev)
1412 repo.ui.status(_('new changesets %s\n') % revrange)
1425 repo.ui.status(_('new changesets %s\n') % revrange)
1413
1426
1414 def nodesummaries(repo, nodes, maxnumnodes=4):
1427 def nodesummaries(repo, nodes, maxnumnodes=4):
1415 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1428 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1416 return ' '.join(short(h) for h in nodes)
1429 return ' '.join(short(h) for h in nodes)
1417 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1430 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1418 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1431 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1419
1432
1420 def enforcesinglehead(repo, tr, desc):
1433 def enforcesinglehead(repo, tr, desc):
1421 """check that no named branch has multiple heads"""
1434 """check that no named branch has multiple heads"""
1422 if desc in ('strip', 'repair'):
1435 if desc in ('strip', 'repair'):
1423 # skip the logic during strip
1436 # skip the logic during strip
1424 return
1437 return
1425 visible = repo.filtered('visible')
1438 visible = repo.filtered('visible')
1426 # possible improvement: we could restrict the check to affected branch
1439 # possible improvement: we could restrict the check to affected branch
1427 for name, heads in visible.branchmap().iteritems():
1440 for name, heads in visible.branchmap().iteritems():
1428 if len(heads) > 1:
1441 if len(heads) > 1:
1429 msg = _('rejecting multiple heads on branch "%s"')
1442 msg = _('rejecting multiple heads on branch "%s"')
1430 msg %= name
1443 msg %= name
1431 hint = _('%d heads: %s')
1444 hint = _('%d heads: %s')
1432 hint %= (len(heads), nodesummaries(repo, heads))
1445 hint %= (len(heads), nodesummaries(repo, heads))
1433 raise error.Abort(msg, hint=hint)
1446 raise error.Abort(msg, hint=hint)
1434
1447
1435 def wrapconvertsink(sink):
1448 def wrapconvertsink(sink):
1436 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1449 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1437 before it is used, whether or not the convert extension was formally loaded.
1450 before it is used, whether or not the convert extension was formally loaded.
1438 """
1451 """
1439 return sink
1452 return sink
1440
1453
1441 def unhidehashlikerevs(repo, specs, hiddentype):
1454 def unhidehashlikerevs(repo, specs, hiddentype):
1442 """parse the user specs and unhide changesets whose hash or revision number
1455 """parse the user specs and unhide changesets whose hash or revision number
1443 is passed.
1456 is passed.
1444
1457
1445 hiddentype can be: 1) 'warn': warn while unhiding changesets
1458 hiddentype can be: 1) 'warn': warn while unhiding changesets
1446 2) 'nowarn': don't warn while unhiding changesets
1459 2) 'nowarn': don't warn while unhiding changesets
1447
1460
1448 returns a repo object with the required changesets unhidden
1461 returns a repo object with the required changesets unhidden
1449 """
1462 """
1450 if not repo.filtername or not repo.ui.configbool('experimental',
1463 if not repo.filtername or not repo.ui.configbool('experimental',
1451 'directaccess'):
1464 'directaccess'):
1452 return repo
1465 return repo
1453
1466
1454 if repo.filtername not in ('visible', 'visible-hidden'):
1467 if repo.filtername not in ('visible', 'visible-hidden'):
1455 return repo
1468 return repo
1456
1469
1457 symbols = set()
1470 symbols = set()
1458 for spec in specs:
1471 for spec in specs:
1459 try:
1472 try:
1460 tree = revsetlang.parse(spec)
1473 tree = revsetlang.parse(spec)
1461 except error.ParseError: # will be reported by scmutil.revrange()
1474 except error.ParseError: # will be reported by scmutil.revrange()
1462 continue
1475 continue
1463
1476
1464 symbols.update(revsetlang.gethashlikesymbols(tree))
1477 symbols.update(revsetlang.gethashlikesymbols(tree))
1465
1478
1466 if not symbols:
1479 if not symbols:
1467 return repo
1480 return repo
1468
1481
1469 revs = _getrevsfromsymbols(repo, symbols)
1482 revs = _getrevsfromsymbols(repo, symbols)
1470
1483
1471 if not revs:
1484 if not revs:
1472 return repo
1485 return repo
1473
1486
1474 if hiddentype == 'warn':
1487 if hiddentype == 'warn':
1475 unfi = repo.unfiltered()
1488 unfi = repo.unfiltered()
1476 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1489 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1477 repo.ui.warn(_("warning: accessing hidden changesets for write "
1490 repo.ui.warn(_("warning: accessing hidden changesets for write "
1478 "operation: %s\n") % revstr)
1491 "operation: %s\n") % revstr)
1479
1492
1480 # we have to use new filtername to separate branch/tags cache until we can
1493 # we have to use new filtername to separate branch/tags cache until we can
1481 # disbale these cache when revisions are dynamically pinned.
1494 # disbale these cache when revisions are dynamically pinned.
1482 return repo.filtered('visible-hidden', revs)
1495 return repo.filtered('visible-hidden', revs)
1483
1496
1484 def _getrevsfromsymbols(repo, symbols):
1497 def _getrevsfromsymbols(repo, symbols):
1485 """parse the list of symbols and returns a set of revision numbers of hidden
1498 """parse the list of symbols and returns a set of revision numbers of hidden
1486 changesets present in symbols"""
1499 changesets present in symbols"""
1487 revs = set()
1500 revs = set()
1488 unfi = repo.unfiltered()
1501 unfi = repo.unfiltered()
1489 unficl = unfi.changelog
1502 unficl = unfi.changelog
1490 cl = repo.changelog
1503 cl = repo.changelog
1491 tiprev = len(unficl)
1504 tiprev = len(unficl)
1492 pmatch = unficl._partialmatch
1505 pmatch = unficl._partialmatch
1493 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1506 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1494 for s in symbols:
1507 for s in symbols:
1495 try:
1508 try:
1496 n = int(s)
1509 n = int(s)
1497 if n <= tiprev:
1510 if n <= tiprev:
1498 if not allowrevnums:
1511 if not allowrevnums:
1499 continue
1512 continue
1500 else:
1513 else:
1501 if n not in cl:
1514 if n not in cl:
1502 revs.add(n)
1515 revs.add(n)
1503 continue
1516 continue
1504 except ValueError:
1517 except ValueError:
1505 pass
1518 pass
1506
1519
1507 try:
1520 try:
1508 s = pmatch(s)
1521 s = pmatch(s)
1509 except (error.LookupError, error.WdirUnsupported):
1522 except (error.LookupError, error.WdirUnsupported):
1510 s = None
1523 s = None
1511
1524
1512 if s is not None:
1525 if s is not None:
1513 rev = unficl.rev(s)
1526 rev = unficl.rev(s)
1514 if rev not in cl:
1527 if rev not in cl:
1515 revs.add(rev)
1528 revs.add(rev)
1516
1529
1517 return revs
1530 return revs
General Comments 0
You need to be logged in to leave comments. Login now