##// END OF EJS Templates
context: handle partial nodeids in revsymbol()...
Martin von Zweigbergk -
r37769:35b34202 default
parent child Browse files
Show More
@@ -1,1538 +1,1543 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 revsetlang,
38 revsetlang,
39 similar,
39 similar,
40 url,
40 url,
41 util,
41 util,
42 vfs,
42 vfs,
43 )
43 )
44
44
45 from .utils import (
45 from .utils import (
46 procutil,
46 procutil,
47 stringutil,
47 stringutil,
48 )
48 )
49
49
50 if pycompat.iswindows:
50 if pycompat.iswindows:
51 from . import scmwindows as scmplatform
51 from . import scmwindows as scmplatform
52 else:
52 else:
53 from . import scmposix as scmplatform
53 from . import scmposix as scmplatform
54
54
55 termsize = scmplatform.termsize
55 termsize = scmplatform.termsize
56
56
57 class status(tuple):
57 class status(tuple):
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 and 'ignored' properties are only relevant to the working copy.
59 and 'ignored' properties are only relevant to the working copy.
60 '''
60 '''
61
61
62 __slots__ = ()
62 __slots__ = ()
63
63
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 clean):
65 clean):
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 ignored, clean))
67 ignored, clean))
68
68
69 @property
69 @property
70 def modified(self):
70 def modified(self):
71 '''files that have been modified'''
71 '''files that have been modified'''
72 return self[0]
72 return self[0]
73
73
74 @property
74 @property
75 def added(self):
75 def added(self):
76 '''files that have been added'''
76 '''files that have been added'''
77 return self[1]
77 return self[1]
78
78
79 @property
79 @property
80 def removed(self):
80 def removed(self):
81 '''files that have been removed'''
81 '''files that have been removed'''
82 return self[2]
82 return self[2]
83
83
84 @property
84 @property
85 def deleted(self):
85 def deleted(self):
86 '''files that are in the dirstate, but have been deleted from the
86 '''files that are in the dirstate, but have been deleted from the
87 working copy (aka "missing")
87 working copy (aka "missing")
88 '''
88 '''
89 return self[3]
89 return self[3]
90
90
91 @property
91 @property
92 def unknown(self):
92 def unknown(self):
93 '''files not in the dirstate that are not ignored'''
93 '''files not in the dirstate that are not ignored'''
94 return self[4]
94 return self[4]
95
95
96 @property
96 @property
97 def ignored(self):
97 def ignored(self):
98 '''files not in the dirstate that are ignored (by _dirignore())'''
98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 return self[5]
99 return self[5]
100
100
101 @property
101 @property
102 def clean(self):
102 def clean(self):
103 '''files that have not been modified'''
103 '''files that have not been modified'''
104 return self[6]
104 return self[6]
105
105
106 def __repr__(self, *args, **kwargs):
106 def __repr__(self, *args, **kwargs):
107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
108 'unknown=%r, ignored=%r, clean=%r>') % self)
108 'unknown=%r, ignored=%r, clean=%r>') % self)
109
109
110 def itersubrepos(ctx1, ctx2):
110 def itersubrepos(ctx1, ctx2):
111 """find subrepos in ctx1 or ctx2"""
111 """find subrepos in ctx1 or ctx2"""
112 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 # Create a (subpath, ctx) mapping where we prefer subpaths from
113 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 # ctx1. The subpaths from ctx2 are important when the .hgsub file
114 # has been modified (in ctx2) but not yet committed (in ctx1).
114 # has been modified (in ctx2) but not yet committed (in ctx1).
115 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 subpaths = dict.fromkeys(ctx2.substate, ctx2)
116 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
117
117
118 missing = set()
118 missing = set()
119
119
120 for subpath in ctx2.substate:
120 for subpath in ctx2.substate:
121 if subpath not in ctx1.substate:
121 if subpath not in ctx1.substate:
122 del subpaths[subpath]
122 del subpaths[subpath]
123 missing.add(subpath)
123 missing.add(subpath)
124
124
125 for subpath, ctx in sorted(subpaths.iteritems()):
125 for subpath, ctx in sorted(subpaths.iteritems()):
126 yield subpath, ctx.sub(subpath)
126 yield subpath, ctx.sub(subpath)
127
127
128 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
129 # status and diff will have an accurate result when it does
129 # status and diff will have an accurate result when it does
130 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
131 # against itself.
131 # against itself.
132 for subpath in missing:
132 for subpath in missing:
133 yield subpath, ctx2.nullsub(subpath, ctx1)
133 yield subpath, ctx2.nullsub(subpath, ctx1)
134
134
135 def nochangesfound(ui, repo, excluded=None):
135 def nochangesfound(ui, repo, excluded=None):
136 '''Report no changes for push/pull, excluded is None or a list of
136 '''Report no changes for push/pull, excluded is None or a list of
137 nodes excluded from the push/pull.
137 nodes excluded from the push/pull.
138 '''
138 '''
139 secretlist = []
139 secretlist = []
140 if excluded:
140 if excluded:
141 for n in excluded:
141 for n in excluded:
142 ctx = repo[n]
142 ctx = repo[n]
143 if ctx.phase() >= phases.secret and not ctx.extinct():
143 if ctx.phase() >= phases.secret and not ctx.extinct():
144 secretlist.append(n)
144 secretlist.append(n)
145
145
146 if secretlist:
146 if secretlist:
147 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 ui.status(_("no changes found (ignored %d secret changesets)\n")
148 % len(secretlist))
148 % len(secretlist))
149 else:
149 else:
150 ui.status(_("no changes found\n"))
150 ui.status(_("no changes found\n"))
151
151
152 def callcatch(ui, func):
152 def callcatch(ui, func):
153 """call func() with global exception handling
153 """call func() with global exception handling
154
154
155 return func() if no exception happens. otherwise do some error handling
155 return func() if no exception happens. otherwise do some error handling
156 and return an exit code accordingly. does not handle all exceptions.
156 and return an exit code accordingly. does not handle all exceptions.
157 """
157 """
158 try:
158 try:
159 try:
159 try:
160 return func()
160 return func()
161 except: # re-raises
161 except: # re-raises
162 ui.traceback()
162 ui.traceback()
163 raise
163 raise
164 # Global exception handling, alphabetically
164 # Global exception handling, alphabetically
165 # Mercurial-specific first, followed by built-in and library exceptions
165 # Mercurial-specific first, followed by built-in and library exceptions
166 except error.LockHeld as inst:
166 except error.LockHeld as inst:
167 if inst.errno == errno.ETIMEDOUT:
167 if inst.errno == errno.ETIMEDOUT:
168 reason = _('timed out waiting for lock held by %r') % inst.locker
168 reason = _('timed out waiting for lock held by %r') % inst.locker
169 else:
169 else:
170 reason = _('lock held by %r') % inst.locker
170 reason = _('lock held by %r') % inst.locker
171 ui.warn(_("abort: %s: %s\n")
171 ui.warn(_("abort: %s: %s\n")
172 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
173 if not inst.locker:
173 if not inst.locker:
174 ui.warn(_("(lock might be very busy)\n"))
174 ui.warn(_("(lock might be very busy)\n"))
175 except error.LockUnavailable as inst:
175 except error.LockUnavailable as inst:
176 ui.warn(_("abort: could not lock %s: %s\n") %
176 ui.warn(_("abort: could not lock %s: %s\n") %
177 (inst.desc or stringutil.forcebytestr(inst.filename),
177 (inst.desc or stringutil.forcebytestr(inst.filename),
178 encoding.strtolocal(inst.strerror)))
178 encoding.strtolocal(inst.strerror)))
179 except error.OutOfBandError as inst:
179 except error.OutOfBandError as inst:
180 if inst.args:
180 if inst.args:
181 msg = _("abort: remote error:\n")
181 msg = _("abort: remote error:\n")
182 else:
182 else:
183 msg = _("abort: remote error\n")
183 msg = _("abort: remote error\n")
184 ui.warn(msg)
184 ui.warn(msg)
185 if inst.args:
185 if inst.args:
186 ui.warn(''.join(inst.args))
186 ui.warn(''.join(inst.args))
187 if inst.hint:
187 if inst.hint:
188 ui.warn('(%s)\n' % inst.hint)
188 ui.warn('(%s)\n' % inst.hint)
189 except error.RepoError as inst:
189 except error.RepoError as inst:
190 ui.warn(_("abort: %s!\n") % inst)
190 ui.warn(_("abort: %s!\n") % inst)
191 if inst.hint:
191 if inst.hint:
192 ui.warn(_("(%s)\n") % inst.hint)
192 ui.warn(_("(%s)\n") % inst.hint)
193 except error.ResponseError as inst:
193 except error.ResponseError as inst:
194 ui.warn(_("abort: %s") % inst.args[0])
194 ui.warn(_("abort: %s") % inst.args[0])
195 msg = inst.args[1]
195 msg = inst.args[1]
196 if isinstance(msg, type(u'')):
196 if isinstance(msg, type(u'')):
197 msg = pycompat.sysbytes(msg)
197 msg = pycompat.sysbytes(msg)
198 if not isinstance(msg, bytes):
198 if not isinstance(msg, bytes):
199 ui.warn(" %r\n" % (msg,))
199 ui.warn(" %r\n" % (msg,))
200 elif not msg:
200 elif not msg:
201 ui.warn(_(" empty string\n"))
201 ui.warn(_(" empty string\n"))
202 else:
202 else:
203 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
204 except error.CensoredNodeError as inst:
204 except error.CensoredNodeError as inst:
205 ui.warn(_("abort: file censored %s!\n") % inst)
205 ui.warn(_("abort: file censored %s!\n") % inst)
206 except error.RevlogError as inst:
206 except error.RevlogError as inst:
207 ui.warn(_("abort: %s!\n") % inst)
207 ui.warn(_("abort: %s!\n") % inst)
208 except error.InterventionRequired as inst:
208 except error.InterventionRequired as inst:
209 ui.warn("%s\n" % inst)
209 ui.warn("%s\n" % inst)
210 if inst.hint:
210 if inst.hint:
211 ui.warn(_("(%s)\n") % inst.hint)
211 ui.warn(_("(%s)\n") % inst.hint)
212 return 1
212 return 1
213 except error.WdirUnsupported:
213 except error.WdirUnsupported:
214 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 ui.warn(_("abort: working directory revision cannot be specified\n"))
215 except error.Abort as inst:
215 except error.Abort as inst:
216 ui.warn(_("abort: %s\n") % inst)
216 ui.warn(_("abort: %s\n") % inst)
217 if inst.hint:
217 if inst.hint:
218 ui.warn(_("(%s)\n") % inst.hint)
218 ui.warn(_("(%s)\n") % inst.hint)
219 except ImportError as inst:
219 except ImportError as inst:
220 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
221 m = stringutil.forcebytestr(inst).split()[-1]
221 m = stringutil.forcebytestr(inst).split()[-1]
222 if m in "mpatch bdiff".split():
222 if m in "mpatch bdiff".split():
223 ui.warn(_("(did you forget to compile extensions?)\n"))
223 ui.warn(_("(did you forget to compile extensions?)\n"))
224 elif m in "zlib".split():
224 elif m in "zlib".split():
225 ui.warn(_("(is your Python install correct?)\n"))
225 ui.warn(_("(is your Python install correct?)\n"))
226 except IOError as inst:
226 except IOError as inst:
227 if util.safehasattr(inst, "code"):
227 if util.safehasattr(inst, "code"):
228 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
229 elif util.safehasattr(inst, "reason"):
229 elif util.safehasattr(inst, "reason"):
230 try: # usually it is in the form (errno, strerror)
230 try: # usually it is in the form (errno, strerror)
231 reason = inst.reason.args[1]
231 reason = inst.reason.args[1]
232 except (AttributeError, IndexError):
232 except (AttributeError, IndexError):
233 # it might be anything, for example a string
233 # it might be anything, for example a string
234 reason = inst.reason
234 reason = inst.reason
235 if isinstance(reason, unicode):
235 if isinstance(reason, unicode):
236 # SSLError of Python 2.7.9 contains a unicode
236 # SSLError of Python 2.7.9 contains a unicode
237 reason = encoding.unitolocal(reason)
237 reason = encoding.unitolocal(reason)
238 ui.warn(_("abort: error: %s\n") % reason)
238 ui.warn(_("abort: error: %s\n") % reason)
239 elif (util.safehasattr(inst, "args")
239 elif (util.safehasattr(inst, "args")
240 and inst.args and inst.args[0] == errno.EPIPE):
240 and inst.args and inst.args[0] == errno.EPIPE):
241 pass
241 pass
242 elif getattr(inst, "strerror", None):
242 elif getattr(inst, "strerror", None):
243 if getattr(inst, "filename", None):
243 if getattr(inst, "filename", None):
244 ui.warn(_("abort: %s: %s\n") % (
244 ui.warn(_("abort: %s: %s\n") % (
245 encoding.strtolocal(inst.strerror),
245 encoding.strtolocal(inst.strerror),
246 stringutil.forcebytestr(inst.filename)))
246 stringutil.forcebytestr(inst.filename)))
247 else:
247 else:
248 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 else:
249 else:
250 raise
250 raise
251 except OSError as inst:
251 except OSError as inst:
252 if getattr(inst, "filename", None) is not None:
252 if getattr(inst, "filename", None) is not None:
253 ui.warn(_("abort: %s: '%s'\n") % (
253 ui.warn(_("abort: %s: '%s'\n") % (
254 encoding.strtolocal(inst.strerror),
254 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
255 stringutil.forcebytestr(inst.filename)))
256 else:
256 else:
257 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 except MemoryError:
258 except MemoryError:
259 ui.warn(_("abort: out of memory\n"))
259 ui.warn(_("abort: out of memory\n"))
260 except SystemExit as inst:
260 except SystemExit as inst:
261 # Commands shouldn't sys.exit directly, but give a return code.
261 # Commands shouldn't sys.exit directly, but give a return code.
262 # Just in case catch this and and pass exit code to caller.
262 # Just in case catch this and and pass exit code to caller.
263 return inst.code
263 return inst.code
264 except socket.error as inst:
264 except socket.error as inst:
265 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
266
266
267 return -1
267 return -1
268
268
269 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
272 if lbl in ['tip', '.', 'null']:
272 if lbl in ['tip', '.', 'null']:
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 for c in (':', '\0', '\n', '\r'):
274 for c in (':', '\0', '\n', '\r'):
275 if c in lbl:
275 if c in lbl:
276 raise error.Abort(
276 raise error.Abort(
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 try:
278 try:
279 int(lbl)
279 int(lbl)
280 raise error.Abort(_("cannot use an integer as a name"))
280 raise error.Abort(_("cannot use an integer as a name"))
281 except ValueError:
281 except ValueError:
282 pass
282 pass
283 if lbl.strip() != lbl:
283 if lbl.strip() != lbl:
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285
285
286 def checkfilename(f):
286 def checkfilename(f):
287 '''Check that the filename f is an acceptable filename for a tracked file'''
287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 if '\r' in f or '\n' in f:
288 if '\r' in f or '\n' in f:
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
290
290
291 def checkportable(ui, f):
291 def checkportable(ui, f):
292 '''Check if filename f is portable and warn or abort depending on config'''
292 '''Check if filename f is portable and warn or abort depending on config'''
293 checkfilename(f)
293 checkfilename(f)
294 abort, warn = checkportabilityalert(ui)
294 abort, warn = checkportabilityalert(ui)
295 if abort or warn:
295 if abort or warn:
296 msg = util.checkwinfilename(f)
296 msg = util.checkwinfilename(f)
297 if msg:
297 if msg:
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 if abort:
299 if abort:
300 raise error.Abort(msg)
300 raise error.Abort(msg)
301 ui.warn(_("warning: %s\n") % msg)
301 ui.warn(_("warning: %s\n") % msg)
302
302
303 def checkportabilityalert(ui):
303 def checkportabilityalert(ui):
304 '''check if the user's config requests nothing, a warning, or abort for
304 '''check if the user's config requests nothing, a warning, or abort for
305 non-portable filenames'''
305 non-portable filenames'''
306 val = ui.config('ui', 'portablefilenames')
306 val = ui.config('ui', 'portablefilenames')
307 lval = val.lower()
307 lval = val.lower()
308 bval = stringutil.parsebool(val)
308 bval = stringutil.parsebool(val)
309 abort = pycompat.iswindows or lval == 'abort'
309 abort = pycompat.iswindows or lval == 'abort'
310 warn = bval or lval == 'warn'
310 warn = bval or lval == 'warn'
311 if bval is None and not (warn or abort or lval == 'ignore'):
311 if bval is None and not (warn or abort or lval == 'ignore'):
312 raise error.ConfigError(
312 raise error.ConfigError(
313 _("ui.portablefilenames value is invalid ('%s')") % val)
313 _("ui.portablefilenames value is invalid ('%s')") % val)
314 return abort, warn
314 return abort, warn
315
315
316 class casecollisionauditor(object):
316 class casecollisionauditor(object):
317 def __init__(self, ui, abort, dirstate):
317 def __init__(self, ui, abort, dirstate):
318 self._ui = ui
318 self._ui = ui
319 self._abort = abort
319 self._abort = abort
320 allfiles = '\0'.join(dirstate._map)
320 allfiles = '\0'.join(dirstate._map)
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 self._dirstate = dirstate
322 self._dirstate = dirstate
323 # The purpose of _newfiles is so that we don't complain about
323 # The purpose of _newfiles is so that we don't complain about
324 # case collisions if someone were to call this object with the
324 # case collisions if someone were to call this object with the
325 # same filename twice.
325 # same filename twice.
326 self._newfiles = set()
326 self._newfiles = set()
327
327
328 def __call__(self, f):
328 def __call__(self, f):
329 if f in self._newfiles:
329 if f in self._newfiles:
330 return
330 return
331 fl = encoding.lower(f)
331 fl = encoding.lower(f)
332 if fl in self._loweredfiles and f not in self._dirstate:
332 if fl in self._loweredfiles and f not in self._dirstate:
333 msg = _('possible case-folding collision for %s') % f
333 msg = _('possible case-folding collision for %s') % f
334 if self._abort:
334 if self._abort:
335 raise error.Abort(msg)
335 raise error.Abort(msg)
336 self._ui.warn(_("warning: %s\n") % msg)
336 self._ui.warn(_("warning: %s\n") % msg)
337 self._loweredfiles.add(fl)
337 self._loweredfiles.add(fl)
338 self._newfiles.add(f)
338 self._newfiles.add(f)
339
339
340 def filteredhash(repo, maxrev):
340 def filteredhash(repo, maxrev):
341 """build hash of filtered revisions in the current repoview.
341 """build hash of filtered revisions in the current repoview.
342
342
343 Multiple caches perform up-to-date validation by checking that the
343 Multiple caches perform up-to-date validation by checking that the
344 tiprev and tipnode stored in the cache file match the current repository.
344 tiprev and tipnode stored in the cache file match the current repository.
345 However, this is not sufficient for validating repoviews because the set
345 However, this is not sufficient for validating repoviews because the set
346 of revisions in the view may change without the repository tiprev and
346 of revisions in the view may change without the repository tiprev and
347 tipnode changing.
347 tipnode changing.
348
348
349 This function hashes all the revs filtered from the view and returns
349 This function hashes all the revs filtered from the view and returns
350 that SHA-1 digest.
350 that SHA-1 digest.
351 """
351 """
352 cl = repo.changelog
352 cl = repo.changelog
353 if not cl.filteredrevs:
353 if not cl.filteredrevs:
354 return None
354 return None
355 key = None
355 key = None
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 if revs:
357 if revs:
358 s = hashlib.sha1()
358 s = hashlib.sha1()
359 for rev in revs:
359 for rev in revs:
360 s.update('%d;' % rev)
360 s.update('%d;' % rev)
361 key = s.digest()
361 key = s.digest()
362 return key
362 return key
363
363
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 '''yield every hg repository under path, always recursively.
365 '''yield every hg repository under path, always recursively.
366 The recurse flag will only control recursion into repo working dirs'''
366 The recurse flag will only control recursion into repo working dirs'''
367 def errhandler(err):
367 def errhandler(err):
368 if err.filename == path:
368 if err.filename == path:
369 raise err
369 raise err
370 samestat = getattr(os.path, 'samestat', None)
370 samestat = getattr(os.path, 'samestat', None)
371 if followsym and samestat is not None:
371 if followsym and samestat is not None:
372 def adddir(dirlst, dirname):
372 def adddir(dirlst, dirname):
373 dirstat = os.stat(dirname)
373 dirstat = os.stat(dirname)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 if not match:
375 if not match:
376 dirlst.append(dirstat)
376 dirlst.append(dirstat)
377 return not match
377 return not match
378 else:
378 else:
379 followsym = False
379 followsym = False
380
380
381 if (seen_dirs is None) and followsym:
381 if (seen_dirs is None) and followsym:
382 seen_dirs = []
382 seen_dirs = []
383 adddir(seen_dirs, path)
383 adddir(seen_dirs, path)
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 dirs.sort()
385 dirs.sort()
386 if '.hg' in dirs:
386 if '.hg' in dirs:
387 yield root # found a repository
387 yield root # found a repository
388 qroot = os.path.join(root, '.hg', 'patches')
388 qroot = os.path.join(root, '.hg', 'patches')
389 if os.path.isdir(os.path.join(qroot, '.hg')):
389 if os.path.isdir(os.path.join(qroot, '.hg')):
390 yield qroot # we have a patch queue repo here
390 yield qroot # we have a patch queue repo here
391 if recurse:
391 if recurse:
392 # avoid recursing inside the .hg directory
392 # avoid recursing inside the .hg directory
393 dirs.remove('.hg')
393 dirs.remove('.hg')
394 else:
394 else:
395 dirs[:] = [] # don't descend further
395 dirs[:] = [] # don't descend further
396 elif followsym:
396 elif followsym:
397 newdirs = []
397 newdirs = []
398 for d in dirs:
398 for d in dirs:
399 fname = os.path.join(root, d)
399 fname = os.path.join(root, d)
400 if adddir(seen_dirs, fname):
400 if adddir(seen_dirs, fname):
401 if os.path.islink(fname):
401 if os.path.islink(fname):
402 for hgname in walkrepos(fname, True, seen_dirs):
402 for hgname in walkrepos(fname, True, seen_dirs):
403 yield hgname
403 yield hgname
404 else:
404 else:
405 newdirs.append(d)
405 newdirs.append(d)
406 dirs[:] = newdirs
406 dirs[:] = newdirs
407
407
408 def binnode(ctx):
408 def binnode(ctx):
409 """Return binary node id for a given basectx"""
409 """Return binary node id for a given basectx"""
410 node = ctx.node()
410 node = ctx.node()
411 if node is None:
411 if node is None:
412 return wdirid
412 return wdirid
413 return node
413 return node
414
414
415 def intrev(ctx):
415 def intrev(ctx):
416 """Return integer for a given basectx that can be used in comparison or
416 """Return integer for a given basectx that can be used in comparison or
417 arithmetic operation"""
417 arithmetic operation"""
418 rev = ctx.rev()
418 rev = ctx.rev()
419 if rev is None:
419 if rev is None:
420 return wdirrev
420 return wdirrev
421 return rev
421 return rev
422
422
423 def formatchangeid(ctx):
423 def formatchangeid(ctx):
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 template provided by logcmdutil.changesettemplater"""
425 template provided by logcmdutil.changesettemplater"""
426 repo = ctx.repo()
426 repo = ctx.repo()
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428
428
429 def formatrevnode(ui, rev, node):
429 def formatrevnode(ui, rev, node):
430 """Format given revision and node depending on the current verbosity"""
430 """Format given revision and node depending on the current verbosity"""
431 if ui.debugflag:
431 if ui.debugflag:
432 hexfunc = hex
432 hexfunc = hex
433 else:
433 else:
434 hexfunc = short
434 hexfunc = short
435 return '%d:%s' % (rev, hexfunc(node))
435 return '%d:%s' % (rev, hexfunc(node))
436
436
437 def resolvepartialhexnodeid(repo, prefix):
437 def resolvepartialhexnodeid(repo, prefix):
438 # Uses unfiltered repo because it's faster when then prefix is ambiguous/
438 # Uses unfiltered repo because it's faster when then prefix is ambiguous/
439 # This matches the "shortest" template function.
439 # This matches the "shortest" template function.
440 node = repo.unfiltered().changelog._partialmatch(prefix)
440 node = repo.unfiltered().changelog._partialmatch(prefix)
441 if node is None:
441 if node is None:
442 return
442 return
443 repo.changelog.rev(node) # make sure node isn't filtered
443 repo.changelog.rev(node) # make sure node isn't filtered
444 return node
444 return node
445
445
446 def isrevsymbol(repo, symbol):
446 def isrevsymbol(repo, symbol):
447 try:
447 try:
448 revsymbol(repo, symbol)
448 revsymbol(repo, symbol)
449 return True
449 return True
450 except error.RepoLookupError:
450 except error.RepoLookupError:
451 return False
451 return False
452
452
453 def revsymbol(repo, symbol):
453 def revsymbol(repo, symbol):
454 """Returns a context given a single revision symbol (as string).
454 """Returns a context given a single revision symbol (as string).
455
455
456 This is similar to revsingle(), but accepts only a single revision symbol,
456 This is similar to revsingle(), but accepts only a single revision symbol,
457 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
457 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
458 not "max(public())".
458 not "max(public())".
459 """
459 """
460 if not isinstance(symbol, bytes):
460 if not isinstance(symbol, bytes):
461 msg = ("symbol (%s of type %s) was not a string, did you mean "
461 msg = ("symbol (%s of type %s) was not a string, did you mean "
462 "repo[symbol]?" % (symbol, type(symbol)))
462 "repo[symbol]?" % (symbol, type(symbol)))
463 raise error.ProgrammingError(msg)
463 raise error.ProgrammingError(msg)
464 try:
464 try:
465 if symbol in ('.', 'tip', 'null'):
465 if symbol in ('.', 'tip', 'null'):
466 return repo[symbol]
466 return repo[symbol]
467
467
468 try:
468 try:
469 r = int(symbol)
469 r = int(symbol)
470 if '%d' % r != symbol:
470 if '%d' % r != symbol:
471 raise ValueError
471 raise ValueError
472 l = len(repo.changelog)
472 l = len(repo.changelog)
473 if r < 0:
473 if r < 0:
474 r += l
474 r += l
475 if r < 0 or r >= l and r != wdirrev:
475 if r < 0 or r >= l and r != wdirrev:
476 raise ValueError
476 raise ValueError
477 return repo[r]
477 return repo[r]
478 except error.FilteredIndexError:
478 except error.FilteredIndexError:
479 raise
479 raise
480 except (ValueError, OverflowError, IndexError):
480 except (ValueError, OverflowError, IndexError):
481 pass
481 pass
482
482
483 if len(symbol) == 40:
483 if len(symbol) == 40:
484 try:
484 try:
485 node = bin(symbol)
485 node = bin(symbol)
486 rev = repo.changelog.rev(node)
486 rev = repo.changelog.rev(node)
487 return repo[rev]
487 return repo[rev]
488 except error.FilteredLookupError:
488 except error.FilteredLookupError:
489 raise
489 raise
490 except (TypeError, LookupError):
490 except (TypeError, LookupError):
491 pass
491 pass
492
492
493 # look up bookmarks through the name interface
493 # look up bookmarks through the name interface
494 try:
494 try:
495 node = repo.names.singlenode(repo, symbol)
495 node = repo.names.singlenode(repo, symbol)
496 rev = repo.changelog.rev(node)
496 rev = repo.changelog.rev(node)
497 return repo[rev]
497 return repo[rev]
498 except KeyError:
498 except KeyError:
499 pass
499 pass
500
500
501 node = repo.unfiltered().changelog._partialmatch(symbol)
502 if node is not None:
503 rev = repo.changelog.rev(node)
504 return repo[rev]
505
501 return repo[symbol]
506 return repo[symbol]
502
507
503 except error.WdirUnsupported:
508 except error.WdirUnsupported:
504 return repo[None]
509 return repo[None]
505 except (error.FilteredIndexError, error.FilteredLookupError,
510 except (error.FilteredIndexError, error.FilteredLookupError,
506 error.FilteredRepoLookupError):
511 error.FilteredRepoLookupError):
507 raise _filterederror(repo, symbol)
512 raise _filterederror(repo, symbol)
508
513
509 def _filterederror(repo, changeid):
514 def _filterederror(repo, changeid):
510 """build an exception to be raised about a filtered changeid
515 """build an exception to be raised about a filtered changeid
511
516
512 This is extracted in a function to help extensions (eg: evolve) to
517 This is extracted in a function to help extensions (eg: evolve) to
513 experiment with various message variants."""
518 experiment with various message variants."""
514 if repo.filtername.startswith('visible'):
519 if repo.filtername.startswith('visible'):
515
520
516 # Check if the changeset is obsolete
521 # Check if the changeset is obsolete
517 unfilteredrepo = repo.unfiltered()
522 unfilteredrepo = repo.unfiltered()
518 ctx = revsymbol(unfilteredrepo, changeid)
523 ctx = revsymbol(unfilteredrepo, changeid)
519
524
520 # If the changeset is obsolete, enrich the message with the reason
525 # If the changeset is obsolete, enrich the message with the reason
521 # that made this changeset not visible
526 # that made this changeset not visible
522 if ctx.obsolete():
527 if ctx.obsolete():
523 msg = obsutil._getfilteredreason(repo, changeid, ctx)
528 msg = obsutil._getfilteredreason(repo, changeid, ctx)
524 else:
529 else:
525 msg = _("hidden revision '%s'") % changeid
530 msg = _("hidden revision '%s'") % changeid
526
531
527 hint = _('use --hidden to access hidden revisions')
532 hint = _('use --hidden to access hidden revisions')
528
533
529 return error.FilteredRepoLookupError(msg, hint=hint)
534 return error.FilteredRepoLookupError(msg, hint=hint)
530 msg = _("filtered revision '%s' (not in '%s' subset)")
535 msg = _("filtered revision '%s' (not in '%s' subset)")
531 msg %= (changeid, repo.filtername)
536 msg %= (changeid, repo.filtername)
532 return error.FilteredRepoLookupError(msg)
537 return error.FilteredRepoLookupError(msg)
533
538
534 def revsingle(repo, revspec, default='.', localalias=None):
539 def revsingle(repo, revspec, default='.', localalias=None):
535 if not revspec and revspec != 0:
540 if not revspec and revspec != 0:
536 return repo[default]
541 return repo[default]
537
542
538 l = revrange(repo, [revspec], localalias=localalias)
543 l = revrange(repo, [revspec], localalias=localalias)
539 if not l:
544 if not l:
540 raise error.Abort(_('empty revision set'))
545 raise error.Abort(_('empty revision set'))
541 return repo[l.last()]
546 return repo[l.last()]
542
547
543 def _pairspec(revspec):
548 def _pairspec(revspec):
544 tree = revsetlang.parse(revspec)
549 tree = revsetlang.parse(revspec)
545 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
550 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
546
551
547 def revpairnodes(repo, revs):
552 def revpairnodes(repo, revs):
548 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
553 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
549 ctx1, ctx2 = revpair(repo, revs)
554 ctx1, ctx2 = revpair(repo, revs)
550 return ctx1.node(), ctx2.node()
555 return ctx1.node(), ctx2.node()
551
556
552 def revpair(repo, revs):
557 def revpair(repo, revs):
553 if not revs:
558 if not revs:
554 return repo['.'], repo[None]
559 return repo['.'], repo[None]
555
560
556 l = revrange(repo, revs)
561 l = revrange(repo, revs)
557
562
558 if not l:
563 if not l:
559 first = second = None
564 first = second = None
560 elif l.isascending():
565 elif l.isascending():
561 first = l.min()
566 first = l.min()
562 second = l.max()
567 second = l.max()
563 elif l.isdescending():
568 elif l.isdescending():
564 first = l.max()
569 first = l.max()
565 second = l.min()
570 second = l.min()
566 else:
571 else:
567 first = l.first()
572 first = l.first()
568 second = l.last()
573 second = l.last()
569
574
570 if first is None:
575 if first is None:
571 raise error.Abort(_('empty revision range'))
576 raise error.Abort(_('empty revision range'))
572 if (first == second and len(revs) >= 2
577 if (first == second and len(revs) >= 2
573 and not all(revrange(repo, [r]) for r in revs)):
578 and not all(revrange(repo, [r]) for r in revs)):
574 raise error.Abort(_('empty revision on one side of range'))
579 raise error.Abort(_('empty revision on one side of range'))
575
580
576 # if top-level is range expression, the result must always be a pair
581 # if top-level is range expression, the result must always be a pair
577 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
582 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
578 return repo[first], repo[None]
583 return repo[first], repo[None]
579
584
580 return repo[first], repo[second]
585 return repo[first], repo[second]
581
586
582 def revrange(repo, specs, localalias=None):
587 def revrange(repo, specs, localalias=None):
583 """Execute 1 to many revsets and return the union.
588 """Execute 1 to many revsets and return the union.
584
589
585 This is the preferred mechanism for executing revsets using user-specified
590 This is the preferred mechanism for executing revsets using user-specified
586 config options, such as revset aliases.
591 config options, such as revset aliases.
587
592
588 The revsets specified by ``specs`` will be executed via a chained ``OR``
593 The revsets specified by ``specs`` will be executed via a chained ``OR``
589 expression. If ``specs`` is empty, an empty result is returned.
594 expression. If ``specs`` is empty, an empty result is returned.
590
595
591 ``specs`` can contain integers, in which case they are assumed to be
596 ``specs`` can contain integers, in which case they are assumed to be
592 revision numbers.
597 revision numbers.
593
598
594 It is assumed the revsets are already formatted. If you have arguments
599 It is assumed the revsets are already formatted. If you have arguments
595 that need to be expanded in the revset, call ``revsetlang.formatspec()``
600 that need to be expanded in the revset, call ``revsetlang.formatspec()``
596 and pass the result as an element of ``specs``.
601 and pass the result as an element of ``specs``.
597
602
598 Specifying a single revset is allowed.
603 Specifying a single revset is allowed.
599
604
600 Returns a ``revset.abstractsmartset`` which is a list-like interface over
605 Returns a ``revset.abstractsmartset`` which is a list-like interface over
601 integer revisions.
606 integer revisions.
602 """
607 """
603 allspecs = []
608 allspecs = []
604 for spec in specs:
609 for spec in specs:
605 if isinstance(spec, int):
610 if isinstance(spec, int):
606 spec = revsetlang.formatspec('rev(%d)', spec)
611 spec = revsetlang.formatspec('rev(%d)', spec)
607 allspecs.append(spec)
612 allspecs.append(spec)
608 return repo.anyrevs(allspecs, user=True, localalias=localalias)
613 return repo.anyrevs(allspecs, user=True, localalias=localalias)
609
614
610 def meaningfulparents(repo, ctx):
615 def meaningfulparents(repo, ctx):
611 """Return list of meaningful (or all if debug) parentrevs for rev.
616 """Return list of meaningful (or all if debug) parentrevs for rev.
612
617
613 For merges (two non-nullrev revisions) both parents are meaningful.
618 For merges (two non-nullrev revisions) both parents are meaningful.
614 Otherwise the first parent revision is considered meaningful if it
619 Otherwise the first parent revision is considered meaningful if it
615 is not the preceding revision.
620 is not the preceding revision.
616 """
621 """
617 parents = ctx.parents()
622 parents = ctx.parents()
618 if len(parents) > 1:
623 if len(parents) > 1:
619 return parents
624 return parents
620 if repo.ui.debugflag:
625 if repo.ui.debugflag:
621 return [parents[0], repo['null']]
626 return [parents[0], repo['null']]
622 if parents[0].rev() >= intrev(ctx) - 1:
627 if parents[0].rev() >= intrev(ctx) - 1:
623 return []
628 return []
624 return parents
629 return parents
625
630
626 def expandpats(pats):
631 def expandpats(pats):
627 '''Expand bare globs when running on windows.
632 '''Expand bare globs when running on windows.
628 On posix we assume it already has already been done by sh.'''
633 On posix we assume it already has already been done by sh.'''
629 if not util.expandglobs:
634 if not util.expandglobs:
630 return list(pats)
635 return list(pats)
631 ret = []
636 ret = []
632 for kindpat in pats:
637 for kindpat in pats:
633 kind, pat = matchmod._patsplit(kindpat, None)
638 kind, pat = matchmod._patsplit(kindpat, None)
634 if kind is None:
639 if kind is None:
635 try:
640 try:
636 globbed = glob.glob(pat)
641 globbed = glob.glob(pat)
637 except re.error:
642 except re.error:
638 globbed = [pat]
643 globbed = [pat]
639 if globbed:
644 if globbed:
640 ret.extend(globbed)
645 ret.extend(globbed)
641 continue
646 continue
642 ret.append(kindpat)
647 ret.append(kindpat)
643 return ret
648 return ret
644
649
645 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
650 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
646 badfn=None):
651 badfn=None):
647 '''Return a matcher and the patterns that were used.
652 '''Return a matcher and the patterns that were used.
648 The matcher will warn about bad matches, unless an alternate badfn callback
653 The matcher will warn about bad matches, unless an alternate badfn callback
649 is provided.'''
654 is provided.'''
650 if pats == ("",):
655 if pats == ("",):
651 pats = []
656 pats = []
652 if opts is None:
657 if opts is None:
653 opts = {}
658 opts = {}
654 if not globbed and default == 'relpath':
659 if not globbed and default == 'relpath':
655 pats = expandpats(pats or [])
660 pats = expandpats(pats or [])
656
661
657 def bad(f, msg):
662 def bad(f, msg):
658 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
663 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
659
664
660 if badfn is None:
665 if badfn is None:
661 badfn = bad
666 badfn = bad
662
667
663 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
668 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
664 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
669 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
665
670
666 if m.always():
671 if m.always():
667 pats = []
672 pats = []
668 return m, pats
673 return m, pats
669
674
670 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
675 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
671 badfn=None):
676 badfn=None):
672 '''Return a matcher that will warn about bad matches.'''
677 '''Return a matcher that will warn about bad matches.'''
673 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
678 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
674
679
675 def matchall(repo):
680 def matchall(repo):
676 '''Return a matcher that will efficiently match everything.'''
681 '''Return a matcher that will efficiently match everything.'''
677 return matchmod.always(repo.root, repo.getcwd())
682 return matchmod.always(repo.root, repo.getcwd())
678
683
679 def matchfiles(repo, files, badfn=None):
684 def matchfiles(repo, files, badfn=None):
680 '''Return a matcher that will efficiently match exactly these files.'''
685 '''Return a matcher that will efficiently match exactly these files.'''
681 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
686 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
682
687
683 def parsefollowlinespattern(repo, rev, pat, msg):
688 def parsefollowlinespattern(repo, rev, pat, msg):
684 """Return a file name from `pat` pattern suitable for usage in followlines
689 """Return a file name from `pat` pattern suitable for usage in followlines
685 logic.
690 logic.
686 """
691 """
687 if not matchmod.patkind(pat):
692 if not matchmod.patkind(pat):
688 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
693 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
689 else:
694 else:
690 ctx = repo[rev]
695 ctx = repo[rev]
691 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
696 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
692 files = [f for f in ctx if m(f)]
697 files = [f for f in ctx if m(f)]
693 if len(files) != 1:
698 if len(files) != 1:
694 raise error.ParseError(msg)
699 raise error.ParseError(msg)
695 return files[0]
700 return files[0]
696
701
697 def origpath(ui, repo, filepath):
702 def origpath(ui, repo, filepath):
698 '''customize where .orig files are created
703 '''customize where .orig files are created
699
704
700 Fetch user defined path from config file: [ui] origbackuppath = <path>
705 Fetch user defined path from config file: [ui] origbackuppath = <path>
701 Fall back to default (filepath with .orig suffix) if not specified
706 Fall back to default (filepath with .orig suffix) if not specified
702 '''
707 '''
703 origbackuppath = ui.config('ui', 'origbackuppath')
708 origbackuppath = ui.config('ui', 'origbackuppath')
704 if not origbackuppath:
709 if not origbackuppath:
705 return filepath + ".orig"
710 return filepath + ".orig"
706
711
707 # Convert filepath from an absolute path into a path inside the repo.
712 # Convert filepath from an absolute path into a path inside the repo.
708 filepathfromroot = util.normpath(os.path.relpath(filepath,
713 filepathfromroot = util.normpath(os.path.relpath(filepath,
709 start=repo.root))
714 start=repo.root))
710
715
711 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
716 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
712 origbackupdir = origvfs.dirname(filepathfromroot)
717 origbackupdir = origvfs.dirname(filepathfromroot)
713 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
718 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
714 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
719 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
715
720
716 # Remove any files that conflict with the backup file's path
721 # Remove any files that conflict with the backup file's path
717 for f in reversed(list(util.finddirs(filepathfromroot))):
722 for f in reversed(list(util.finddirs(filepathfromroot))):
718 if origvfs.isfileorlink(f):
723 if origvfs.isfileorlink(f):
719 ui.note(_('removing conflicting file: %s\n')
724 ui.note(_('removing conflicting file: %s\n')
720 % origvfs.join(f))
725 % origvfs.join(f))
721 origvfs.unlink(f)
726 origvfs.unlink(f)
722 break
727 break
723
728
724 origvfs.makedirs(origbackupdir)
729 origvfs.makedirs(origbackupdir)
725
730
726 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
731 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
727 ui.note(_('removing conflicting directory: %s\n')
732 ui.note(_('removing conflicting directory: %s\n')
728 % origvfs.join(filepathfromroot))
733 % origvfs.join(filepathfromroot))
729 origvfs.rmtree(filepathfromroot, forcibly=True)
734 origvfs.rmtree(filepathfromroot, forcibly=True)
730
735
731 return origvfs.join(filepathfromroot)
736 return origvfs.join(filepathfromroot)
732
737
733 class _containsnode(object):
738 class _containsnode(object):
734 """proxy __contains__(node) to container.__contains__ which accepts revs"""
739 """proxy __contains__(node) to container.__contains__ which accepts revs"""
735
740
736 def __init__(self, repo, revcontainer):
741 def __init__(self, repo, revcontainer):
737 self._torev = repo.changelog.rev
742 self._torev = repo.changelog.rev
738 self._revcontains = revcontainer.__contains__
743 self._revcontains = revcontainer.__contains__
739
744
740 def __contains__(self, node):
745 def __contains__(self, node):
741 return self._revcontains(self._torev(node))
746 return self._revcontains(self._torev(node))
742
747
743 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
748 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
744 """do common cleanups when old nodes are replaced by new nodes
749 """do common cleanups when old nodes are replaced by new nodes
745
750
746 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
751 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
747 (we might also want to move working directory parent in the future)
752 (we might also want to move working directory parent in the future)
748
753
749 By default, bookmark moves are calculated automatically from 'replacements',
754 By default, bookmark moves are calculated automatically from 'replacements',
750 but 'moves' can be used to override that. Also, 'moves' may include
755 but 'moves' can be used to override that. Also, 'moves' may include
751 additional bookmark moves that should not have associated obsmarkers.
756 additional bookmark moves that should not have associated obsmarkers.
752
757
753 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
758 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
754 have replacements. operation is a string, like "rebase".
759 have replacements. operation is a string, like "rebase".
755
760
756 metadata is dictionary containing metadata to be stored in obsmarker if
761 metadata is dictionary containing metadata to be stored in obsmarker if
757 obsolescence is enabled.
762 obsolescence is enabled.
758 """
763 """
759 if not replacements and not moves:
764 if not replacements and not moves:
760 return
765 return
761
766
762 # translate mapping's other forms
767 # translate mapping's other forms
763 if not util.safehasattr(replacements, 'items'):
768 if not util.safehasattr(replacements, 'items'):
764 replacements = {n: () for n in replacements}
769 replacements = {n: () for n in replacements}
765
770
766 # Calculate bookmark movements
771 # Calculate bookmark movements
767 if moves is None:
772 if moves is None:
768 moves = {}
773 moves = {}
769 # Unfiltered repo is needed since nodes in replacements might be hidden.
774 # Unfiltered repo is needed since nodes in replacements might be hidden.
770 unfi = repo.unfiltered()
775 unfi = repo.unfiltered()
771 for oldnode, newnodes in replacements.items():
776 for oldnode, newnodes in replacements.items():
772 if oldnode in moves:
777 if oldnode in moves:
773 continue
778 continue
774 if len(newnodes) > 1:
779 if len(newnodes) > 1:
775 # usually a split, take the one with biggest rev number
780 # usually a split, take the one with biggest rev number
776 newnode = next(unfi.set('max(%ln)', newnodes)).node()
781 newnode = next(unfi.set('max(%ln)', newnodes)).node()
777 elif len(newnodes) == 0:
782 elif len(newnodes) == 0:
778 # move bookmark backwards
783 # move bookmark backwards
779 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
784 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
780 list(replacements)))
785 list(replacements)))
781 if roots:
786 if roots:
782 newnode = roots[0].node()
787 newnode = roots[0].node()
783 else:
788 else:
784 newnode = nullid
789 newnode = nullid
785 else:
790 else:
786 newnode = newnodes[0]
791 newnode = newnodes[0]
787 moves[oldnode] = newnode
792 moves[oldnode] = newnode
788
793
789 with repo.transaction('cleanup') as tr:
794 with repo.transaction('cleanup') as tr:
790 # Move bookmarks
795 # Move bookmarks
791 bmarks = repo._bookmarks
796 bmarks = repo._bookmarks
792 bmarkchanges = []
797 bmarkchanges = []
793 allnewnodes = [n for ns in replacements.values() for n in ns]
798 allnewnodes = [n for ns in replacements.values() for n in ns]
794 for oldnode, newnode in moves.items():
799 for oldnode, newnode in moves.items():
795 oldbmarks = repo.nodebookmarks(oldnode)
800 oldbmarks = repo.nodebookmarks(oldnode)
796 if not oldbmarks:
801 if not oldbmarks:
797 continue
802 continue
798 from . import bookmarks # avoid import cycle
803 from . import bookmarks # avoid import cycle
799 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
804 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
800 (util.rapply(pycompat.maybebytestr, oldbmarks),
805 (util.rapply(pycompat.maybebytestr, oldbmarks),
801 hex(oldnode), hex(newnode)))
806 hex(oldnode), hex(newnode)))
802 # Delete divergent bookmarks being parents of related newnodes
807 # Delete divergent bookmarks being parents of related newnodes
803 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
808 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
804 allnewnodes, newnode, oldnode)
809 allnewnodes, newnode, oldnode)
805 deletenodes = _containsnode(repo, deleterevs)
810 deletenodes = _containsnode(repo, deleterevs)
806 for name in oldbmarks:
811 for name in oldbmarks:
807 bmarkchanges.append((name, newnode))
812 bmarkchanges.append((name, newnode))
808 for b in bookmarks.divergent2delete(repo, deletenodes, name):
813 for b in bookmarks.divergent2delete(repo, deletenodes, name):
809 bmarkchanges.append((b, None))
814 bmarkchanges.append((b, None))
810
815
811 if bmarkchanges:
816 if bmarkchanges:
812 bmarks.applychanges(repo, tr, bmarkchanges)
817 bmarks.applychanges(repo, tr, bmarkchanges)
813
818
814 # Obsolete or strip nodes
819 # Obsolete or strip nodes
815 if obsolete.isenabled(repo, obsolete.createmarkersopt):
820 if obsolete.isenabled(repo, obsolete.createmarkersopt):
816 # If a node is already obsoleted, and we want to obsolete it
821 # If a node is already obsoleted, and we want to obsolete it
817 # without a successor, skip that obssolete request since it's
822 # without a successor, skip that obssolete request since it's
818 # unnecessary. That's the "if s or not isobs(n)" check below.
823 # unnecessary. That's the "if s or not isobs(n)" check below.
819 # Also sort the node in topology order, that might be useful for
824 # Also sort the node in topology order, that might be useful for
820 # some obsstore logic.
825 # some obsstore logic.
821 # NOTE: the filtering and sorting might belong to createmarkers.
826 # NOTE: the filtering and sorting might belong to createmarkers.
822 isobs = unfi.obsstore.successors.__contains__
827 isobs = unfi.obsstore.successors.__contains__
823 torev = unfi.changelog.rev
828 torev = unfi.changelog.rev
824 sortfunc = lambda ns: torev(ns[0])
829 sortfunc = lambda ns: torev(ns[0])
825 rels = [(unfi[n], tuple(unfi[m] for m in s))
830 rels = [(unfi[n], tuple(unfi[m] for m in s))
826 for n, s in sorted(replacements.items(), key=sortfunc)
831 for n, s in sorted(replacements.items(), key=sortfunc)
827 if s or not isobs(n)]
832 if s or not isobs(n)]
828 if rels:
833 if rels:
829 obsolete.createmarkers(repo, rels, operation=operation,
834 obsolete.createmarkers(repo, rels, operation=operation,
830 metadata=metadata)
835 metadata=metadata)
831 else:
836 else:
832 from . import repair # avoid import cycle
837 from . import repair # avoid import cycle
833 tostrip = list(replacements)
838 tostrip = list(replacements)
834 if tostrip:
839 if tostrip:
835 repair.delayedstrip(repo.ui, repo, tostrip, operation)
840 repair.delayedstrip(repo.ui, repo, tostrip, operation)
836
841
837 def addremove(repo, matcher, prefix, opts=None):
842 def addremove(repo, matcher, prefix, opts=None):
838 if opts is None:
843 if opts is None:
839 opts = {}
844 opts = {}
840 m = matcher
845 m = matcher
841 dry_run = opts.get('dry_run')
846 dry_run = opts.get('dry_run')
842 try:
847 try:
843 similarity = float(opts.get('similarity') or 0)
848 similarity = float(opts.get('similarity') or 0)
844 except ValueError:
849 except ValueError:
845 raise error.Abort(_('similarity must be a number'))
850 raise error.Abort(_('similarity must be a number'))
846 if similarity < 0 or similarity > 100:
851 if similarity < 0 or similarity > 100:
847 raise error.Abort(_('similarity must be between 0 and 100'))
852 raise error.Abort(_('similarity must be between 0 and 100'))
848 similarity /= 100.0
853 similarity /= 100.0
849
854
850 ret = 0
855 ret = 0
851 join = lambda f: os.path.join(prefix, f)
856 join = lambda f: os.path.join(prefix, f)
852
857
853 wctx = repo[None]
858 wctx = repo[None]
854 for subpath in sorted(wctx.substate):
859 for subpath in sorted(wctx.substate):
855 submatch = matchmod.subdirmatcher(subpath, m)
860 submatch = matchmod.subdirmatcher(subpath, m)
856 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
861 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
857 sub = wctx.sub(subpath)
862 sub = wctx.sub(subpath)
858 try:
863 try:
859 if sub.addremove(submatch, prefix, opts):
864 if sub.addremove(submatch, prefix, opts):
860 ret = 1
865 ret = 1
861 except error.LookupError:
866 except error.LookupError:
862 repo.ui.status(_("skipping missing subrepository: %s\n")
867 repo.ui.status(_("skipping missing subrepository: %s\n")
863 % join(subpath))
868 % join(subpath))
864
869
865 rejected = []
870 rejected = []
866 def badfn(f, msg):
871 def badfn(f, msg):
867 if f in m.files():
872 if f in m.files():
868 m.bad(f, msg)
873 m.bad(f, msg)
869 rejected.append(f)
874 rejected.append(f)
870
875
871 badmatch = matchmod.badmatch(m, badfn)
876 badmatch = matchmod.badmatch(m, badfn)
872 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
877 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
873 badmatch)
878 badmatch)
874
879
875 unknownset = set(unknown + forgotten)
880 unknownset = set(unknown + forgotten)
876 toprint = unknownset.copy()
881 toprint = unknownset.copy()
877 toprint.update(deleted)
882 toprint.update(deleted)
878 for abs in sorted(toprint):
883 for abs in sorted(toprint):
879 if repo.ui.verbose or not m.exact(abs):
884 if repo.ui.verbose or not m.exact(abs):
880 if abs in unknownset:
885 if abs in unknownset:
881 status = _('adding %s\n') % m.uipath(abs)
886 status = _('adding %s\n') % m.uipath(abs)
882 else:
887 else:
883 status = _('removing %s\n') % m.uipath(abs)
888 status = _('removing %s\n') % m.uipath(abs)
884 repo.ui.status(status)
889 repo.ui.status(status)
885
890
886 renames = _findrenames(repo, m, added + unknown, removed + deleted,
891 renames = _findrenames(repo, m, added + unknown, removed + deleted,
887 similarity)
892 similarity)
888
893
889 if not dry_run:
894 if not dry_run:
890 _markchanges(repo, unknown + forgotten, deleted, renames)
895 _markchanges(repo, unknown + forgotten, deleted, renames)
891
896
892 for f in rejected:
897 for f in rejected:
893 if f in m.files():
898 if f in m.files():
894 return 1
899 return 1
895 return ret
900 return ret
896
901
897 def marktouched(repo, files, similarity=0.0):
902 def marktouched(repo, files, similarity=0.0):
898 '''Assert that files have somehow been operated upon. files are relative to
903 '''Assert that files have somehow been operated upon. files are relative to
899 the repo root.'''
904 the repo root.'''
900 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
905 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
901 rejected = []
906 rejected = []
902
907
903 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
908 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
904
909
905 if repo.ui.verbose:
910 if repo.ui.verbose:
906 unknownset = set(unknown + forgotten)
911 unknownset = set(unknown + forgotten)
907 toprint = unknownset.copy()
912 toprint = unknownset.copy()
908 toprint.update(deleted)
913 toprint.update(deleted)
909 for abs in sorted(toprint):
914 for abs in sorted(toprint):
910 if abs in unknownset:
915 if abs in unknownset:
911 status = _('adding %s\n') % abs
916 status = _('adding %s\n') % abs
912 else:
917 else:
913 status = _('removing %s\n') % abs
918 status = _('removing %s\n') % abs
914 repo.ui.status(status)
919 repo.ui.status(status)
915
920
916 renames = _findrenames(repo, m, added + unknown, removed + deleted,
921 renames = _findrenames(repo, m, added + unknown, removed + deleted,
917 similarity)
922 similarity)
918
923
919 _markchanges(repo, unknown + forgotten, deleted, renames)
924 _markchanges(repo, unknown + forgotten, deleted, renames)
920
925
921 for f in rejected:
926 for f in rejected:
922 if f in m.files():
927 if f in m.files():
923 return 1
928 return 1
924 return 0
929 return 0
925
930
926 def _interestingfiles(repo, matcher):
931 def _interestingfiles(repo, matcher):
927 '''Walk dirstate with matcher, looking for files that addremove would care
932 '''Walk dirstate with matcher, looking for files that addremove would care
928 about.
933 about.
929
934
930 This is different from dirstate.status because it doesn't care about
935 This is different from dirstate.status because it doesn't care about
931 whether files are modified or clean.'''
936 whether files are modified or clean.'''
932 added, unknown, deleted, removed, forgotten = [], [], [], [], []
937 added, unknown, deleted, removed, forgotten = [], [], [], [], []
933 audit_path = pathutil.pathauditor(repo.root, cached=True)
938 audit_path = pathutil.pathauditor(repo.root, cached=True)
934
939
935 ctx = repo[None]
940 ctx = repo[None]
936 dirstate = repo.dirstate
941 dirstate = repo.dirstate
937 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
942 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
938 unknown=True, ignored=False, full=False)
943 unknown=True, ignored=False, full=False)
939 for abs, st in walkresults.iteritems():
944 for abs, st in walkresults.iteritems():
940 dstate = dirstate[abs]
945 dstate = dirstate[abs]
941 if dstate == '?' and audit_path.check(abs):
946 if dstate == '?' and audit_path.check(abs):
942 unknown.append(abs)
947 unknown.append(abs)
943 elif dstate != 'r' and not st:
948 elif dstate != 'r' and not st:
944 deleted.append(abs)
949 deleted.append(abs)
945 elif dstate == 'r' and st:
950 elif dstate == 'r' and st:
946 forgotten.append(abs)
951 forgotten.append(abs)
947 # for finding renames
952 # for finding renames
948 elif dstate == 'r' and not st:
953 elif dstate == 'r' and not st:
949 removed.append(abs)
954 removed.append(abs)
950 elif dstate == 'a':
955 elif dstate == 'a':
951 added.append(abs)
956 added.append(abs)
952
957
953 return added, unknown, deleted, removed, forgotten
958 return added, unknown, deleted, removed, forgotten
954
959
955 def _findrenames(repo, matcher, added, removed, similarity):
960 def _findrenames(repo, matcher, added, removed, similarity):
956 '''Find renames from removed files to added ones.'''
961 '''Find renames from removed files to added ones.'''
957 renames = {}
962 renames = {}
958 if similarity > 0:
963 if similarity > 0:
959 for old, new, score in similar.findrenames(repo, added, removed,
964 for old, new, score in similar.findrenames(repo, added, removed,
960 similarity):
965 similarity):
961 if (repo.ui.verbose or not matcher.exact(old)
966 if (repo.ui.verbose or not matcher.exact(old)
962 or not matcher.exact(new)):
967 or not matcher.exact(new)):
963 repo.ui.status(_('recording removal of %s as rename to %s '
968 repo.ui.status(_('recording removal of %s as rename to %s '
964 '(%d%% similar)\n') %
969 '(%d%% similar)\n') %
965 (matcher.rel(old), matcher.rel(new),
970 (matcher.rel(old), matcher.rel(new),
966 score * 100))
971 score * 100))
967 renames[new] = old
972 renames[new] = old
968 return renames
973 return renames
969
974
970 def _markchanges(repo, unknown, deleted, renames):
975 def _markchanges(repo, unknown, deleted, renames):
971 '''Marks the files in unknown as added, the files in deleted as removed,
976 '''Marks the files in unknown as added, the files in deleted as removed,
972 and the files in renames as copied.'''
977 and the files in renames as copied.'''
973 wctx = repo[None]
978 wctx = repo[None]
974 with repo.wlock():
979 with repo.wlock():
975 wctx.forget(deleted)
980 wctx.forget(deleted)
976 wctx.add(unknown)
981 wctx.add(unknown)
977 for new, old in renames.iteritems():
982 for new, old in renames.iteritems():
978 wctx.copy(old, new)
983 wctx.copy(old, new)
979
984
980 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
985 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
981 """Update the dirstate to reflect the intent of copying src to dst. For
986 """Update the dirstate to reflect the intent of copying src to dst. For
982 different reasons it might not end with dst being marked as copied from src.
987 different reasons it might not end with dst being marked as copied from src.
983 """
988 """
984 origsrc = repo.dirstate.copied(src) or src
989 origsrc = repo.dirstate.copied(src) or src
985 if dst == origsrc: # copying back a copy?
990 if dst == origsrc: # copying back a copy?
986 if repo.dirstate[dst] not in 'mn' and not dryrun:
991 if repo.dirstate[dst] not in 'mn' and not dryrun:
987 repo.dirstate.normallookup(dst)
992 repo.dirstate.normallookup(dst)
988 else:
993 else:
989 if repo.dirstate[origsrc] == 'a' and origsrc == src:
994 if repo.dirstate[origsrc] == 'a' and origsrc == src:
990 if not ui.quiet:
995 if not ui.quiet:
991 ui.warn(_("%s has not been committed yet, so no copy "
996 ui.warn(_("%s has not been committed yet, so no copy "
992 "data will be stored for %s.\n")
997 "data will be stored for %s.\n")
993 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
998 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
994 if repo.dirstate[dst] in '?r' and not dryrun:
999 if repo.dirstate[dst] in '?r' and not dryrun:
995 wctx.add([dst])
1000 wctx.add([dst])
996 elif not dryrun:
1001 elif not dryrun:
997 wctx.copy(origsrc, dst)
1002 wctx.copy(origsrc, dst)
998
1003
999 def readrequires(opener, supported):
1004 def readrequires(opener, supported):
1000 '''Reads and parses .hg/requires and checks if all entries found
1005 '''Reads and parses .hg/requires and checks if all entries found
1001 are in the list of supported features.'''
1006 are in the list of supported features.'''
1002 requirements = set(opener.read("requires").splitlines())
1007 requirements = set(opener.read("requires").splitlines())
1003 missings = []
1008 missings = []
1004 for r in requirements:
1009 for r in requirements:
1005 if r not in supported:
1010 if r not in supported:
1006 if not r or not r[0:1].isalnum():
1011 if not r or not r[0:1].isalnum():
1007 raise error.RequirementError(_(".hg/requires file is corrupt"))
1012 raise error.RequirementError(_(".hg/requires file is corrupt"))
1008 missings.append(r)
1013 missings.append(r)
1009 missings.sort()
1014 missings.sort()
1010 if missings:
1015 if missings:
1011 raise error.RequirementError(
1016 raise error.RequirementError(
1012 _("repository requires features unknown to this Mercurial: %s")
1017 _("repository requires features unknown to this Mercurial: %s")
1013 % " ".join(missings),
1018 % " ".join(missings),
1014 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1019 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1015 " for more information"))
1020 " for more information"))
1016 return requirements
1021 return requirements
1017
1022
1018 def writerequires(opener, requirements):
1023 def writerequires(opener, requirements):
1019 with opener('requires', 'w') as fp:
1024 with opener('requires', 'w') as fp:
1020 for r in sorted(requirements):
1025 for r in sorted(requirements):
1021 fp.write("%s\n" % r)
1026 fp.write("%s\n" % r)
1022
1027
1023 class filecachesubentry(object):
1028 class filecachesubentry(object):
1024 def __init__(self, path, stat):
1029 def __init__(self, path, stat):
1025 self.path = path
1030 self.path = path
1026 self.cachestat = None
1031 self.cachestat = None
1027 self._cacheable = None
1032 self._cacheable = None
1028
1033
1029 if stat:
1034 if stat:
1030 self.cachestat = filecachesubentry.stat(self.path)
1035 self.cachestat = filecachesubentry.stat(self.path)
1031
1036
1032 if self.cachestat:
1037 if self.cachestat:
1033 self._cacheable = self.cachestat.cacheable()
1038 self._cacheable = self.cachestat.cacheable()
1034 else:
1039 else:
1035 # None means we don't know yet
1040 # None means we don't know yet
1036 self._cacheable = None
1041 self._cacheable = None
1037
1042
1038 def refresh(self):
1043 def refresh(self):
1039 if self.cacheable():
1044 if self.cacheable():
1040 self.cachestat = filecachesubentry.stat(self.path)
1045 self.cachestat = filecachesubentry.stat(self.path)
1041
1046
1042 def cacheable(self):
1047 def cacheable(self):
1043 if self._cacheable is not None:
1048 if self._cacheable is not None:
1044 return self._cacheable
1049 return self._cacheable
1045
1050
1046 # we don't know yet, assume it is for now
1051 # we don't know yet, assume it is for now
1047 return True
1052 return True
1048
1053
1049 def changed(self):
1054 def changed(self):
1050 # no point in going further if we can't cache it
1055 # no point in going further if we can't cache it
1051 if not self.cacheable():
1056 if not self.cacheable():
1052 return True
1057 return True
1053
1058
1054 newstat = filecachesubentry.stat(self.path)
1059 newstat = filecachesubentry.stat(self.path)
1055
1060
1056 # we may not know if it's cacheable yet, check again now
1061 # we may not know if it's cacheable yet, check again now
1057 if newstat and self._cacheable is None:
1062 if newstat and self._cacheable is None:
1058 self._cacheable = newstat.cacheable()
1063 self._cacheable = newstat.cacheable()
1059
1064
1060 # check again
1065 # check again
1061 if not self._cacheable:
1066 if not self._cacheable:
1062 return True
1067 return True
1063
1068
1064 if self.cachestat != newstat:
1069 if self.cachestat != newstat:
1065 self.cachestat = newstat
1070 self.cachestat = newstat
1066 return True
1071 return True
1067 else:
1072 else:
1068 return False
1073 return False
1069
1074
1070 @staticmethod
1075 @staticmethod
1071 def stat(path):
1076 def stat(path):
1072 try:
1077 try:
1073 return util.cachestat(path)
1078 return util.cachestat(path)
1074 except OSError as e:
1079 except OSError as e:
1075 if e.errno != errno.ENOENT:
1080 if e.errno != errno.ENOENT:
1076 raise
1081 raise
1077
1082
1078 class filecacheentry(object):
1083 class filecacheentry(object):
1079 def __init__(self, paths, stat=True):
1084 def __init__(self, paths, stat=True):
1080 self._entries = []
1085 self._entries = []
1081 for path in paths:
1086 for path in paths:
1082 self._entries.append(filecachesubentry(path, stat))
1087 self._entries.append(filecachesubentry(path, stat))
1083
1088
1084 def changed(self):
1089 def changed(self):
1085 '''true if any entry has changed'''
1090 '''true if any entry has changed'''
1086 for entry in self._entries:
1091 for entry in self._entries:
1087 if entry.changed():
1092 if entry.changed():
1088 return True
1093 return True
1089 return False
1094 return False
1090
1095
1091 def refresh(self):
1096 def refresh(self):
1092 for entry in self._entries:
1097 for entry in self._entries:
1093 entry.refresh()
1098 entry.refresh()
1094
1099
1095 class filecache(object):
1100 class filecache(object):
1096 '''A property like decorator that tracks files under .hg/ for updates.
1101 '''A property like decorator that tracks files under .hg/ for updates.
1097
1102
1098 Records stat info when called in _filecache.
1103 Records stat info when called in _filecache.
1099
1104
1100 On subsequent calls, compares old stat info with new info, and recreates the
1105 On subsequent calls, compares old stat info with new info, and recreates the
1101 object when any of the files changes, updating the new stat info in
1106 object when any of the files changes, updating the new stat info in
1102 _filecache.
1107 _filecache.
1103
1108
1104 Mercurial either atomic renames or appends for files under .hg,
1109 Mercurial either atomic renames or appends for files under .hg,
1105 so to ensure the cache is reliable we need the filesystem to be able
1110 so to ensure the cache is reliable we need the filesystem to be able
1106 to tell us if a file has been replaced. If it can't, we fallback to
1111 to tell us if a file has been replaced. If it can't, we fallback to
1107 recreating the object on every call (essentially the same behavior as
1112 recreating the object on every call (essentially the same behavior as
1108 propertycache).
1113 propertycache).
1109
1114
1110 '''
1115 '''
1111 def __init__(self, *paths):
1116 def __init__(self, *paths):
1112 self.paths = paths
1117 self.paths = paths
1113
1118
1114 def join(self, obj, fname):
1119 def join(self, obj, fname):
1115 """Used to compute the runtime path of a cached file.
1120 """Used to compute the runtime path of a cached file.
1116
1121
1117 Users should subclass filecache and provide their own version of this
1122 Users should subclass filecache and provide their own version of this
1118 function to call the appropriate join function on 'obj' (an instance
1123 function to call the appropriate join function on 'obj' (an instance
1119 of the class that its member function was decorated).
1124 of the class that its member function was decorated).
1120 """
1125 """
1121 raise NotImplementedError
1126 raise NotImplementedError
1122
1127
1123 def __call__(self, func):
1128 def __call__(self, func):
1124 self.func = func
1129 self.func = func
1125 self.name = func.__name__.encode('ascii')
1130 self.name = func.__name__.encode('ascii')
1126 return self
1131 return self
1127
1132
1128 def __get__(self, obj, type=None):
1133 def __get__(self, obj, type=None):
1129 # if accessed on the class, return the descriptor itself.
1134 # if accessed on the class, return the descriptor itself.
1130 if obj is None:
1135 if obj is None:
1131 return self
1136 return self
1132 # do we need to check if the file changed?
1137 # do we need to check if the file changed?
1133 if self.name in obj.__dict__:
1138 if self.name in obj.__dict__:
1134 assert self.name in obj._filecache, self.name
1139 assert self.name in obj._filecache, self.name
1135 return obj.__dict__[self.name]
1140 return obj.__dict__[self.name]
1136
1141
1137 entry = obj._filecache.get(self.name)
1142 entry = obj._filecache.get(self.name)
1138
1143
1139 if entry:
1144 if entry:
1140 if entry.changed():
1145 if entry.changed():
1141 entry.obj = self.func(obj)
1146 entry.obj = self.func(obj)
1142 else:
1147 else:
1143 paths = [self.join(obj, path) for path in self.paths]
1148 paths = [self.join(obj, path) for path in self.paths]
1144
1149
1145 # We stat -before- creating the object so our cache doesn't lie if
1150 # We stat -before- creating the object so our cache doesn't lie if
1146 # a writer modified between the time we read and stat
1151 # a writer modified between the time we read and stat
1147 entry = filecacheentry(paths, True)
1152 entry = filecacheentry(paths, True)
1148 entry.obj = self.func(obj)
1153 entry.obj = self.func(obj)
1149
1154
1150 obj._filecache[self.name] = entry
1155 obj._filecache[self.name] = entry
1151
1156
1152 obj.__dict__[self.name] = entry.obj
1157 obj.__dict__[self.name] = entry.obj
1153 return entry.obj
1158 return entry.obj
1154
1159
1155 def __set__(self, obj, value):
1160 def __set__(self, obj, value):
1156 if self.name not in obj._filecache:
1161 if self.name not in obj._filecache:
1157 # we add an entry for the missing value because X in __dict__
1162 # we add an entry for the missing value because X in __dict__
1158 # implies X in _filecache
1163 # implies X in _filecache
1159 paths = [self.join(obj, path) for path in self.paths]
1164 paths = [self.join(obj, path) for path in self.paths]
1160 ce = filecacheentry(paths, False)
1165 ce = filecacheentry(paths, False)
1161 obj._filecache[self.name] = ce
1166 obj._filecache[self.name] = ce
1162 else:
1167 else:
1163 ce = obj._filecache[self.name]
1168 ce = obj._filecache[self.name]
1164
1169
1165 ce.obj = value # update cached copy
1170 ce.obj = value # update cached copy
1166 obj.__dict__[self.name] = value # update copy returned by obj.x
1171 obj.__dict__[self.name] = value # update copy returned by obj.x
1167
1172
1168 def __delete__(self, obj):
1173 def __delete__(self, obj):
1169 try:
1174 try:
1170 del obj.__dict__[self.name]
1175 del obj.__dict__[self.name]
1171 except KeyError:
1176 except KeyError:
1172 raise AttributeError(self.name)
1177 raise AttributeError(self.name)
1173
1178
1174 def extdatasource(repo, source):
1179 def extdatasource(repo, source):
1175 """Gather a map of rev -> value dict from the specified source
1180 """Gather a map of rev -> value dict from the specified source
1176
1181
1177 A source spec is treated as a URL, with a special case shell: type
1182 A source spec is treated as a URL, with a special case shell: type
1178 for parsing the output from a shell command.
1183 for parsing the output from a shell command.
1179
1184
1180 The data is parsed as a series of newline-separated records where
1185 The data is parsed as a series of newline-separated records where
1181 each record is a revision specifier optionally followed by a space
1186 each record is a revision specifier optionally followed by a space
1182 and a freeform string value. If the revision is known locally, it
1187 and a freeform string value. If the revision is known locally, it
1183 is converted to a rev, otherwise the record is skipped.
1188 is converted to a rev, otherwise the record is skipped.
1184
1189
1185 Note that both key and value are treated as UTF-8 and converted to
1190 Note that both key and value are treated as UTF-8 and converted to
1186 the local encoding. This allows uniformity between local and
1191 the local encoding. This allows uniformity between local and
1187 remote data sources.
1192 remote data sources.
1188 """
1193 """
1189
1194
1190 spec = repo.ui.config("extdata", source)
1195 spec = repo.ui.config("extdata", source)
1191 if not spec:
1196 if not spec:
1192 raise error.Abort(_("unknown extdata source '%s'") % source)
1197 raise error.Abort(_("unknown extdata source '%s'") % source)
1193
1198
1194 data = {}
1199 data = {}
1195 src = proc = None
1200 src = proc = None
1196 try:
1201 try:
1197 if spec.startswith("shell:"):
1202 if spec.startswith("shell:"):
1198 # external commands should be run relative to the repo root
1203 # external commands should be run relative to the repo root
1199 cmd = spec[6:]
1204 cmd = spec[6:]
1200 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1205 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1201 close_fds=procutil.closefds,
1206 close_fds=procutil.closefds,
1202 stdout=subprocess.PIPE, cwd=repo.root)
1207 stdout=subprocess.PIPE, cwd=repo.root)
1203 src = proc.stdout
1208 src = proc.stdout
1204 else:
1209 else:
1205 # treat as a URL or file
1210 # treat as a URL or file
1206 src = url.open(repo.ui, spec)
1211 src = url.open(repo.ui, spec)
1207 for l in src:
1212 for l in src:
1208 if " " in l:
1213 if " " in l:
1209 k, v = l.strip().split(" ", 1)
1214 k, v = l.strip().split(" ", 1)
1210 else:
1215 else:
1211 k, v = l.strip(), ""
1216 k, v = l.strip(), ""
1212
1217
1213 k = encoding.tolocal(k)
1218 k = encoding.tolocal(k)
1214 try:
1219 try:
1215 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1220 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1216 except (error.LookupError, error.RepoLookupError):
1221 except (error.LookupError, error.RepoLookupError):
1217 pass # we ignore data for nodes that don't exist locally
1222 pass # we ignore data for nodes that don't exist locally
1218 finally:
1223 finally:
1219 if proc:
1224 if proc:
1220 proc.communicate()
1225 proc.communicate()
1221 if src:
1226 if src:
1222 src.close()
1227 src.close()
1223 if proc and proc.returncode != 0:
1228 if proc and proc.returncode != 0:
1224 raise error.Abort(_("extdata command '%s' failed: %s")
1229 raise error.Abort(_("extdata command '%s' failed: %s")
1225 % (cmd, procutil.explainexit(proc.returncode)))
1230 % (cmd, procutil.explainexit(proc.returncode)))
1226
1231
1227 return data
1232 return data
1228
1233
1229 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1234 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1230 if lock is None:
1235 if lock is None:
1231 raise error.LockInheritanceContractViolation(
1236 raise error.LockInheritanceContractViolation(
1232 'lock can only be inherited while held')
1237 'lock can only be inherited while held')
1233 if environ is None:
1238 if environ is None:
1234 environ = {}
1239 environ = {}
1235 with lock.inherit() as locker:
1240 with lock.inherit() as locker:
1236 environ[envvar] = locker
1241 environ[envvar] = locker
1237 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1242 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1238
1243
1239 def wlocksub(repo, cmd, *args, **kwargs):
1244 def wlocksub(repo, cmd, *args, **kwargs):
1240 """run cmd as a subprocess that allows inheriting repo's wlock
1245 """run cmd as a subprocess that allows inheriting repo's wlock
1241
1246
1242 This can only be called while the wlock is held. This takes all the
1247 This can only be called while the wlock is held. This takes all the
1243 arguments that ui.system does, and returns the exit code of the
1248 arguments that ui.system does, and returns the exit code of the
1244 subprocess."""
1249 subprocess."""
1245 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1250 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1246 **kwargs)
1251 **kwargs)
1247
1252
1248 def gdinitconfig(ui):
1253 def gdinitconfig(ui):
1249 """helper function to know if a repo should be created as general delta
1254 """helper function to know if a repo should be created as general delta
1250 """
1255 """
1251 # experimental config: format.generaldelta
1256 # experimental config: format.generaldelta
1252 return (ui.configbool('format', 'generaldelta')
1257 return (ui.configbool('format', 'generaldelta')
1253 or ui.configbool('format', 'usegeneraldelta'))
1258 or ui.configbool('format', 'usegeneraldelta'))
1254
1259
1255 def gddeltaconfig(ui):
1260 def gddeltaconfig(ui):
1256 """helper function to know if incoming delta should be optimised
1261 """helper function to know if incoming delta should be optimised
1257 """
1262 """
1258 # experimental config: format.generaldelta
1263 # experimental config: format.generaldelta
1259 return ui.configbool('format', 'generaldelta')
1264 return ui.configbool('format', 'generaldelta')
1260
1265
1261 class simplekeyvaluefile(object):
1266 class simplekeyvaluefile(object):
1262 """A simple file with key=value lines
1267 """A simple file with key=value lines
1263
1268
1264 Keys must be alphanumerics and start with a letter, values must not
1269 Keys must be alphanumerics and start with a letter, values must not
1265 contain '\n' characters"""
1270 contain '\n' characters"""
1266 firstlinekey = '__firstline'
1271 firstlinekey = '__firstline'
1267
1272
1268 def __init__(self, vfs, path, keys=None):
1273 def __init__(self, vfs, path, keys=None):
1269 self.vfs = vfs
1274 self.vfs = vfs
1270 self.path = path
1275 self.path = path
1271
1276
1272 def read(self, firstlinenonkeyval=False):
1277 def read(self, firstlinenonkeyval=False):
1273 """Read the contents of a simple key-value file
1278 """Read the contents of a simple key-value file
1274
1279
1275 'firstlinenonkeyval' indicates whether the first line of file should
1280 'firstlinenonkeyval' indicates whether the first line of file should
1276 be treated as a key-value pair or reuturned fully under the
1281 be treated as a key-value pair or reuturned fully under the
1277 __firstline key."""
1282 __firstline key."""
1278 lines = self.vfs.readlines(self.path)
1283 lines = self.vfs.readlines(self.path)
1279 d = {}
1284 d = {}
1280 if firstlinenonkeyval:
1285 if firstlinenonkeyval:
1281 if not lines:
1286 if not lines:
1282 e = _("empty simplekeyvalue file")
1287 e = _("empty simplekeyvalue file")
1283 raise error.CorruptedState(e)
1288 raise error.CorruptedState(e)
1284 # we don't want to include '\n' in the __firstline
1289 # we don't want to include '\n' in the __firstline
1285 d[self.firstlinekey] = lines[0][:-1]
1290 d[self.firstlinekey] = lines[0][:-1]
1286 del lines[0]
1291 del lines[0]
1287
1292
1288 try:
1293 try:
1289 # the 'if line.strip()' part prevents us from failing on empty
1294 # the 'if line.strip()' part prevents us from failing on empty
1290 # lines which only contain '\n' therefore are not skipped
1295 # lines which only contain '\n' therefore are not skipped
1291 # by 'if line'
1296 # by 'if line'
1292 updatedict = dict(line[:-1].split('=', 1) for line in lines
1297 updatedict = dict(line[:-1].split('=', 1) for line in lines
1293 if line.strip())
1298 if line.strip())
1294 if self.firstlinekey in updatedict:
1299 if self.firstlinekey in updatedict:
1295 e = _("%r can't be used as a key")
1300 e = _("%r can't be used as a key")
1296 raise error.CorruptedState(e % self.firstlinekey)
1301 raise error.CorruptedState(e % self.firstlinekey)
1297 d.update(updatedict)
1302 d.update(updatedict)
1298 except ValueError as e:
1303 except ValueError as e:
1299 raise error.CorruptedState(str(e))
1304 raise error.CorruptedState(str(e))
1300 return d
1305 return d
1301
1306
1302 def write(self, data, firstline=None):
1307 def write(self, data, firstline=None):
1303 """Write key=>value mapping to a file
1308 """Write key=>value mapping to a file
1304 data is a dict. Keys must be alphanumerical and start with a letter.
1309 data is a dict. Keys must be alphanumerical and start with a letter.
1305 Values must not contain newline characters.
1310 Values must not contain newline characters.
1306
1311
1307 If 'firstline' is not None, it is written to file before
1312 If 'firstline' is not None, it is written to file before
1308 everything else, as it is, not in a key=value form"""
1313 everything else, as it is, not in a key=value form"""
1309 lines = []
1314 lines = []
1310 if firstline is not None:
1315 if firstline is not None:
1311 lines.append('%s\n' % firstline)
1316 lines.append('%s\n' % firstline)
1312
1317
1313 for k, v in data.items():
1318 for k, v in data.items():
1314 if k == self.firstlinekey:
1319 if k == self.firstlinekey:
1315 e = "key name '%s' is reserved" % self.firstlinekey
1320 e = "key name '%s' is reserved" % self.firstlinekey
1316 raise error.ProgrammingError(e)
1321 raise error.ProgrammingError(e)
1317 if not k[0:1].isalpha():
1322 if not k[0:1].isalpha():
1318 e = "keys must start with a letter in a key-value file"
1323 e = "keys must start with a letter in a key-value file"
1319 raise error.ProgrammingError(e)
1324 raise error.ProgrammingError(e)
1320 if not k.isalnum():
1325 if not k.isalnum():
1321 e = "invalid key name in a simple key-value file"
1326 e = "invalid key name in a simple key-value file"
1322 raise error.ProgrammingError(e)
1327 raise error.ProgrammingError(e)
1323 if '\n' in v:
1328 if '\n' in v:
1324 e = "invalid value in a simple key-value file"
1329 e = "invalid value in a simple key-value file"
1325 raise error.ProgrammingError(e)
1330 raise error.ProgrammingError(e)
1326 lines.append("%s=%s\n" % (k, v))
1331 lines.append("%s=%s\n" % (k, v))
1327 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1332 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1328 fp.write(''.join(lines))
1333 fp.write(''.join(lines))
1329
1334
1330 _reportobsoletedsource = [
1335 _reportobsoletedsource = [
1331 'debugobsolete',
1336 'debugobsolete',
1332 'pull',
1337 'pull',
1333 'push',
1338 'push',
1334 'serve',
1339 'serve',
1335 'unbundle',
1340 'unbundle',
1336 ]
1341 ]
1337
1342
1338 _reportnewcssource = [
1343 _reportnewcssource = [
1339 'pull',
1344 'pull',
1340 'unbundle',
1345 'unbundle',
1341 ]
1346 ]
1342
1347
1343 # a list of (repo, ctx, files) functions called by various commands to allow
1348 # a list of (repo, ctx, files) functions called by various commands to allow
1344 # extensions to ensure the corresponding files are available locally, before the
1349 # extensions to ensure the corresponding files are available locally, before the
1345 # command uses them.
1350 # command uses them.
1346 fileprefetchhooks = util.hooks()
1351 fileprefetchhooks = util.hooks()
1347
1352
1348 # A marker that tells the evolve extension to suppress its own reporting
1353 # A marker that tells the evolve extension to suppress its own reporting
1349 _reportstroubledchangesets = True
1354 _reportstroubledchangesets = True
1350
1355
1351 def registersummarycallback(repo, otr, txnname=''):
1356 def registersummarycallback(repo, otr, txnname=''):
1352 """register a callback to issue a summary after the transaction is closed
1357 """register a callback to issue a summary after the transaction is closed
1353 """
1358 """
1354 def txmatch(sources):
1359 def txmatch(sources):
1355 return any(txnname.startswith(source) for source in sources)
1360 return any(txnname.startswith(source) for source in sources)
1356
1361
1357 categories = []
1362 categories = []
1358
1363
1359 def reportsummary(func):
1364 def reportsummary(func):
1360 """decorator for report callbacks."""
1365 """decorator for report callbacks."""
1361 # The repoview life cycle is shorter than the one of the actual
1366 # The repoview life cycle is shorter than the one of the actual
1362 # underlying repository. So the filtered object can die before the
1367 # underlying repository. So the filtered object can die before the
1363 # weakref is used leading to troubles. We keep a reference to the
1368 # weakref is used leading to troubles. We keep a reference to the
1364 # unfiltered object and restore the filtering when retrieving the
1369 # unfiltered object and restore the filtering when retrieving the
1365 # repository through the weakref.
1370 # repository through the weakref.
1366 filtername = repo.filtername
1371 filtername = repo.filtername
1367 reporef = weakref.ref(repo.unfiltered())
1372 reporef = weakref.ref(repo.unfiltered())
1368 def wrapped(tr):
1373 def wrapped(tr):
1369 repo = reporef()
1374 repo = reporef()
1370 if filtername:
1375 if filtername:
1371 repo = repo.filtered(filtername)
1376 repo = repo.filtered(filtername)
1372 func(repo, tr)
1377 func(repo, tr)
1373 newcat = '%02i-txnreport' % len(categories)
1378 newcat = '%02i-txnreport' % len(categories)
1374 otr.addpostclose(newcat, wrapped)
1379 otr.addpostclose(newcat, wrapped)
1375 categories.append(newcat)
1380 categories.append(newcat)
1376 return wrapped
1381 return wrapped
1377
1382
1378 if txmatch(_reportobsoletedsource):
1383 if txmatch(_reportobsoletedsource):
1379 @reportsummary
1384 @reportsummary
1380 def reportobsoleted(repo, tr):
1385 def reportobsoleted(repo, tr):
1381 obsoleted = obsutil.getobsoleted(repo, tr)
1386 obsoleted = obsutil.getobsoleted(repo, tr)
1382 if obsoleted:
1387 if obsoleted:
1383 repo.ui.status(_('obsoleted %i changesets\n')
1388 repo.ui.status(_('obsoleted %i changesets\n')
1384 % len(obsoleted))
1389 % len(obsoleted))
1385
1390
1386 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1391 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1387 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1392 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1388 instabilitytypes = [
1393 instabilitytypes = [
1389 ('orphan', 'orphan'),
1394 ('orphan', 'orphan'),
1390 ('phase-divergent', 'phasedivergent'),
1395 ('phase-divergent', 'phasedivergent'),
1391 ('content-divergent', 'contentdivergent'),
1396 ('content-divergent', 'contentdivergent'),
1392 ]
1397 ]
1393
1398
1394 def getinstabilitycounts(repo):
1399 def getinstabilitycounts(repo):
1395 filtered = repo.changelog.filteredrevs
1400 filtered = repo.changelog.filteredrevs
1396 counts = {}
1401 counts = {}
1397 for instability, revset in instabilitytypes:
1402 for instability, revset in instabilitytypes:
1398 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1403 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1399 filtered)
1404 filtered)
1400 return counts
1405 return counts
1401
1406
1402 oldinstabilitycounts = getinstabilitycounts(repo)
1407 oldinstabilitycounts = getinstabilitycounts(repo)
1403 @reportsummary
1408 @reportsummary
1404 def reportnewinstabilities(repo, tr):
1409 def reportnewinstabilities(repo, tr):
1405 newinstabilitycounts = getinstabilitycounts(repo)
1410 newinstabilitycounts = getinstabilitycounts(repo)
1406 for instability, revset in instabilitytypes:
1411 for instability, revset in instabilitytypes:
1407 delta = (newinstabilitycounts[instability] -
1412 delta = (newinstabilitycounts[instability] -
1408 oldinstabilitycounts[instability])
1413 oldinstabilitycounts[instability])
1409 if delta > 0:
1414 if delta > 0:
1410 repo.ui.warn(_('%i new %s changesets\n') %
1415 repo.ui.warn(_('%i new %s changesets\n') %
1411 (delta, instability))
1416 (delta, instability))
1412
1417
1413 if txmatch(_reportnewcssource):
1418 if txmatch(_reportnewcssource):
1414 @reportsummary
1419 @reportsummary
1415 def reportnewcs(repo, tr):
1420 def reportnewcs(repo, tr):
1416 """Report the range of new revisions pulled/unbundled."""
1421 """Report the range of new revisions pulled/unbundled."""
1417 newrevs = tr.changes.get('revs', xrange(0, 0))
1422 newrevs = tr.changes.get('revs', xrange(0, 0))
1418 if not newrevs:
1423 if not newrevs:
1419 return
1424 return
1420
1425
1421 # Compute the bounds of new revisions' range, excluding obsoletes.
1426 # Compute the bounds of new revisions' range, excluding obsoletes.
1422 unfi = repo.unfiltered()
1427 unfi = repo.unfiltered()
1423 revs = unfi.revs('%ld and not obsolete()', newrevs)
1428 revs = unfi.revs('%ld and not obsolete()', newrevs)
1424 if not revs:
1429 if not revs:
1425 # Got only obsoletes.
1430 # Got only obsoletes.
1426 return
1431 return
1427 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1432 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1428
1433
1429 if minrev == maxrev:
1434 if minrev == maxrev:
1430 revrange = minrev
1435 revrange = minrev
1431 else:
1436 else:
1432 revrange = '%s:%s' % (minrev, maxrev)
1437 revrange = '%s:%s' % (minrev, maxrev)
1433 repo.ui.status(_('new changesets %s\n') % revrange)
1438 repo.ui.status(_('new changesets %s\n') % revrange)
1434
1439
1435 def nodesummaries(repo, nodes, maxnumnodes=4):
1440 def nodesummaries(repo, nodes, maxnumnodes=4):
1436 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1441 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1437 return ' '.join(short(h) for h in nodes)
1442 return ' '.join(short(h) for h in nodes)
1438 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1443 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1439 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1444 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1440
1445
1441 def enforcesinglehead(repo, tr, desc):
1446 def enforcesinglehead(repo, tr, desc):
1442 """check that no named branch has multiple heads"""
1447 """check that no named branch has multiple heads"""
1443 if desc in ('strip', 'repair'):
1448 if desc in ('strip', 'repair'):
1444 # skip the logic during strip
1449 # skip the logic during strip
1445 return
1450 return
1446 visible = repo.filtered('visible')
1451 visible = repo.filtered('visible')
1447 # possible improvement: we could restrict the check to affected branch
1452 # possible improvement: we could restrict the check to affected branch
1448 for name, heads in visible.branchmap().iteritems():
1453 for name, heads in visible.branchmap().iteritems():
1449 if len(heads) > 1:
1454 if len(heads) > 1:
1450 msg = _('rejecting multiple heads on branch "%s"')
1455 msg = _('rejecting multiple heads on branch "%s"')
1451 msg %= name
1456 msg %= name
1452 hint = _('%d heads: %s')
1457 hint = _('%d heads: %s')
1453 hint %= (len(heads), nodesummaries(repo, heads))
1458 hint %= (len(heads), nodesummaries(repo, heads))
1454 raise error.Abort(msg, hint=hint)
1459 raise error.Abort(msg, hint=hint)
1455
1460
1456 def wrapconvertsink(sink):
1461 def wrapconvertsink(sink):
1457 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1462 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1458 before it is used, whether or not the convert extension was formally loaded.
1463 before it is used, whether or not the convert extension was formally loaded.
1459 """
1464 """
1460 return sink
1465 return sink
1461
1466
1462 def unhidehashlikerevs(repo, specs, hiddentype):
1467 def unhidehashlikerevs(repo, specs, hiddentype):
1463 """parse the user specs and unhide changesets whose hash or revision number
1468 """parse the user specs and unhide changesets whose hash or revision number
1464 is passed.
1469 is passed.
1465
1470
1466 hiddentype can be: 1) 'warn': warn while unhiding changesets
1471 hiddentype can be: 1) 'warn': warn while unhiding changesets
1467 2) 'nowarn': don't warn while unhiding changesets
1472 2) 'nowarn': don't warn while unhiding changesets
1468
1473
1469 returns a repo object with the required changesets unhidden
1474 returns a repo object with the required changesets unhidden
1470 """
1475 """
1471 if not repo.filtername or not repo.ui.configbool('experimental',
1476 if not repo.filtername or not repo.ui.configbool('experimental',
1472 'directaccess'):
1477 'directaccess'):
1473 return repo
1478 return repo
1474
1479
1475 if repo.filtername not in ('visible', 'visible-hidden'):
1480 if repo.filtername not in ('visible', 'visible-hidden'):
1476 return repo
1481 return repo
1477
1482
1478 symbols = set()
1483 symbols = set()
1479 for spec in specs:
1484 for spec in specs:
1480 try:
1485 try:
1481 tree = revsetlang.parse(spec)
1486 tree = revsetlang.parse(spec)
1482 except error.ParseError: # will be reported by scmutil.revrange()
1487 except error.ParseError: # will be reported by scmutil.revrange()
1483 continue
1488 continue
1484
1489
1485 symbols.update(revsetlang.gethashlikesymbols(tree))
1490 symbols.update(revsetlang.gethashlikesymbols(tree))
1486
1491
1487 if not symbols:
1492 if not symbols:
1488 return repo
1493 return repo
1489
1494
1490 revs = _getrevsfromsymbols(repo, symbols)
1495 revs = _getrevsfromsymbols(repo, symbols)
1491
1496
1492 if not revs:
1497 if not revs:
1493 return repo
1498 return repo
1494
1499
1495 if hiddentype == 'warn':
1500 if hiddentype == 'warn':
1496 unfi = repo.unfiltered()
1501 unfi = repo.unfiltered()
1497 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1502 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1498 repo.ui.warn(_("warning: accessing hidden changesets for write "
1503 repo.ui.warn(_("warning: accessing hidden changesets for write "
1499 "operation: %s\n") % revstr)
1504 "operation: %s\n") % revstr)
1500
1505
1501 # we have to use new filtername to separate branch/tags cache until we can
1506 # we have to use new filtername to separate branch/tags cache until we can
1502 # disbale these cache when revisions are dynamically pinned.
1507 # disbale these cache when revisions are dynamically pinned.
1503 return repo.filtered('visible-hidden', revs)
1508 return repo.filtered('visible-hidden', revs)
1504
1509
1505 def _getrevsfromsymbols(repo, symbols):
1510 def _getrevsfromsymbols(repo, symbols):
1506 """parse the list of symbols and returns a set of revision numbers of hidden
1511 """parse the list of symbols and returns a set of revision numbers of hidden
1507 changesets present in symbols"""
1512 changesets present in symbols"""
1508 revs = set()
1513 revs = set()
1509 unfi = repo.unfiltered()
1514 unfi = repo.unfiltered()
1510 unficl = unfi.changelog
1515 unficl = unfi.changelog
1511 cl = repo.changelog
1516 cl = repo.changelog
1512 tiprev = len(unficl)
1517 tiprev = len(unficl)
1513 pmatch = unficl._partialmatch
1518 pmatch = unficl._partialmatch
1514 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1519 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1515 for s in symbols:
1520 for s in symbols:
1516 try:
1521 try:
1517 n = int(s)
1522 n = int(s)
1518 if n <= tiprev:
1523 if n <= tiprev:
1519 if not allowrevnums:
1524 if not allowrevnums:
1520 continue
1525 continue
1521 else:
1526 else:
1522 if n not in cl:
1527 if n not in cl:
1523 revs.add(n)
1528 revs.add(n)
1524 continue
1529 continue
1525 except ValueError:
1530 except ValueError:
1526 pass
1531 pass
1527
1532
1528 try:
1533 try:
1529 s = pmatch(s)
1534 s = pmatch(s)
1530 except (error.LookupError, error.WdirUnsupported):
1535 except (error.LookupError, error.WdirUnsupported):
1531 s = None
1536 s = None
1532
1537
1533 if s is not None:
1538 if s is not None:
1534 rev = unficl.rev(s)
1539 rev = unficl.rev(s)
1535 if rev not in cl:
1540 if rev not in cl:
1536 revs.add(rev)
1541 revs.add(rev)
1537
1542
1538 return revs
1543 return revs
General Comments 0
You need to be logged in to leave comments. Login now