##// END OF EJS Templates
sparse-revlog: also use sparse-revlog config as a general delta trigger...
Boris Feld -
r38782:f8cbff21 stable
parent child Browse files
Show More
@@ -1,1699 +1,1700 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 revsetlang,
38 revsetlang,
39 similar,
39 similar,
40 url,
40 url,
41 util,
41 util,
42 vfs,
42 vfs,
43 )
43 )
44
44
45 from .utils import (
45 from .utils import (
46 procutil,
46 procutil,
47 stringutil,
47 stringutil,
48 )
48 )
49
49
50 if pycompat.iswindows:
50 if pycompat.iswindows:
51 from . import scmwindows as scmplatform
51 from . import scmwindows as scmplatform
52 else:
52 else:
53 from . import scmposix as scmplatform
53 from . import scmposix as scmplatform
54
54
55 termsize = scmplatform.termsize
55 termsize = scmplatform.termsize
56
56
57 class status(tuple):
57 class status(tuple):
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 and 'ignored' properties are only relevant to the working copy.
59 and 'ignored' properties are only relevant to the working copy.
60 '''
60 '''
61
61
62 __slots__ = ()
62 __slots__ = ()
63
63
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 clean):
65 clean):
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 ignored, clean))
67 ignored, clean))
68
68
69 @property
69 @property
70 def modified(self):
70 def modified(self):
71 '''files that have been modified'''
71 '''files that have been modified'''
72 return self[0]
72 return self[0]
73
73
74 @property
74 @property
75 def added(self):
75 def added(self):
76 '''files that have been added'''
76 '''files that have been added'''
77 return self[1]
77 return self[1]
78
78
79 @property
79 @property
80 def removed(self):
80 def removed(self):
81 '''files that have been removed'''
81 '''files that have been removed'''
82 return self[2]
82 return self[2]
83
83
84 @property
84 @property
85 def deleted(self):
85 def deleted(self):
86 '''files that are in the dirstate, but have been deleted from the
86 '''files that are in the dirstate, but have been deleted from the
87 working copy (aka "missing")
87 working copy (aka "missing")
88 '''
88 '''
89 return self[3]
89 return self[3]
90
90
91 @property
91 @property
92 def unknown(self):
92 def unknown(self):
93 '''files not in the dirstate that are not ignored'''
93 '''files not in the dirstate that are not ignored'''
94 return self[4]
94 return self[4]
95
95
96 @property
96 @property
97 def ignored(self):
97 def ignored(self):
98 '''files not in the dirstate that are ignored (by _dirignore())'''
98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 return self[5]
99 return self[5]
100
100
101 @property
101 @property
102 def clean(self):
102 def clean(self):
103 '''files that have not been modified'''
103 '''files that have not been modified'''
104 return self[6]
104 return self[6]
105
105
106 def __repr__(self, *args, **kwargs):
106 def __repr__(self, *args, **kwargs):
107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
108 r'unknown=%s, ignored=%s, clean=%s>') %
108 r'unknown=%s, ignored=%s, clean=%s>') %
109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
110
110
111 def itersubrepos(ctx1, ctx2):
111 def itersubrepos(ctx1, ctx2):
112 """find subrepos in ctx1 or ctx2"""
112 """find subrepos in ctx1 or ctx2"""
113 # Create a (subpath, ctx) mapping where we prefer subpaths from
113 # Create a (subpath, ctx) mapping where we prefer subpaths from
114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
115 # has been modified (in ctx2) but not yet committed (in ctx1).
115 # has been modified (in ctx2) but not yet committed (in ctx1).
116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
118
118
119 missing = set()
119 missing = set()
120
120
121 for subpath in ctx2.substate:
121 for subpath in ctx2.substate:
122 if subpath not in ctx1.substate:
122 if subpath not in ctx1.substate:
123 del subpaths[subpath]
123 del subpaths[subpath]
124 missing.add(subpath)
124 missing.add(subpath)
125
125
126 for subpath, ctx in sorted(subpaths.iteritems()):
126 for subpath, ctx in sorted(subpaths.iteritems()):
127 yield subpath, ctx.sub(subpath)
127 yield subpath, ctx.sub(subpath)
128
128
129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
130 # status and diff will have an accurate result when it does
130 # status and diff will have an accurate result when it does
131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
132 # against itself.
132 # against itself.
133 for subpath in missing:
133 for subpath in missing:
134 yield subpath, ctx2.nullsub(subpath, ctx1)
134 yield subpath, ctx2.nullsub(subpath, ctx1)
135
135
136 def nochangesfound(ui, repo, excluded=None):
136 def nochangesfound(ui, repo, excluded=None):
137 '''Report no changes for push/pull, excluded is None or a list of
137 '''Report no changes for push/pull, excluded is None or a list of
138 nodes excluded from the push/pull.
138 nodes excluded from the push/pull.
139 '''
139 '''
140 secretlist = []
140 secretlist = []
141 if excluded:
141 if excluded:
142 for n in excluded:
142 for n in excluded:
143 ctx = repo[n]
143 ctx = repo[n]
144 if ctx.phase() >= phases.secret and not ctx.extinct():
144 if ctx.phase() >= phases.secret and not ctx.extinct():
145 secretlist.append(n)
145 secretlist.append(n)
146
146
147 if secretlist:
147 if secretlist:
148 ui.status(_("no changes found (ignored %d secret changesets)\n")
148 ui.status(_("no changes found (ignored %d secret changesets)\n")
149 % len(secretlist))
149 % len(secretlist))
150 else:
150 else:
151 ui.status(_("no changes found\n"))
151 ui.status(_("no changes found\n"))
152
152
153 def callcatch(ui, func):
153 def callcatch(ui, func):
154 """call func() with global exception handling
154 """call func() with global exception handling
155
155
156 return func() if no exception happens. otherwise do some error handling
156 return func() if no exception happens. otherwise do some error handling
157 and return an exit code accordingly. does not handle all exceptions.
157 and return an exit code accordingly. does not handle all exceptions.
158 """
158 """
159 try:
159 try:
160 try:
160 try:
161 return func()
161 return func()
162 except: # re-raises
162 except: # re-raises
163 ui.traceback()
163 ui.traceback()
164 raise
164 raise
165 # Global exception handling, alphabetically
165 # Global exception handling, alphabetically
166 # Mercurial-specific first, followed by built-in and library exceptions
166 # Mercurial-specific first, followed by built-in and library exceptions
167 except error.LockHeld as inst:
167 except error.LockHeld as inst:
168 if inst.errno == errno.ETIMEDOUT:
168 if inst.errno == errno.ETIMEDOUT:
169 reason = _('timed out waiting for lock held by %r') % inst.locker
169 reason = _('timed out waiting for lock held by %r') % inst.locker
170 else:
170 else:
171 reason = _('lock held by %r') % inst.locker
171 reason = _('lock held by %r') % inst.locker
172 ui.warn(_("abort: %s: %s\n")
172 ui.warn(_("abort: %s: %s\n")
173 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
173 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
174 if not inst.locker:
174 if not inst.locker:
175 ui.warn(_("(lock might be very busy)\n"))
175 ui.warn(_("(lock might be very busy)\n"))
176 except error.LockUnavailable as inst:
176 except error.LockUnavailable as inst:
177 ui.warn(_("abort: could not lock %s: %s\n") %
177 ui.warn(_("abort: could not lock %s: %s\n") %
178 (inst.desc or stringutil.forcebytestr(inst.filename),
178 (inst.desc or stringutil.forcebytestr(inst.filename),
179 encoding.strtolocal(inst.strerror)))
179 encoding.strtolocal(inst.strerror)))
180 except error.OutOfBandError as inst:
180 except error.OutOfBandError as inst:
181 if inst.args:
181 if inst.args:
182 msg = _("abort: remote error:\n")
182 msg = _("abort: remote error:\n")
183 else:
183 else:
184 msg = _("abort: remote error\n")
184 msg = _("abort: remote error\n")
185 ui.warn(msg)
185 ui.warn(msg)
186 if inst.args:
186 if inst.args:
187 ui.warn(''.join(inst.args))
187 ui.warn(''.join(inst.args))
188 if inst.hint:
188 if inst.hint:
189 ui.warn('(%s)\n' % inst.hint)
189 ui.warn('(%s)\n' % inst.hint)
190 except error.RepoError as inst:
190 except error.RepoError as inst:
191 ui.warn(_("abort: %s!\n") % inst)
191 ui.warn(_("abort: %s!\n") % inst)
192 if inst.hint:
192 if inst.hint:
193 ui.warn(_("(%s)\n") % inst.hint)
193 ui.warn(_("(%s)\n") % inst.hint)
194 except error.ResponseError as inst:
194 except error.ResponseError as inst:
195 ui.warn(_("abort: %s") % inst.args[0])
195 ui.warn(_("abort: %s") % inst.args[0])
196 msg = inst.args[1]
196 msg = inst.args[1]
197 if isinstance(msg, type(u'')):
197 if isinstance(msg, type(u'')):
198 msg = pycompat.sysbytes(msg)
198 msg = pycompat.sysbytes(msg)
199 if not isinstance(msg, bytes):
199 if not isinstance(msg, bytes):
200 ui.warn(" %r\n" % (msg,))
200 ui.warn(" %r\n" % (msg,))
201 elif not msg:
201 elif not msg:
202 ui.warn(_(" empty string\n"))
202 ui.warn(_(" empty string\n"))
203 else:
203 else:
204 ui.warn("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
204 ui.warn("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
205 except error.CensoredNodeError as inst:
205 except error.CensoredNodeError as inst:
206 ui.warn(_("abort: file censored %s!\n") % inst)
206 ui.warn(_("abort: file censored %s!\n") % inst)
207 except error.RevlogError as inst:
207 except error.RevlogError as inst:
208 ui.warn(_("abort: %s!\n") % inst)
208 ui.warn(_("abort: %s!\n") % inst)
209 except error.InterventionRequired as inst:
209 except error.InterventionRequired as inst:
210 ui.warn("%s\n" % inst)
210 ui.warn("%s\n" % inst)
211 if inst.hint:
211 if inst.hint:
212 ui.warn(_("(%s)\n") % inst.hint)
212 ui.warn(_("(%s)\n") % inst.hint)
213 return 1
213 return 1
214 except error.WdirUnsupported:
214 except error.WdirUnsupported:
215 ui.warn(_("abort: working directory revision cannot be specified\n"))
215 ui.warn(_("abort: working directory revision cannot be specified\n"))
216 except error.Abort as inst:
216 except error.Abort as inst:
217 ui.warn(_("abort: %s\n") % inst)
217 ui.warn(_("abort: %s\n") % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.warn(_("(%s)\n") % inst.hint)
219 ui.warn(_("(%s)\n") % inst.hint)
220 except ImportError as inst:
220 except ImportError as inst:
221 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
221 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
222 m = stringutil.forcebytestr(inst).split()[-1]
222 m = stringutil.forcebytestr(inst).split()[-1]
223 if m in "mpatch bdiff".split():
223 if m in "mpatch bdiff".split():
224 ui.warn(_("(did you forget to compile extensions?)\n"))
224 ui.warn(_("(did you forget to compile extensions?)\n"))
225 elif m in "zlib".split():
225 elif m in "zlib".split():
226 ui.warn(_("(is your Python install correct?)\n"))
226 ui.warn(_("(is your Python install correct?)\n"))
227 except IOError as inst:
227 except IOError as inst:
228 if util.safehasattr(inst, "code"):
228 if util.safehasattr(inst, "code"):
229 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
229 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
230 elif util.safehasattr(inst, "reason"):
230 elif util.safehasattr(inst, "reason"):
231 try: # usually it is in the form (errno, strerror)
231 try: # usually it is in the form (errno, strerror)
232 reason = inst.reason.args[1]
232 reason = inst.reason.args[1]
233 except (AttributeError, IndexError):
233 except (AttributeError, IndexError):
234 # it might be anything, for example a string
234 # it might be anything, for example a string
235 reason = inst.reason
235 reason = inst.reason
236 if isinstance(reason, pycompat.unicode):
236 if isinstance(reason, pycompat.unicode):
237 # SSLError of Python 2.7.9 contains a unicode
237 # SSLError of Python 2.7.9 contains a unicode
238 reason = encoding.unitolocal(reason)
238 reason = encoding.unitolocal(reason)
239 ui.warn(_("abort: error: %s\n") % reason)
239 ui.warn(_("abort: error: %s\n") % reason)
240 elif (util.safehasattr(inst, "args")
240 elif (util.safehasattr(inst, "args")
241 and inst.args and inst.args[0] == errno.EPIPE):
241 and inst.args and inst.args[0] == errno.EPIPE):
242 pass
242 pass
243 elif getattr(inst, "strerror", None):
243 elif getattr(inst, "strerror", None):
244 if getattr(inst, "filename", None):
244 if getattr(inst, "filename", None):
245 ui.warn(_("abort: %s: %s\n") % (
245 ui.warn(_("abort: %s: %s\n") % (
246 encoding.strtolocal(inst.strerror),
246 encoding.strtolocal(inst.strerror),
247 stringutil.forcebytestr(inst.filename)))
247 stringutil.forcebytestr(inst.filename)))
248 else:
248 else:
249 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
250 else:
250 else:
251 raise
251 raise
252 except OSError as inst:
252 except OSError as inst:
253 if getattr(inst, "filename", None) is not None:
253 if getattr(inst, "filename", None) is not None:
254 ui.warn(_("abort: %s: '%s'\n") % (
254 ui.warn(_("abort: %s: '%s'\n") % (
255 encoding.strtolocal(inst.strerror),
255 encoding.strtolocal(inst.strerror),
256 stringutil.forcebytestr(inst.filename)))
256 stringutil.forcebytestr(inst.filename)))
257 else:
257 else:
258 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 except MemoryError:
259 except MemoryError:
260 ui.warn(_("abort: out of memory\n"))
260 ui.warn(_("abort: out of memory\n"))
261 except SystemExit as inst:
261 except SystemExit as inst:
262 # Commands shouldn't sys.exit directly, but give a return code.
262 # Commands shouldn't sys.exit directly, but give a return code.
263 # Just in case catch this and and pass exit code to caller.
263 # Just in case catch this and and pass exit code to caller.
264 return inst.code
264 return inst.code
265 except socket.error as inst:
265 except socket.error as inst:
266 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
266 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
267
267
268 return -1
268 return -1
269
269
270 def checknewlabel(repo, lbl, kind):
270 def checknewlabel(repo, lbl, kind):
271 # Do not use the "kind" parameter in ui output.
271 # Do not use the "kind" parameter in ui output.
272 # It makes strings difficult to translate.
272 # It makes strings difficult to translate.
273 if lbl in ['tip', '.', 'null']:
273 if lbl in ['tip', '.', 'null']:
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 for c in (':', '\0', '\n', '\r'):
275 for c in (':', '\0', '\n', '\r'):
276 if c in lbl:
276 if c in lbl:
277 raise error.Abort(
277 raise error.Abort(
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 raise error.Abort(_("cannot use an integer as a name"))
281 raise error.Abort(_("cannot use an integer as a name"))
282 except ValueError:
282 except ValueError:
283 pass
283 pass
284 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286
286
287 def checkfilename(f):
287 def checkfilename(f):
288 '''Check that the filename f is an acceptable filename for a tracked file'''
288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 if '\r' in f or '\n' in f:
289 if '\r' in f or '\n' in f:
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 % pycompat.bytestr(f))
291 % pycompat.bytestr(f))
292
292
293 def checkportable(ui, f):
293 def checkportable(ui, f):
294 '''Check if filename f is portable and warn or abort depending on config'''
294 '''Check if filename f is portable and warn or abort depending on config'''
295 checkfilename(f)
295 checkfilename(f)
296 abort, warn = checkportabilityalert(ui)
296 abort, warn = checkportabilityalert(ui)
297 if abort or warn:
297 if abort or warn:
298 msg = util.checkwinfilename(f)
298 msg = util.checkwinfilename(f)
299 if msg:
299 if msg:
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 if abort:
301 if abort:
302 raise error.Abort(msg)
302 raise error.Abort(msg)
303 ui.warn(_("warning: %s\n") % msg)
303 ui.warn(_("warning: %s\n") % msg)
304
304
305 def checkportabilityalert(ui):
305 def checkportabilityalert(ui):
306 '''check if the user's config requests nothing, a warning, or abort for
306 '''check if the user's config requests nothing, a warning, or abort for
307 non-portable filenames'''
307 non-portable filenames'''
308 val = ui.config('ui', 'portablefilenames')
308 val = ui.config('ui', 'portablefilenames')
309 lval = val.lower()
309 lval = val.lower()
310 bval = stringutil.parsebool(val)
310 bval = stringutil.parsebool(val)
311 abort = pycompat.iswindows or lval == 'abort'
311 abort = pycompat.iswindows or lval == 'abort'
312 warn = bval or lval == 'warn'
312 warn = bval or lval == 'warn'
313 if bval is None and not (warn or abort or lval == 'ignore'):
313 if bval is None and not (warn or abort or lval == 'ignore'):
314 raise error.ConfigError(
314 raise error.ConfigError(
315 _("ui.portablefilenames value is invalid ('%s')") % val)
315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 return abort, warn
316 return abort, warn
317
317
318 class casecollisionauditor(object):
318 class casecollisionauditor(object):
319 def __init__(self, ui, abort, dirstate):
319 def __init__(self, ui, abort, dirstate):
320 self._ui = ui
320 self._ui = ui
321 self._abort = abort
321 self._abort = abort
322 allfiles = '\0'.join(dirstate._map)
322 allfiles = '\0'.join(dirstate._map)
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 self._dirstate = dirstate
324 self._dirstate = dirstate
325 # The purpose of _newfiles is so that we don't complain about
325 # The purpose of _newfiles is so that we don't complain about
326 # case collisions if someone were to call this object with the
326 # case collisions if someone were to call this object with the
327 # same filename twice.
327 # same filename twice.
328 self._newfiles = set()
328 self._newfiles = set()
329
329
330 def __call__(self, f):
330 def __call__(self, f):
331 if f in self._newfiles:
331 if f in self._newfiles:
332 return
332 return
333 fl = encoding.lower(f)
333 fl = encoding.lower(f)
334 if fl in self._loweredfiles and f not in self._dirstate:
334 if fl in self._loweredfiles and f not in self._dirstate:
335 msg = _('possible case-folding collision for %s') % f
335 msg = _('possible case-folding collision for %s') % f
336 if self._abort:
336 if self._abort:
337 raise error.Abort(msg)
337 raise error.Abort(msg)
338 self._ui.warn(_("warning: %s\n") % msg)
338 self._ui.warn(_("warning: %s\n") % msg)
339 self._loweredfiles.add(fl)
339 self._loweredfiles.add(fl)
340 self._newfiles.add(f)
340 self._newfiles.add(f)
341
341
342 def filteredhash(repo, maxrev):
342 def filteredhash(repo, maxrev):
343 """build hash of filtered revisions in the current repoview.
343 """build hash of filtered revisions in the current repoview.
344
344
345 Multiple caches perform up-to-date validation by checking that the
345 Multiple caches perform up-to-date validation by checking that the
346 tiprev and tipnode stored in the cache file match the current repository.
346 tiprev and tipnode stored in the cache file match the current repository.
347 However, this is not sufficient for validating repoviews because the set
347 However, this is not sufficient for validating repoviews because the set
348 of revisions in the view may change without the repository tiprev and
348 of revisions in the view may change without the repository tiprev and
349 tipnode changing.
349 tipnode changing.
350
350
351 This function hashes all the revs filtered from the view and returns
351 This function hashes all the revs filtered from the view and returns
352 that SHA-1 digest.
352 that SHA-1 digest.
353 """
353 """
354 cl = repo.changelog
354 cl = repo.changelog
355 if not cl.filteredrevs:
355 if not cl.filteredrevs:
356 return None
356 return None
357 key = None
357 key = None
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 if revs:
359 if revs:
360 s = hashlib.sha1()
360 s = hashlib.sha1()
361 for rev in revs:
361 for rev in revs:
362 s.update('%d;' % rev)
362 s.update('%d;' % rev)
363 key = s.digest()
363 key = s.digest()
364 return key
364 return key
365
365
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 '''yield every hg repository under path, always recursively.
367 '''yield every hg repository under path, always recursively.
368 The recurse flag will only control recursion into repo working dirs'''
368 The recurse flag will only control recursion into repo working dirs'''
369 def errhandler(err):
369 def errhandler(err):
370 if err.filename == path:
370 if err.filename == path:
371 raise err
371 raise err
372 samestat = getattr(os.path, 'samestat', None)
372 samestat = getattr(os.path, 'samestat', None)
373 if followsym and samestat is not None:
373 if followsym and samestat is not None:
374 def adddir(dirlst, dirname):
374 def adddir(dirlst, dirname):
375 dirstat = os.stat(dirname)
375 dirstat = os.stat(dirname)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 if not match:
377 if not match:
378 dirlst.append(dirstat)
378 dirlst.append(dirstat)
379 return not match
379 return not match
380 else:
380 else:
381 followsym = False
381 followsym = False
382
382
383 if (seen_dirs is None) and followsym:
383 if (seen_dirs is None) and followsym:
384 seen_dirs = []
384 seen_dirs = []
385 adddir(seen_dirs, path)
385 adddir(seen_dirs, path)
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 dirs.sort()
387 dirs.sort()
388 if '.hg' in dirs:
388 if '.hg' in dirs:
389 yield root # found a repository
389 yield root # found a repository
390 qroot = os.path.join(root, '.hg', 'patches')
390 qroot = os.path.join(root, '.hg', 'patches')
391 if os.path.isdir(os.path.join(qroot, '.hg')):
391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 yield qroot # we have a patch queue repo here
392 yield qroot # we have a patch queue repo here
393 if recurse:
393 if recurse:
394 # avoid recursing inside the .hg directory
394 # avoid recursing inside the .hg directory
395 dirs.remove('.hg')
395 dirs.remove('.hg')
396 else:
396 else:
397 dirs[:] = [] # don't descend further
397 dirs[:] = [] # don't descend further
398 elif followsym:
398 elif followsym:
399 newdirs = []
399 newdirs = []
400 for d in dirs:
400 for d in dirs:
401 fname = os.path.join(root, d)
401 fname = os.path.join(root, d)
402 if adddir(seen_dirs, fname):
402 if adddir(seen_dirs, fname):
403 if os.path.islink(fname):
403 if os.path.islink(fname):
404 for hgname in walkrepos(fname, True, seen_dirs):
404 for hgname in walkrepos(fname, True, seen_dirs):
405 yield hgname
405 yield hgname
406 else:
406 else:
407 newdirs.append(d)
407 newdirs.append(d)
408 dirs[:] = newdirs
408 dirs[:] = newdirs
409
409
410 def binnode(ctx):
410 def binnode(ctx):
411 """Return binary node id for a given basectx"""
411 """Return binary node id for a given basectx"""
412 node = ctx.node()
412 node = ctx.node()
413 if node is None:
413 if node is None:
414 return wdirid
414 return wdirid
415 return node
415 return node
416
416
417 def intrev(ctx):
417 def intrev(ctx):
418 """Return integer for a given basectx that can be used in comparison or
418 """Return integer for a given basectx that can be used in comparison or
419 arithmetic operation"""
419 arithmetic operation"""
420 rev = ctx.rev()
420 rev = ctx.rev()
421 if rev is None:
421 if rev is None:
422 return wdirrev
422 return wdirrev
423 return rev
423 return rev
424
424
425 def formatchangeid(ctx):
425 def formatchangeid(ctx):
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 template provided by logcmdutil.changesettemplater"""
427 template provided by logcmdutil.changesettemplater"""
428 repo = ctx.repo()
428 repo = ctx.repo()
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430
430
431 def formatrevnode(ui, rev, node):
431 def formatrevnode(ui, rev, node):
432 """Format given revision and node depending on the current verbosity"""
432 """Format given revision and node depending on the current verbosity"""
433 if ui.debugflag:
433 if ui.debugflag:
434 hexfunc = hex
434 hexfunc = hex
435 else:
435 else:
436 hexfunc = short
436 hexfunc = short
437 return '%d:%s' % (rev, hexfunc(node))
437 return '%d:%s' % (rev, hexfunc(node))
438
438
439 def resolvehexnodeidprefix(repo, prefix):
439 def resolvehexnodeidprefix(repo, prefix):
440 # Uses unfiltered repo because it's faster when prefix is ambiguous/
440 # Uses unfiltered repo because it's faster when prefix is ambiguous/
441 # This matches the shortesthexnodeidprefix() function below.
441 # This matches the shortesthexnodeidprefix() function below.
442 node = repo.unfiltered().changelog._partialmatch(prefix)
442 node = repo.unfiltered().changelog._partialmatch(prefix)
443 if node is None:
443 if node is None:
444 return
444 return
445 repo.changelog.rev(node) # make sure node isn't filtered
445 repo.changelog.rev(node) # make sure node isn't filtered
446 return node
446 return node
447
447
448 def shortesthexnodeidprefix(repo, node, minlength=1):
448 def shortesthexnodeidprefix(repo, node, minlength=1):
449 """Find the shortest unambiguous prefix that matches hexnode."""
449 """Find the shortest unambiguous prefix that matches hexnode."""
450 # _partialmatch() of filtered changelog could take O(len(repo)) time,
450 # _partialmatch() of filtered changelog could take O(len(repo)) time,
451 # which would be unacceptably slow. so we look for hash collision in
451 # which would be unacceptably slow. so we look for hash collision in
452 # unfiltered space, which means some hashes may be slightly longer.
452 # unfiltered space, which means some hashes may be slightly longer.
453 cl = repo.unfiltered().changelog
453 cl = repo.unfiltered().changelog
454
454
455 def isrev(prefix):
455 def isrev(prefix):
456 try:
456 try:
457 i = int(prefix)
457 i = int(prefix)
458 # if we are a pure int, then starting with zero will not be
458 # if we are a pure int, then starting with zero will not be
459 # confused as a rev; or, obviously, if the int is larger
459 # confused as a rev; or, obviously, if the int is larger
460 # than the value of the tip rev
460 # than the value of the tip rev
461 if prefix[0:1] == b'0' or i > len(cl):
461 if prefix[0:1] == b'0' or i > len(cl):
462 return False
462 return False
463 return True
463 return True
464 except ValueError:
464 except ValueError:
465 return False
465 return False
466
466
467 def disambiguate(prefix):
467 def disambiguate(prefix):
468 """Disambiguate against revnums."""
468 """Disambiguate against revnums."""
469 hexnode = hex(node)
469 hexnode = hex(node)
470 for length in range(len(prefix), len(hexnode) + 1):
470 for length in range(len(prefix), len(hexnode) + 1):
471 prefix = hexnode[:length]
471 prefix = hexnode[:length]
472 if not isrev(prefix):
472 if not isrev(prefix):
473 return prefix
473 return prefix
474
474
475 try:
475 try:
476 return disambiguate(cl.shortest(node, minlength))
476 return disambiguate(cl.shortest(node, minlength))
477 except error.LookupError:
477 except error.LookupError:
478 raise error.RepoLookupError()
478 raise error.RepoLookupError()
479
479
480 def isrevsymbol(repo, symbol):
480 def isrevsymbol(repo, symbol):
481 """Checks if a symbol exists in the repo.
481 """Checks if a symbol exists in the repo.
482
482
483 See revsymbol() for details. Raises error.LookupError if the symbol is an
483 See revsymbol() for details. Raises error.LookupError if the symbol is an
484 ambiguous nodeid prefix.
484 ambiguous nodeid prefix.
485 """
485 """
486 try:
486 try:
487 revsymbol(repo, symbol)
487 revsymbol(repo, symbol)
488 return True
488 return True
489 except error.RepoLookupError:
489 except error.RepoLookupError:
490 return False
490 return False
491
491
492 def revsymbol(repo, symbol):
492 def revsymbol(repo, symbol):
493 """Returns a context given a single revision symbol (as string).
493 """Returns a context given a single revision symbol (as string).
494
494
495 This is similar to revsingle(), but accepts only a single revision symbol,
495 This is similar to revsingle(), but accepts only a single revision symbol,
496 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
496 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
497 not "max(public())".
497 not "max(public())".
498 """
498 """
499 if not isinstance(symbol, bytes):
499 if not isinstance(symbol, bytes):
500 msg = ("symbol (%s of type %s) was not a string, did you mean "
500 msg = ("symbol (%s of type %s) was not a string, did you mean "
501 "repo[symbol]?" % (symbol, type(symbol)))
501 "repo[symbol]?" % (symbol, type(symbol)))
502 raise error.ProgrammingError(msg)
502 raise error.ProgrammingError(msg)
503 try:
503 try:
504 if symbol in ('.', 'tip', 'null'):
504 if symbol in ('.', 'tip', 'null'):
505 return repo[symbol]
505 return repo[symbol]
506
506
507 try:
507 try:
508 r = int(symbol)
508 r = int(symbol)
509 if '%d' % r != symbol:
509 if '%d' % r != symbol:
510 raise ValueError
510 raise ValueError
511 l = len(repo.changelog)
511 l = len(repo.changelog)
512 if r < 0:
512 if r < 0:
513 r += l
513 r += l
514 if r < 0 or r >= l and r != wdirrev:
514 if r < 0 or r >= l and r != wdirrev:
515 raise ValueError
515 raise ValueError
516 return repo[r]
516 return repo[r]
517 except error.FilteredIndexError:
517 except error.FilteredIndexError:
518 raise
518 raise
519 except (ValueError, OverflowError, IndexError):
519 except (ValueError, OverflowError, IndexError):
520 pass
520 pass
521
521
522 if len(symbol) == 40:
522 if len(symbol) == 40:
523 try:
523 try:
524 node = bin(symbol)
524 node = bin(symbol)
525 rev = repo.changelog.rev(node)
525 rev = repo.changelog.rev(node)
526 return repo[rev]
526 return repo[rev]
527 except error.FilteredLookupError:
527 except error.FilteredLookupError:
528 raise
528 raise
529 except (TypeError, LookupError):
529 except (TypeError, LookupError):
530 pass
530 pass
531
531
532 # look up bookmarks through the name interface
532 # look up bookmarks through the name interface
533 try:
533 try:
534 node = repo.names.singlenode(repo, symbol)
534 node = repo.names.singlenode(repo, symbol)
535 rev = repo.changelog.rev(node)
535 rev = repo.changelog.rev(node)
536 return repo[rev]
536 return repo[rev]
537 except KeyError:
537 except KeyError:
538 pass
538 pass
539
539
540 node = resolvehexnodeidprefix(repo, symbol)
540 node = resolvehexnodeidprefix(repo, symbol)
541 if node is not None:
541 if node is not None:
542 rev = repo.changelog.rev(node)
542 rev = repo.changelog.rev(node)
543 return repo[rev]
543 return repo[rev]
544
544
545 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
545 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
546
546
547 except error.WdirUnsupported:
547 except error.WdirUnsupported:
548 return repo[None]
548 return repo[None]
549 except (error.FilteredIndexError, error.FilteredLookupError,
549 except (error.FilteredIndexError, error.FilteredLookupError,
550 error.FilteredRepoLookupError):
550 error.FilteredRepoLookupError):
551 raise _filterederror(repo, symbol)
551 raise _filterederror(repo, symbol)
552
552
553 def _filterederror(repo, changeid):
553 def _filterederror(repo, changeid):
554 """build an exception to be raised about a filtered changeid
554 """build an exception to be raised about a filtered changeid
555
555
556 This is extracted in a function to help extensions (eg: evolve) to
556 This is extracted in a function to help extensions (eg: evolve) to
557 experiment with various message variants."""
557 experiment with various message variants."""
558 if repo.filtername.startswith('visible'):
558 if repo.filtername.startswith('visible'):
559
559
560 # Check if the changeset is obsolete
560 # Check if the changeset is obsolete
561 unfilteredrepo = repo.unfiltered()
561 unfilteredrepo = repo.unfiltered()
562 ctx = revsymbol(unfilteredrepo, changeid)
562 ctx = revsymbol(unfilteredrepo, changeid)
563
563
564 # If the changeset is obsolete, enrich the message with the reason
564 # If the changeset is obsolete, enrich the message with the reason
565 # that made this changeset not visible
565 # that made this changeset not visible
566 if ctx.obsolete():
566 if ctx.obsolete():
567 msg = obsutil._getfilteredreason(repo, changeid, ctx)
567 msg = obsutil._getfilteredreason(repo, changeid, ctx)
568 else:
568 else:
569 msg = _("hidden revision '%s'") % changeid
569 msg = _("hidden revision '%s'") % changeid
570
570
571 hint = _('use --hidden to access hidden revisions')
571 hint = _('use --hidden to access hidden revisions')
572
572
573 return error.FilteredRepoLookupError(msg, hint=hint)
573 return error.FilteredRepoLookupError(msg, hint=hint)
574 msg = _("filtered revision '%s' (not in '%s' subset)")
574 msg = _("filtered revision '%s' (not in '%s' subset)")
575 msg %= (changeid, repo.filtername)
575 msg %= (changeid, repo.filtername)
576 return error.FilteredRepoLookupError(msg)
576 return error.FilteredRepoLookupError(msg)
577
577
578 def revsingle(repo, revspec, default='.', localalias=None):
578 def revsingle(repo, revspec, default='.', localalias=None):
579 if not revspec and revspec != 0:
579 if not revspec and revspec != 0:
580 return repo[default]
580 return repo[default]
581
581
582 l = revrange(repo, [revspec], localalias=localalias)
582 l = revrange(repo, [revspec], localalias=localalias)
583 if not l:
583 if not l:
584 raise error.Abort(_('empty revision set'))
584 raise error.Abort(_('empty revision set'))
585 return repo[l.last()]
585 return repo[l.last()]
586
586
587 def _pairspec(revspec):
587 def _pairspec(revspec):
588 tree = revsetlang.parse(revspec)
588 tree = revsetlang.parse(revspec)
589 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
589 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
590
590
591 def revpair(repo, revs):
591 def revpair(repo, revs):
592 if not revs:
592 if not revs:
593 return repo['.'], repo[None]
593 return repo['.'], repo[None]
594
594
595 l = revrange(repo, revs)
595 l = revrange(repo, revs)
596
596
597 if not l:
597 if not l:
598 first = second = None
598 first = second = None
599 elif l.isascending():
599 elif l.isascending():
600 first = l.min()
600 first = l.min()
601 second = l.max()
601 second = l.max()
602 elif l.isdescending():
602 elif l.isdescending():
603 first = l.max()
603 first = l.max()
604 second = l.min()
604 second = l.min()
605 else:
605 else:
606 first = l.first()
606 first = l.first()
607 second = l.last()
607 second = l.last()
608
608
609 if first is None:
609 if first is None:
610 raise error.Abort(_('empty revision range'))
610 raise error.Abort(_('empty revision range'))
611 if (first == second and len(revs) >= 2
611 if (first == second and len(revs) >= 2
612 and not all(revrange(repo, [r]) for r in revs)):
612 and not all(revrange(repo, [r]) for r in revs)):
613 raise error.Abort(_('empty revision on one side of range'))
613 raise error.Abort(_('empty revision on one side of range'))
614
614
615 # if top-level is range expression, the result must always be a pair
615 # if top-level is range expression, the result must always be a pair
616 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
616 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
617 return repo[first], repo[None]
617 return repo[first], repo[None]
618
618
619 return repo[first], repo[second]
619 return repo[first], repo[second]
620
620
621 def revrange(repo, specs, localalias=None):
621 def revrange(repo, specs, localalias=None):
622 """Execute 1 to many revsets and return the union.
622 """Execute 1 to many revsets and return the union.
623
623
624 This is the preferred mechanism for executing revsets using user-specified
624 This is the preferred mechanism for executing revsets using user-specified
625 config options, such as revset aliases.
625 config options, such as revset aliases.
626
626
627 The revsets specified by ``specs`` will be executed via a chained ``OR``
627 The revsets specified by ``specs`` will be executed via a chained ``OR``
628 expression. If ``specs`` is empty, an empty result is returned.
628 expression. If ``specs`` is empty, an empty result is returned.
629
629
630 ``specs`` can contain integers, in which case they are assumed to be
630 ``specs`` can contain integers, in which case they are assumed to be
631 revision numbers.
631 revision numbers.
632
632
633 It is assumed the revsets are already formatted. If you have arguments
633 It is assumed the revsets are already formatted. If you have arguments
634 that need to be expanded in the revset, call ``revsetlang.formatspec()``
634 that need to be expanded in the revset, call ``revsetlang.formatspec()``
635 and pass the result as an element of ``specs``.
635 and pass the result as an element of ``specs``.
636
636
637 Specifying a single revset is allowed.
637 Specifying a single revset is allowed.
638
638
639 Returns a ``revset.abstractsmartset`` which is a list-like interface over
639 Returns a ``revset.abstractsmartset`` which is a list-like interface over
640 integer revisions.
640 integer revisions.
641 """
641 """
642 allspecs = []
642 allspecs = []
643 for spec in specs:
643 for spec in specs:
644 if isinstance(spec, int):
644 if isinstance(spec, int):
645 spec = revsetlang.formatspec('rev(%d)', spec)
645 spec = revsetlang.formatspec('rev(%d)', spec)
646 allspecs.append(spec)
646 allspecs.append(spec)
647 return repo.anyrevs(allspecs, user=True, localalias=localalias)
647 return repo.anyrevs(allspecs, user=True, localalias=localalias)
648
648
649 def meaningfulparents(repo, ctx):
649 def meaningfulparents(repo, ctx):
650 """Return list of meaningful (or all if debug) parentrevs for rev.
650 """Return list of meaningful (or all if debug) parentrevs for rev.
651
651
652 For merges (two non-nullrev revisions) both parents are meaningful.
652 For merges (two non-nullrev revisions) both parents are meaningful.
653 Otherwise the first parent revision is considered meaningful if it
653 Otherwise the first parent revision is considered meaningful if it
654 is not the preceding revision.
654 is not the preceding revision.
655 """
655 """
656 parents = ctx.parents()
656 parents = ctx.parents()
657 if len(parents) > 1:
657 if len(parents) > 1:
658 return parents
658 return parents
659 if repo.ui.debugflag:
659 if repo.ui.debugflag:
660 return [parents[0], repo['null']]
660 return [parents[0], repo['null']]
661 if parents[0].rev() >= intrev(ctx) - 1:
661 if parents[0].rev() >= intrev(ctx) - 1:
662 return []
662 return []
663 return parents
663 return parents
664
664
665 def expandpats(pats):
665 def expandpats(pats):
666 '''Expand bare globs when running on windows.
666 '''Expand bare globs when running on windows.
667 On posix we assume it already has already been done by sh.'''
667 On posix we assume it already has already been done by sh.'''
668 if not util.expandglobs:
668 if not util.expandglobs:
669 return list(pats)
669 return list(pats)
670 ret = []
670 ret = []
671 for kindpat in pats:
671 for kindpat in pats:
672 kind, pat = matchmod._patsplit(kindpat, None)
672 kind, pat = matchmod._patsplit(kindpat, None)
673 if kind is None:
673 if kind is None:
674 try:
674 try:
675 globbed = glob.glob(pat)
675 globbed = glob.glob(pat)
676 except re.error:
676 except re.error:
677 globbed = [pat]
677 globbed = [pat]
678 if globbed:
678 if globbed:
679 ret.extend(globbed)
679 ret.extend(globbed)
680 continue
680 continue
681 ret.append(kindpat)
681 ret.append(kindpat)
682 return ret
682 return ret
683
683
684 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
684 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
685 badfn=None):
685 badfn=None):
686 '''Return a matcher and the patterns that were used.
686 '''Return a matcher and the patterns that were used.
687 The matcher will warn about bad matches, unless an alternate badfn callback
687 The matcher will warn about bad matches, unless an alternate badfn callback
688 is provided.'''
688 is provided.'''
689 if pats == ("",):
689 if pats == ("",):
690 pats = []
690 pats = []
691 if opts is None:
691 if opts is None:
692 opts = {}
692 opts = {}
693 if not globbed and default == 'relpath':
693 if not globbed and default == 'relpath':
694 pats = expandpats(pats or [])
694 pats = expandpats(pats or [])
695
695
696 def bad(f, msg):
696 def bad(f, msg):
697 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
697 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
698
698
699 if badfn is None:
699 if badfn is None:
700 badfn = bad
700 badfn = bad
701
701
702 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
702 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
703 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
703 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
704
704
705 if m.always():
705 if m.always():
706 pats = []
706 pats = []
707 return m, pats
707 return m, pats
708
708
709 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
709 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
710 badfn=None):
710 badfn=None):
711 '''Return a matcher that will warn about bad matches.'''
711 '''Return a matcher that will warn about bad matches.'''
712 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
712 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
713
713
714 def matchall(repo):
714 def matchall(repo):
715 '''Return a matcher that will efficiently match everything.'''
715 '''Return a matcher that will efficiently match everything.'''
716 return matchmod.always(repo.root, repo.getcwd())
716 return matchmod.always(repo.root, repo.getcwd())
717
717
718 def matchfiles(repo, files, badfn=None):
718 def matchfiles(repo, files, badfn=None):
719 '''Return a matcher that will efficiently match exactly these files.'''
719 '''Return a matcher that will efficiently match exactly these files.'''
720 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
720 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
721
721
722 def parsefollowlinespattern(repo, rev, pat, msg):
722 def parsefollowlinespattern(repo, rev, pat, msg):
723 """Return a file name from `pat` pattern suitable for usage in followlines
723 """Return a file name from `pat` pattern suitable for usage in followlines
724 logic.
724 logic.
725 """
725 """
726 if not matchmod.patkind(pat):
726 if not matchmod.patkind(pat):
727 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
727 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
728 else:
728 else:
729 ctx = repo[rev]
729 ctx = repo[rev]
730 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
730 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
731 files = [f for f in ctx if m(f)]
731 files = [f for f in ctx if m(f)]
732 if len(files) != 1:
732 if len(files) != 1:
733 raise error.ParseError(msg)
733 raise error.ParseError(msg)
734 return files[0]
734 return files[0]
735
735
736 def origpath(ui, repo, filepath):
736 def origpath(ui, repo, filepath):
737 '''customize where .orig files are created
737 '''customize where .orig files are created
738
738
739 Fetch user defined path from config file: [ui] origbackuppath = <path>
739 Fetch user defined path from config file: [ui] origbackuppath = <path>
740 Fall back to default (filepath with .orig suffix) if not specified
740 Fall back to default (filepath with .orig suffix) if not specified
741 '''
741 '''
742 origbackuppath = ui.config('ui', 'origbackuppath')
742 origbackuppath = ui.config('ui', 'origbackuppath')
743 if not origbackuppath:
743 if not origbackuppath:
744 return filepath + ".orig"
744 return filepath + ".orig"
745
745
746 # Convert filepath from an absolute path into a path inside the repo.
746 # Convert filepath from an absolute path into a path inside the repo.
747 filepathfromroot = util.normpath(os.path.relpath(filepath,
747 filepathfromroot = util.normpath(os.path.relpath(filepath,
748 start=repo.root))
748 start=repo.root))
749
749
750 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
750 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
751 origbackupdir = origvfs.dirname(filepathfromroot)
751 origbackupdir = origvfs.dirname(filepathfromroot)
752 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
752 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
753 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
753 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
754
754
755 # Remove any files that conflict with the backup file's path
755 # Remove any files that conflict with the backup file's path
756 for f in reversed(list(util.finddirs(filepathfromroot))):
756 for f in reversed(list(util.finddirs(filepathfromroot))):
757 if origvfs.isfileorlink(f):
757 if origvfs.isfileorlink(f):
758 ui.note(_('removing conflicting file: %s\n')
758 ui.note(_('removing conflicting file: %s\n')
759 % origvfs.join(f))
759 % origvfs.join(f))
760 origvfs.unlink(f)
760 origvfs.unlink(f)
761 break
761 break
762
762
763 origvfs.makedirs(origbackupdir)
763 origvfs.makedirs(origbackupdir)
764
764
765 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
765 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
766 ui.note(_('removing conflicting directory: %s\n')
766 ui.note(_('removing conflicting directory: %s\n')
767 % origvfs.join(filepathfromroot))
767 % origvfs.join(filepathfromroot))
768 origvfs.rmtree(filepathfromroot, forcibly=True)
768 origvfs.rmtree(filepathfromroot, forcibly=True)
769
769
770 return origvfs.join(filepathfromroot)
770 return origvfs.join(filepathfromroot)
771
771
772 class _containsnode(object):
772 class _containsnode(object):
773 """proxy __contains__(node) to container.__contains__ which accepts revs"""
773 """proxy __contains__(node) to container.__contains__ which accepts revs"""
774
774
775 def __init__(self, repo, revcontainer):
775 def __init__(self, repo, revcontainer):
776 self._torev = repo.changelog.rev
776 self._torev = repo.changelog.rev
777 self._revcontains = revcontainer.__contains__
777 self._revcontains = revcontainer.__contains__
778
778
779 def __contains__(self, node):
779 def __contains__(self, node):
780 return self._revcontains(self._torev(node))
780 return self._revcontains(self._torev(node))
781
781
782 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
782 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
783 fixphase=False, targetphase=None):
783 fixphase=False, targetphase=None):
784 """do common cleanups when old nodes are replaced by new nodes
784 """do common cleanups when old nodes are replaced by new nodes
785
785
786 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
786 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
787 (we might also want to move working directory parent in the future)
787 (we might also want to move working directory parent in the future)
788
788
789 By default, bookmark moves are calculated automatically from 'replacements',
789 By default, bookmark moves are calculated automatically from 'replacements',
790 but 'moves' can be used to override that. Also, 'moves' may include
790 but 'moves' can be used to override that. Also, 'moves' may include
791 additional bookmark moves that should not have associated obsmarkers.
791 additional bookmark moves that should not have associated obsmarkers.
792
792
793 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
793 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
794 have replacements. operation is a string, like "rebase".
794 have replacements. operation is a string, like "rebase".
795
795
796 metadata is dictionary containing metadata to be stored in obsmarker if
796 metadata is dictionary containing metadata to be stored in obsmarker if
797 obsolescence is enabled.
797 obsolescence is enabled.
798 """
798 """
799 assert fixphase or targetphase is None
799 assert fixphase or targetphase is None
800 if not replacements and not moves:
800 if not replacements and not moves:
801 return
801 return
802
802
803 # translate mapping's other forms
803 # translate mapping's other forms
804 if not util.safehasattr(replacements, 'items'):
804 if not util.safehasattr(replacements, 'items'):
805 replacements = {n: () for n in replacements}
805 replacements = {n: () for n in replacements}
806
806
807 # Calculate bookmark movements
807 # Calculate bookmark movements
808 if moves is None:
808 if moves is None:
809 moves = {}
809 moves = {}
810 # Unfiltered repo is needed since nodes in replacements might be hidden.
810 # Unfiltered repo is needed since nodes in replacements might be hidden.
811 unfi = repo.unfiltered()
811 unfi = repo.unfiltered()
812 for oldnode, newnodes in replacements.items():
812 for oldnode, newnodes in replacements.items():
813 if oldnode in moves:
813 if oldnode in moves:
814 continue
814 continue
815 if len(newnodes) > 1:
815 if len(newnodes) > 1:
816 # usually a split, take the one with biggest rev number
816 # usually a split, take the one with biggest rev number
817 newnode = next(unfi.set('max(%ln)', newnodes)).node()
817 newnode = next(unfi.set('max(%ln)', newnodes)).node()
818 elif len(newnodes) == 0:
818 elif len(newnodes) == 0:
819 # move bookmark backwards
819 # move bookmark backwards
820 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
820 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
821 list(replacements)))
821 list(replacements)))
822 if roots:
822 if roots:
823 newnode = roots[0].node()
823 newnode = roots[0].node()
824 else:
824 else:
825 newnode = nullid
825 newnode = nullid
826 else:
826 else:
827 newnode = newnodes[0]
827 newnode = newnodes[0]
828 moves[oldnode] = newnode
828 moves[oldnode] = newnode
829
829
830 allnewnodes = [n for ns in replacements.values() for n in ns]
830 allnewnodes = [n for ns in replacements.values() for n in ns]
831 toretract = {}
831 toretract = {}
832 toadvance = {}
832 toadvance = {}
833 if fixphase:
833 if fixphase:
834 precursors = {}
834 precursors = {}
835 for oldnode, newnodes in replacements.items():
835 for oldnode, newnodes in replacements.items():
836 for newnode in newnodes:
836 for newnode in newnodes:
837 precursors.setdefault(newnode, []).append(oldnode)
837 precursors.setdefault(newnode, []).append(oldnode)
838
838
839 allnewnodes.sort(key=lambda n: unfi[n].rev())
839 allnewnodes.sort(key=lambda n: unfi[n].rev())
840 newphases = {}
840 newphases = {}
841 def phase(ctx):
841 def phase(ctx):
842 return newphases.get(ctx.node(), ctx.phase())
842 return newphases.get(ctx.node(), ctx.phase())
843 for newnode in allnewnodes:
843 for newnode in allnewnodes:
844 ctx = unfi[newnode]
844 ctx = unfi[newnode]
845 parentphase = max(phase(p) for p in ctx.parents())
845 parentphase = max(phase(p) for p in ctx.parents())
846 if targetphase is None:
846 if targetphase is None:
847 oldphase = max(unfi[oldnode].phase()
847 oldphase = max(unfi[oldnode].phase()
848 for oldnode in precursors[newnode])
848 for oldnode in precursors[newnode])
849 newphase = max(oldphase, parentphase)
849 newphase = max(oldphase, parentphase)
850 else:
850 else:
851 newphase = max(targetphase, parentphase)
851 newphase = max(targetphase, parentphase)
852 newphases[newnode] = newphase
852 newphases[newnode] = newphase
853 if newphase > ctx.phase():
853 if newphase > ctx.phase():
854 toretract.setdefault(newphase, []).append(newnode)
854 toretract.setdefault(newphase, []).append(newnode)
855 elif newphase < ctx.phase():
855 elif newphase < ctx.phase():
856 toadvance.setdefault(newphase, []).append(newnode)
856 toadvance.setdefault(newphase, []).append(newnode)
857
857
858 with repo.transaction('cleanup') as tr:
858 with repo.transaction('cleanup') as tr:
859 # Move bookmarks
859 # Move bookmarks
860 bmarks = repo._bookmarks
860 bmarks = repo._bookmarks
861 bmarkchanges = []
861 bmarkchanges = []
862 for oldnode, newnode in moves.items():
862 for oldnode, newnode in moves.items():
863 oldbmarks = repo.nodebookmarks(oldnode)
863 oldbmarks = repo.nodebookmarks(oldnode)
864 if not oldbmarks:
864 if not oldbmarks:
865 continue
865 continue
866 from . import bookmarks # avoid import cycle
866 from . import bookmarks # avoid import cycle
867 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
867 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
868 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
868 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
869 hex(oldnode), hex(newnode)))
869 hex(oldnode), hex(newnode)))
870 # Delete divergent bookmarks being parents of related newnodes
870 # Delete divergent bookmarks being parents of related newnodes
871 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
871 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
872 allnewnodes, newnode, oldnode)
872 allnewnodes, newnode, oldnode)
873 deletenodes = _containsnode(repo, deleterevs)
873 deletenodes = _containsnode(repo, deleterevs)
874 for name in oldbmarks:
874 for name in oldbmarks:
875 bmarkchanges.append((name, newnode))
875 bmarkchanges.append((name, newnode))
876 for b in bookmarks.divergent2delete(repo, deletenodes, name):
876 for b in bookmarks.divergent2delete(repo, deletenodes, name):
877 bmarkchanges.append((b, None))
877 bmarkchanges.append((b, None))
878
878
879 if bmarkchanges:
879 if bmarkchanges:
880 bmarks.applychanges(repo, tr, bmarkchanges)
880 bmarks.applychanges(repo, tr, bmarkchanges)
881
881
882 for phase, nodes in toretract.items():
882 for phase, nodes in toretract.items():
883 phases.retractboundary(repo, tr, phase, nodes)
883 phases.retractboundary(repo, tr, phase, nodes)
884 for phase, nodes in toadvance.items():
884 for phase, nodes in toadvance.items():
885 phases.advanceboundary(repo, tr, phase, nodes)
885 phases.advanceboundary(repo, tr, phase, nodes)
886
886
887 # Obsolete or strip nodes
887 # Obsolete or strip nodes
888 if obsolete.isenabled(repo, obsolete.createmarkersopt):
888 if obsolete.isenabled(repo, obsolete.createmarkersopt):
889 # If a node is already obsoleted, and we want to obsolete it
889 # If a node is already obsoleted, and we want to obsolete it
890 # without a successor, skip that obssolete request since it's
890 # without a successor, skip that obssolete request since it's
891 # unnecessary. That's the "if s or not isobs(n)" check below.
891 # unnecessary. That's the "if s or not isobs(n)" check below.
892 # Also sort the node in topology order, that might be useful for
892 # Also sort the node in topology order, that might be useful for
893 # some obsstore logic.
893 # some obsstore logic.
894 # NOTE: the filtering and sorting might belong to createmarkers.
894 # NOTE: the filtering and sorting might belong to createmarkers.
895 isobs = unfi.obsstore.successors.__contains__
895 isobs = unfi.obsstore.successors.__contains__
896 torev = unfi.changelog.rev
896 torev = unfi.changelog.rev
897 sortfunc = lambda ns: torev(ns[0])
897 sortfunc = lambda ns: torev(ns[0])
898 rels = [(unfi[n], tuple(unfi[m] for m in s))
898 rels = [(unfi[n], tuple(unfi[m] for m in s))
899 for n, s in sorted(replacements.items(), key=sortfunc)
899 for n, s in sorted(replacements.items(), key=sortfunc)
900 if s or not isobs(n)]
900 if s or not isobs(n)]
901 if rels:
901 if rels:
902 obsolete.createmarkers(repo, rels, operation=operation,
902 obsolete.createmarkers(repo, rels, operation=operation,
903 metadata=metadata)
903 metadata=metadata)
904 else:
904 else:
905 from . import repair # avoid import cycle
905 from . import repair # avoid import cycle
906 tostrip = list(replacements)
906 tostrip = list(replacements)
907 if tostrip:
907 if tostrip:
908 repair.delayedstrip(repo.ui, repo, tostrip, operation)
908 repair.delayedstrip(repo.ui, repo, tostrip, operation)
909
909
910 def addremove(repo, matcher, prefix, opts=None):
910 def addremove(repo, matcher, prefix, opts=None):
911 if opts is None:
911 if opts is None:
912 opts = {}
912 opts = {}
913 m = matcher
913 m = matcher
914 dry_run = opts.get('dry_run')
914 dry_run = opts.get('dry_run')
915 try:
915 try:
916 similarity = float(opts.get('similarity') or 0)
916 similarity = float(opts.get('similarity') or 0)
917 except ValueError:
917 except ValueError:
918 raise error.Abort(_('similarity must be a number'))
918 raise error.Abort(_('similarity must be a number'))
919 if similarity < 0 or similarity > 100:
919 if similarity < 0 or similarity > 100:
920 raise error.Abort(_('similarity must be between 0 and 100'))
920 raise error.Abort(_('similarity must be between 0 and 100'))
921 similarity /= 100.0
921 similarity /= 100.0
922
922
923 ret = 0
923 ret = 0
924 join = lambda f: os.path.join(prefix, f)
924 join = lambda f: os.path.join(prefix, f)
925
925
926 wctx = repo[None]
926 wctx = repo[None]
927 for subpath in sorted(wctx.substate):
927 for subpath in sorted(wctx.substate):
928 submatch = matchmod.subdirmatcher(subpath, m)
928 submatch = matchmod.subdirmatcher(subpath, m)
929 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
929 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
930 sub = wctx.sub(subpath)
930 sub = wctx.sub(subpath)
931 try:
931 try:
932 if sub.addremove(submatch, prefix, opts):
932 if sub.addremove(submatch, prefix, opts):
933 ret = 1
933 ret = 1
934 except error.LookupError:
934 except error.LookupError:
935 repo.ui.status(_("skipping missing subrepository: %s\n")
935 repo.ui.status(_("skipping missing subrepository: %s\n")
936 % join(subpath))
936 % join(subpath))
937
937
938 rejected = []
938 rejected = []
939 def badfn(f, msg):
939 def badfn(f, msg):
940 if f in m.files():
940 if f in m.files():
941 m.bad(f, msg)
941 m.bad(f, msg)
942 rejected.append(f)
942 rejected.append(f)
943
943
944 badmatch = matchmod.badmatch(m, badfn)
944 badmatch = matchmod.badmatch(m, badfn)
945 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
945 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
946 badmatch)
946 badmatch)
947
947
948 unknownset = set(unknown + forgotten)
948 unknownset = set(unknown + forgotten)
949 toprint = unknownset.copy()
949 toprint = unknownset.copy()
950 toprint.update(deleted)
950 toprint.update(deleted)
951 for abs in sorted(toprint):
951 for abs in sorted(toprint):
952 if repo.ui.verbose or not m.exact(abs):
952 if repo.ui.verbose or not m.exact(abs):
953 if abs in unknownset:
953 if abs in unknownset:
954 status = _('adding %s\n') % m.uipath(abs)
954 status = _('adding %s\n') % m.uipath(abs)
955 else:
955 else:
956 status = _('removing %s\n') % m.uipath(abs)
956 status = _('removing %s\n') % m.uipath(abs)
957 repo.ui.status(status)
957 repo.ui.status(status)
958
958
959 renames = _findrenames(repo, m, added + unknown, removed + deleted,
959 renames = _findrenames(repo, m, added + unknown, removed + deleted,
960 similarity)
960 similarity)
961
961
962 if not dry_run:
962 if not dry_run:
963 _markchanges(repo, unknown + forgotten, deleted, renames)
963 _markchanges(repo, unknown + forgotten, deleted, renames)
964
964
965 for f in rejected:
965 for f in rejected:
966 if f in m.files():
966 if f in m.files():
967 return 1
967 return 1
968 return ret
968 return ret
969
969
970 def marktouched(repo, files, similarity=0.0):
970 def marktouched(repo, files, similarity=0.0):
971 '''Assert that files have somehow been operated upon. files are relative to
971 '''Assert that files have somehow been operated upon. files are relative to
972 the repo root.'''
972 the repo root.'''
973 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
973 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
974 rejected = []
974 rejected = []
975
975
976 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
976 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
977
977
978 if repo.ui.verbose:
978 if repo.ui.verbose:
979 unknownset = set(unknown + forgotten)
979 unknownset = set(unknown + forgotten)
980 toprint = unknownset.copy()
980 toprint = unknownset.copy()
981 toprint.update(deleted)
981 toprint.update(deleted)
982 for abs in sorted(toprint):
982 for abs in sorted(toprint):
983 if abs in unknownset:
983 if abs in unknownset:
984 status = _('adding %s\n') % abs
984 status = _('adding %s\n') % abs
985 else:
985 else:
986 status = _('removing %s\n') % abs
986 status = _('removing %s\n') % abs
987 repo.ui.status(status)
987 repo.ui.status(status)
988
988
989 renames = _findrenames(repo, m, added + unknown, removed + deleted,
989 renames = _findrenames(repo, m, added + unknown, removed + deleted,
990 similarity)
990 similarity)
991
991
992 _markchanges(repo, unknown + forgotten, deleted, renames)
992 _markchanges(repo, unknown + forgotten, deleted, renames)
993
993
994 for f in rejected:
994 for f in rejected:
995 if f in m.files():
995 if f in m.files():
996 return 1
996 return 1
997 return 0
997 return 0
998
998
999 def _interestingfiles(repo, matcher):
999 def _interestingfiles(repo, matcher):
1000 '''Walk dirstate with matcher, looking for files that addremove would care
1000 '''Walk dirstate with matcher, looking for files that addremove would care
1001 about.
1001 about.
1002
1002
1003 This is different from dirstate.status because it doesn't care about
1003 This is different from dirstate.status because it doesn't care about
1004 whether files are modified or clean.'''
1004 whether files are modified or clean.'''
1005 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1005 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1006 audit_path = pathutil.pathauditor(repo.root, cached=True)
1006 audit_path = pathutil.pathauditor(repo.root, cached=True)
1007
1007
1008 ctx = repo[None]
1008 ctx = repo[None]
1009 dirstate = repo.dirstate
1009 dirstate = repo.dirstate
1010 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1010 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1011 unknown=True, ignored=False, full=False)
1011 unknown=True, ignored=False, full=False)
1012 for abs, st in walkresults.iteritems():
1012 for abs, st in walkresults.iteritems():
1013 dstate = dirstate[abs]
1013 dstate = dirstate[abs]
1014 if dstate == '?' and audit_path.check(abs):
1014 if dstate == '?' and audit_path.check(abs):
1015 unknown.append(abs)
1015 unknown.append(abs)
1016 elif dstate != 'r' and not st:
1016 elif dstate != 'r' and not st:
1017 deleted.append(abs)
1017 deleted.append(abs)
1018 elif dstate == 'r' and st:
1018 elif dstate == 'r' and st:
1019 forgotten.append(abs)
1019 forgotten.append(abs)
1020 # for finding renames
1020 # for finding renames
1021 elif dstate == 'r' and not st:
1021 elif dstate == 'r' and not st:
1022 removed.append(abs)
1022 removed.append(abs)
1023 elif dstate == 'a':
1023 elif dstate == 'a':
1024 added.append(abs)
1024 added.append(abs)
1025
1025
1026 return added, unknown, deleted, removed, forgotten
1026 return added, unknown, deleted, removed, forgotten
1027
1027
1028 def _findrenames(repo, matcher, added, removed, similarity):
1028 def _findrenames(repo, matcher, added, removed, similarity):
1029 '''Find renames from removed files to added ones.'''
1029 '''Find renames from removed files to added ones.'''
1030 renames = {}
1030 renames = {}
1031 if similarity > 0:
1031 if similarity > 0:
1032 for old, new, score in similar.findrenames(repo, added, removed,
1032 for old, new, score in similar.findrenames(repo, added, removed,
1033 similarity):
1033 similarity):
1034 if (repo.ui.verbose or not matcher.exact(old)
1034 if (repo.ui.verbose or not matcher.exact(old)
1035 or not matcher.exact(new)):
1035 or not matcher.exact(new)):
1036 repo.ui.status(_('recording removal of %s as rename to %s '
1036 repo.ui.status(_('recording removal of %s as rename to %s '
1037 '(%d%% similar)\n') %
1037 '(%d%% similar)\n') %
1038 (matcher.rel(old), matcher.rel(new),
1038 (matcher.rel(old), matcher.rel(new),
1039 score * 100))
1039 score * 100))
1040 renames[new] = old
1040 renames[new] = old
1041 return renames
1041 return renames
1042
1042
1043 def _markchanges(repo, unknown, deleted, renames):
1043 def _markchanges(repo, unknown, deleted, renames):
1044 '''Marks the files in unknown as added, the files in deleted as removed,
1044 '''Marks the files in unknown as added, the files in deleted as removed,
1045 and the files in renames as copied.'''
1045 and the files in renames as copied.'''
1046 wctx = repo[None]
1046 wctx = repo[None]
1047 with repo.wlock():
1047 with repo.wlock():
1048 wctx.forget(deleted)
1048 wctx.forget(deleted)
1049 wctx.add(unknown)
1049 wctx.add(unknown)
1050 for new, old in renames.iteritems():
1050 for new, old in renames.iteritems():
1051 wctx.copy(old, new)
1051 wctx.copy(old, new)
1052
1052
1053 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1053 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1054 """Update the dirstate to reflect the intent of copying src to dst. For
1054 """Update the dirstate to reflect the intent of copying src to dst. For
1055 different reasons it might not end with dst being marked as copied from src.
1055 different reasons it might not end with dst being marked as copied from src.
1056 """
1056 """
1057 origsrc = repo.dirstate.copied(src) or src
1057 origsrc = repo.dirstate.copied(src) or src
1058 if dst == origsrc: # copying back a copy?
1058 if dst == origsrc: # copying back a copy?
1059 if repo.dirstate[dst] not in 'mn' and not dryrun:
1059 if repo.dirstate[dst] not in 'mn' and not dryrun:
1060 repo.dirstate.normallookup(dst)
1060 repo.dirstate.normallookup(dst)
1061 else:
1061 else:
1062 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1062 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1063 if not ui.quiet:
1063 if not ui.quiet:
1064 ui.warn(_("%s has not been committed yet, so no copy "
1064 ui.warn(_("%s has not been committed yet, so no copy "
1065 "data will be stored for %s.\n")
1065 "data will be stored for %s.\n")
1066 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1066 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1067 if repo.dirstate[dst] in '?r' and not dryrun:
1067 if repo.dirstate[dst] in '?r' and not dryrun:
1068 wctx.add([dst])
1068 wctx.add([dst])
1069 elif not dryrun:
1069 elif not dryrun:
1070 wctx.copy(origsrc, dst)
1070 wctx.copy(origsrc, dst)
1071
1071
1072 def readrequires(opener, supported):
1072 def readrequires(opener, supported):
1073 '''Reads and parses .hg/requires and checks if all entries found
1073 '''Reads and parses .hg/requires and checks if all entries found
1074 are in the list of supported features.'''
1074 are in the list of supported features.'''
1075 requirements = set(opener.read("requires").splitlines())
1075 requirements = set(opener.read("requires").splitlines())
1076 missings = []
1076 missings = []
1077 for r in requirements:
1077 for r in requirements:
1078 if r not in supported:
1078 if r not in supported:
1079 if not r or not r[0:1].isalnum():
1079 if not r or not r[0:1].isalnum():
1080 raise error.RequirementError(_(".hg/requires file is corrupt"))
1080 raise error.RequirementError(_(".hg/requires file is corrupt"))
1081 missings.append(r)
1081 missings.append(r)
1082 missings.sort()
1082 missings.sort()
1083 if missings:
1083 if missings:
1084 raise error.RequirementError(
1084 raise error.RequirementError(
1085 _("repository requires features unknown to this Mercurial: %s")
1085 _("repository requires features unknown to this Mercurial: %s")
1086 % " ".join(missings),
1086 % " ".join(missings),
1087 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1087 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1088 " for more information"))
1088 " for more information"))
1089 return requirements
1089 return requirements
1090
1090
1091 def writerequires(opener, requirements):
1091 def writerequires(opener, requirements):
1092 with opener('requires', 'w') as fp:
1092 with opener('requires', 'w') as fp:
1093 for r in sorted(requirements):
1093 for r in sorted(requirements):
1094 fp.write("%s\n" % r)
1094 fp.write("%s\n" % r)
1095
1095
1096 class filecachesubentry(object):
1096 class filecachesubentry(object):
1097 def __init__(self, path, stat):
1097 def __init__(self, path, stat):
1098 self.path = path
1098 self.path = path
1099 self.cachestat = None
1099 self.cachestat = None
1100 self._cacheable = None
1100 self._cacheable = None
1101
1101
1102 if stat:
1102 if stat:
1103 self.cachestat = filecachesubentry.stat(self.path)
1103 self.cachestat = filecachesubentry.stat(self.path)
1104
1104
1105 if self.cachestat:
1105 if self.cachestat:
1106 self._cacheable = self.cachestat.cacheable()
1106 self._cacheable = self.cachestat.cacheable()
1107 else:
1107 else:
1108 # None means we don't know yet
1108 # None means we don't know yet
1109 self._cacheable = None
1109 self._cacheable = None
1110
1110
1111 def refresh(self):
1111 def refresh(self):
1112 if self.cacheable():
1112 if self.cacheable():
1113 self.cachestat = filecachesubentry.stat(self.path)
1113 self.cachestat = filecachesubentry.stat(self.path)
1114
1114
1115 def cacheable(self):
1115 def cacheable(self):
1116 if self._cacheable is not None:
1116 if self._cacheable is not None:
1117 return self._cacheable
1117 return self._cacheable
1118
1118
1119 # we don't know yet, assume it is for now
1119 # we don't know yet, assume it is for now
1120 return True
1120 return True
1121
1121
1122 def changed(self):
1122 def changed(self):
1123 # no point in going further if we can't cache it
1123 # no point in going further if we can't cache it
1124 if not self.cacheable():
1124 if not self.cacheable():
1125 return True
1125 return True
1126
1126
1127 newstat = filecachesubentry.stat(self.path)
1127 newstat = filecachesubentry.stat(self.path)
1128
1128
1129 # we may not know if it's cacheable yet, check again now
1129 # we may not know if it's cacheable yet, check again now
1130 if newstat and self._cacheable is None:
1130 if newstat and self._cacheable is None:
1131 self._cacheable = newstat.cacheable()
1131 self._cacheable = newstat.cacheable()
1132
1132
1133 # check again
1133 # check again
1134 if not self._cacheable:
1134 if not self._cacheable:
1135 return True
1135 return True
1136
1136
1137 if self.cachestat != newstat:
1137 if self.cachestat != newstat:
1138 self.cachestat = newstat
1138 self.cachestat = newstat
1139 return True
1139 return True
1140 else:
1140 else:
1141 return False
1141 return False
1142
1142
1143 @staticmethod
1143 @staticmethod
1144 def stat(path):
1144 def stat(path):
1145 try:
1145 try:
1146 return util.cachestat(path)
1146 return util.cachestat(path)
1147 except OSError as e:
1147 except OSError as e:
1148 if e.errno != errno.ENOENT:
1148 if e.errno != errno.ENOENT:
1149 raise
1149 raise
1150
1150
1151 class filecacheentry(object):
1151 class filecacheentry(object):
1152 def __init__(self, paths, stat=True):
1152 def __init__(self, paths, stat=True):
1153 self._entries = []
1153 self._entries = []
1154 for path in paths:
1154 for path in paths:
1155 self._entries.append(filecachesubentry(path, stat))
1155 self._entries.append(filecachesubentry(path, stat))
1156
1156
1157 def changed(self):
1157 def changed(self):
1158 '''true if any entry has changed'''
1158 '''true if any entry has changed'''
1159 for entry in self._entries:
1159 for entry in self._entries:
1160 if entry.changed():
1160 if entry.changed():
1161 return True
1161 return True
1162 return False
1162 return False
1163
1163
1164 def refresh(self):
1164 def refresh(self):
1165 for entry in self._entries:
1165 for entry in self._entries:
1166 entry.refresh()
1166 entry.refresh()
1167
1167
1168 class filecache(object):
1168 class filecache(object):
1169 """A property like decorator that tracks files under .hg/ for updates.
1169 """A property like decorator that tracks files under .hg/ for updates.
1170
1170
1171 On first access, the files defined as arguments are stat()ed and the
1171 On first access, the files defined as arguments are stat()ed and the
1172 results cached. The decorated function is called. The results are stashed
1172 results cached. The decorated function is called. The results are stashed
1173 away in a ``_filecache`` dict on the object whose method is decorated.
1173 away in a ``_filecache`` dict on the object whose method is decorated.
1174
1174
1175 On subsequent access, the cached result is returned.
1175 On subsequent access, the cached result is returned.
1176
1176
1177 On external property set operations, stat() calls are performed and the new
1177 On external property set operations, stat() calls are performed and the new
1178 value is cached.
1178 value is cached.
1179
1179
1180 On property delete operations, cached data is removed.
1180 On property delete operations, cached data is removed.
1181
1181
1182 When using the property API, cached data is always returned, if available:
1182 When using the property API, cached data is always returned, if available:
1183 no stat() is performed to check if the file has changed and if the function
1183 no stat() is performed to check if the file has changed and if the function
1184 needs to be called to reflect file changes.
1184 needs to be called to reflect file changes.
1185
1185
1186 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1186 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1187 can populate an entry before the property's getter is called. In this case,
1187 can populate an entry before the property's getter is called. In this case,
1188 entries in ``_filecache`` will be used during property operations,
1188 entries in ``_filecache`` will be used during property operations,
1189 if available. If the underlying file changes, it is up to external callers
1189 if available. If the underlying file changes, it is up to external callers
1190 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1190 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1191 method result as well as possibly calling ``del obj._filecache[attr]`` to
1191 method result as well as possibly calling ``del obj._filecache[attr]`` to
1192 remove the ``filecacheentry``.
1192 remove the ``filecacheentry``.
1193 """
1193 """
1194
1194
1195 def __init__(self, *paths):
1195 def __init__(self, *paths):
1196 self.paths = paths
1196 self.paths = paths
1197
1197
1198 def join(self, obj, fname):
1198 def join(self, obj, fname):
1199 """Used to compute the runtime path of a cached file.
1199 """Used to compute the runtime path of a cached file.
1200
1200
1201 Users should subclass filecache and provide their own version of this
1201 Users should subclass filecache and provide their own version of this
1202 function to call the appropriate join function on 'obj' (an instance
1202 function to call the appropriate join function on 'obj' (an instance
1203 of the class that its member function was decorated).
1203 of the class that its member function was decorated).
1204 """
1204 """
1205 raise NotImplementedError
1205 raise NotImplementedError
1206
1206
1207 def __call__(self, func):
1207 def __call__(self, func):
1208 self.func = func
1208 self.func = func
1209 self.sname = func.__name__
1209 self.sname = func.__name__
1210 self.name = pycompat.sysbytes(self.sname)
1210 self.name = pycompat.sysbytes(self.sname)
1211 return self
1211 return self
1212
1212
1213 def __get__(self, obj, type=None):
1213 def __get__(self, obj, type=None):
1214 # if accessed on the class, return the descriptor itself.
1214 # if accessed on the class, return the descriptor itself.
1215 if obj is None:
1215 if obj is None:
1216 return self
1216 return self
1217 # do we need to check if the file changed?
1217 # do we need to check if the file changed?
1218 if self.sname in obj.__dict__:
1218 if self.sname in obj.__dict__:
1219 assert self.name in obj._filecache, self.name
1219 assert self.name in obj._filecache, self.name
1220 return obj.__dict__[self.sname]
1220 return obj.__dict__[self.sname]
1221
1221
1222 entry = obj._filecache.get(self.name)
1222 entry = obj._filecache.get(self.name)
1223
1223
1224 if entry:
1224 if entry:
1225 if entry.changed():
1225 if entry.changed():
1226 entry.obj = self.func(obj)
1226 entry.obj = self.func(obj)
1227 else:
1227 else:
1228 paths = [self.join(obj, path) for path in self.paths]
1228 paths = [self.join(obj, path) for path in self.paths]
1229
1229
1230 # We stat -before- creating the object so our cache doesn't lie if
1230 # We stat -before- creating the object so our cache doesn't lie if
1231 # a writer modified between the time we read and stat
1231 # a writer modified between the time we read and stat
1232 entry = filecacheentry(paths, True)
1232 entry = filecacheentry(paths, True)
1233 entry.obj = self.func(obj)
1233 entry.obj = self.func(obj)
1234
1234
1235 obj._filecache[self.name] = entry
1235 obj._filecache[self.name] = entry
1236
1236
1237 obj.__dict__[self.sname] = entry.obj
1237 obj.__dict__[self.sname] = entry.obj
1238 return entry.obj
1238 return entry.obj
1239
1239
1240 def __set__(self, obj, value):
1240 def __set__(self, obj, value):
1241 if self.name not in obj._filecache:
1241 if self.name not in obj._filecache:
1242 # we add an entry for the missing value because X in __dict__
1242 # we add an entry for the missing value because X in __dict__
1243 # implies X in _filecache
1243 # implies X in _filecache
1244 paths = [self.join(obj, path) for path in self.paths]
1244 paths = [self.join(obj, path) for path in self.paths]
1245 ce = filecacheentry(paths, False)
1245 ce = filecacheentry(paths, False)
1246 obj._filecache[self.name] = ce
1246 obj._filecache[self.name] = ce
1247 else:
1247 else:
1248 ce = obj._filecache[self.name]
1248 ce = obj._filecache[self.name]
1249
1249
1250 ce.obj = value # update cached copy
1250 ce.obj = value # update cached copy
1251 obj.__dict__[self.sname] = value # update copy returned by obj.x
1251 obj.__dict__[self.sname] = value # update copy returned by obj.x
1252
1252
1253 def __delete__(self, obj):
1253 def __delete__(self, obj):
1254 try:
1254 try:
1255 del obj.__dict__[self.sname]
1255 del obj.__dict__[self.sname]
1256 except KeyError:
1256 except KeyError:
1257 raise AttributeError(self.sname)
1257 raise AttributeError(self.sname)
1258
1258
1259 def extdatasource(repo, source):
1259 def extdatasource(repo, source):
1260 """Gather a map of rev -> value dict from the specified source
1260 """Gather a map of rev -> value dict from the specified source
1261
1261
1262 A source spec is treated as a URL, with a special case shell: type
1262 A source spec is treated as a URL, with a special case shell: type
1263 for parsing the output from a shell command.
1263 for parsing the output from a shell command.
1264
1264
1265 The data is parsed as a series of newline-separated records where
1265 The data is parsed as a series of newline-separated records where
1266 each record is a revision specifier optionally followed by a space
1266 each record is a revision specifier optionally followed by a space
1267 and a freeform string value. If the revision is known locally, it
1267 and a freeform string value. If the revision is known locally, it
1268 is converted to a rev, otherwise the record is skipped.
1268 is converted to a rev, otherwise the record is skipped.
1269
1269
1270 Note that both key and value are treated as UTF-8 and converted to
1270 Note that both key and value are treated as UTF-8 and converted to
1271 the local encoding. This allows uniformity between local and
1271 the local encoding. This allows uniformity between local and
1272 remote data sources.
1272 remote data sources.
1273 """
1273 """
1274
1274
1275 spec = repo.ui.config("extdata", source)
1275 spec = repo.ui.config("extdata", source)
1276 if not spec:
1276 if not spec:
1277 raise error.Abort(_("unknown extdata source '%s'") % source)
1277 raise error.Abort(_("unknown extdata source '%s'") % source)
1278
1278
1279 data = {}
1279 data = {}
1280 src = proc = None
1280 src = proc = None
1281 try:
1281 try:
1282 if spec.startswith("shell:"):
1282 if spec.startswith("shell:"):
1283 # external commands should be run relative to the repo root
1283 # external commands should be run relative to the repo root
1284 cmd = spec[6:]
1284 cmd = spec[6:]
1285 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1285 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1286 close_fds=procutil.closefds,
1286 close_fds=procutil.closefds,
1287 stdout=subprocess.PIPE, cwd=repo.root)
1287 stdout=subprocess.PIPE, cwd=repo.root)
1288 src = proc.stdout
1288 src = proc.stdout
1289 else:
1289 else:
1290 # treat as a URL or file
1290 # treat as a URL or file
1291 src = url.open(repo.ui, spec)
1291 src = url.open(repo.ui, spec)
1292 for l in src:
1292 for l in src:
1293 if " " in l:
1293 if " " in l:
1294 k, v = l.strip().split(" ", 1)
1294 k, v = l.strip().split(" ", 1)
1295 else:
1295 else:
1296 k, v = l.strip(), ""
1296 k, v = l.strip(), ""
1297
1297
1298 k = encoding.tolocal(k)
1298 k = encoding.tolocal(k)
1299 try:
1299 try:
1300 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1300 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1301 except (error.LookupError, error.RepoLookupError):
1301 except (error.LookupError, error.RepoLookupError):
1302 pass # we ignore data for nodes that don't exist locally
1302 pass # we ignore data for nodes that don't exist locally
1303 finally:
1303 finally:
1304 if proc:
1304 if proc:
1305 proc.communicate()
1305 proc.communicate()
1306 if src:
1306 if src:
1307 src.close()
1307 src.close()
1308 if proc and proc.returncode != 0:
1308 if proc and proc.returncode != 0:
1309 raise error.Abort(_("extdata command '%s' failed: %s")
1309 raise error.Abort(_("extdata command '%s' failed: %s")
1310 % (cmd, procutil.explainexit(proc.returncode)))
1310 % (cmd, procutil.explainexit(proc.returncode)))
1311
1311
1312 return data
1312 return data
1313
1313
1314 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1314 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1315 if lock is None:
1315 if lock is None:
1316 raise error.LockInheritanceContractViolation(
1316 raise error.LockInheritanceContractViolation(
1317 'lock can only be inherited while held')
1317 'lock can only be inherited while held')
1318 if environ is None:
1318 if environ is None:
1319 environ = {}
1319 environ = {}
1320 with lock.inherit() as locker:
1320 with lock.inherit() as locker:
1321 environ[envvar] = locker
1321 environ[envvar] = locker
1322 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1322 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1323
1323
1324 def wlocksub(repo, cmd, *args, **kwargs):
1324 def wlocksub(repo, cmd, *args, **kwargs):
1325 """run cmd as a subprocess that allows inheriting repo's wlock
1325 """run cmd as a subprocess that allows inheriting repo's wlock
1326
1326
1327 This can only be called while the wlock is held. This takes all the
1327 This can only be called while the wlock is held. This takes all the
1328 arguments that ui.system does, and returns the exit code of the
1328 arguments that ui.system does, and returns the exit code of the
1329 subprocess."""
1329 subprocess."""
1330 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1330 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1331 **kwargs)
1331 **kwargs)
1332
1332
1333 class progress(object):
1333 class progress(object):
1334 def __init__(self, ui, topic, unit="", total=None):
1334 def __init__(self, ui, topic, unit="", total=None):
1335 self.ui = ui
1335 self.ui = ui
1336 self.pos = 0
1336 self.pos = 0
1337 self.topic = topic
1337 self.topic = topic
1338 self.unit = unit
1338 self.unit = unit
1339 self.total = total
1339 self.total = total
1340
1340
1341 def __enter__(self):
1341 def __enter__(self):
1342 return self
1342 return self
1343
1343
1344 def __exit__(self, exc_type, exc_value, exc_tb):
1344 def __exit__(self, exc_type, exc_value, exc_tb):
1345 self.complete()
1345 self.complete()
1346
1346
1347 def update(self, pos, item="", total=None):
1347 def update(self, pos, item="", total=None):
1348 assert pos is not None
1348 assert pos is not None
1349 if total:
1349 if total:
1350 self.total = total
1350 self.total = total
1351 self.pos = pos
1351 self.pos = pos
1352 self._print(item)
1352 self._print(item)
1353
1353
1354 def increment(self, step=1, item="", total=None):
1354 def increment(self, step=1, item="", total=None):
1355 self.update(self.pos + step, item, total)
1355 self.update(self.pos + step, item, total)
1356
1356
1357 def complete(self):
1357 def complete(self):
1358 self.ui.progress(self.topic, None)
1358 self.ui.progress(self.topic, None)
1359
1359
1360 def _print(self, item):
1360 def _print(self, item):
1361 self.ui.progress(self.topic, self.pos, item, self.unit,
1361 self.ui.progress(self.topic, self.pos, item, self.unit,
1362 self.total)
1362 self.total)
1363
1363
1364 def gdinitconfig(ui):
1364 def gdinitconfig(ui):
1365 """helper function to know if a repo should be created as general delta
1365 """helper function to know if a repo should be created as general delta
1366 """
1366 """
1367 # experimental config: format.generaldelta
1367 # experimental config: format.generaldelta
1368 return (ui.configbool('format', 'generaldelta')
1368 return (ui.configbool('format', 'generaldelta')
1369 or ui.configbool('format', 'usegeneraldelta'))
1369 or ui.configbool('format', 'usegeneraldelta')
1370 or ui.configbool('format', 'sparse-revlog'))
1370
1371
1371 def gddeltaconfig(ui):
1372 def gddeltaconfig(ui):
1372 """helper function to know if incoming delta should be optimised
1373 """helper function to know if incoming delta should be optimised
1373 """
1374 """
1374 # experimental config: format.generaldelta
1375 # experimental config: format.generaldelta
1375 return ui.configbool('format', 'generaldelta')
1376 return ui.configbool('format', 'generaldelta')
1376
1377
1377 class simplekeyvaluefile(object):
1378 class simplekeyvaluefile(object):
1378 """A simple file with key=value lines
1379 """A simple file with key=value lines
1379
1380
1380 Keys must be alphanumerics and start with a letter, values must not
1381 Keys must be alphanumerics and start with a letter, values must not
1381 contain '\n' characters"""
1382 contain '\n' characters"""
1382 firstlinekey = '__firstline'
1383 firstlinekey = '__firstline'
1383
1384
1384 def __init__(self, vfs, path, keys=None):
1385 def __init__(self, vfs, path, keys=None):
1385 self.vfs = vfs
1386 self.vfs = vfs
1386 self.path = path
1387 self.path = path
1387
1388
1388 def read(self, firstlinenonkeyval=False):
1389 def read(self, firstlinenonkeyval=False):
1389 """Read the contents of a simple key-value file
1390 """Read the contents of a simple key-value file
1390
1391
1391 'firstlinenonkeyval' indicates whether the first line of file should
1392 'firstlinenonkeyval' indicates whether the first line of file should
1392 be treated as a key-value pair or reuturned fully under the
1393 be treated as a key-value pair or reuturned fully under the
1393 __firstline key."""
1394 __firstline key."""
1394 lines = self.vfs.readlines(self.path)
1395 lines = self.vfs.readlines(self.path)
1395 d = {}
1396 d = {}
1396 if firstlinenonkeyval:
1397 if firstlinenonkeyval:
1397 if not lines:
1398 if not lines:
1398 e = _("empty simplekeyvalue file")
1399 e = _("empty simplekeyvalue file")
1399 raise error.CorruptedState(e)
1400 raise error.CorruptedState(e)
1400 # we don't want to include '\n' in the __firstline
1401 # we don't want to include '\n' in the __firstline
1401 d[self.firstlinekey] = lines[0][:-1]
1402 d[self.firstlinekey] = lines[0][:-1]
1402 del lines[0]
1403 del lines[0]
1403
1404
1404 try:
1405 try:
1405 # the 'if line.strip()' part prevents us from failing on empty
1406 # the 'if line.strip()' part prevents us from failing on empty
1406 # lines which only contain '\n' therefore are not skipped
1407 # lines which only contain '\n' therefore are not skipped
1407 # by 'if line'
1408 # by 'if line'
1408 updatedict = dict(line[:-1].split('=', 1) for line in lines
1409 updatedict = dict(line[:-1].split('=', 1) for line in lines
1409 if line.strip())
1410 if line.strip())
1410 if self.firstlinekey in updatedict:
1411 if self.firstlinekey in updatedict:
1411 e = _("%r can't be used as a key")
1412 e = _("%r can't be used as a key")
1412 raise error.CorruptedState(e % self.firstlinekey)
1413 raise error.CorruptedState(e % self.firstlinekey)
1413 d.update(updatedict)
1414 d.update(updatedict)
1414 except ValueError as e:
1415 except ValueError as e:
1415 raise error.CorruptedState(str(e))
1416 raise error.CorruptedState(str(e))
1416 return d
1417 return d
1417
1418
1418 def write(self, data, firstline=None):
1419 def write(self, data, firstline=None):
1419 """Write key=>value mapping to a file
1420 """Write key=>value mapping to a file
1420 data is a dict. Keys must be alphanumerical and start with a letter.
1421 data is a dict. Keys must be alphanumerical and start with a letter.
1421 Values must not contain newline characters.
1422 Values must not contain newline characters.
1422
1423
1423 If 'firstline' is not None, it is written to file before
1424 If 'firstline' is not None, it is written to file before
1424 everything else, as it is, not in a key=value form"""
1425 everything else, as it is, not in a key=value form"""
1425 lines = []
1426 lines = []
1426 if firstline is not None:
1427 if firstline is not None:
1427 lines.append('%s\n' % firstline)
1428 lines.append('%s\n' % firstline)
1428
1429
1429 for k, v in data.items():
1430 for k, v in data.items():
1430 if k == self.firstlinekey:
1431 if k == self.firstlinekey:
1431 e = "key name '%s' is reserved" % self.firstlinekey
1432 e = "key name '%s' is reserved" % self.firstlinekey
1432 raise error.ProgrammingError(e)
1433 raise error.ProgrammingError(e)
1433 if not k[0:1].isalpha():
1434 if not k[0:1].isalpha():
1434 e = "keys must start with a letter in a key-value file"
1435 e = "keys must start with a letter in a key-value file"
1435 raise error.ProgrammingError(e)
1436 raise error.ProgrammingError(e)
1436 if not k.isalnum():
1437 if not k.isalnum():
1437 e = "invalid key name in a simple key-value file"
1438 e = "invalid key name in a simple key-value file"
1438 raise error.ProgrammingError(e)
1439 raise error.ProgrammingError(e)
1439 if '\n' in v:
1440 if '\n' in v:
1440 e = "invalid value in a simple key-value file"
1441 e = "invalid value in a simple key-value file"
1441 raise error.ProgrammingError(e)
1442 raise error.ProgrammingError(e)
1442 lines.append("%s=%s\n" % (k, v))
1443 lines.append("%s=%s\n" % (k, v))
1443 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1444 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1444 fp.write(''.join(lines))
1445 fp.write(''.join(lines))
1445
1446
1446 _reportobsoletedsource = [
1447 _reportobsoletedsource = [
1447 'debugobsolete',
1448 'debugobsolete',
1448 'pull',
1449 'pull',
1449 'push',
1450 'push',
1450 'serve',
1451 'serve',
1451 'unbundle',
1452 'unbundle',
1452 ]
1453 ]
1453
1454
1454 _reportnewcssource = [
1455 _reportnewcssource = [
1455 'pull',
1456 'pull',
1456 'unbundle',
1457 'unbundle',
1457 ]
1458 ]
1458
1459
1459 def prefetchfiles(repo, revs, match):
1460 def prefetchfiles(repo, revs, match):
1460 """Invokes the registered file prefetch functions, allowing extensions to
1461 """Invokes the registered file prefetch functions, allowing extensions to
1461 ensure the corresponding files are available locally, before the command
1462 ensure the corresponding files are available locally, before the command
1462 uses them."""
1463 uses them."""
1463 if match:
1464 if match:
1464 # The command itself will complain about files that don't exist, so
1465 # The command itself will complain about files that don't exist, so
1465 # don't duplicate the message.
1466 # don't duplicate the message.
1466 match = matchmod.badmatch(match, lambda fn, msg: None)
1467 match = matchmod.badmatch(match, lambda fn, msg: None)
1467 else:
1468 else:
1468 match = matchall(repo)
1469 match = matchall(repo)
1469
1470
1470 fileprefetchhooks(repo, revs, match)
1471 fileprefetchhooks(repo, revs, match)
1471
1472
1472 # a list of (repo, revs, match) prefetch functions
1473 # a list of (repo, revs, match) prefetch functions
1473 fileprefetchhooks = util.hooks()
1474 fileprefetchhooks = util.hooks()
1474
1475
1475 # A marker that tells the evolve extension to suppress its own reporting
1476 # A marker that tells the evolve extension to suppress its own reporting
1476 _reportstroubledchangesets = True
1477 _reportstroubledchangesets = True
1477
1478
1478 def registersummarycallback(repo, otr, txnname=''):
1479 def registersummarycallback(repo, otr, txnname=''):
1479 """register a callback to issue a summary after the transaction is closed
1480 """register a callback to issue a summary after the transaction is closed
1480 """
1481 """
1481 def txmatch(sources):
1482 def txmatch(sources):
1482 return any(txnname.startswith(source) for source in sources)
1483 return any(txnname.startswith(source) for source in sources)
1483
1484
1484 categories = []
1485 categories = []
1485
1486
1486 def reportsummary(func):
1487 def reportsummary(func):
1487 """decorator for report callbacks."""
1488 """decorator for report callbacks."""
1488 # The repoview life cycle is shorter than the one of the actual
1489 # The repoview life cycle is shorter than the one of the actual
1489 # underlying repository. So the filtered object can die before the
1490 # underlying repository. So the filtered object can die before the
1490 # weakref is used leading to troubles. We keep a reference to the
1491 # weakref is used leading to troubles. We keep a reference to the
1491 # unfiltered object and restore the filtering when retrieving the
1492 # unfiltered object and restore the filtering when retrieving the
1492 # repository through the weakref.
1493 # repository through the weakref.
1493 filtername = repo.filtername
1494 filtername = repo.filtername
1494 reporef = weakref.ref(repo.unfiltered())
1495 reporef = weakref.ref(repo.unfiltered())
1495 def wrapped(tr):
1496 def wrapped(tr):
1496 repo = reporef()
1497 repo = reporef()
1497 if filtername:
1498 if filtername:
1498 repo = repo.filtered(filtername)
1499 repo = repo.filtered(filtername)
1499 func(repo, tr)
1500 func(repo, tr)
1500 newcat = '%02i-txnreport' % len(categories)
1501 newcat = '%02i-txnreport' % len(categories)
1501 otr.addpostclose(newcat, wrapped)
1502 otr.addpostclose(newcat, wrapped)
1502 categories.append(newcat)
1503 categories.append(newcat)
1503 return wrapped
1504 return wrapped
1504
1505
1505 if txmatch(_reportobsoletedsource):
1506 if txmatch(_reportobsoletedsource):
1506 @reportsummary
1507 @reportsummary
1507 def reportobsoleted(repo, tr):
1508 def reportobsoleted(repo, tr):
1508 obsoleted = obsutil.getobsoleted(repo, tr)
1509 obsoleted = obsutil.getobsoleted(repo, tr)
1509 if obsoleted:
1510 if obsoleted:
1510 repo.ui.status(_('obsoleted %i changesets\n')
1511 repo.ui.status(_('obsoleted %i changesets\n')
1511 % len(obsoleted))
1512 % len(obsoleted))
1512
1513
1513 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1514 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1514 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1515 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1515 instabilitytypes = [
1516 instabilitytypes = [
1516 ('orphan', 'orphan'),
1517 ('orphan', 'orphan'),
1517 ('phase-divergent', 'phasedivergent'),
1518 ('phase-divergent', 'phasedivergent'),
1518 ('content-divergent', 'contentdivergent'),
1519 ('content-divergent', 'contentdivergent'),
1519 ]
1520 ]
1520
1521
1521 def getinstabilitycounts(repo):
1522 def getinstabilitycounts(repo):
1522 filtered = repo.changelog.filteredrevs
1523 filtered = repo.changelog.filteredrevs
1523 counts = {}
1524 counts = {}
1524 for instability, revset in instabilitytypes:
1525 for instability, revset in instabilitytypes:
1525 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1526 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1526 filtered)
1527 filtered)
1527 return counts
1528 return counts
1528
1529
1529 oldinstabilitycounts = getinstabilitycounts(repo)
1530 oldinstabilitycounts = getinstabilitycounts(repo)
1530 @reportsummary
1531 @reportsummary
1531 def reportnewinstabilities(repo, tr):
1532 def reportnewinstabilities(repo, tr):
1532 newinstabilitycounts = getinstabilitycounts(repo)
1533 newinstabilitycounts = getinstabilitycounts(repo)
1533 for instability, revset in instabilitytypes:
1534 for instability, revset in instabilitytypes:
1534 delta = (newinstabilitycounts[instability] -
1535 delta = (newinstabilitycounts[instability] -
1535 oldinstabilitycounts[instability])
1536 oldinstabilitycounts[instability])
1536 msg = getinstabilitymessage(delta, instability)
1537 msg = getinstabilitymessage(delta, instability)
1537 if msg:
1538 if msg:
1538 repo.ui.warn(msg)
1539 repo.ui.warn(msg)
1539
1540
1540 if txmatch(_reportnewcssource):
1541 if txmatch(_reportnewcssource):
1541 @reportsummary
1542 @reportsummary
1542 def reportnewcs(repo, tr):
1543 def reportnewcs(repo, tr):
1543 """Report the range of new revisions pulled/unbundled."""
1544 """Report the range of new revisions pulled/unbundled."""
1544 newrevs = tr.changes.get('revs', xrange(0, 0))
1545 newrevs = tr.changes.get('revs', xrange(0, 0))
1545 if not newrevs:
1546 if not newrevs:
1546 return
1547 return
1547
1548
1548 # Compute the bounds of new revisions' range, excluding obsoletes.
1549 # Compute the bounds of new revisions' range, excluding obsoletes.
1549 unfi = repo.unfiltered()
1550 unfi = repo.unfiltered()
1550 revs = unfi.revs('%ld and not obsolete()', newrevs)
1551 revs = unfi.revs('%ld and not obsolete()', newrevs)
1551 if not revs:
1552 if not revs:
1552 # Got only obsoletes.
1553 # Got only obsoletes.
1553 return
1554 return
1554 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1555 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1555
1556
1556 if minrev == maxrev:
1557 if minrev == maxrev:
1557 revrange = minrev
1558 revrange = minrev
1558 else:
1559 else:
1559 revrange = '%s:%s' % (minrev, maxrev)
1560 revrange = '%s:%s' % (minrev, maxrev)
1560 repo.ui.status(_('new changesets %s\n') % revrange)
1561 repo.ui.status(_('new changesets %s\n') % revrange)
1561
1562
1562 @reportsummary
1563 @reportsummary
1563 def reportphasechanges(repo, tr):
1564 def reportphasechanges(repo, tr):
1564 """Report statistics of phase changes for changesets pre-existing
1565 """Report statistics of phase changes for changesets pre-existing
1565 pull/unbundle.
1566 pull/unbundle.
1566 """
1567 """
1567 newrevs = tr.changes.get('revs', xrange(0, 0))
1568 newrevs = tr.changes.get('revs', xrange(0, 0))
1568 phasetracking = tr.changes.get('phases', {})
1569 phasetracking = tr.changes.get('phases', {})
1569 if not phasetracking:
1570 if not phasetracking:
1570 return
1571 return
1571 published = [
1572 published = [
1572 rev for rev, (old, new) in phasetracking.iteritems()
1573 rev for rev, (old, new) in phasetracking.iteritems()
1573 if new == phases.public and rev not in newrevs
1574 if new == phases.public and rev not in newrevs
1574 ]
1575 ]
1575 if not published:
1576 if not published:
1576 return
1577 return
1577 repo.ui.status(_('%d local changesets published\n')
1578 repo.ui.status(_('%d local changesets published\n')
1578 % len(published))
1579 % len(published))
1579
1580
1580 def getinstabilitymessage(delta, instability):
1581 def getinstabilitymessage(delta, instability):
1581 """function to return the message to show warning about new instabilities
1582 """function to return the message to show warning about new instabilities
1582
1583
1583 exists as a separate function so that extension can wrap to show more
1584 exists as a separate function so that extension can wrap to show more
1584 information like how to fix instabilities"""
1585 information like how to fix instabilities"""
1585 if delta > 0:
1586 if delta > 0:
1586 return _('%i new %s changesets\n') % (delta, instability)
1587 return _('%i new %s changesets\n') % (delta, instability)
1587
1588
1588 def nodesummaries(repo, nodes, maxnumnodes=4):
1589 def nodesummaries(repo, nodes, maxnumnodes=4):
1589 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1590 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1590 return ' '.join(short(h) for h in nodes)
1591 return ' '.join(short(h) for h in nodes)
1591 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1592 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1592 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1593 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1593
1594
1594 def enforcesinglehead(repo, tr, desc):
1595 def enforcesinglehead(repo, tr, desc):
1595 """check that no named branch has multiple heads"""
1596 """check that no named branch has multiple heads"""
1596 if desc in ('strip', 'repair'):
1597 if desc in ('strip', 'repair'):
1597 # skip the logic during strip
1598 # skip the logic during strip
1598 return
1599 return
1599 visible = repo.filtered('visible')
1600 visible = repo.filtered('visible')
1600 # possible improvement: we could restrict the check to affected branch
1601 # possible improvement: we could restrict the check to affected branch
1601 for name, heads in visible.branchmap().iteritems():
1602 for name, heads in visible.branchmap().iteritems():
1602 if len(heads) > 1:
1603 if len(heads) > 1:
1603 msg = _('rejecting multiple heads on branch "%s"')
1604 msg = _('rejecting multiple heads on branch "%s"')
1604 msg %= name
1605 msg %= name
1605 hint = _('%d heads: %s')
1606 hint = _('%d heads: %s')
1606 hint %= (len(heads), nodesummaries(repo, heads))
1607 hint %= (len(heads), nodesummaries(repo, heads))
1607 raise error.Abort(msg, hint=hint)
1608 raise error.Abort(msg, hint=hint)
1608
1609
1609 def wrapconvertsink(sink):
1610 def wrapconvertsink(sink):
1610 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1611 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1611 before it is used, whether or not the convert extension was formally loaded.
1612 before it is used, whether or not the convert extension was formally loaded.
1612 """
1613 """
1613 return sink
1614 return sink
1614
1615
1615 def unhidehashlikerevs(repo, specs, hiddentype):
1616 def unhidehashlikerevs(repo, specs, hiddentype):
1616 """parse the user specs and unhide changesets whose hash or revision number
1617 """parse the user specs and unhide changesets whose hash or revision number
1617 is passed.
1618 is passed.
1618
1619
1619 hiddentype can be: 1) 'warn': warn while unhiding changesets
1620 hiddentype can be: 1) 'warn': warn while unhiding changesets
1620 2) 'nowarn': don't warn while unhiding changesets
1621 2) 'nowarn': don't warn while unhiding changesets
1621
1622
1622 returns a repo object with the required changesets unhidden
1623 returns a repo object with the required changesets unhidden
1623 """
1624 """
1624 if not repo.filtername or not repo.ui.configbool('experimental',
1625 if not repo.filtername or not repo.ui.configbool('experimental',
1625 'directaccess'):
1626 'directaccess'):
1626 return repo
1627 return repo
1627
1628
1628 if repo.filtername not in ('visible', 'visible-hidden'):
1629 if repo.filtername not in ('visible', 'visible-hidden'):
1629 return repo
1630 return repo
1630
1631
1631 symbols = set()
1632 symbols = set()
1632 for spec in specs:
1633 for spec in specs:
1633 try:
1634 try:
1634 tree = revsetlang.parse(spec)
1635 tree = revsetlang.parse(spec)
1635 except error.ParseError: # will be reported by scmutil.revrange()
1636 except error.ParseError: # will be reported by scmutil.revrange()
1636 continue
1637 continue
1637
1638
1638 symbols.update(revsetlang.gethashlikesymbols(tree))
1639 symbols.update(revsetlang.gethashlikesymbols(tree))
1639
1640
1640 if not symbols:
1641 if not symbols:
1641 return repo
1642 return repo
1642
1643
1643 revs = _getrevsfromsymbols(repo, symbols)
1644 revs = _getrevsfromsymbols(repo, symbols)
1644
1645
1645 if not revs:
1646 if not revs:
1646 return repo
1647 return repo
1647
1648
1648 if hiddentype == 'warn':
1649 if hiddentype == 'warn':
1649 unfi = repo.unfiltered()
1650 unfi = repo.unfiltered()
1650 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1651 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1651 repo.ui.warn(_("warning: accessing hidden changesets for write "
1652 repo.ui.warn(_("warning: accessing hidden changesets for write "
1652 "operation: %s\n") % revstr)
1653 "operation: %s\n") % revstr)
1653
1654
1654 # we have to use new filtername to separate branch/tags cache until we can
1655 # we have to use new filtername to separate branch/tags cache until we can
1655 # disbale these cache when revisions are dynamically pinned.
1656 # disbale these cache when revisions are dynamically pinned.
1656 return repo.filtered('visible-hidden', revs)
1657 return repo.filtered('visible-hidden', revs)
1657
1658
1658 def _getrevsfromsymbols(repo, symbols):
1659 def _getrevsfromsymbols(repo, symbols):
1659 """parse the list of symbols and returns a set of revision numbers of hidden
1660 """parse the list of symbols and returns a set of revision numbers of hidden
1660 changesets present in symbols"""
1661 changesets present in symbols"""
1661 revs = set()
1662 revs = set()
1662 unfi = repo.unfiltered()
1663 unfi = repo.unfiltered()
1663 unficl = unfi.changelog
1664 unficl = unfi.changelog
1664 cl = repo.changelog
1665 cl = repo.changelog
1665 tiprev = len(unficl)
1666 tiprev = len(unficl)
1666 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1667 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1667 for s in symbols:
1668 for s in symbols:
1668 try:
1669 try:
1669 n = int(s)
1670 n = int(s)
1670 if n <= tiprev:
1671 if n <= tiprev:
1671 if not allowrevnums:
1672 if not allowrevnums:
1672 continue
1673 continue
1673 else:
1674 else:
1674 if n not in cl:
1675 if n not in cl:
1675 revs.add(n)
1676 revs.add(n)
1676 continue
1677 continue
1677 except ValueError:
1678 except ValueError:
1678 pass
1679 pass
1679
1680
1680 try:
1681 try:
1681 s = resolvehexnodeidprefix(unfi, s)
1682 s = resolvehexnodeidprefix(unfi, s)
1682 except (error.LookupError, error.WdirUnsupported):
1683 except (error.LookupError, error.WdirUnsupported):
1683 s = None
1684 s = None
1684
1685
1685 if s is not None:
1686 if s is not None:
1686 rev = unficl.rev(s)
1687 rev = unficl.rev(s)
1687 if rev not in cl:
1688 if rev not in cl:
1688 revs.add(rev)
1689 revs.add(rev)
1689
1690
1690 return revs
1691 return revs
1691
1692
1692 def bookmarkrevs(repo, mark):
1693 def bookmarkrevs(repo, mark):
1693 """
1694 """
1694 Select revisions reachable by a given bookmark
1695 Select revisions reachable by a given bookmark
1695 """
1696 """
1696 return repo.revs("ancestors(bookmark(%s)) - "
1697 return repo.revs("ancestors(bookmark(%s)) - "
1697 "ancestors(head() and not bookmark(%s)) - "
1698 "ancestors(head() and not bookmark(%s)) - "
1698 "ancestors(bookmark() and not bookmark(%s))",
1699 "ancestors(bookmark() and not bookmark(%s))",
1699 mark, mark, mark)
1700 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now