##// END OF EJS Templates
scmutil: move construction of instability count message to separate fn...
Pulkit Goyal -
r38474:1cac2e8c default
parent child Browse files
Show More
@@ -1,1680 +1,1688 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 revsetlang,
38 revsetlang,
39 similar,
39 similar,
40 url,
40 url,
41 util,
41 util,
42 vfs,
42 vfs,
43 )
43 )
44
44
45 from .utils import (
45 from .utils import (
46 procutil,
46 procutil,
47 stringutil,
47 stringutil,
48 )
48 )
49
49
50 if pycompat.iswindows:
50 if pycompat.iswindows:
51 from . import scmwindows as scmplatform
51 from . import scmwindows as scmplatform
52 else:
52 else:
53 from . import scmposix as scmplatform
53 from . import scmposix as scmplatform
54
54
55 termsize = scmplatform.termsize
55 termsize = scmplatform.termsize
56
56
57 class status(tuple):
57 class status(tuple):
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 and 'ignored' properties are only relevant to the working copy.
59 and 'ignored' properties are only relevant to the working copy.
60 '''
60 '''
61
61
62 __slots__ = ()
62 __slots__ = ()
63
63
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 clean):
65 clean):
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 ignored, clean))
67 ignored, clean))
68
68
69 @property
69 @property
70 def modified(self):
70 def modified(self):
71 '''files that have been modified'''
71 '''files that have been modified'''
72 return self[0]
72 return self[0]
73
73
74 @property
74 @property
75 def added(self):
75 def added(self):
76 '''files that have been added'''
76 '''files that have been added'''
77 return self[1]
77 return self[1]
78
78
79 @property
79 @property
80 def removed(self):
80 def removed(self):
81 '''files that have been removed'''
81 '''files that have been removed'''
82 return self[2]
82 return self[2]
83
83
84 @property
84 @property
85 def deleted(self):
85 def deleted(self):
86 '''files that are in the dirstate, but have been deleted from the
86 '''files that are in the dirstate, but have been deleted from the
87 working copy (aka "missing")
87 working copy (aka "missing")
88 '''
88 '''
89 return self[3]
89 return self[3]
90
90
91 @property
91 @property
92 def unknown(self):
92 def unknown(self):
93 '''files not in the dirstate that are not ignored'''
93 '''files not in the dirstate that are not ignored'''
94 return self[4]
94 return self[4]
95
95
96 @property
96 @property
97 def ignored(self):
97 def ignored(self):
98 '''files not in the dirstate that are ignored (by _dirignore())'''
98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 return self[5]
99 return self[5]
100
100
101 @property
101 @property
102 def clean(self):
102 def clean(self):
103 '''files that have not been modified'''
103 '''files that have not been modified'''
104 return self[6]
104 return self[6]
105
105
106 def __repr__(self, *args, **kwargs):
106 def __repr__(self, *args, **kwargs):
107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
108 r'unknown=%s, ignored=%s, clean=%s>') %
108 r'unknown=%s, ignored=%s, clean=%s>') %
109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
110
110
111 def itersubrepos(ctx1, ctx2):
111 def itersubrepos(ctx1, ctx2):
112 """find subrepos in ctx1 or ctx2"""
112 """find subrepos in ctx1 or ctx2"""
113 # Create a (subpath, ctx) mapping where we prefer subpaths from
113 # Create a (subpath, ctx) mapping where we prefer subpaths from
114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
115 # has been modified (in ctx2) but not yet committed (in ctx1).
115 # has been modified (in ctx2) but not yet committed (in ctx1).
116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
118
118
119 missing = set()
119 missing = set()
120
120
121 for subpath in ctx2.substate:
121 for subpath in ctx2.substate:
122 if subpath not in ctx1.substate:
122 if subpath not in ctx1.substate:
123 del subpaths[subpath]
123 del subpaths[subpath]
124 missing.add(subpath)
124 missing.add(subpath)
125
125
126 for subpath, ctx in sorted(subpaths.iteritems()):
126 for subpath, ctx in sorted(subpaths.iteritems()):
127 yield subpath, ctx.sub(subpath)
127 yield subpath, ctx.sub(subpath)
128
128
129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
130 # status and diff will have an accurate result when it does
130 # status and diff will have an accurate result when it does
131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
132 # against itself.
132 # against itself.
133 for subpath in missing:
133 for subpath in missing:
134 yield subpath, ctx2.nullsub(subpath, ctx1)
134 yield subpath, ctx2.nullsub(subpath, ctx1)
135
135
136 def nochangesfound(ui, repo, excluded=None):
136 def nochangesfound(ui, repo, excluded=None):
137 '''Report no changes for push/pull, excluded is None or a list of
137 '''Report no changes for push/pull, excluded is None or a list of
138 nodes excluded from the push/pull.
138 nodes excluded from the push/pull.
139 '''
139 '''
140 secretlist = []
140 secretlist = []
141 if excluded:
141 if excluded:
142 for n in excluded:
142 for n in excluded:
143 ctx = repo[n]
143 ctx = repo[n]
144 if ctx.phase() >= phases.secret and not ctx.extinct():
144 if ctx.phase() >= phases.secret and not ctx.extinct():
145 secretlist.append(n)
145 secretlist.append(n)
146
146
147 if secretlist:
147 if secretlist:
148 ui.status(_("no changes found (ignored %d secret changesets)\n")
148 ui.status(_("no changes found (ignored %d secret changesets)\n")
149 % len(secretlist))
149 % len(secretlist))
150 else:
150 else:
151 ui.status(_("no changes found\n"))
151 ui.status(_("no changes found\n"))
152
152
153 def callcatch(ui, func):
153 def callcatch(ui, func):
154 """call func() with global exception handling
154 """call func() with global exception handling
155
155
156 return func() if no exception happens. otherwise do some error handling
156 return func() if no exception happens. otherwise do some error handling
157 and return an exit code accordingly. does not handle all exceptions.
157 and return an exit code accordingly. does not handle all exceptions.
158 """
158 """
159 try:
159 try:
160 try:
160 try:
161 return func()
161 return func()
162 except: # re-raises
162 except: # re-raises
163 ui.traceback()
163 ui.traceback()
164 raise
164 raise
165 # Global exception handling, alphabetically
165 # Global exception handling, alphabetically
166 # Mercurial-specific first, followed by built-in and library exceptions
166 # Mercurial-specific first, followed by built-in and library exceptions
167 except error.LockHeld as inst:
167 except error.LockHeld as inst:
168 if inst.errno == errno.ETIMEDOUT:
168 if inst.errno == errno.ETIMEDOUT:
169 reason = _('timed out waiting for lock held by %r') % inst.locker
169 reason = _('timed out waiting for lock held by %r') % inst.locker
170 else:
170 else:
171 reason = _('lock held by %r') % inst.locker
171 reason = _('lock held by %r') % inst.locker
172 ui.warn(_("abort: %s: %s\n")
172 ui.warn(_("abort: %s: %s\n")
173 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
173 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
174 if not inst.locker:
174 if not inst.locker:
175 ui.warn(_("(lock might be very busy)\n"))
175 ui.warn(_("(lock might be very busy)\n"))
176 except error.LockUnavailable as inst:
176 except error.LockUnavailable as inst:
177 ui.warn(_("abort: could not lock %s: %s\n") %
177 ui.warn(_("abort: could not lock %s: %s\n") %
178 (inst.desc or stringutil.forcebytestr(inst.filename),
178 (inst.desc or stringutil.forcebytestr(inst.filename),
179 encoding.strtolocal(inst.strerror)))
179 encoding.strtolocal(inst.strerror)))
180 except error.OutOfBandError as inst:
180 except error.OutOfBandError as inst:
181 if inst.args:
181 if inst.args:
182 msg = _("abort: remote error:\n")
182 msg = _("abort: remote error:\n")
183 else:
183 else:
184 msg = _("abort: remote error\n")
184 msg = _("abort: remote error\n")
185 ui.warn(msg)
185 ui.warn(msg)
186 if inst.args:
186 if inst.args:
187 ui.warn(''.join(inst.args))
187 ui.warn(''.join(inst.args))
188 if inst.hint:
188 if inst.hint:
189 ui.warn('(%s)\n' % inst.hint)
189 ui.warn('(%s)\n' % inst.hint)
190 except error.RepoError as inst:
190 except error.RepoError as inst:
191 ui.warn(_("abort: %s!\n") % inst)
191 ui.warn(_("abort: %s!\n") % inst)
192 if inst.hint:
192 if inst.hint:
193 ui.warn(_("(%s)\n") % inst.hint)
193 ui.warn(_("(%s)\n") % inst.hint)
194 except error.ResponseError as inst:
194 except error.ResponseError as inst:
195 ui.warn(_("abort: %s") % inst.args[0])
195 ui.warn(_("abort: %s") % inst.args[0])
196 msg = inst.args[1]
196 msg = inst.args[1]
197 if isinstance(msg, type(u'')):
197 if isinstance(msg, type(u'')):
198 msg = pycompat.sysbytes(msg)
198 msg = pycompat.sysbytes(msg)
199 if not isinstance(msg, bytes):
199 if not isinstance(msg, bytes):
200 ui.warn(" %r\n" % (msg,))
200 ui.warn(" %r\n" % (msg,))
201 elif not msg:
201 elif not msg:
202 ui.warn(_(" empty string\n"))
202 ui.warn(_(" empty string\n"))
203 else:
203 else:
204 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
204 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
205 except error.CensoredNodeError as inst:
205 except error.CensoredNodeError as inst:
206 ui.warn(_("abort: file censored %s!\n") % inst)
206 ui.warn(_("abort: file censored %s!\n") % inst)
207 except error.RevlogError as inst:
207 except error.RevlogError as inst:
208 ui.warn(_("abort: %s!\n") % inst)
208 ui.warn(_("abort: %s!\n") % inst)
209 except error.InterventionRequired as inst:
209 except error.InterventionRequired as inst:
210 ui.warn("%s\n" % inst)
210 ui.warn("%s\n" % inst)
211 if inst.hint:
211 if inst.hint:
212 ui.warn(_("(%s)\n") % inst.hint)
212 ui.warn(_("(%s)\n") % inst.hint)
213 return 1
213 return 1
214 except error.WdirUnsupported:
214 except error.WdirUnsupported:
215 ui.warn(_("abort: working directory revision cannot be specified\n"))
215 ui.warn(_("abort: working directory revision cannot be specified\n"))
216 except error.Abort as inst:
216 except error.Abort as inst:
217 ui.warn(_("abort: %s\n") % inst)
217 ui.warn(_("abort: %s\n") % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.warn(_("(%s)\n") % inst.hint)
219 ui.warn(_("(%s)\n") % inst.hint)
220 except ImportError as inst:
220 except ImportError as inst:
221 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
221 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
222 m = stringutil.forcebytestr(inst).split()[-1]
222 m = stringutil.forcebytestr(inst).split()[-1]
223 if m in "mpatch bdiff".split():
223 if m in "mpatch bdiff".split():
224 ui.warn(_("(did you forget to compile extensions?)\n"))
224 ui.warn(_("(did you forget to compile extensions?)\n"))
225 elif m in "zlib".split():
225 elif m in "zlib".split():
226 ui.warn(_("(is your Python install correct?)\n"))
226 ui.warn(_("(is your Python install correct?)\n"))
227 except IOError as inst:
227 except IOError as inst:
228 if util.safehasattr(inst, "code"):
228 if util.safehasattr(inst, "code"):
229 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
229 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
230 elif util.safehasattr(inst, "reason"):
230 elif util.safehasattr(inst, "reason"):
231 try: # usually it is in the form (errno, strerror)
231 try: # usually it is in the form (errno, strerror)
232 reason = inst.reason.args[1]
232 reason = inst.reason.args[1]
233 except (AttributeError, IndexError):
233 except (AttributeError, IndexError):
234 # it might be anything, for example a string
234 # it might be anything, for example a string
235 reason = inst.reason
235 reason = inst.reason
236 if isinstance(reason, pycompat.unicode):
236 if isinstance(reason, pycompat.unicode):
237 # SSLError of Python 2.7.9 contains a unicode
237 # SSLError of Python 2.7.9 contains a unicode
238 reason = encoding.unitolocal(reason)
238 reason = encoding.unitolocal(reason)
239 ui.warn(_("abort: error: %s\n") % reason)
239 ui.warn(_("abort: error: %s\n") % reason)
240 elif (util.safehasattr(inst, "args")
240 elif (util.safehasattr(inst, "args")
241 and inst.args and inst.args[0] == errno.EPIPE):
241 and inst.args and inst.args[0] == errno.EPIPE):
242 pass
242 pass
243 elif getattr(inst, "strerror", None):
243 elif getattr(inst, "strerror", None):
244 if getattr(inst, "filename", None):
244 if getattr(inst, "filename", None):
245 ui.warn(_("abort: %s: %s\n") % (
245 ui.warn(_("abort: %s: %s\n") % (
246 encoding.strtolocal(inst.strerror),
246 encoding.strtolocal(inst.strerror),
247 stringutil.forcebytestr(inst.filename)))
247 stringutil.forcebytestr(inst.filename)))
248 else:
248 else:
249 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
250 else:
250 else:
251 raise
251 raise
252 except OSError as inst:
252 except OSError as inst:
253 if getattr(inst, "filename", None) is not None:
253 if getattr(inst, "filename", None) is not None:
254 ui.warn(_("abort: %s: '%s'\n") % (
254 ui.warn(_("abort: %s: '%s'\n") % (
255 encoding.strtolocal(inst.strerror),
255 encoding.strtolocal(inst.strerror),
256 stringutil.forcebytestr(inst.filename)))
256 stringutil.forcebytestr(inst.filename)))
257 else:
257 else:
258 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 except MemoryError:
259 except MemoryError:
260 ui.warn(_("abort: out of memory\n"))
260 ui.warn(_("abort: out of memory\n"))
261 except SystemExit as inst:
261 except SystemExit as inst:
262 # Commands shouldn't sys.exit directly, but give a return code.
262 # Commands shouldn't sys.exit directly, but give a return code.
263 # Just in case catch this and and pass exit code to caller.
263 # Just in case catch this and and pass exit code to caller.
264 return inst.code
264 return inst.code
265 except socket.error as inst:
265 except socket.error as inst:
266 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
266 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
267
267
268 return -1
268 return -1
269
269
270 def checknewlabel(repo, lbl, kind):
270 def checknewlabel(repo, lbl, kind):
271 # Do not use the "kind" parameter in ui output.
271 # Do not use the "kind" parameter in ui output.
272 # It makes strings difficult to translate.
272 # It makes strings difficult to translate.
273 if lbl in ['tip', '.', 'null']:
273 if lbl in ['tip', '.', 'null']:
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 for c in (':', '\0', '\n', '\r'):
275 for c in (':', '\0', '\n', '\r'):
276 if c in lbl:
276 if c in lbl:
277 raise error.Abort(
277 raise error.Abort(
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 raise error.Abort(_("cannot use an integer as a name"))
281 raise error.Abort(_("cannot use an integer as a name"))
282 except ValueError:
282 except ValueError:
283 pass
283 pass
284 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286
286
287 def checkfilename(f):
287 def checkfilename(f):
288 '''Check that the filename f is an acceptable filename for a tracked file'''
288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 if '\r' in f or '\n' in f:
289 if '\r' in f or '\n' in f:
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 % pycompat.bytestr(f))
291 % pycompat.bytestr(f))
292
292
293 def checkportable(ui, f):
293 def checkportable(ui, f):
294 '''Check if filename f is portable and warn or abort depending on config'''
294 '''Check if filename f is portable and warn or abort depending on config'''
295 checkfilename(f)
295 checkfilename(f)
296 abort, warn = checkportabilityalert(ui)
296 abort, warn = checkportabilityalert(ui)
297 if abort or warn:
297 if abort or warn:
298 msg = util.checkwinfilename(f)
298 msg = util.checkwinfilename(f)
299 if msg:
299 if msg:
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 if abort:
301 if abort:
302 raise error.Abort(msg)
302 raise error.Abort(msg)
303 ui.warn(_("warning: %s\n") % msg)
303 ui.warn(_("warning: %s\n") % msg)
304
304
305 def checkportabilityalert(ui):
305 def checkportabilityalert(ui):
306 '''check if the user's config requests nothing, a warning, or abort for
306 '''check if the user's config requests nothing, a warning, or abort for
307 non-portable filenames'''
307 non-portable filenames'''
308 val = ui.config('ui', 'portablefilenames')
308 val = ui.config('ui', 'portablefilenames')
309 lval = val.lower()
309 lval = val.lower()
310 bval = stringutil.parsebool(val)
310 bval = stringutil.parsebool(val)
311 abort = pycompat.iswindows or lval == 'abort'
311 abort = pycompat.iswindows or lval == 'abort'
312 warn = bval or lval == 'warn'
312 warn = bval or lval == 'warn'
313 if bval is None and not (warn or abort or lval == 'ignore'):
313 if bval is None and not (warn or abort or lval == 'ignore'):
314 raise error.ConfigError(
314 raise error.ConfigError(
315 _("ui.portablefilenames value is invalid ('%s')") % val)
315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 return abort, warn
316 return abort, warn
317
317
318 class casecollisionauditor(object):
318 class casecollisionauditor(object):
319 def __init__(self, ui, abort, dirstate):
319 def __init__(self, ui, abort, dirstate):
320 self._ui = ui
320 self._ui = ui
321 self._abort = abort
321 self._abort = abort
322 allfiles = '\0'.join(dirstate._map)
322 allfiles = '\0'.join(dirstate._map)
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 self._dirstate = dirstate
324 self._dirstate = dirstate
325 # The purpose of _newfiles is so that we don't complain about
325 # The purpose of _newfiles is so that we don't complain about
326 # case collisions if someone were to call this object with the
326 # case collisions if someone were to call this object with the
327 # same filename twice.
327 # same filename twice.
328 self._newfiles = set()
328 self._newfiles = set()
329
329
330 def __call__(self, f):
330 def __call__(self, f):
331 if f in self._newfiles:
331 if f in self._newfiles:
332 return
332 return
333 fl = encoding.lower(f)
333 fl = encoding.lower(f)
334 if fl in self._loweredfiles and f not in self._dirstate:
334 if fl in self._loweredfiles and f not in self._dirstate:
335 msg = _('possible case-folding collision for %s') % f
335 msg = _('possible case-folding collision for %s') % f
336 if self._abort:
336 if self._abort:
337 raise error.Abort(msg)
337 raise error.Abort(msg)
338 self._ui.warn(_("warning: %s\n") % msg)
338 self._ui.warn(_("warning: %s\n") % msg)
339 self._loweredfiles.add(fl)
339 self._loweredfiles.add(fl)
340 self._newfiles.add(f)
340 self._newfiles.add(f)
341
341
342 def filteredhash(repo, maxrev):
342 def filteredhash(repo, maxrev):
343 """build hash of filtered revisions in the current repoview.
343 """build hash of filtered revisions in the current repoview.
344
344
345 Multiple caches perform up-to-date validation by checking that the
345 Multiple caches perform up-to-date validation by checking that the
346 tiprev and tipnode stored in the cache file match the current repository.
346 tiprev and tipnode stored in the cache file match the current repository.
347 However, this is not sufficient for validating repoviews because the set
347 However, this is not sufficient for validating repoviews because the set
348 of revisions in the view may change without the repository tiprev and
348 of revisions in the view may change without the repository tiprev and
349 tipnode changing.
349 tipnode changing.
350
350
351 This function hashes all the revs filtered from the view and returns
351 This function hashes all the revs filtered from the view and returns
352 that SHA-1 digest.
352 that SHA-1 digest.
353 """
353 """
354 cl = repo.changelog
354 cl = repo.changelog
355 if not cl.filteredrevs:
355 if not cl.filteredrevs:
356 return None
356 return None
357 key = None
357 key = None
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 if revs:
359 if revs:
360 s = hashlib.sha1()
360 s = hashlib.sha1()
361 for rev in revs:
361 for rev in revs:
362 s.update('%d;' % rev)
362 s.update('%d;' % rev)
363 key = s.digest()
363 key = s.digest()
364 return key
364 return key
365
365
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 '''yield every hg repository under path, always recursively.
367 '''yield every hg repository under path, always recursively.
368 The recurse flag will only control recursion into repo working dirs'''
368 The recurse flag will only control recursion into repo working dirs'''
369 def errhandler(err):
369 def errhandler(err):
370 if err.filename == path:
370 if err.filename == path:
371 raise err
371 raise err
372 samestat = getattr(os.path, 'samestat', None)
372 samestat = getattr(os.path, 'samestat', None)
373 if followsym and samestat is not None:
373 if followsym and samestat is not None:
374 def adddir(dirlst, dirname):
374 def adddir(dirlst, dirname):
375 dirstat = os.stat(dirname)
375 dirstat = os.stat(dirname)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 if not match:
377 if not match:
378 dirlst.append(dirstat)
378 dirlst.append(dirstat)
379 return not match
379 return not match
380 else:
380 else:
381 followsym = False
381 followsym = False
382
382
383 if (seen_dirs is None) and followsym:
383 if (seen_dirs is None) and followsym:
384 seen_dirs = []
384 seen_dirs = []
385 adddir(seen_dirs, path)
385 adddir(seen_dirs, path)
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 dirs.sort()
387 dirs.sort()
388 if '.hg' in dirs:
388 if '.hg' in dirs:
389 yield root # found a repository
389 yield root # found a repository
390 qroot = os.path.join(root, '.hg', 'patches')
390 qroot = os.path.join(root, '.hg', 'patches')
391 if os.path.isdir(os.path.join(qroot, '.hg')):
391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 yield qroot # we have a patch queue repo here
392 yield qroot # we have a patch queue repo here
393 if recurse:
393 if recurse:
394 # avoid recursing inside the .hg directory
394 # avoid recursing inside the .hg directory
395 dirs.remove('.hg')
395 dirs.remove('.hg')
396 else:
396 else:
397 dirs[:] = [] # don't descend further
397 dirs[:] = [] # don't descend further
398 elif followsym:
398 elif followsym:
399 newdirs = []
399 newdirs = []
400 for d in dirs:
400 for d in dirs:
401 fname = os.path.join(root, d)
401 fname = os.path.join(root, d)
402 if adddir(seen_dirs, fname):
402 if adddir(seen_dirs, fname):
403 if os.path.islink(fname):
403 if os.path.islink(fname):
404 for hgname in walkrepos(fname, True, seen_dirs):
404 for hgname in walkrepos(fname, True, seen_dirs):
405 yield hgname
405 yield hgname
406 else:
406 else:
407 newdirs.append(d)
407 newdirs.append(d)
408 dirs[:] = newdirs
408 dirs[:] = newdirs
409
409
410 def binnode(ctx):
410 def binnode(ctx):
411 """Return binary node id for a given basectx"""
411 """Return binary node id for a given basectx"""
412 node = ctx.node()
412 node = ctx.node()
413 if node is None:
413 if node is None:
414 return wdirid
414 return wdirid
415 return node
415 return node
416
416
417 def intrev(ctx):
417 def intrev(ctx):
418 """Return integer for a given basectx that can be used in comparison or
418 """Return integer for a given basectx that can be used in comparison or
419 arithmetic operation"""
419 arithmetic operation"""
420 rev = ctx.rev()
420 rev = ctx.rev()
421 if rev is None:
421 if rev is None:
422 return wdirrev
422 return wdirrev
423 return rev
423 return rev
424
424
425 def formatchangeid(ctx):
425 def formatchangeid(ctx):
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 template provided by logcmdutil.changesettemplater"""
427 template provided by logcmdutil.changesettemplater"""
428 repo = ctx.repo()
428 repo = ctx.repo()
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430
430
431 def formatrevnode(ui, rev, node):
431 def formatrevnode(ui, rev, node):
432 """Format given revision and node depending on the current verbosity"""
432 """Format given revision and node depending on the current verbosity"""
433 if ui.debugflag:
433 if ui.debugflag:
434 hexfunc = hex
434 hexfunc = hex
435 else:
435 else:
436 hexfunc = short
436 hexfunc = short
437 return '%d:%s' % (rev, hexfunc(node))
437 return '%d:%s' % (rev, hexfunc(node))
438
438
439 def resolvehexnodeidprefix(repo, prefix):
439 def resolvehexnodeidprefix(repo, prefix):
440 # Uses unfiltered repo because it's faster when prefix is ambiguous/
440 # Uses unfiltered repo because it's faster when prefix is ambiguous/
441 # This matches the shortesthexnodeidprefix() function below.
441 # This matches the shortesthexnodeidprefix() function below.
442 node = repo.unfiltered().changelog._partialmatch(prefix)
442 node = repo.unfiltered().changelog._partialmatch(prefix)
443 if node is None:
443 if node is None:
444 return
444 return
445 repo.changelog.rev(node) # make sure node isn't filtered
445 repo.changelog.rev(node) # make sure node isn't filtered
446 return node
446 return node
447
447
448 def shortesthexnodeidprefix(repo, node, minlength=1):
448 def shortesthexnodeidprefix(repo, node, minlength=1):
449 """Find the shortest unambiguous prefix that matches hexnode."""
449 """Find the shortest unambiguous prefix that matches hexnode."""
450 # _partialmatch() of filtered changelog could take O(len(repo)) time,
450 # _partialmatch() of filtered changelog could take O(len(repo)) time,
451 # which would be unacceptably slow. so we look for hash collision in
451 # which would be unacceptably slow. so we look for hash collision in
452 # unfiltered space, which means some hashes may be slightly longer.
452 # unfiltered space, which means some hashes may be slightly longer.
453 cl = repo.unfiltered().changelog
453 cl = repo.unfiltered().changelog
454
454
455 def isrev(prefix):
455 def isrev(prefix):
456 try:
456 try:
457 i = int(prefix)
457 i = int(prefix)
458 # if we are a pure int, then starting with zero will not be
458 # if we are a pure int, then starting with zero will not be
459 # confused as a rev; or, obviously, if the int is larger
459 # confused as a rev; or, obviously, if the int is larger
460 # than the value of the tip rev
460 # than the value of the tip rev
461 if prefix[0] == '0' or i > len(cl):
461 if prefix[0] == '0' or i > len(cl):
462 return False
462 return False
463 return True
463 return True
464 except ValueError:
464 except ValueError:
465 return False
465 return False
466
466
467 def disambiguate(prefix):
467 def disambiguate(prefix):
468 """Disambiguate against revnums."""
468 """Disambiguate against revnums."""
469 hexnode = hex(node)
469 hexnode = hex(node)
470 for length in range(len(prefix), len(hexnode) + 1):
470 for length in range(len(prefix), len(hexnode) + 1):
471 prefix = hexnode[:length]
471 prefix = hexnode[:length]
472 if not isrev(prefix):
472 if not isrev(prefix):
473 return prefix
473 return prefix
474
474
475 try:
475 try:
476 return disambiguate(cl.shortest(node, minlength))
476 return disambiguate(cl.shortest(node, minlength))
477 except error.LookupError:
477 except error.LookupError:
478 raise error.RepoLookupError()
478 raise error.RepoLookupError()
479
479
480 def isrevsymbol(repo, symbol):
480 def isrevsymbol(repo, symbol):
481 """Checks if a symbol exists in the repo.
481 """Checks if a symbol exists in the repo.
482
482
483 See revsymbol() for details. Raises error.LookupError if the symbol is an
483 See revsymbol() for details. Raises error.LookupError if the symbol is an
484 ambiguous nodeid prefix.
484 ambiguous nodeid prefix.
485 """
485 """
486 try:
486 try:
487 revsymbol(repo, symbol)
487 revsymbol(repo, symbol)
488 return True
488 return True
489 except error.RepoLookupError:
489 except error.RepoLookupError:
490 return False
490 return False
491
491
492 def revsymbol(repo, symbol):
492 def revsymbol(repo, symbol):
493 """Returns a context given a single revision symbol (as string).
493 """Returns a context given a single revision symbol (as string).
494
494
495 This is similar to revsingle(), but accepts only a single revision symbol,
495 This is similar to revsingle(), but accepts only a single revision symbol,
496 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
496 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
497 not "max(public())".
497 not "max(public())".
498 """
498 """
499 if not isinstance(symbol, bytes):
499 if not isinstance(symbol, bytes):
500 msg = ("symbol (%s of type %s) was not a string, did you mean "
500 msg = ("symbol (%s of type %s) was not a string, did you mean "
501 "repo[symbol]?" % (symbol, type(symbol)))
501 "repo[symbol]?" % (symbol, type(symbol)))
502 raise error.ProgrammingError(msg)
502 raise error.ProgrammingError(msg)
503 try:
503 try:
504 if symbol in ('.', 'tip', 'null'):
504 if symbol in ('.', 'tip', 'null'):
505 return repo[symbol]
505 return repo[symbol]
506
506
507 try:
507 try:
508 r = int(symbol)
508 r = int(symbol)
509 if '%d' % r != symbol:
509 if '%d' % r != symbol:
510 raise ValueError
510 raise ValueError
511 l = len(repo.changelog)
511 l = len(repo.changelog)
512 if r < 0:
512 if r < 0:
513 r += l
513 r += l
514 if r < 0 or r >= l and r != wdirrev:
514 if r < 0 or r >= l and r != wdirrev:
515 raise ValueError
515 raise ValueError
516 return repo[r]
516 return repo[r]
517 except error.FilteredIndexError:
517 except error.FilteredIndexError:
518 raise
518 raise
519 except (ValueError, OverflowError, IndexError):
519 except (ValueError, OverflowError, IndexError):
520 pass
520 pass
521
521
522 if len(symbol) == 40:
522 if len(symbol) == 40:
523 try:
523 try:
524 node = bin(symbol)
524 node = bin(symbol)
525 rev = repo.changelog.rev(node)
525 rev = repo.changelog.rev(node)
526 return repo[rev]
526 return repo[rev]
527 except error.FilteredLookupError:
527 except error.FilteredLookupError:
528 raise
528 raise
529 except (TypeError, LookupError):
529 except (TypeError, LookupError):
530 pass
530 pass
531
531
532 # look up bookmarks through the name interface
532 # look up bookmarks through the name interface
533 try:
533 try:
534 node = repo.names.singlenode(repo, symbol)
534 node = repo.names.singlenode(repo, symbol)
535 rev = repo.changelog.rev(node)
535 rev = repo.changelog.rev(node)
536 return repo[rev]
536 return repo[rev]
537 except KeyError:
537 except KeyError:
538 pass
538 pass
539
539
540 node = resolvehexnodeidprefix(repo, symbol)
540 node = resolvehexnodeidprefix(repo, symbol)
541 if node is not None:
541 if node is not None:
542 rev = repo.changelog.rev(node)
542 rev = repo.changelog.rev(node)
543 return repo[rev]
543 return repo[rev]
544
544
545 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
545 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
546
546
547 except error.WdirUnsupported:
547 except error.WdirUnsupported:
548 return repo[None]
548 return repo[None]
549 except (error.FilteredIndexError, error.FilteredLookupError,
549 except (error.FilteredIndexError, error.FilteredLookupError,
550 error.FilteredRepoLookupError):
550 error.FilteredRepoLookupError):
551 raise _filterederror(repo, symbol)
551 raise _filterederror(repo, symbol)
552
552
553 def _filterederror(repo, changeid):
553 def _filterederror(repo, changeid):
554 """build an exception to be raised about a filtered changeid
554 """build an exception to be raised about a filtered changeid
555
555
556 This is extracted in a function to help extensions (eg: evolve) to
556 This is extracted in a function to help extensions (eg: evolve) to
557 experiment with various message variants."""
557 experiment with various message variants."""
558 if repo.filtername.startswith('visible'):
558 if repo.filtername.startswith('visible'):
559
559
560 # Check if the changeset is obsolete
560 # Check if the changeset is obsolete
561 unfilteredrepo = repo.unfiltered()
561 unfilteredrepo = repo.unfiltered()
562 ctx = revsymbol(unfilteredrepo, changeid)
562 ctx = revsymbol(unfilteredrepo, changeid)
563
563
564 # If the changeset is obsolete, enrich the message with the reason
564 # If the changeset is obsolete, enrich the message with the reason
565 # that made this changeset not visible
565 # that made this changeset not visible
566 if ctx.obsolete():
566 if ctx.obsolete():
567 msg = obsutil._getfilteredreason(repo, changeid, ctx)
567 msg = obsutil._getfilteredreason(repo, changeid, ctx)
568 else:
568 else:
569 msg = _("hidden revision '%s'") % changeid
569 msg = _("hidden revision '%s'") % changeid
570
570
571 hint = _('use --hidden to access hidden revisions')
571 hint = _('use --hidden to access hidden revisions')
572
572
573 return error.FilteredRepoLookupError(msg, hint=hint)
573 return error.FilteredRepoLookupError(msg, hint=hint)
574 msg = _("filtered revision '%s' (not in '%s' subset)")
574 msg = _("filtered revision '%s' (not in '%s' subset)")
575 msg %= (changeid, repo.filtername)
575 msg %= (changeid, repo.filtername)
576 return error.FilteredRepoLookupError(msg)
576 return error.FilteredRepoLookupError(msg)
577
577
578 def revsingle(repo, revspec, default='.', localalias=None):
578 def revsingle(repo, revspec, default='.', localalias=None):
579 if not revspec and revspec != 0:
579 if not revspec and revspec != 0:
580 return repo[default]
580 return repo[default]
581
581
582 l = revrange(repo, [revspec], localalias=localalias)
582 l = revrange(repo, [revspec], localalias=localalias)
583 if not l:
583 if not l:
584 raise error.Abort(_('empty revision set'))
584 raise error.Abort(_('empty revision set'))
585 return repo[l.last()]
585 return repo[l.last()]
586
586
587 def _pairspec(revspec):
587 def _pairspec(revspec):
588 tree = revsetlang.parse(revspec)
588 tree = revsetlang.parse(revspec)
589 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
589 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
590
590
591 def revpair(repo, revs):
591 def revpair(repo, revs):
592 if not revs:
592 if not revs:
593 return repo['.'], repo[None]
593 return repo['.'], repo[None]
594
594
595 l = revrange(repo, revs)
595 l = revrange(repo, revs)
596
596
597 if not l:
597 if not l:
598 first = second = None
598 first = second = None
599 elif l.isascending():
599 elif l.isascending():
600 first = l.min()
600 first = l.min()
601 second = l.max()
601 second = l.max()
602 elif l.isdescending():
602 elif l.isdescending():
603 first = l.max()
603 first = l.max()
604 second = l.min()
604 second = l.min()
605 else:
605 else:
606 first = l.first()
606 first = l.first()
607 second = l.last()
607 second = l.last()
608
608
609 if first is None:
609 if first is None:
610 raise error.Abort(_('empty revision range'))
610 raise error.Abort(_('empty revision range'))
611 if (first == second and len(revs) >= 2
611 if (first == second and len(revs) >= 2
612 and not all(revrange(repo, [r]) for r in revs)):
612 and not all(revrange(repo, [r]) for r in revs)):
613 raise error.Abort(_('empty revision on one side of range'))
613 raise error.Abort(_('empty revision on one side of range'))
614
614
615 # if top-level is range expression, the result must always be a pair
615 # if top-level is range expression, the result must always be a pair
616 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
616 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
617 return repo[first], repo[None]
617 return repo[first], repo[None]
618
618
619 return repo[first], repo[second]
619 return repo[first], repo[second]
620
620
621 def revrange(repo, specs, localalias=None):
621 def revrange(repo, specs, localalias=None):
622 """Execute 1 to many revsets and return the union.
622 """Execute 1 to many revsets and return the union.
623
623
624 This is the preferred mechanism for executing revsets using user-specified
624 This is the preferred mechanism for executing revsets using user-specified
625 config options, such as revset aliases.
625 config options, such as revset aliases.
626
626
627 The revsets specified by ``specs`` will be executed via a chained ``OR``
627 The revsets specified by ``specs`` will be executed via a chained ``OR``
628 expression. If ``specs`` is empty, an empty result is returned.
628 expression. If ``specs`` is empty, an empty result is returned.
629
629
630 ``specs`` can contain integers, in which case they are assumed to be
630 ``specs`` can contain integers, in which case they are assumed to be
631 revision numbers.
631 revision numbers.
632
632
633 It is assumed the revsets are already formatted. If you have arguments
633 It is assumed the revsets are already formatted. If you have arguments
634 that need to be expanded in the revset, call ``revsetlang.formatspec()``
634 that need to be expanded in the revset, call ``revsetlang.formatspec()``
635 and pass the result as an element of ``specs``.
635 and pass the result as an element of ``specs``.
636
636
637 Specifying a single revset is allowed.
637 Specifying a single revset is allowed.
638
638
639 Returns a ``revset.abstractsmartset`` which is a list-like interface over
639 Returns a ``revset.abstractsmartset`` which is a list-like interface over
640 integer revisions.
640 integer revisions.
641 """
641 """
642 allspecs = []
642 allspecs = []
643 for spec in specs:
643 for spec in specs:
644 if isinstance(spec, int):
644 if isinstance(spec, int):
645 spec = revsetlang.formatspec('rev(%d)', spec)
645 spec = revsetlang.formatspec('rev(%d)', spec)
646 allspecs.append(spec)
646 allspecs.append(spec)
647 return repo.anyrevs(allspecs, user=True, localalias=localalias)
647 return repo.anyrevs(allspecs, user=True, localalias=localalias)
648
648
649 def meaningfulparents(repo, ctx):
649 def meaningfulparents(repo, ctx):
650 """Return list of meaningful (or all if debug) parentrevs for rev.
650 """Return list of meaningful (or all if debug) parentrevs for rev.
651
651
652 For merges (two non-nullrev revisions) both parents are meaningful.
652 For merges (two non-nullrev revisions) both parents are meaningful.
653 Otherwise the first parent revision is considered meaningful if it
653 Otherwise the first parent revision is considered meaningful if it
654 is not the preceding revision.
654 is not the preceding revision.
655 """
655 """
656 parents = ctx.parents()
656 parents = ctx.parents()
657 if len(parents) > 1:
657 if len(parents) > 1:
658 return parents
658 return parents
659 if repo.ui.debugflag:
659 if repo.ui.debugflag:
660 return [parents[0], repo['null']]
660 return [parents[0], repo['null']]
661 if parents[0].rev() >= intrev(ctx) - 1:
661 if parents[0].rev() >= intrev(ctx) - 1:
662 return []
662 return []
663 return parents
663 return parents
664
664
665 def expandpats(pats):
665 def expandpats(pats):
666 '''Expand bare globs when running on windows.
666 '''Expand bare globs when running on windows.
667 On posix we assume it already has already been done by sh.'''
667 On posix we assume it already has already been done by sh.'''
668 if not util.expandglobs:
668 if not util.expandglobs:
669 return list(pats)
669 return list(pats)
670 ret = []
670 ret = []
671 for kindpat in pats:
671 for kindpat in pats:
672 kind, pat = matchmod._patsplit(kindpat, None)
672 kind, pat = matchmod._patsplit(kindpat, None)
673 if kind is None:
673 if kind is None:
674 try:
674 try:
675 globbed = glob.glob(pat)
675 globbed = glob.glob(pat)
676 except re.error:
676 except re.error:
677 globbed = [pat]
677 globbed = [pat]
678 if globbed:
678 if globbed:
679 ret.extend(globbed)
679 ret.extend(globbed)
680 continue
680 continue
681 ret.append(kindpat)
681 ret.append(kindpat)
682 return ret
682 return ret
683
683
684 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
684 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
685 badfn=None):
685 badfn=None):
686 '''Return a matcher and the patterns that were used.
686 '''Return a matcher and the patterns that were used.
687 The matcher will warn about bad matches, unless an alternate badfn callback
687 The matcher will warn about bad matches, unless an alternate badfn callback
688 is provided.'''
688 is provided.'''
689 if pats == ("",):
689 if pats == ("",):
690 pats = []
690 pats = []
691 if opts is None:
691 if opts is None:
692 opts = {}
692 opts = {}
693 if not globbed and default == 'relpath':
693 if not globbed and default == 'relpath':
694 pats = expandpats(pats or [])
694 pats = expandpats(pats or [])
695
695
696 def bad(f, msg):
696 def bad(f, msg):
697 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
697 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
698
698
699 if badfn is None:
699 if badfn is None:
700 badfn = bad
700 badfn = bad
701
701
702 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
702 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
703 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
703 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
704
704
705 if m.always():
705 if m.always():
706 pats = []
706 pats = []
707 return m, pats
707 return m, pats
708
708
709 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
709 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
710 badfn=None):
710 badfn=None):
711 '''Return a matcher that will warn about bad matches.'''
711 '''Return a matcher that will warn about bad matches.'''
712 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
712 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
713
713
714 def matchall(repo):
714 def matchall(repo):
715 '''Return a matcher that will efficiently match everything.'''
715 '''Return a matcher that will efficiently match everything.'''
716 return matchmod.always(repo.root, repo.getcwd())
716 return matchmod.always(repo.root, repo.getcwd())
717
717
718 def matchfiles(repo, files, badfn=None):
718 def matchfiles(repo, files, badfn=None):
719 '''Return a matcher that will efficiently match exactly these files.'''
719 '''Return a matcher that will efficiently match exactly these files.'''
720 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
720 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
721
721
722 def parsefollowlinespattern(repo, rev, pat, msg):
722 def parsefollowlinespattern(repo, rev, pat, msg):
723 """Return a file name from `pat` pattern suitable for usage in followlines
723 """Return a file name from `pat` pattern suitable for usage in followlines
724 logic.
724 logic.
725 """
725 """
726 if not matchmod.patkind(pat):
726 if not matchmod.patkind(pat):
727 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
727 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
728 else:
728 else:
729 ctx = repo[rev]
729 ctx = repo[rev]
730 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
730 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
731 files = [f for f in ctx if m(f)]
731 files = [f for f in ctx if m(f)]
732 if len(files) != 1:
732 if len(files) != 1:
733 raise error.ParseError(msg)
733 raise error.ParseError(msg)
734 return files[0]
734 return files[0]
735
735
736 def origpath(ui, repo, filepath):
736 def origpath(ui, repo, filepath):
737 '''customize where .orig files are created
737 '''customize where .orig files are created
738
738
739 Fetch user defined path from config file: [ui] origbackuppath = <path>
739 Fetch user defined path from config file: [ui] origbackuppath = <path>
740 Fall back to default (filepath with .orig suffix) if not specified
740 Fall back to default (filepath with .orig suffix) if not specified
741 '''
741 '''
742 origbackuppath = ui.config('ui', 'origbackuppath')
742 origbackuppath = ui.config('ui', 'origbackuppath')
743 if not origbackuppath:
743 if not origbackuppath:
744 return filepath + ".orig"
744 return filepath + ".orig"
745
745
746 # Convert filepath from an absolute path into a path inside the repo.
746 # Convert filepath from an absolute path into a path inside the repo.
747 filepathfromroot = util.normpath(os.path.relpath(filepath,
747 filepathfromroot = util.normpath(os.path.relpath(filepath,
748 start=repo.root))
748 start=repo.root))
749
749
750 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
750 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
751 origbackupdir = origvfs.dirname(filepathfromroot)
751 origbackupdir = origvfs.dirname(filepathfromroot)
752 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
752 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
753 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
753 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
754
754
755 # Remove any files that conflict with the backup file's path
755 # Remove any files that conflict with the backup file's path
756 for f in reversed(list(util.finddirs(filepathfromroot))):
756 for f in reversed(list(util.finddirs(filepathfromroot))):
757 if origvfs.isfileorlink(f):
757 if origvfs.isfileorlink(f):
758 ui.note(_('removing conflicting file: %s\n')
758 ui.note(_('removing conflicting file: %s\n')
759 % origvfs.join(f))
759 % origvfs.join(f))
760 origvfs.unlink(f)
760 origvfs.unlink(f)
761 break
761 break
762
762
763 origvfs.makedirs(origbackupdir)
763 origvfs.makedirs(origbackupdir)
764
764
765 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
765 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
766 ui.note(_('removing conflicting directory: %s\n')
766 ui.note(_('removing conflicting directory: %s\n')
767 % origvfs.join(filepathfromroot))
767 % origvfs.join(filepathfromroot))
768 origvfs.rmtree(filepathfromroot, forcibly=True)
768 origvfs.rmtree(filepathfromroot, forcibly=True)
769
769
770 return origvfs.join(filepathfromroot)
770 return origvfs.join(filepathfromroot)
771
771
772 class _containsnode(object):
772 class _containsnode(object):
773 """proxy __contains__(node) to container.__contains__ which accepts revs"""
773 """proxy __contains__(node) to container.__contains__ which accepts revs"""
774
774
775 def __init__(self, repo, revcontainer):
775 def __init__(self, repo, revcontainer):
776 self._torev = repo.changelog.rev
776 self._torev = repo.changelog.rev
777 self._revcontains = revcontainer.__contains__
777 self._revcontains = revcontainer.__contains__
778
778
779 def __contains__(self, node):
779 def __contains__(self, node):
780 return self._revcontains(self._torev(node))
780 return self._revcontains(self._torev(node))
781
781
782 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
782 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
783 fixphase=False, targetphase=None):
783 fixphase=False, targetphase=None):
784 """do common cleanups when old nodes are replaced by new nodes
784 """do common cleanups when old nodes are replaced by new nodes
785
785
786 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
786 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
787 (we might also want to move working directory parent in the future)
787 (we might also want to move working directory parent in the future)
788
788
789 By default, bookmark moves are calculated automatically from 'replacements',
789 By default, bookmark moves are calculated automatically from 'replacements',
790 but 'moves' can be used to override that. Also, 'moves' may include
790 but 'moves' can be used to override that. Also, 'moves' may include
791 additional bookmark moves that should not have associated obsmarkers.
791 additional bookmark moves that should not have associated obsmarkers.
792
792
793 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
793 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
794 have replacements. operation is a string, like "rebase".
794 have replacements. operation is a string, like "rebase".
795
795
796 metadata is dictionary containing metadata to be stored in obsmarker if
796 metadata is dictionary containing metadata to be stored in obsmarker if
797 obsolescence is enabled.
797 obsolescence is enabled.
798 """
798 """
799 assert fixphase or targetphase is None
799 assert fixphase or targetphase is None
800 if not replacements and not moves:
800 if not replacements and not moves:
801 return
801 return
802
802
803 # translate mapping's other forms
803 # translate mapping's other forms
804 if not util.safehasattr(replacements, 'items'):
804 if not util.safehasattr(replacements, 'items'):
805 replacements = {n: () for n in replacements}
805 replacements = {n: () for n in replacements}
806
806
807 # Calculate bookmark movements
807 # Calculate bookmark movements
808 if moves is None:
808 if moves is None:
809 moves = {}
809 moves = {}
810 # Unfiltered repo is needed since nodes in replacements might be hidden.
810 # Unfiltered repo is needed since nodes in replacements might be hidden.
811 unfi = repo.unfiltered()
811 unfi = repo.unfiltered()
812 for oldnode, newnodes in replacements.items():
812 for oldnode, newnodes in replacements.items():
813 if oldnode in moves:
813 if oldnode in moves:
814 continue
814 continue
815 if len(newnodes) > 1:
815 if len(newnodes) > 1:
816 # usually a split, take the one with biggest rev number
816 # usually a split, take the one with biggest rev number
817 newnode = next(unfi.set('max(%ln)', newnodes)).node()
817 newnode = next(unfi.set('max(%ln)', newnodes)).node()
818 elif len(newnodes) == 0:
818 elif len(newnodes) == 0:
819 # move bookmark backwards
819 # move bookmark backwards
820 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
820 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
821 list(replacements)))
821 list(replacements)))
822 if roots:
822 if roots:
823 newnode = roots[0].node()
823 newnode = roots[0].node()
824 else:
824 else:
825 newnode = nullid
825 newnode = nullid
826 else:
826 else:
827 newnode = newnodes[0]
827 newnode = newnodes[0]
828 moves[oldnode] = newnode
828 moves[oldnode] = newnode
829
829
830 allnewnodes = [n for ns in replacements.values() for n in ns]
830 allnewnodes = [n for ns in replacements.values() for n in ns]
831 toretract = {}
831 toretract = {}
832 toadvance = {}
832 toadvance = {}
833 if fixphase:
833 if fixphase:
834 precursors = {}
834 precursors = {}
835 for oldnode, newnodes in replacements.items():
835 for oldnode, newnodes in replacements.items():
836 for newnode in newnodes:
836 for newnode in newnodes:
837 precursors.setdefault(newnode, []).append(oldnode)
837 precursors.setdefault(newnode, []).append(oldnode)
838
838
839 allnewnodes.sort(key=lambda n: unfi[n].rev())
839 allnewnodes.sort(key=lambda n: unfi[n].rev())
840 newphases = {}
840 newphases = {}
841 def phase(ctx):
841 def phase(ctx):
842 return newphases.get(ctx.node(), ctx.phase())
842 return newphases.get(ctx.node(), ctx.phase())
843 for newnode in allnewnodes:
843 for newnode in allnewnodes:
844 ctx = unfi[newnode]
844 ctx = unfi[newnode]
845 parentphase = max(phase(p) for p in ctx.parents())
845 parentphase = max(phase(p) for p in ctx.parents())
846 if targetphase is None:
846 if targetphase is None:
847 oldphase = max(unfi[oldnode].phase()
847 oldphase = max(unfi[oldnode].phase()
848 for oldnode in precursors[newnode])
848 for oldnode in precursors[newnode])
849 newphase = max(oldphase, parentphase)
849 newphase = max(oldphase, parentphase)
850 else:
850 else:
851 newphase = max(targetphase, parentphase)
851 newphase = max(targetphase, parentphase)
852 newphases[newnode] = newphase
852 newphases[newnode] = newphase
853 if newphase > ctx.phase():
853 if newphase > ctx.phase():
854 toretract.setdefault(newphase, []).append(newnode)
854 toretract.setdefault(newphase, []).append(newnode)
855 elif newphase < ctx.phase():
855 elif newphase < ctx.phase():
856 toadvance.setdefault(newphase, []).append(newnode)
856 toadvance.setdefault(newphase, []).append(newnode)
857
857
858 with repo.transaction('cleanup') as tr:
858 with repo.transaction('cleanup') as tr:
859 # Move bookmarks
859 # Move bookmarks
860 bmarks = repo._bookmarks
860 bmarks = repo._bookmarks
861 bmarkchanges = []
861 bmarkchanges = []
862 for oldnode, newnode in moves.items():
862 for oldnode, newnode in moves.items():
863 oldbmarks = repo.nodebookmarks(oldnode)
863 oldbmarks = repo.nodebookmarks(oldnode)
864 if not oldbmarks:
864 if not oldbmarks:
865 continue
865 continue
866 from . import bookmarks # avoid import cycle
866 from . import bookmarks # avoid import cycle
867 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
867 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
868 (util.rapply(pycompat.maybebytestr, oldbmarks),
868 (util.rapply(pycompat.maybebytestr, oldbmarks),
869 hex(oldnode), hex(newnode)))
869 hex(oldnode), hex(newnode)))
870 # Delete divergent bookmarks being parents of related newnodes
870 # Delete divergent bookmarks being parents of related newnodes
871 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
871 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
872 allnewnodes, newnode, oldnode)
872 allnewnodes, newnode, oldnode)
873 deletenodes = _containsnode(repo, deleterevs)
873 deletenodes = _containsnode(repo, deleterevs)
874 for name in oldbmarks:
874 for name in oldbmarks:
875 bmarkchanges.append((name, newnode))
875 bmarkchanges.append((name, newnode))
876 for b in bookmarks.divergent2delete(repo, deletenodes, name):
876 for b in bookmarks.divergent2delete(repo, deletenodes, name):
877 bmarkchanges.append((b, None))
877 bmarkchanges.append((b, None))
878
878
879 if bmarkchanges:
879 if bmarkchanges:
880 bmarks.applychanges(repo, tr, bmarkchanges)
880 bmarks.applychanges(repo, tr, bmarkchanges)
881
881
882 for phase, nodes in toretract.items():
882 for phase, nodes in toretract.items():
883 phases.retractboundary(repo, tr, phase, nodes)
883 phases.retractboundary(repo, tr, phase, nodes)
884 for phase, nodes in toadvance.items():
884 for phase, nodes in toadvance.items():
885 phases.advanceboundary(repo, tr, phase, nodes)
885 phases.advanceboundary(repo, tr, phase, nodes)
886
886
887 # Obsolete or strip nodes
887 # Obsolete or strip nodes
888 if obsolete.isenabled(repo, obsolete.createmarkersopt):
888 if obsolete.isenabled(repo, obsolete.createmarkersopt):
889 # If a node is already obsoleted, and we want to obsolete it
889 # If a node is already obsoleted, and we want to obsolete it
890 # without a successor, skip that obssolete request since it's
890 # without a successor, skip that obssolete request since it's
891 # unnecessary. That's the "if s or not isobs(n)" check below.
891 # unnecessary. That's the "if s or not isobs(n)" check below.
892 # Also sort the node in topology order, that might be useful for
892 # Also sort the node in topology order, that might be useful for
893 # some obsstore logic.
893 # some obsstore logic.
894 # NOTE: the filtering and sorting might belong to createmarkers.
894 # NOTE: the filtering and sorting might belong to createmarkers.
895 isobs = unfi.obsstore.successors.__contains__
895 isobs = unfi.obsstore.successors.__contains__
896 torev = unfi.changelog.rev
896 torev = unfi.changelog.rev
897 sortfunc = lambda ns: torev(ns[0])
897 sortfunc = lambda ns: torev(ns[0])
898 rels = [(unfi[n], tuple(unfi[m] for m in s))
898 rels = [(unfi[n], tuple(unfi[m] for m in s))
899 for n, s in sorted(replacements.items(), key=sortfunc)
899 for n, s in sorted(replacements.items(), key=sortfunc)
900 if s or not isobs(n)]
900 if s or not isobs(n)]
901 if rels:
901 if rels:
902 obsolete.createmarkers(repo, rels, operation=operation,
902 obsolete.createmarkers(repo, rels, operation=operation,
903 metadata=metadata)
903 metadata=metadata)
904 else:
904 else:
905 from . import repair # avoid import cycle
905 from . import repair # avoid import cycle
906 tostrip = list(replacements)
906 tostrip = list(replacements)
907 if tostrip:
907 if tostrip:
908 repair.delayedstrip(repo.ui, repo, tostrip, operation)
908 repair.delayedstrip(repo.ui, repo, tostrip, operation)
909
909
910 def addremove(repo, matcher, prefix, opts=None):
910 def addremove(repo, matcher, prefix, opts=None):
911 if opts is None:
911 if opts is None:
912 opts = {}
912 opts = {}
913 m = matcher
913 m = matcher
914 dry_run = opts.get('dry_run')
914 dry_run = opts.get('dry_run')
915 try:
915 try:
916 similarity = float(opts.get('similarity') or 0)
916 similarity = float(opts.get('similarity') or 0)
917 except ValueError:
917 except ValueError:
918 raise error.Abort(_('similarity must be a number'))
918 raise error.Abort(_('similarity must be a number'))
919 if similarity < 0 or similarity > 100:
919 if similarity < 0 or similarity > 100:
920 raise error.Abort(_('similarity must be between 0 and 100'))
920 raise error.Abort(_('similarity must be between 0 and 100'))
921 similarity /= 100.0
921 similarity /= 100.0
922
922
923 ret = 0
923 ret = 0
924 join = lambda f: os.path.join(prefix, f)
924 join = lambda f: os.path.join(prefix, f)
925
925
926 wctx = repo[None]
926 wctx = repo[None]
927 for subpath in sorted(wctx.substate):
927 for subpath in sorted(wctx.substate):
928 submatch = matchmod.subdirmatcher(subpath, m)
928 submatch = matchmod.subdirmatcher(subpath, m)
929 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
929 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
930 sub = wctx.sub(subpath)
930 sub = wctx.sub(subpath)
931 try:
931 try:
932 if sub.addremove(submatch, prefix, opts):
932 if sub.addremove(submatch, prefix, opts):
933 ret = 1
933 ret = 1
934 except error.LookupError:
934 except error.LookupError:
935 repo.ui.status(_("skipping missing subrepository: %s\n")
935 repo.ui.status(_("skipping missing subrepository: %s\n")
936 % join(subpath))
936 % join(subpath))
937
937
938 rejected = []
938 rejected = []
939 def badfn(f, msg):
939 def badfn(f, msg):
940 if f in m.files():
940 if f in m.files():
941 m.bad(f, msg)
941 m.bad(f, msg)
942 rejected.append(f)
942 rejected.append(f)
943
943
944 badmatch = matchmod.badmatch(m, badfn)
944 badmatch = matchmod.badmatch(m, badfn)
945 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
945 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
946 badmatch)
946 badmatch)
947
947
948 unknownset = set(unknown + forgotten)
948 unknownset = set(unknown + forgotten)
949 toprint = unknownset.copy()
949 toprint = unknownset.copy()
950 toprint.update(deleted)
950 toprint.update(deleted)
951 for abs in sorted(toprint):
951 for abs in sorted(toprint):
952 if repo.ui.verbose or not m.exact(abs):
952 if repo.ui.verbose or not m.exact(abs):
953 if abs in unknownset:
953 if abs in unknownset:
954 status = _('adding %s\n') % m.uipath(abs)
954 status = _('adding %s\n') % m.uipath(abs)
955 else:
955 else:
956 status = _('removing %s\n') % m.uipath(abs)
956 status = _('removing %s\n') % m.uipath(abs)
957 repo.ui.status(status)
957 repo.ui.status(status)
958
958
959 renames = _findrenames(repo, m, added + unknown, removed + deleted,
959 renames = _findrenames(repo, m, added + unknown, removed + deleted,
960 similarity)
960 similarity)
961
961
962 if not dry_run:
962 if not dry_run:
963 _markchanges(repo, unknown + forgotten, deleted, renames)
963 _markchanges(repo, unknown + forgotten, deleted, renames)
964
964
965 for f in rejected:
965 for f in rejected:
966 if f in m.files():
966 if f in m.files():
967 return 1
967 return 1
968 return ret
968 return ret
969
969
970 def marktouched(repo, files, similarity=0.0):
970 def marktouched(repo, files, similarity=0.0):
971 '''Assert that files have somehow been operated upon. files are relative to
971 '''Assert that files have somehow been operated upon. files are relative to
972 the repo root.'''
972 the repo root.'''
973 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
973 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
974 rejected = []
974 rejected = []
975
975
976 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
976 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
977
977
978 if repo.ui.verbose:
978 if repo.ui.verbose:
979 unknownset = set(unknown + forgotten)
979 unknownset = set(unknown + forgotten)
980 toprint = unknownset.copy()
980 toprint = unknownset.copy()
981 toprint.update(deleted)
981 toprint.update(deleted)
982 for abs in sorted(toprint):
982 for abs in sorted(toprint):
983 if abs in unknownset:
983 if abs in unknownset:
984 status = _('adding %s\n') % abs
984 status = _('adding %s\n') % abs
985 else:
985 else:
986 status = _('removing %s\n') % abs
986 status = _('removing %s\n') % abs
987 repo.ui.status(status)
987 repo.ui.status(status)
988
988
989 renames = _findrenames(repo, m, added + unknown, removed + deleted,
989 renames = _findrenames(repo, m, added + unknown, removed + deleted,
990 similarity)
990 similarity)
991
991
992 _markchanges(repo, unknown + forgotten, deleted, renames)
992 _markchanges(repo, unknown + forgotten, deleted, renames)
993
993
994 for f in rejected:
994 for f in rejected:
995 if f in m.files():
995 if f in m.files():
996 return 1
996 return 1
997 return 0
997 return 0
998
998
999 def _interestingfiles(repo, matcher):
999 def _interestingfiles(repo, matcher):
1000 '''Walk dirstate with matcher, looking for files that addremove would care
1000 '''Walk dirstate with matcher, looking for files that addremove would care
1001 about.
1001 about.
1002
1002
1003 This is different from dirstate.status because it doesn't care about
1003 This is different from dirstate.status because it doesn't care about
1004 whether files are modified or clean.'''
1004 whether files are modified or clean.'''
1005 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1005 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1006 audit_path = pathutil.pathauditor(repo.root, cached=True)
1006 audit_path = pathutil.pathauditor(repo.root, cached=True)
1007
1007
1008 ctx = repo[None]
1008 ctx = repo[None]
1009 dirstate = repo.dirstate
1009 dirstate = repo.dirstate
1010 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1010 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1011 unknown=True, ignored=False, full=False)
1011 unknown=True, ignored=False, full=False)
1012 for abs, st in walkresults.iteritems():
1012 for abs, st in walkresults.iteritems():
1013 dstate = dirstate[abs]
1013 dstate = dirstate[abs]
1014 if dstate == '?' and audit_path.check(abs):
1014 if dstate == '?' and audit_path.check(abs):
1015 unknown.append(abs)
1015 unknown.append(abs)
1016 elif dstate != 'r' and not st:
1016 elif dstate != 'r' and not st:
1017 deleted.append(abs)
1017 deleted.append(abs)
1018 elif dstate == 'r' and st:
1018 elif dstate == 'r' and st:
1019 forgotten.append(abs)
1019 forgotten.append(abs)
1020 # for finding renames
1020 # for finding renames
1021 elif dstate == 'r' and not st:
1021 elif dstate == 'r' and not st:
1022 removed.append(abs)
1022 removed.append(abs)
1023 elif dstate == 'a':
1023 elif dstate == 'a':
1024 added.append(abs)
1024 added.append(abs)
1025
1025
1026 return added, unknown, deleted, removed, forgotten
1026 return added, unknown, deleted, removed, forgotten
1027
1027
1028 def _findrenames(repo, matcher, added, removed, similarity):
1028 def _findrenames(repo, matcher, added, removed, similarity):
1029 '''Find renames from removed files to added ones.'''
1029 '''Find renames from removed files to added ones.'''
1030 renames = {}
1030 renames = {}
1031 if similarity > 0:
1031 if similarity > 0:
1032 for old, new, score in similar.findrenames(repo, added, removed,
1032 for old, new, score in similar.findrenames(repo, added, removed,
1033 similarity):
1033 similarity):
1034 if (repo.ui.verbose or not matcher.exact(old)
1034 if (repo.ui.verbose or not matcher.exact(old)
1035 or not matcher.exact(new)):
1035 or not matcher.exact(new)):
1036 repo.ui.status(_('recording removal of %s as rename to %s '
1036 repo.ui.status(_('recording removal of %s as rename to %s '
1037 '(%d%% similar)\n') %
1037 '(%d%% similar)\n') %
1038 (matcher.rel(old), matcher.rel(new),
1038 (matcher.rel(old), matcher.rel(new),
1039 score * 100))
1039 score * 100))
1040 renames[new] = old
1040 renames[new] = old
1041 return renames
1041 return renames
1042
1042
1043 def _markchanges(repo, unknown, deleted, renames):
1043 def _markchanges(repo, unknown, deleted, renames):
1044 '''Marks the files in unknown as added, the files in deleted as removed,
1044 '''Marks the files in unknown as added, the files in deleted as removed,
1045 and the files in renames as copied.'''
1045 and the files in renames as copied.'''
1046 wctx = repo[None]
1046 wctx = repo[None]
1047 with repo.wlock():
1047 with repo.wlock():
1048 wctx.forget(deleted)
1048 wctx.forget(deleted)
1049 wctx.add(unknown)
1049 wctx.add(unknown)
1050 for new, old in renames.iteritems():
1050 for new, old in renames.iteritems():
1051 wctx.copy(old, new)
1051 wctx.copy(old, new)
1052
1052
1053 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1053 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1054 """Update the dirstate to reflect the intent of copying src to dst. For
1054 """Update the dirstate to reflect the intent of copying src to dst. For
1055 different reasons it might not end with dst being marked as copied from src.
1055 different reasons it might not end with dst being marked as copied from src.
1056 """
1056 """
1057 origsrc = repo.dirstate.copied(src) or src
1057 origsrc = repo.dirstate.copied(src) or src
1058 if dst == origsrc: # copying back a copy?
1058 if dst == origsrc: # copying back a copy?
1059 if repo.dirstate[dst] not in 'mn' and not dryrun:
1059 if repo.dirstate[dst] not in 'mn' and not dryrun:
1060 repo.dirstate.normallookup(dst)
1060 repo.dirstate.normallookup(dst)
1061 else:
1061 else:
1062 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1062 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1063 if not ui.quiet:
1063 if not ui.quiet:
1064 ui.warn(_("%s has not been committed yet, so no copy "
1064 ui.warn(_("%s has not been committed yet, so no copy "
1065 "data will be stored for %s.\n")
1065 "data will be stored for %s.\n")
1066 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1066 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1067 if repo.dirstate[dst] in '?r' and not dryrun:
1067 if repo.dirstate[dst] in '?r' and not dryrun:
1068 wctx.add([dst])
1068 wctx.add([dst])
1069 elif not dryrun:
1069 elif not dryrun:
1070 wctx.copy(origsrc, dst)
1070 wctx.copy(origsrc, dst)
1071
1071
1072 def readrequires(opener, supported):
1072 def readrequires(opener, supported):
1073 '''Reads and parses .hg/requires and checks if all entries found
1073 '''Reads and parses .hg/requires and checks if all entries found
1074 are in the list of supported features.'''
1074 are in the list of supported features.'''
1075 requirements = set(opener.read("requires").splitlines())
1075 requirements = set(opener.read("requires").splitlines())
1076 missings = []
1076 missings = []
1077 for r in requirements:
1077 for r in requirements:
1078 if r not in supported:
1078 if r not in supported:
1079 if not r or not r[0:1].isalnum():
1079 if not r or not r[0:1].isalnum():
1080 raise error.RequirementError(_(".hg/requires file is corrupt"))
1080 raise error.RequirementError(_(".hg/requires file is corrupt"))
1081 missings.append(r)
1081 missings.append(r)
1082 missings.sort()
1082 missings.sort()
1083 if missings:
1083 if missings:
1084 raise error.RequirementError(
1084 raise error.RequirementError(
1085 _("repository requires features unknown to this Mercurial: %s")
1085 _("repository requires features unknown to this Mercurial: %s")
1086 % " ".join(missings),
1086 % " ".join(missings),
1087 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1087 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1088 " for more information"))
1088 " for more information"))
1089 return requirements
1089 return requirements
1090
1090
1091 def writerequires(opener, requirements):
1091 def writerequires(opener, requirements):
1092 with opener('requires', 'w') as fp:
1092 with opener('requires', 'w') as fp:
1093 for r in sorted(requirements):
1093 for r in sorted(requirements):
1094 fp.write("%s\n" % r)
1094 fp.write("%s\n" % r)
1095
1095
1096 class filecachesubentry(object):
1096 class filecachesubentry(object):
1097 def __init__(self, path, stat):
1097 def __init__(self, path, stat):
1098 self.path = path
1098 self.path = path
1099 self.cachestat = None
1099 self.cachestat = None
1100 self._cacheable = None
1100 self._cacheable = None
1101
1101
1102 if stat:
1102 if stat:
1103 self.cachestat = filecachesubentry.stat(self.path)
1103 self.cachestat = filecachesubentry.stat(self.path)
1104
1104
1105 if self.cachestat:
1105 if self.cachestat:
1106 self._cacheable = self.cachestat.cacheable()
1106 self._cacheable = self.cachestat.cacheable()
1107 else:
1107 else:
1108 # None means we don't know yet
1108 # None means we don't know yet
1109 self._cacheable = None
1109 self._cacheable = None
1110
1110
1111 def refresh(self):
1111 def refresh(self):
1112 if self.cacheable():
1112 if self.cacheable():
1113 self.cachestat = filecachesubentry.stat(self.path)
1113 self.cachestat = filecachesubentry.stat(self.path)
1114
1114
1115 def cacheable(self):
1115 def cacheable(self):
1116 if self._cacheable is not None:
1116 if self._cacheable is not None:
1117 return self._cacheable
1117 return self._cacheable
1118
1118
1119 # we don't know yet, assume it is for now
1119 # we don't know yet, assume it is for now
1120 return True
1120 return True
1121
1121
1122 def changed(self):
1122 def changed(self):
1123 # no point in going further if we can't cache it
1123 # no point in going further if we can't cache it
1124 if not self.cacheable():
1124 if not self.cacheable():
1125 return True
1125 return True
1126
1126
1127 newstat = filecachesubentry.stat(self.path)
1127 newstat = filecachesubentry.stat(self.path)
1128
1128
1129 # we may not know if it's cacheable yet, check again now
1129 # we may not know if it's cacheable yet, check again now
1130 if newstat and self._cacheable is None:
1130 if newstat and self._cacheable is None:
1131 self._cacheable = newstat.cacheable()
1131 self._cacheable = newstat.cacheable()
1132
1132
1133 # check again
1133 # check again
1134 if not self._cacheable:
1134 if not self._cacheable:
1135 return True
1135 return True
1136
1136
1137 if self.cachestat != newstat:
1137 if self.cachestat != newstat:
1138 self.cachestat = newstat
1138 self.cachestat = newstat
1139 return True
1139 return True
1140 else:
1140 else:
1141 return False
1141 return False
1142
1142
1143 @staticmethod
1143 @staticmethod
1144 def stat(path):
1144 def stat(path):
1145 try:
1145 try:
1146 return util.cachestat(path)
1146 return util.cachestat(path)
1147 except OSError as e:
1147 except OSError as e:
1148 if e.errno != errno.ENOENT:
1148 if e.errno != errno.ENOENT:
1149 raise
1149 raise
1150
1150
1151 class filecacheentry(object):
1151 class filecacheentry(object):
1152 def __init__(self, paths, stat=True):
1152 def __init__(self, paths, stat=True):
1153 self._entries = []
1153 self._entries = []
1154 for path in paths:
1154 for path in paths:
1155 self._entries.append(filecachesubentry(path, stat))
1155 self._entries.append(filecachesubentry(path, stat))
1156
1156
1157 def changed(self):
1157 def changed(self):
1158 '''true if any entry has changed'''
1158 '''true if any entry has changed'''
1159 for entry in self._entries:
1159 for entry in self._entries:
1160 if entry.changed():
1160 if entry.changed():
1161 return True
1161 return True
1162 return False
1162 return False
1163
1163
1164 def refresh(self):
1164 def refresh(self):
1165 for entry in self._entries:
1165 for entry in self._entries:
1166 entry.refresh()
1166 entry.refresh()
1167
1167
1168 class filecache(object):
1168 class filecache(object):
1169 '''A property like decorator that tracks files under .hg/ for updates.
1169 '''A property like decorator that tracks files under .hg/ for updates.
1170
1170
1171 Records stat info when called in _filecache.
1171 Records stat info when called in _filecache.
1172
1172
1173 On subsequent calls, compares old stat info with new info, and recreates the
1173 On subsequent calls, compares old stat info with new info, and recreates the
1174 object when any of the files changes, updating the new stat info in
1174 object when any of the files changes, updating the new stat info in
1175 _filecache.
1175 _filecache.
1176
1176
1177 Mercurial either atomic renames or appends for files under .hg,
1177 Mercurial either atomic renames or appends for files under .hg,
1178 so to ensure the cache is reliable we need the filesystem to be able
1178 so to ensure the cache is reliable we need the filesystem to be able
1179 to tell us if a file has been replaced. If it can't, we fallback to
1179 to tell us if a file has been replaced. If it can't, we fallback to
1180 recreating the object on every call (essentially the same behavior as
1180 recreating the object on every call (essentially the same behavior as
1181 propertycache).
1181 propertycache).
1182
1182
1183 '''
1183 '''
1184 def __init__(self, *paths):
1184 def __init__(self, *paths):
1185 self.paths = paths
1185 self.paths = paths
1186
1186
1187 def join(self, obj, fname):
1187 def join(self, obj, fname):
1188 """Used to compute the runtime path of a cached file.
1188 """Used to compute the runtime path of a cached file.
1189
1189
1190 Users should subclass filecache and provide their own version of this
1190 Users should subclass filecache and provide their own version of this
1191 function to call the appropriate join function on 'obj' (an instance
1191 function to call the appropriate join function on 'obj' (an instance
1192 of the class that its member function was decorated).
1192 of the class that its member function was decorated).
1193 """
1193 """
1194 raise NotImplementedError
1194 raise NotImplementedError
1195
1195
1196 def __call__(self, func):
1196 def __call__(self, func):
1197 self.func = func
1197 self.func = func
1198 self.sname = func.__name__
1198 self.sname = func.__name__
1199 self.name = pycompat.sysbytes(self.sname)
1199 self.name = pycompat.sysbytes(self.sname)
1200 return self
1200 return self
1201
1201
1202 def __get__(self, obj, type=None):
1202 def __get__(self, obj, type=None):
1203 # if accessed on the class, return the descriptor itself.
1203 # if accessed on the class, return the descriptor itself.
1204 if obj is None:
1204 if obj is None:
1205 return self
1205 return self
1206 # do we need to check if the file changed?
1206 # do we need to check if the file changed?
1207 if self.sname in obj.__dict__:
1207 if self.sname in obj.__dict__:
1208 assert self.name in obj._filecache, self.name
1208 assert self.name in obj._filecache, self.name
1209 return obj.__dict__[self.sname]
1209 return obj.__dict__[self.sname]
1210
1210
1211 entry = obj._filecache.get(self.name)
1211 entry = obj._filecache.get(self.name)
1212
1212
1213 if entry:
1213 if entry:
1214 if entry.changed():
1214 if entry.changed():
1215 entry.obj = self.func(obj)
1215 entry.obj = self.func(obj)
1216 else:
1216 else:
1217 paths = [self.join(obj, path) for path in self.paths]
1217 paths = [self.join(obj, path) for path in self.paths]
1218
1218
1219 # We stat -before- creating the object so our cache doesn't lie if
1219 # We stat -before- creating the object so our cache doesn't lie if
1220 # a writer modified between the time we read and stat
1220 # a writer modified between the time we read and stat
1221 entry = filecacheentry(paths, True)
1221 entry = filecacheentry(paths, True)
1222 entry.obj = self.func(obj)
1222 entry.obj = self.func(obj)
1223
1223
1224 obj._filecache[self.name] = entry
1224 obj._filecache[self.name] = entry
1225
1225
1226 obj.__dict__[self.sname] = entry.obj
1226 obj.__dict__[self.sname] = entry.obj
1227 return entry.obj
1227 return entry.obj
1228
1228
1229 def __set__(self, obj, value):
1229 def __set__(self, obj, value):
1230 if self.name not in obj._filecache:
1230 if self.name not in obj._filecache:
1231 # we add an entry for the missing value because X in __dict__
1231 # we add an entry for the missing value because X in __dict__
1232 # implies X in _filecache
1232 # implies X in _filecache
1233 paths = [self.join(obj, path) for path in self.paths]
1233 paths = [self.join(obj, path) for path in self.paths]
1234 ce = filecacheentry(paths, False)
1234 ce = filecacheentry(paths, False)
1235 obj._filecache[self.name] = ce
1235 obj._filecache[self.name] = ce
1236 else:
1236 else:
1237 ce = obj._filecache[self.name]
1237 ce = obj._filecache[self.name]
1238
1238
1239 ce.obj = value # update cached copy
1239 ce.obj = value # update cached copy
1240 obj.__dict__[self.sname] = value # update copy returned by obj.x
1240 obj.__dict__[self.sname] = value # update copy returned by obj.x
1241
1241
1242 def __delete__(self, obj):
1242 def __delete__(self, obj):
1243 try:
1243 try:
1244 del obj.__dict__[self.sname]
1244 del obj.__dict__[self.sname]
1245 except KeyError:
1245 except KeyError:
1246 raise AttributeError(self.sname)
1246 raise AttributeError(self.sname)
1247
1247
1248 def extdatasource(repo, source):
1248 def extdatasource(repo, source):
1249 """Gather a map of rev -> value dict from the specified source
1249 """Gather a map of rev -> value dict from the specified source
1250
1250
1251 A source spec is treated as a URL, with a special case shell: type
1251 A source spec is treated as a URL, with a special case shell: type
1252 for parsing the output from a shell command.
1252 for parsing the output from a shell command.
1253
1253
1254 The data is parsed as a series of newline-separated records where
1254 The data is parsed as a series of newline-separated records where
1255 each record is a revision specifier optionally followed by a space
1255 each record is a revision specifier optionally followed by a space
1256 and a freeform string value. If the revision is known locally, it
1256 and a freeform string value. If the revision is known locally, it
1257 is converted to a rev, otherwise the record is skipped.
1257 is converted to a rev, otherwise the record is skipped.
1258
1258
1259 Note that both key and value are treated as UTF-8 and converted to
1259 Note that both key and value are treated as UTF-8 and converted to
1260 the local encoding. This allows uniformity between local and
1260 the local encoding. This allows uniformity between local and
1261 remote data sources.
1261 remote data sources.
1262 """
1262 """
1263
1263
1264 spec = repo.ui.config("extdata", source)
1264 spec = repo.ui.config("extdata", source)
1265 if not spec:
1265 if not spec:
1266 raise error.Abort(_("unknown extdata source '%s'") % source)
1266 raise error.Abort(_("unknown extdata source '%s'") % source)
1267
1267
1268 data = {}
1268 data = {}
1269 src = proc = None
1269 src = proc = None
1270 try:
1270 try:
1271 if spec.startswith("shell:"):
1271 if spec.startswith("shell:"):
1272 # external commands should be run relative to the repo root
1272 # external commands should be run relative to the repo root
1273 cmd = spec[6:]
1273 cmd = spec[6:]
1274 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1274 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1275 close_fds=procutil.closefds,
1275 close_fds=procutil.closefds,
1276 stdout=subprocess.PIPE, cwd=repo.root)
1276 stdout=subprocess.PIPE, cwd=repo.root)
1277 src = proc.stdout
1277 src = proc.stdout
1278 else:
1278 else:
1279 # treat as a URL or file
1279 # treat as a URL or file
1280 src = url.open(repo.ui, spec)
1280 src = url.open(repo.ui, spec)
1281 for l in src:
1281 for l in src:
1282 if " " in l:
1282 if " " in l:
1283 k, v = l.strip().split(" ", 1)
1283 k, v = l.strip().split(" ", 1)
1284 else:
1284 else:
1285 k, v = l.strip(), ""
1285 k, v = l.strip(), ""
1286
1286
1287 k = encoding.tolocal(k)
1287 k = encoding.tolocal(k)
1288 try:
1288 try:
1289 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1289 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1290 except (error.LookupError, error.RepoLookupError):
1290 except (error.LookupError, error.RepoLookupError):
1291 pass # we ignore data for nodes that don't exist locally
1291 pass # we ignore data for nodes that don't exist locally
1292 finally:
1292 finally:
1293 if proc:
1293 if proc:
1294 proc.communicate()
1294 proc.communicate()
1295 if src:
1295 if src:
1296 src.close()
1296 src.close()
1297 if proc and proc.returncode != 0:
1297 if proc and proc.returncode != 0:
1298 raise error.Abort(_("extdata command '%s' failed: %s")
1298 raise error.Abort(_("extdata command '%s' failed: %s")
1299 % (cmd, procutil.explainexit(proc.returncode)))
1299 % (cmd, procutil.explainexit(proc.returncode)))
1300
1300
1301 return data
1301 return data
1302
1302
1303 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1303 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1304 if lock is None:
1304 if lock is None:
1305 raise error.LockInheritanceContractViolation(
1305 raise error.LockInheritanceContractViolation(
1306 'lock can only be inherited while held')
1306 'lock can only be inherited while held')
1307 if environ is None:
1307 if environ is None:
1308 environ = {}
1308 environ = {}
1309 with lock.inherit() as locker:
1309 with lock.inherit() as locker:
1310 environ[envvar] = locker
1310 environ[envvar] = locker
1311 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1311 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1312
1312
1313 def wlocksub(repo, cmd, *args, **kwargs):
1313 def wlocksub(repo, cmd, *args, **kwargs):
1314 """run cmd as a subprocess that allows inheriting repo's wlock
1314 """run cmd as a subprocess that allows inheriting repo's wlock
1315
1315
1316 This can only be called while the wlock is held. This takes all the
1316 This can only be called while the wlock is held. This takes all the
1317 arguments that ui.system does, and returns the exit code of the
1317 arguments that ui.system does, and returns the exit code of the
1318 subprocess."""
1318 subprocess."""
1319 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1319 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1320 **kwargs)
1320 **kwargs)
1321
1321
1322 class progress(object):
1322 class progress(object):
1323 def __init__(self, ui, topic, unit="", total=None):
1323 def __init__(self, ui, topic, unit="", total=None):
1324 self.ui = ui
1324 self.ui = ui
1325 self.pos = 0
1325 self.pos = 0
1326 self.topic = topic
1326 self.topic = topic
1327 self.unit = unit
1327 self.unit = unit
1328 self.total = total
1328 self.total = total
1329
1329
1330 def __enter__(self):
1330 def __enter__(self):
1331 pass
1331 pass
1332
1332
1333 def __exit__(self, exc_type, exc_value, exc_tb):
1333 def __exit__(self, exc_type, exc_value, exc_tb):
1334 self.complete()
1334 self.complete()
1335
1335
1336 def update(self, pos, item="", total=None):
1336 def update(self, pos, item="", total=None):
1337 assert pos is not None
1337 assert pos is not None
1338 if total:
1338 if total:
1339 self.total = total
1339 self.total = total
1340 self.pos = pos
1340 self.pos = pos
1341 self._print(item)
1341 self._print(item)
1342
1342
1343 def increment(self, step=1, item="", total=None):
1343 def increment(self, step=1, item="", total=None):
1344 self.update(self.pos + step, item, total)
1344 self.update(self.pos + step, item, total)
1345
1345
1346 def complete(self):
1346 def complete(self):
1347 self.ui.progress(self.topic, None)
1347 self.ui.progress(self.topic, None)
1348
1348
1349 def _print(self, item):
1349 def _print(self, item):
1350 self.ui.progress(self.topic, self.pos, item, self.unit,
1350 self.ui.progress(self.topic, self.pos, item, self.unit,
1351 self.total)
1351 self.total)
1352
1352
1353 def gdinitconfig(ui):
1353 def gdinitconfig(ui):
1354 """helper function to know if a repo should be created as general delta
1354 """helper function to know if a repo should be created as general delta
1355 """
1355 """
1356 # experimental config: format.generaldelta
1356 # experimental config: format.generaldelta
1357 return (ui.configbool('format', 'generaldelta')
1357 return (ui.configbool('format', 'generaldelta')
1358 or ui.configbool('format', 'usegeneraldelta'))
1358 or ui.configbool('format', 'usegeneraldelta'))
1359
1359
1360 def gddeltaconfig(ui):
1360 def gddeltaconfig(ui):
1361 """helper function to know if incoming delta should be optimised
1361 """helper function to know if incoming delta should be optimised
1362 """
1362 """
1363 # experimental config: format.generaldelta
1363 # experimental config: format.generaldelta
1364 return ui.configbool('format', 'generaldelta')
1364 return ui.configbool('format', 'generaldelta')
1365
1365
1366 class simplekeyvaluefile(object):
1366 class simplekeyvaluefile(object):
1367 """A simple file with key=value lines
1367 """A simple file with key=value lines
1368
1368
1369 Keys must be alphanumerics and start with a letter, values must not
1369 Keys must be alphanumerics and start with a letter, values must not
1370 contain '\n' characters"""
1370 contain '\n' characters"""
1371 firstlinekey = '__firstline'
1371 firstlinekey = '__firstline'
1372
1372
1373 def __init__(self, vfs, path, keys=None):
1373 def __init__(self, vfs, path, keys=None):
1374 self.vfs = vfs
1374 self.vfs = vfs
1375 self.path = path
1375 self.path = path
1376
1376
1377 def read(self, firstlinenonkeyval=False):
1377 def read(self, firstlinenonkeyval=False):
1378 """Read the contents of a simple key-value file
1378 """Read the contents of a simple key-value file
1379
1379
1380 'firstlinenonkeyval' indicates whether the first line of file should
1380 'firstlinenonkeyval' indicates whether the first line of file should
1381 be treated as a key-value pair or reuturned fully under the
1381 be treated as a key-value pair or reuturned fully under the
1382 __firstline key."""
1382 __firstline key."""
1383 lines = self.vfs.readlines(self.path)
1383 lines = self.vfs.readlines(self.path)
1384 d = {}
1384 d = {}
1385 if firstlinenonkeyval:
1385 if firstlinenonkeyval:
1386 if not lines:
1386 if not lines:
1387 e = _("empty simplekeyvalue file")
1387 e = _("empty simplekeyvalue file")
1388 raise error.CorruptedState(e)
1388 raise error.CorruptedState(e)
1389 # we don't want to include '\n' in the __firstline
1389 # we don't want to include '\n' in the __firstline
1390 d[self.firstlinekey] = lines[0][:-1]
1390 d[self.firstlinekey] = lines[0][:-1]
1391 del lines[0]
1391 del lines[0]
1392
1392
1393 try:
1393 try:
1394 # the 'if line.strip()' part prevents us from failing on empty
1394 # the 'if line.strip()' part prevents us from failing on empty
1395 # lines which only contain '\n' therefore are not skipped
1395 # lines which only contain '\n' therefore are not skipped
1396 # by 'if line'
1396 # by 'if line'
1397 updatedict = dict(line[:-1].split('=', 1) for line in lines
1397 updatedict = dict(line[:-1].split('=', 1) for line in lines
1398 if line.strip())
1398 if line.strip())
1399 if self.firstlinekey in updatedict:
1399 if self.firstlinekey in updatedict:
1400 e = _("%r can't be used as a key")
1400 e = _("%r can't be used as a key")
1401 raise error.CorruptedState(e % self.firstlinekey)
1401 raise error.CorruptedState(e % self.firstlinekey)
1402 d.update(updatedict)
1402 d.update(updatedict)
1403 except ValueError as e:
1403 except ValueError as e:
1404 raise error.CorruptedState(str(e))
1404 raise error.CorruptedState(str(e))
1405 return d
1405 return d
1406
1406
1407 def write(self, data, firstline=None):
1407 def write(self, data, firstline=None):
1408 """Write key=>value mapping to a file
1408 """Write key=>value mapping to a file
1409 data is a dict. Keys must be alphanumerical and start with a letter.
1409 data is a dict. Keys must be alphanumerical and start with a letter.
1410 Values must not contain newline characters.
1410 Values must not contain newline characters.
1411
1411
1412 If 'firstline' is not None, it is written to file before
1412 If 'firstline' is not None, it is written to file before
1413 everything else, as it is, not in a key=value form"""
1413 everything else, as it is, not in a key=value form"""
1414 lines = []
1414 lines = []
1415 if firstline is not None:
1415 if firstline is not None:
1416 lines.append('%s\n' % firstline)
1416 lines.append('%s\n' % firstline)
1417
1417
1418 for k, v in data.items():
1418 for k, v in data.items():
1419 if k == self.firstlinekey:
1419 if k == self.firstlinekey:
1420 e = "key name '%s' is reserved" % self.firstlinekey
1420 e = "key name '%s' is reserved" % self.firstlinekey
1421 raise error.ProgrammingError(e)
1421 raise error.ProgrammingError(e)
1422 if not k[0:1].isalpha():
1422 if not k[0:1].isalpha():
1423 e = "keys must start with a letter in a key-value file"
1423 e = "keys must start with a letter in a key-value file"
1424 raise error.ProgrammingError(e)
1424 raise error.ProgrammingError(e)
1425 if not k.isalnum():
1425 if not k.isalnum():
1426 e = "invalid key name in a simple key-value file"
1426 e = "invalid key name in a simple key-value file"
1427 raise error.ProgrammingError(e)
1427 raise error.ProgrammingError(e)
1428 if '\n' in v:
1428 if '\n' in v:
1429 e = "invalid value in a simple key-value file"
1429 e = "invalid value in a simple key-value file"
1430 raise error.ProgrammingError(e)
1430 raise error.ProgrammingError(e)
1431 lines.append("%s=%s\n" % (k, v))
1431 lines.append("%s=%s\n" % (k, v))
1432 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1432 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1433 fp.write(''.join(lines))
1433 fp.write(''.join(lines))
1434
1434
1435 _reportobsoletedsource = [
1435 _reportobsoletedsource = [
1436 'debugobsolete',
1436 'debugobsolete',
1437 'pull',
1437 'pull',
1438 'push',
1438 'push',
1439 'serve',
1439 'serve',
1440 'unbundle',
1440 'unbundle',
1441 ]
1441 ]
1442
1442
1443 _reportnewcssource = [
1443 _reportnewcssource = [
1444 'pull',
1444 'pull',
1445 'unbundle',
1445 'unbundle',
1446 ]
1446 ]
1447
1447
1448 def prefetchfiles(repo, revs, match):
1448 def prefetchfiles(repo, revs, match):
1449 """Invokes the registered file prefetch functions, allowing extensions to
1449 """Invokes the registered file prefetch functions, allowing extensions to
1450 ensure the corresponding files are available locally, before the command
1450 ensure the corresponding files are available locally, before the command
1451 uses them."""
1451 uses them."""
1452 if match:
1452 if match:
1453 # The command itself will complain about files that don't exist, so
1453 # The command itself will complain about files that don't exist, so
1454 # don't duplicate the message.
1454 # don't duplicate the message.
1455 match = matchmod.badmatch(match, lambda fn, msg: None)
1455 match = matchmod.badmatch(match, lambda fn, msg: None)
1456 else:
1456 else:
1457 match = matchall(repo)
1457 match = matchall(repo)
1458
1458
1459 fileprefetchhooks(repo, revs, match)
1459 fileprefetchhooks(repo, revs, match)
1460
1460
1461 # a list of (repo, revs, match) prefetch functions
1461 # a list of (repo, revs, match) prefetch functions
1462 fileprefetchhooks = util.hooks()
1462 fileprefetchhooks = util.hooks()
1463
1463
1464 # A marker that tells the evolve extension to suppress its own reporting
1464 # A marker that tells the evolve extension to suppress its own reporting
1465 _reportstroubledchangesets = True
1465 _reportstroubledchangesets = True
1466
1466
1467 def registersummarycallback(repo, otr, txnname=''):
1467 def registersummarycallback(repo, otr, txnname=''):
1468 """register a callback to issue a summary after the transaction is closed
1468 """register a callback to issue a summary after the transaction is closed
1469 """
1469 """
1470 def txmatch(sources):
1470 def txmatch(sources):
1471 return any(txnname.startswith(source) for source in sources)
1471 return any(txnname.startswith(source) for source in sources)
1472
1472
1473 categories = []
1473 categories = []
1474
1474
1475 def reportsummary(func):
1475 def reportsummary(func):
1476 """decorator for report callbacks."""
1476 """decorator for report callbacks."""
1477 # The repoview life cycle is shorter than the one of the actual
1477 # The repoview life cycle is shorter than the one of the actual
1478 # underlying repository. So the filtered object can die before the
1478 # underlying repository. So the filtered object can die before the
1479 # weakref is used leading to troubles. We keep a reference to the
1479 # weakref is used leading to troubles. We keep a reference to the
1480 # unfiltered object and restore the filtering when retrieving the
1480 # unfiltered object and restore the filtering when retrieving the
1481 # repository through the weakref.
1481 # repository through the weakref.
1482 filtername = repo.filtername
1482 filtername = repo.filtername
1483 reporef = weakref.ref(repo.unfiltered())
1483 reporef = weakref.ref(repo.unfiltered())
1484 def wrapped(tr):
1484 def wrapped(tr):
1485 repo = reporef()
1485 repo = reporef()
1486 if filtername:
1486 if filtername:
1487 repo = repo.filtered(filtername)
1487 repo = repo.filtered(filtername)
1488 func(repo, tr)
1488 func(repo, tr)
1489 newcat = '%02i-txnreport' % len(categories)
1489 newcat = '%02i-txnreport' % len(categories)
1490 otr.addpostclose(newcat, wrapped)
1490 otr.addpostclose(newcat, wrapped)
1491 categories.append(newcat)
1491 categories.append(newcat)
1492 return wrapped
1492 return wrapped
1493
1493
1494 if txmatch(_reportobsoletedsource):
1494 if txmatch(_reportobsoletedsource):
1495 @reportsummary
1495 @reportsummary
1496 def reportobsoleted(repo, tr):
1496 def reportobsoleted(repo, tr):
1497 obsoleted = obsutil.getobsoleted(repo, tr)
1497 obsoleted = obsutil.getobsoleted(repo, tr)
1498 if obsoleted:
1498 if obsoleted:
1499 repo.ui.status(_('obsoleted %i changesets\n')
1499 repo.ui.status(_('obsoleted %i changesets\n')
1500 % len(obsoleted))
1500 % len(obsoleted))
1501
1501
1502 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1502 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1503 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1503 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1504 instabilitytypes = [
1504 instabilitytypes = [
1505 ('orphan', 'orphan'),
1505 ('orphan', 'orphan'),
1506 ('phase-divergent', 'phasedivergent'),
1506 ('phase-divergent', 'phasedivergent'),
1507 ('content-divergent', 'contentdivergent'),
1507 ('content-divergent', 'contentdivergent'),
1508 ]
1508 ]
1509
1509
1510 def getinstabilitycounts(repo):
1510 def getinstabilitycounts(repo):
1511 filtered = repo.changelog.filteredrevs
1511 filtered = repo.changelog.filteredrevs
1512 counts = {}
1512 counts = {}
1513 for instability, revset in instabilitytypes:
1513 for instability, revset in instabilitytypes:
1514 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1514 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1515 filtered)
1515 filtered)
1516 return counts
1516 return counts
1517
1517
1518 oldinstabilitycounts = getinstabilitycounts(repo)
1518 oldinstabilitycounts = getinstabilitycounts(repo)
1519 @reportsummary
1519 @reportsummary
1520 def reportnewinstabilities(repo, tr):
1520 def reportnewinstabilities(repo, tr):
1521 newinstabilitycounts = getinstabilitycounts(repo)
1521 newinstabilitycounts = getinstabilitycounts(repo)
1522 for instability, revset in instabilitytypes:
1522 for instability, revset in instabilitytypes:
1523 delta = (newinstabilitycounts[instability] -
1523 delta = (newinstabilitycounts[instability] -
1524 oldinstabilitycounts[instability])
1524 oldinstabilitycounts[instability])
1525 if delta > 0:
1525 msg = getinstabilitymessage(delta, instability)
1526 repo.ui.warn(_('%i new %s changesets\n') %
1526 if msg:
1527 (delta, instability))
1527 repo.ui.warn(msg)
1528
1528
1529 if txmatch(_reportnewcssource):
1529 if txmatch(_reportnewcssource):
1530 @reportsummary
1530 @reportsummary
1531 def reportnewcs(repo, tr):
1531 def reportnewcs(repo, tr):
1532 """Report the range of new revisions pulled/unbundled."""
1532 """Report the range of new revisions pulled/unbundled."""
1533 newrevs = tr.changes.get('revs', xrange(0, 0))
1533 newrevs = tr.changes.get('revs', xrange(0, 0))
1534 if not newrevs:
1534 if not newrevs:
1535 return
1535 return
1536
1536
1537 # Compute the bounds of new revisions' range, excluding obsoletes.
1537 # Compute the bounds of new revisions' range, excluding obsoletes.
1538 unfi = repo.unfiltered()
1538 unfi = repo.unfiltered()
1539 revs = unfi.revs('%ld and not obsolete()', newrevs)
1539 revs = unfi.revs('%ld and not obsolete()', newrevs)
1540 if not revs:
1540 if not revs:
1541 # Got only obsoletes.
1541 # Got only obsoletes.
1542 return
1542 return
1543 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1543 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1544
1544
1545 if minrev == maxrev:
1545 if minrev == maxrev:
1546 revrange = minrev
1546 revrange = minrev
1547 else:
1547 else:
1548 revrange = '%s:%s' % (minrev, maxrev)
1548 revrange = '%s:%s' % (minrev, maxrev)
1549 repo.ui.status(_('new changesets %s\n') % revrange)
1549 repo.ui.status(_('new changesets %s\n') % revrange)
1550
1550
1551 @reportsummary
1551 @reportsummary
1552 def reportphasechanges(repo, tr):
1552 def reportphasechanges(repo, tr):
1553 """Report statistics of phase changes for changesets pre-existing
1553 """Report statistics of phase changes for changesets pre-existing
1554 pull/unbundle.
1554 pull/unbundle.
1555 """
1555 """
1556 newrevs = tr.changes.get('revs', xrange(0, 0))
1556 newrevs = tr.changes.get('revs', xrange(0, 0))
1557 phasetracking = tr.changes.get('phases', {})
1557 phasetracking = tr.changes.get('phases', {})
1558 if not phasetracking:
1558 if not phasetracking:
1559 return
1559 return
1560 published = [
1560 published = [
1561 rev for rev, (old, new) in phasetracking.iteritems()
1561 rev for rev, (old, new) in phasetracking.iteritems()
1562 if new == phases.public and rev not in newrevs
1562 if new == phases.public and rev not in newrevs
1563 ]
1563 ]
1564 if not published:
1564 if not published:
1565 return
1565 return
1566 repo.ui.status(_('%d local changesets published\n')
1566 repo.ui.status(_('%d local changesets published\n')
1567 % len(published))
1567 % len(published))
1568
1568
1569 def getinstabilitymessage(delta, instability):
1570 """function to return the message to show warning about new instabilities
1571
1572 exists as a separate function so that extension can wrap to show more
1573 information like how to fix instabilities"""
1574 if delta > 0:
1575 return _('%i new %s changesets\n') % (delta, instability)
1576
1569 def nodesummaries(repo, nodes, maxnumnodes=4):
1577 def nodesummaries(repo, nodes, maxnumnodes=4):
1570 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1578 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1571 return ' '.join(short(h) for h in nodes)
1579 return ' '.join(short(h) for h in nodes)
1572 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1580 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1573 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1581 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1574
1582
1575 def enforcesinglehead(repo, tr, desc):
1583 def enforcesinglehead(repo, tr, desc):
1576 """check that no named branch has multiple heads"""
1584 """check that no named branch has multiple heads"""
1577 if desc in ('strip', 'repair'):
1585 if desc in ('strip', 'repair'):
1578 # skip the logic during strip
1586 # skip the logic during strip
1579 return
1587 return
1580 visible = repo.filtered('visible')
1588 visible = repo.filtered('visible')
1581 # possible improvement: we could restrict the check to affected branch
1589 # possible improvement: we could restrict the check to affected branch
1582 for name, heads in visible.branchmap().iteritems():
1590 for name, heads in visible.branchmap().iteritems():
1583 if len(heads) > 1:
1591 if len(heads) > 1:
1584 msg = _('rejecting multiple heads on branch "%s"')
1592 msg = _('rejecting multiple heads on branch "%s"')
1585 msg %= name
1593 msg %= name
1586 hint = _('%d heads: %s')
1594 hint = _('%d heads: %s')
1587 hint %= (len(heads), nodesummaries(repo, heads))
1595 hint %= (len(heads), nodesummaries(repo, heads))
1588 raise error.Abort(msg, hint=hint)
1596 raise error.Abort(msg, hint=hint)
1589
1597
1590 def wrapconvertsink(sink):
1598 def wrapconvertsink(sink):
1591 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1599 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1592 before it is used, whether or not the convert extension was formally loaded.
1600 before it is used, whether or not the convert extension was formally loaded.
1593 """
1601 """
1594 return sink
1602 return sink
1595
1603
1596 def unhidehashlikerevs(repo, specs, hiddentype):
1604 def unhidehashlikerevs(repo, specs, hiddentype):
1597 """parse the user specs and unhide changesets whose hash or revision number
1605 """parse the user specs and unhide changesets whose hash or revision number
1598 is passed.
1606 is passed.
1599
1607
1600 hiddentype can be: 1) 'warn': warn while unhiding changesets
1608 hiddentype can be: 1) 'warn': warn while unhiding changesets
1601 2) 'nowarn': don't warn while unhiding changesets
1609 2) 'nowarn': don't warn while unhiding changesets
1602
1610
1603 returns a repo object with the required changesets unhidden
1611 returns a repo object with the required changesets unhidden
1604 """
1612 """
1605 if not repo.filtername or not repo.ui.configbool('experimental',
1613 if not repo.filtername or not repo.ui.configbool('experimental',
1606 'directaccess'):
1614 'directaccess'):
1607 return repo
1615 return repo
1608
1616
1609 if repo.filtername not in ('visible', 'visible-hidden'):
1617 if repo.filtername not in ('visible', 'visible-hidden'):
1610 return repo
1618 return repo
1611
1619
1612 symbols = set()
1620 symbols = set()
1613 for spec in specs:
1621 for spec in specs:
1614 try:
1622 try:
1615 tree = revsetlang.parse(spec)
1623 tree = revsetlang.parse(spec)
1616 except error.ParseError: # will be reported by scmutil.revrange()
1624 except error.ParseError: # will be reported by scmutil.revrange()
1617 continue
1625 continue
1618
1626
1619 symbols.update(revsetlang.gethashlikesymbols(tree))
1627 symbols.update(revsetlang.gethashlikesymbols(tree))
1620
1628
1621 if not symbols:
1629 if not symbols:
1622 return repo
1630 return repo
1623
1631
1624 revs = _getrevsfromsymbols(repo, symbols)
1632 revs = _getrevsfromsymbols(repo, symbols)
1625
1633
1626 if not revs:
1634 if not revs:
1627 return repo
1635 return repo
1628
1636
1629 if hiddentype == 'warn':
1637 if hiddentype == 'warn':
1630 unfi = repo.unfiltered()
1638 unfi = repo.unfiltered()
1631 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1639 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1632 repo.ui.warn(_("warning: accessing hidden changesets for write "
1640 repo.ui.warn(_("warning: accessing hidden changesets for write "
1633 "operation: %s\n") % revstr)
1641 "operation: %s\n") % revstr)
1634
1642
1635 # we have to use new filtername to separate branch/tags cache until we can
1643 # we have to use new filtername to separate branch/tags cache until we can
1636 # disbale these cache when revisions are dynamically pinned.
1644 # disbale these cache when revisions are dynamically pinned.
1637 return repo.filtered('visible-hidden', revs)
1645 return repo.filtered('visible-hidden', revs)
1638
1646
1639 def _getrevsfromsymbols(repo, symbols):
1647 def _getrevsfromsymbols(repo, symbols):
1640 """parse the list of symbols and returns a set of revision numbers of hidden
1648 """parse the list of symbols and returns a set of revision numbers of hidden
1641 changesets present in symbols"""
1649 changesets present in symbols"""
1642 revs = set()
1650 revs = set()
1643 unfi = repo.unfiltered()
1651 unfi = repo.unfiltered()
1644 unficl = unfi.changelog
1652 unficl = unfi.changelog
1645 cl = repo.changelog
1653 cl = repo.changelog
1646 tiprev = len(unficl)
1654 tiprev = len(unficl)
1647 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1655 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1648 for s in symbols:
1656 for s in symbols:
1649 try:
1657 try:
1650 n = int(s)
1658 n = int(s)
1651 if n <= tiprev:
1659 if n <= tiprev:
1652 if not allowrevnums:
1660 if not allowrevnums:
1653 continue
1661 continue
1654 else:
1662 else:
1655 if n not in cl:
1663 if n not in cl:
1656 revs.add(n)
1664 revs.add(n)
1657 continue
1665 continue
1658 except ValueError:
1666 except ValueError:
1659 pass
1667 pass
1660
1668
1661 try:
1669 try:
1662 s = resolvehexnodeidprefix(unfi, s)
1670 s = resolvehexnodeidprefix(unfi, s)
1663 except (error.LookupError, error.WdirUnsupported):
1671 except (error.LookupError, error.WdirUnsupported):
1664 s = None
1672 s = None
1665
1673
1666 if s is not None:
1674 if s is not None:
1667 rev = unficl.rev(s)
1675 rev = unficl.rev(s)
1668 if rev not in cl:
1676 if rev not in cl:
1669 revs.add(rev)
1677 revs.add(rev)
1670
1678
1671 return revs
1679 return revs
1672
1680
1673 def bookmarkrevs(repo, mark):
1681 def bookmarkrevs(repo, mark):
1674 """
1682 """
1675 Select revisions reachable by a given bookmark
1683 Select revisions reachable by a given bookmark
1676 """
1684 """
1677 return repo.revs("ancestors(bookmark(%s)) - "
1685 return repo.revs("ancestors(bookmark(%s)) - "
1678 "ancestors(head() and not bookmark(%s)) - "
1686 "ancestors(head() and not bookmark(%s)) - "
1679 "ancestors(bookmark() and not bookmark(%s))",
1687 "ancestors(bookmark() and not bookmark(%s))",
1680 mark, mark, mark)
1688 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now