##// END OF EJS Templates
copies: create helper for getting all copies for changeset...
Martin von Zweigbergk -
r42703:88ba0ff9 default
parent child Browse files
Show More
@@ -1,1959 +1,1981 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 copies as copiesmod,
31 copies as copiesmod,
32 encoding,
32 encoding,
33 error,
33 error,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 obsutil,
36 obsutil,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 policy,
39 policy,
40 pycompat,
40 pycompat,
41 revsetlang,
41 revsetlang,
42 similar,
42 similar,
43 smartset,
43 smartset,
44 url,
44 url,
45 util,
45 util,
46 vfs,
46 vfs,
47 )
47 )
48
48
49 from .utils import (
49 from .utils import (
50 procutil,
50 procutil,
51 stringutil,
51 stringutil,
52 )
52 )
53
53
54 if pycompat.iswindows:
54 if pycompat.iswindows:
55 from . import scmwindows as scmplatform
55 from . import scmwindows as scmplatform
56 else:
56 else:
57 from . import scmposix as scmplatform
57 from . import scmposix as scmplatform
58
58
59 parsers = policy.importmod(r'parsers')
59 parsers = policy.importmod(r'parsers')
60
60
61 termsize = scmplatform.termsize
61 termsize = scmplatform.termsize
62
62
63 class status(tuple):
63 class status(tuple):
64 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
65 and 'ignored' properties are only relevant to the working copy.
65 and 'ignored' properties are only relevant to the working copy.
66 '''
66 '''
67
67
68 __slots__ = ()
68 __slots__ = ()
69
69
70 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
71 clean):
71 clean):
72 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
73 ignored, clean))
73 ignored, clean))
74
74
75 @property
75 @property
76 def modified(self):
76 def modified(self):
77 '''files that have been modified'''
77 '''files that have been modified'''
78 return self[0]
78 return self[0]
79
79
80 @property
80 @property
81 def added(self):
81 def added(self):
82 '''files that have been added'''
82 '''files that have been added'''
83 return self[1]
83 return self[1]
84
84
85 @property
85 @property
86 def removed(self):
86 def removed(self):
87 '''files that have been removed'''
87 '''files that have been removed'''
88 return self[2]
88 return self[2]
89
89
90 @property
90 @property
91 def deleted(self):
91 def deleted(self):
92 '''files that are in the dirstate, but have been deleted from the
92 '''files that are in the dirstate, but have been deleted from the
93 working copy (aka "missing")
93 working copy (aka "missing")
94 '''
94 '''
95 return self[3]
95 return self[3]
96
96
97 @property
97 @property
98 def unknown(self):
98 def unknown(self):
99 '''files not in the dirstate that are not ignored'''
99 '''files not in the dirstate that are not ignored'''
100 return self[4]
100 return self[4]
101
101
102 @property
102 @property
103 def ignored(self):
103 def ignored(self):
104 '''files not in the dirstate that are ignored (by _dirignore())'''
104 '''files not in the dirstate that are ignored (by _dirignore())'''
105 return self[5]
105 return self[5]
106
106
107 @property
107 @property
108 def clean(self):
108 def clean(self):
109 '''files that have not been modified'''
109 '''files that have not been modified'''
110 return self[6]
110 return self[6]
111
111
112 def __repr__(self, *args, **kwargs):
112 def __repr__(self, *args, **kwargs):
113 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
114 r'unknown=%s, ignored=%s, clean=%s>') %
114 r'unknown=%s, ignored=%s, clean=%s>') %
115 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
116
116
117 def itersubrepos(ctx1, ctx2):
117 def itersubrepos(ctx1, ctx2):
118 """find subrepos in ctx1 or ctx2"""
118 """find subrepos in ctx1 or ctx2"""
119 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # Create a (subpath, ctx) mapping where we prefer subpaths from
120 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # ctx1. The subpaths from ctx2 are important when the .hgsub file
121 # has been modified (in ctx2) but not yet committed (in ctx1).
121 # has been modified (in ctx2) but not yet committed (in ctx1).
122 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths = dict.fromkeys(ctx2.substate, ctx2)
123 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
124
124
125 missing = set()
125 missing = set()
126
126
127 for subpath in ctx2.substate:
127 for subpath in ctx2.substate:
128 if subpath not in ctx1.substate:
128 if subpath not in ctx1.substate:
129 del subpaths[subpath]
129 del subpaths[subpath]
130 missing.add(subpath)
130 missing.add(subpath)
131
131
132 for subpath, ctx in sorted(subpaths.iteritems()):
132 for subpath, ctx in sorted(subpaths.iteritems()):
133 yield subpath, ctx.sub(subpath)
133 yield subpath, ctx.sub(subpath)
134
134
135 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
136 # status and diff will have an accurate result when it does
136 # status and diff will have an accurate result when it does
137 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
138 # against itself.
138 # against itself.
139 for subpath in missing:
139 for subpath in missing:
140 yield subpath, ctx2.nullsub(subpath, ctx1)
140 yield subpath, ctx2.nullsub(subpath, ctx1)
141
141
142 def nochangesfound(ui, repo, excluded=None):
142 def nochangesfound(ui, repo, excluded=None):
143 '''Report no changes for push/pull, excluded is None or a list of
143 '''Report no changes for push/pull, excluded is None or a list of
144 nodes excluded from the push/pull.
144 nodes excluded from the push/pull.
145 '''
145 '''
146 secretlist = []
146 secretlist = []
147 if excluded:
147 if excluded:
148 for n in excluded:
148 for n in excluded:
149 ctx = repo[n]
149 ctx = repo[n]
150 if ctx.phase() >= phases.secret and not ctx.extinct():
150 if ctx.phase() >= phases.secret and not ctx.extinct():
151 secretlist.append(n)
151 secretlist.append(n)
152
152
153 if secretlist:
153 if secretlist:
154 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 ui.status(_("no changes found (ignored %d secret changesets)\n")
155 % len(secretlist))
155 % len(secretlist))
156 else:
156 else:
157 ui.status(_("no changes found\n"))
157 ui.status(_("no changes found\n"))
158
158
159 def callcatch(ui, func):
159 def callcatch(ui, func):
160 """call func() with global exception handling
160 """call func() with global exception handling
161
161
162 return func() if no exception happens. otherwise do some error handling
162 return func() if no exception happens. otherwise do some error handling
163 and return an exit code accordingly. does not handle all exceptions.
163 and return an exit code accordingly. does not handle all exceptions.
164 """
164 """
165 try:
165 try:
166 try:
166 try:
167 return func()
167 return func()
168 except: # re-raises
168 except: # re-raises
169 ui.traceback()
169 ui.traceback()
170 raise
170 raise
171 # Global exception handling, alphabetically
171 # Global exception handling, alphabetically
172 # Mercurial-specific first, followed by built-in and library exceptions
172 # Mercurial-specific first, followed by built-in and library exceptions
173 except error.LockHeld as inst:
173 except error.LockHeld as inst:
174 if inst.errno == errno.ETIMEDOUT:
174 if inst.errno == errno.ETIMEDOUT:
175 reason = _('timed out waiting for lock held by %r') % (
175 reason = _('timed out waiting for lock held by %r') % (
176 pycompat.bytestr(inst.locker))
176 pycompat.bytestr(inst.locker))
177 else:
177 else:
178 reason = _('lock held by %r') % inst.locker
178 reason = _('lock held by %r') % inst.locker
179 ui.error(_("abort: %s: %s\n") % (
179 ui.error(_("abort: %s: %s\n") % (
180 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 inst.desc or stringutil.forcebytestr(inst.filename), reason))
181 if not inst.locker:
181 if not inst.locker:
182 ui.error(_("(lock might be very busy)\n"))
182 ui.error(_("(lock might be very busy)\n"))
183 except error.LockUnavailable as inst:
183 except error.LockUnavailable as inst:
184 ui.error(_("abort: could not lock %s: %s\n") %
184 ui.error(_("abort: could not lock %s: %s\n") %
185 (inst.desc or stringutil.forcebytestr(inst.filename),
185 (inst.desc or stringutil.forcebytestr(inst.filename),
186 encoding.strtolocal(inst.strerror)))
186 encoding.strtolocal(inst.strerror)))
187 except error.OutOfBandError as inst:
187 except error.OutOfBandError as inst:
188 if inst.args:
188 if inst.args:
189 msg = _("abort: remote error:\n")
189 msg = _("abort: remote error:\n")
190 else:
190 else:
191 msg = _("abort: remote error\n")
191 msg = _("abort: remote error\n")
192 ui.error(msg)
192 ui.error(msg)
193 if inst.args:
193 if inst.args:
194 ui.error(''.join(inst.args))
194 ui.error(''.join(inst.args))
195 if inst.hint:
195 if inst.hint:
196 ui.error('(%s)\n' % inst.hint)
196 ui.error('(%s)\n' % inst.hint)
197 except error.RepoError as inst:
197 except error.RepoError as inst:
198 ui.error(_("abort: %s!\n") % inst)
198 ui.error(_("abort: %s!\n") % inst)
199 if inst.hint:
199 if inst.hint:
200 ui.error(_("(%s)\n") % inst.hint)
200 ui.error(_("(%s)\n") % inst.hint)
201 except error.ResponseError as inst:
201 except error.ResponseError as inst:
202 ui.error(_("abort: %s") % inst.args[0])
202 ui.error(_("abort: %s") % inst.args[0])
203 msg = inst.args[1]
203 msg = inst.args[1]
204 if isinstance(msg, type(u'')):
204 if isinstance(msg, type(u'')):
205 msg = pycompat.sysbytes(msg)
205 msg = pycompat.sysbytes(msg)
206 if not isinstance(msg, bytes):
206 if not isinstance(msg, bytes):
207 ui.error(" %r\n" % (msg,))
207 ui.error(" %r\n" % (msg,))
208 elif not msg:
208 elif not msg:
209 ui.error(_(" empty string\n"))
209 ui.error(_(" empty string\n"))
210 else:
210 else:
211 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
212 except error.CensoredNodeError as inst:
212 except error.CensoredNodeError as inst:
213 ui.error(_("abort: file censored %s!\n") % inst)
213 ui.error(_("abort: file censored %s!\n") % inst)
214 except error.StorageError as inst:
214 except error.StorageError as inst:
215 ui.error(_("abort: %s!\n") % inst)
215 ui.error(_("abort: %s!\n") % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.error(_("(%s)\n") % inst.hint)
217 ui.error(_("(%s)\n") % inst.hint)
218 except error.InterventionRequired as inst:
218 except error.InterventionRequired as inst:
219 ui.error("%s\n" % inst)
219 ui.error("%s\n" % inst)
220 if inst.hint:
220 if inst.hint:
221 ui.error(_("(%s)\n") % inst.hint)
221 ui.error(_("(%s)\n") % inst.hint)
222 return 1
222 return 1
223 except error.WdirUnsupported:
223 except error.WdirUnsupported:
224 ui.error(_("abort: working directory revision cannot be specified\n"))
224 ui.error(_("abort: working directory revision cannot be specified\n"))
225 except error.Abort as inst:
225 except error.Abort as inst:
226 ui.error(_("abort: %s\n") % inst)
226 ui.error(_("abort: %s\n") % inst)
227 if inst.hint:
227 if inst.hint:
228 ui.error(_("(%s)\n") % inst.hint)
228 ui.error(_("(%s)\n") % inst.hint)
229 except ImportError as inst:
229 except ImportError as inst:
230 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
231 m = stringutil.forcebytestr(inst).split()[-1]
231 m = stringutil.forcebytestr(inst).split()[-1]
232 if m in "mpatch bdiff".split():
232 if m in "mpatch bdiff".split():
233 ui.error(_("(did you forget to compile extensions?)\n"))
233 ui.error(_("(did you forget to compile extensions?)\n"))
234 elif m in "zlib".split():
234 elif m in "zlib".split():
235 ui.error(_("(is your Python install correct?)\n"))
235 ui.error(_("(is your Python install correct?)\n"))
236 except (IOError, OSError) as inst:
236 except (IOError, OSError) as inst:
237 if util.safehasattr(inst, "code"): # HTTPError
237 if util.safehasattr(inst, "code"): # HTTPError
238 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
239 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 elif util.safehasattr(inst, "reason"): # URLError or SSLError
240 try: # usually it is in the form (errno, strerror)
240 try: # usually it is in the form (errno, strerror)
241 reason = inst.reason.args[1]
241 reason = inst.reason.args[1]
242 except (AttributeError, IndexError):
242 except (AttributeError, IndexError):
243 # it might be anything, for example a string
243 # it might be anything, for example a string
244 reason = inst.reason
244 reason = inst.reason
245 if isinstance(reason, pycompat.unicode):
245 if isinstance(reason, pycompat.unicode):
246 # SSLError of Python 2.7.9 contains a unicode
246 # SSLError of Python 2.7.9 contains a unicode
247 reason = encoding.unitolocal(reason)
247 reason = encoding.unitolocal(reason)
248 ui.error(_("abort: error: %s\n") % reason)
248 ui.error(_("abort: error: %s\n") % reason)
249 elif (util.safehasattr(inst, "args")
249 elif (util.safehasattr(inst, "args")
250 and inst.args and inst.args[0] == errno.EPIPE):
250 and inst.args and inst.args[0] == errno.EPIPE):
251 pass
251 pass
252 elif getattr(inst, "strerror", None): # common IOError or OSError
252 elif getattr(inst, "strerror", None): # common IOError or OSError
253 if getattr(inst, "filename", None) is not None:
253 if getattr(inst, "filename", None) is not None:
254 ui.error(_("abort: %s: '%s'\n") % (
254 ui.error(_("abort: %s: '%s'\n") % (
255 encoding.strtolocal(inst.strerror),
255 encoding.strtolocal(inst.strerror),
256 stringutil.forcebytestr(inst.filename)))
256 stringutil.forcebytestr(inst.filename)))
257 else:
257 else:
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 else: # suspicious IOError
259 else: # suspicious IOError
260 raise
260 raise
261 except MemoryError:
261 except MemoryError:
262 ui.error(_("abort: out of memory\n"))
262 ui.error(_("abort: out of memory\n"))
263 except SystemExit as inst:
263 except SystemExit as inst:
264 # Commands shouldn't sys.exit directly, but give a return code.
264 # Commands shouldn't sys.exit directly, but give a return code.
265 # Just in case catch this and and pass exit code to caller.
265 # Just in case catch this and and pass exit code to caller.
266 return inst.code
266 return inst.code
267
267
268 return -1
268 return -1
269
269
270 def checknewlabel(repo, lbl, kind):
270 def checknewlabel(repo, lbl, kind):
271 # Do not use the "kind" parameter in ui output.
271 # Do not use the "kind" parameter in ui output.
272 # It makes strings difficult to translate.
272 # It makes strings difficult to translate.
273 if lbl in ['tip', '.', 'null']:
273 if lbl in ['tip', '.', 'null']:
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 for c in (':', '\0', '\n', '\r'):
275 for c in (':', '\0', '\n', '\r'):
276 if c in lbl:
276 if c in lbl:
277 raise error.Abort(
277 raise error.Abort(
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 raise error.Abort(_("cannot use an integer as a name"))
281 raise error.Abort(_("cannot use an integer as a name"))
282 except ValueError:
282 except ValueError:
283 pass
283 pass
284 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286
286
287 def checkfilename(f):
287 def checkfilename(f):
288 '''Check that the filename f is an acceptable filename for a tracked file'''
288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 if '\r' in f or '\n' in f:
289 if '\r' in f or '\n' in f:
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 % pycompat.bytestr(f))
291 % pycompat.bytestr(f))
292
292
293 def checkportable(ui, f):
293 def checkportable(ui, f):
294 '''Check if filename f is portable and warn or abort depending on config'''
294 '''Check if filename f is portable and warn or abort depending on config'''
295 checkfilename(f)
295 checkfilename(f)
296 abort, warn = checkportabilityalert(ui)
296 abort, warn = checkportabilityalert(ui)
297 if abort or warn:
297 if abort or warn:
298 msg = util.checkwinfilename(f)
298 msg = util.checkwinfilename(f)
299 if msg:
299 if msg:
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 if abort:
301 if abort:
302 raise error.Abort(msg)
302 raise error.Abort(msg)
303 ui.warn(_("warning: %s\n") % msg)
303 ui.warn(_("warning: %s\n") % msg)
304
304
305 def checkportabilityalert(ui):
305 def checkportabilityalert(ui):
306 '''check if the user's config requests nothing, a warning, or abort for
306 '''check if the user's config requests nothing, a warning, or abort for
307 non-portable filenames'''
307 non-portable filenames'''
308 val = ui.config('ui', 'portablefilenames')
308 val = ui.config('ui', 'portablefilenames')
309 lval = val.lower()
309 lval = val.lower()
310 bval = stringutil.parsebool(val)
310 bval = stringutil.parsebool(val)
311 abort = pycompat.iswindows or lval == 'abort'
311 abort = pycompat.iswindows or lval == 'abort'
312 warn = bval or lval == 'warn'
312 warn = bval or lval == 'warn'
313 if bval is None and not (warn or abort or lval == 'ignore'):
313 if bval is None and not (warn or abort or lval == 'ignore'):
314 raise error.ConfigError(
314 raise error.ConfigError(
315 _("ui.portablefilenames value is invalid ('%s')") % val)
315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 return abort, warn
316 return abort, warn
317
317
318 class casecollisionauditor(object):
318 class casecollisionauditor(object):
319 def __init__(self, ui, abort, dirstate):
319 def __init__(self, ui, abort, dirstate):
320 self._ui = ui
320 self._ui = ui
321 self._abort = abort
321 self._abort = abort
322 allfiles = '\0'.join(dirstate._map)
322 allfiles = '\0'.join(dirstate._map)
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 self._dirstate = dirstate
324 self._dirstate = dirstate
325 # The purpose of _newfiles is so that we don't complain about
325 # The purpose of _newfiles is so that we don't complain about
326 # case collisions if someone were to call this object with the
326 # case collisions if someone were to call this object with the
327 # same filename twice.
327 # same filename twice.
328 self._newfiles = set()
328 self._newfiles = set()
329
329
330 def __call__(self, f):
330 def __call__(self, f):
331 if f in self._newfiles:
331 if f in self._newfiles:
332 return
332 return
333 fl = encoding.lower(f)
333 fl = encoding.lower(f)
334 if fl in self._loweredfiles and f not in self._dirstate:
334 if fl in self._loweredfiles and f not in self._dirstate:
335 msg = _('possible case-folding collision for %s') % f
335 msg = _('possible case-folding collision for %s') % f
336 if self._abort:
336 if self._abort:
337 raise error.Abort(msg)
337 raise error.Abort(msg)
338 self._ui.warn(_("warning: %s\n") % msg)
338 self._ui.warn(_("warning: %s\n") % msg)
339 self._loweredfiles.add(fl)
339 self._loweredfiles.add(fl)
340 self._newfiles.add(f)
340 self._newfiles.add(f)
341
341
342 def filteredhash(repo, maxrev):
342 def filteredhash(repo, maxrev):
343 """build hash of filtered revisions in the current repoview.
343 """build hash of filtered revisions in the current repoview.
344
344
345 Multiple caches perform up-to-date validation by checking that the
345 Multiple caches perform up-to-date validation by checking that the
346 tiprev and tipnode stored in the cache file match the current repository.
346 tiprev and tipnode stored in the cache file match the current repository.
347 However, this is not sufficient for validating repoviews because the set
347 However, this is not sufficient for validating repoviews because the set
348 of revisions in the view may change without the repository tiprev and
348 of revisions in the view may change without the repository tiprev and
349 tipnode changing.
349 tipnode changing.
350
350
351 This function hashes all the revs filtered from the view and returns
351 This function hashes all the revs filtered from the view and returns
352 that SHA-1 digest.
352 that SHA-1 digest.
353 """
353 """
354 cl = repo.changelog
354 cl = repo.changelog
355 if not cl.filteredrevs:
355 if not cl.filteredrevs:
356 return None
356 return None
357 key = None
357 key = None
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 if revs:
359 if revs:
360 s = hashlib.sha1()
360 s = hashlib.sha1()
361 for rev in revs:
361 for rev in revs:
362 s.update('%d;' % rev)
362 s.update('%d;' % rev)
363 key = s.digest()
363 key = s.digest()
364 return key
364 return key
365
365
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 '''yield every hg repository under path, always recursively.
367 '''yield every hg repository under path, always recursively.
368 The recurse flag will only control recursion into repo working dirs'''
368 The recurse flag will only control recursion into repo working dirs'''
369 def errhandler(err):
369 def errhandler(err):
370 if err.filename == path:
370 if err.filename == path:
371 raise err
371 raise err
372 samestat = getattr(os.path, 'samestat', None)
372 samestat = getattr(os.path, 'samestat', None)
373 if followsym and samestat is not None:
373 if followsym and samestat is not None:
374 def adddir(dirlst, dirname):
374 def adddir(dirlst, dirname):
375 dirstat = os.stat(dirname)
375 dirstat = os.stat(dirname)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 if not match:
377 if not match:
378 dirlst.append(dirstat)
378 dirlst.append(dirstat)
379 return not match
379 return not match
380 else:
380 else:
381 followsym = False
381 followsym = False
382
382
383 if (seen_dirs is None) and followsym:
383 if (seen_dirs is None) and followsym:
384 seen_dirs = []
384 seen_dirs = []
385 adddir(seen_dirs, path)
385 adddir(seen_dirs, path)
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 dirs.sort()
387 dirs.sort()
388 if '.hg' in dirs:
388 if '.hg' in dirs:
389 yield root # found a repository
389 yield root # found a repository
390 qroot = os.path.join(root, '.hg', 'patches')
390 qroot = os.path.join(root, '.hg', 'patches')
391 if os.path.isdir(os.path.join(qroot, '.hg')):
391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 yield qroot # we have a patch queue repo here
392 yield qroot # we have a patch queue repo here
393 if recurse:
393 if recurse:
394 # avoid recursing inside the .hg directory
394 # avoid recursing inside the .hg directory
395 dirs.remove('.hg')
395 dirs.remove('.hg')
396 else:
396 else:
397 dirs[:] = [] # don't descend further
397 dirs[:] = [] # don't descend further
398 elif followsym:
398 elif followsym:
399 newdirs = []
399 newdirs = []
400 for d in dirs:
400 for d in dirs:
401 fname = os.path.join(root, d)
401 fname = os.path.join(root, d)
402 if adddir(seen_dirs, fname):
402 if adddir(seen_dirs, fname):
403 if os.path.islink(fname):
403 if os.path.islink(fname):
404 for hgname in walkrepos(fname, True, seen_dirs):
404 for hgname in walkrepos(fname, True, seen_dirs):
405 yield hgname
405 yield hgname
406 else:
406 else:
407 newdirs.append(d)
407 newdirs.append(d)
408 dirs[:] = newdirs
408 dirs[:] = newdirs
409
409
410 def binnode(ctx):
410 def binnode(ctx):
411 """Return binary node id for a given basectx"""
411 """Return binary node id for a given basectx"""
412 node = ctx.node()
412 node = ctx.node()
413 if node is None:
413 if node is None:
414 return wdirid
414 return wdirid
415 return node
415 return node
416
416
417 def intrev(ctx):
417 def intrev(ctx):
418 """Return integer for a given basectx that can be used in comparison or
418 """Return integer for a given basectx that can be used in comparison or
419 arithmetic operation"""
419 arithmetic operation"""
420 rev = ctx.rev()
420 rev = ctx.rev()
421 if rev is None:
421 if rev is None:
422 return wdirrev
422 return wdirrev
423 return rev
423 return rev
424
424
425 def formatchangeid(ctx):
425 def formatchangeid(ctx):
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 template provided by logcmdutil.changesettemplater"""
427 template provided by logcmdutil.changesettemplater"""
428 repo = ctx.repo()
428 repo = ctx.repo()
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430
430
431 def formatrevnode(ui, rev, node):
431 def formatrevnode(ui, rev, node):
432 """Format given revision and node depending on the current verbosity"""
432 """Format given revision and node depending on the current verbosity"""
433 if ui.debugflag:
433 if ui.debugflag:
434 hexfunc = hex
434 hexfunc = hex
435 else:
435 else:
436 hexfunc = short
436 hexfunc = short
437 return '%d:%s' % (rev, hexfunc(node))
437 return '%d:%s' % (rev, hexfunc(node))
438
438
439 def resolvehexnodeidprefix(repo, prefix):
439 def resolvehexnodeidprefix(repo, prefix):
440 if (prefix.startswith('x') and
440 if (prefix.startswith('x') and
441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
442 prefix = prefix[1:]
442 prefix = prefix[1:]
443 try:
443 try:
444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
445 # This matches the shortesthexnodeidprefix() function below.
445 # This matches the shortesthexnodeidprefix() function below.
446 node = repo.unfiltered().changelog._partialmatch(prefix)
446 node = repo.unfiltered().changelog._partialmatch(prefix)
447 except error.AmbiguousPrefixLookupError:
447 except error.AmbiguousPrefixLookupError:
448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
449 if revset:
449 if revset:
450 # Clear config to avoid infinite recursion
450 # Clear config to avoid infinite recursion
451 configoverrides = {('experimental',
451 configoverrides = {('experimental',
452 'revisions.disambiguatewithin'): None}
452 'revisions.disambiguatewithin'): None}
453 with repo.ui.configoverride(configoverrides):
453 with repo.ui.configoverride(configoverrides):
454 revs = repo.anyrevs([revset], user=True)
454 revs = repo.anyrevs([revset], user=True)
455 matches = []
455 matches = []
456 for rev in revs:
456 for rev in revs:
457 node = repo.changelog.node(rev)
457 node = repo.changelog.node(rev)
458 if hex(node).startswith(prefix):
458 if hex(node).startswith(prefix):
459 matches.append(node)
459 matches.append(node)
460 if len(matches) == 1:
460 if len(matches) == 1:
461 return matches[0]
461 return matches[0]
462 raise
462 raise
463 if node is None:
463 if node is None:
464 return
464 return
465 repo.changelog.rev(node) # make sure node isn't filtered
465 repo.changelog.rev(node) # make sure node isn't filtered
466 return node
466 return node
467
467
468 def mayberevnum(repo, prefix):
468 def mayberevnum(repo, prefix):
469 """Checks if the given prefix may be mistaken for a revision number"""
469 """Checks if the given prefix may be mistaken for a revision number"""
470 try:
470 try:
471 i = int(prefix)
471 i = int(prefix)
472 # if we are a pure int, then starting with zero will not be
472 # if we are a pure int, then starting with zero will not be
473 # confused as a rev; or, obviously, if the int is larger
473 # confused as a rev; or, obviously, if the int is larger
474 # than the value of the tip rev. We still need to disambiguate if
474 # than the value of the tip rev. We still need to disambiguate if
475 # prefix == '0', since that *is* a valid revnum.
475 # prefix == '0', since that *is* a valid revnum.
476 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
477 return False
477 return False
478 return True
478 return True
479 except ValueError:
479 except ValueError:
480 return False
480 return False
481
481
482 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
483 """Find the shortest unambiguous prefix that matches hexnode.
483 """Find the shortest unambiguous prefix that matches hexnode.
484
484
485 If "cache" is not None, it must be a dictionary that can be used for
485 If "cache" is not None, it must be a dictionary that can be used for
486 caching between calls to this method.
486 caching between calls to this method.
487 """
487 """
488 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 # _partialmatch() of filtered changelog could take O(len(repo)) time,
489 # which would be unacceptably slow. so we look for hash collision in
489 # which would be unacceptably slow. so we look for hash collision in
490 # unfiltered space, which means some hashes may be slightly longer.
490 # unfiltered space, which means some hashes may be slightly longer.
491
491
492 minlength=max(minlength, 1)
492 minlength=max(minlength, 1)
493
493
494 def disambiguate(prefix):
494 def disambiguate(prefix):
495 """Disambiguate against revnums."""
495 """Disambiguate against revnums."""
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
497 if mayberevnum(repo, prefix):
497 if mayberevnum(repo, prefix):
498 return 'x' + prefix
498 return 'x' + prefix
499 else:
499 else:
500 return prefix
500 return prefix
501
501
502 hexnode = hex(node)
502 hexnode = hex(node)
503 for length in range(len(prefix), len(hexnode) + 1):
503 for length in range(len(prefix), len(hexnode) + 1):
504 prefix = hexnode[:length]
504 prefix = hexnode[:length]
505 if not mayberevnum(repo, prefix):
505 if not mayberevnum(repo, prefix):
506 return prefix
506 return prefix
507
507
508 cl = repo.unfiltered().changelog
508 cl = repo.unfiltered().changelog
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
510 if revset:
510 if revset:
511 revs = None
511 revs = None
512 if cache is not None:
512 if cache is not None:
513 revs = cache.get('disambiguationrevset')
513 revs = cache.get('disambiguationrevset')
514 if revs is None:
514 if revs is None:
515 revs = repo.anyrevs([revset], user=True)
515 revs = repo.anyrevs([revset], user=True)
516 if cache is not None:
516 if cache is not None:
517 cache['disambiguationrevset'] = revs
517 cache['disambiguationrevset'] = revs
518 if cl.rev(node) in revs:
518 if cl.rev(node) in revs:
519 hexnode = hex(node)
519 hexnode = hex(node)
520 nodetree = None
520 nodetree = None
521 if cache is not None:
521 if cache is not None:
522 nodetree = cache.get('disambiguationnodetree')
522 nodetree = cache.get('disambiguationnodetree')
523 if not nodetree:
523 if not nodetree:
524 try:
524 try:
525 nodetree = parsers.nodetree(cl.index, len(revs))
525 nodetree = parsers.nodetree(cl.index, len(revs))
526 except AttributeError:
526 except AttributeError:
527 # no native nodetree
527 # no native nodetree
528 pass
528 pass
529 else:
529 else:
530 for r in revs:
530 for r in revs:
531 nodetree.insert(r)
531 nodetree.insert(r)
532 if cache is not None:
532 if cache is not None:
533 cache['disambiguationnodetree'] = nodetree
533 cache['disambiguationnodetree'] = nodetree
534 if nodetree is not None:
534 if nodetree is not None:
535 length = max(nodetree.shortest(node), minlength)
535 length = max(nodetree.shortest(node), minlength)
536 prefix = hexnode[:length]
536 prefix = hexnode[:length]
537 return disambiguate(prefix)
537 return disambiguate(prefix)
538 for length in range(minlength, len(hexnode) + 1):
538 for length in range(minlength, len(hexnode) + 1):
539 matches = []
539 matches = []
540 prefix = hexnode[:length]
540 prefix = hexnode[:length]
541 for rev in revs:
541 for rev in revs:
542 otherhexnode = repo[rev].hex()
542 otherhexnode = repo[rev].hex()
543 if prefix == otherhexnode[:length]:
543 if prefix == otherhexnode[:length]:
544 matches.append(otherhexnode)
544 matches.append(otherhexnode)
545 if len(matches) == 1:
545 if len(matches) == 1:
546 return disambiguate(prefix)
546 return disambiguate(prefix)
547
547
548 try:
548 try:
549 return disambiguate(cl.shortest(node, minlength))
549 return disambiguate(cl.shortest(node, minlength))
550 except error.LookupError:
550 except error.LookupError:
551 raise error.RepoLookupError()
551 raise error.RepoLookupError()
552
552
553 def isrevsymbol(repo, symbol):
553 def isrevsymbol(repo, symbol):
554 """Checks if a symbol exists in the repo.
554 """Checks if a symbol exists in the repo.
555
555
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
557 symbol is an ambiguous nodeid prefix.
557 symbol is an ambiguous nodeid prefix.
558 """
558 """
559 try:
559 try:
560 revsymbol(repo, symbol)
560 revsymbol(repo, symbol)
561 return True
561 return True
562 except error.RepoLookupError:
562 except error.RepoLookupError:
563 return False
563 return False
564
564
565 def revsymbol(repo, symbol):
565 def revsymbol(repo, symbol):
566 """Returns a context given a single revision symbol (as string).
566 """Returns a context given a single revision symbol (as string).
567
567
568 This is similar to revsingle(), but accepts only a single revision symbol,
568 This is similar to revsingle(), but accepts only a single revision symbol,
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
570 not "max(public())".
570 not "max(public())".
571 """
571 """
572 if not isinstance(symbol, bytes):
572 if not isinstance(symbol, bytes):
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
574 "repo[symbol]?" % (symbol, type(symbol)))
574 "repo[symbol]?" % (symbol, type(symbol)))
575 raise error.ProgrammingError(msg)
575 raise error.ProgrammingError(msg)
576 try:
576 try:
577 if symbol in ('.', 'tip', 'null'):
577 if symbol in ('.', 'tip', 'null'):
578 return repo[symbol]
578 return repo[symbol]
579
579
580 try:
580 try:
581 r = int(symbol)
581 r = int(symbol)
582 if '%d' % r != symbol:
582 if '%d' % r != symbol:
583 raise ValueError
583 raise ValueError
584 l = len(repo.changelog)
584 l = len(repo.changelog)
585 if r < 0:
585 if r < 0:
586 r += l
586 r += l
587 if r < 0 or r >= l and r != wdirrev:
587 if r < 0 or r >= l and r != wdirrev:
588 raise ValueError
588 raise ValueError
589 return repo[r]
589 return repo[r]
590 except error.FilteredIndexError:
590 except error.FilteredIndexError:
591 raise
591 raise
592 except (ValueError, OverflowError, IndexError):
592 except (ValueError, OverflowError, IndexError):
593 pass
593 pass
594
594
595 if len(symbol) == 40:
595 if len(symbol) == 40:
596 try:
596 try:
597 node = bin(symbol)
597 node = bin(symbol)
598 rev = repo.changelog.rev(node)
598 rev = repo.changelog.rev(node)
599 return repo[rev]
599 return repo[rev]
600 except error.FilteredLookupError:
600 except error.FilteredLookupError:
601 raise
601 raise
602 except (TypeError, LookupError):
602 except (TypeError, LookupError):
603 pass
603 pass
604
604
605 # look up bookmarks through the name interface
605 # look up bookmarks through the name interface
606 try:
606 try:
607 node = repo.names.singlenode(repo, symbol)
607 node = repo.names.singlenode(repo, symbol)
608 rev = repo.changelog.rev(node)
608 rev = repo.changelog.rev(node)
609 return repo[rev]
609 return repo[rev]
610 except KeyError:
610 except KeyError:
611 pass
611 pass
612
612
613 node = resolvehexnodeidprefix(repo, symbol)
613 node = resolvehexnodeidprefix(repo, symbol)
614 if node is not None:
614 if node is not None:
615 rev = repo.changelog.rev(node)
615 rev = repo.changelog.rev(node)
616 return repo[rev]
616 return repo[rev]
617
617
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
619
619
620 except error.WdirUnsupported:
620 except error.WdirUnsupported:
621 return repo[None]
621 return repo[None]
622 except (error.FilteredIndexError, error.FilteredLookupError,
622 except (error.FilteredIndexError, error.FilteredLookupError,
623 error.FilteredRepoLookupError):
623 error.FilteredRepoLookupError):
624 raise _filterederror(repo, symbol)
624 raise _filterederror(repo, symbol)
625
625
626 def _filterederror(repo, changeid):
626 def _filterederror(repo, changeid):
627 """build an exception to be raised about a filtered changeid
627 """build an exception to be raised about a filtered changeid
628
628
629 This is extracted in a function to help extensions (eg: evolve) to
629 This is extracted in a function to help extensions (eg: evolve) to
630 experiment with various message variants."""
630 experiment with various message variants."""
631 if repo.filtername.startswith('visible'):
631 if repo.filtername.startswith('visible'):
632
632
633 # Check if the changeset is obsolete
633 # Check if the changeset is obsolete
634 unfilteredrepo = repo.unfiltered()
634 unfilteredrepo = repo.unfiltered()
635 ctx = revsymbol(unfilteredrepo, changeid)
635 ctx = revsymbol(unfilteredrepo, changeid)
636
636
637 # If the changeset is obsolete, enrich the message with the reason
637 # If the changeset is obsolete, enrich the message with the reason
638 # that made this changeset not visible
638 # that made this changeset not visible
639 if ctx.obsolete():
639 if ctx.obsolete():
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
641 else:
641 else:
642 msg = _("hidden revision '%s'") % changeid
642 msg = _("hidden revision '%s'") % changeid
643
643
644 hint = _('use --hidden to access hidden revisions')
644 hint = _('use --hidden to access hidden revisions')
645
645
646 return error.FilteredRepoLookupError(msg, hint=hint)
646 return error.FilteredRepoLookupError(msg, hint=hint)
647 msg = _("filtered revision '%s' (not in '%s' subset)")
647 msg = _("filtered revision '%s' (not in '%s' subset)")
648 msg %= (changeid, repo.filtername)
648 msg %= (changeid, repo.filtername)
649 return error.FilteredRepoLookupError(msg)
649 return error.FilteredRepoLookupError(msg)
650
650
651 def revsingle(repo, revspec, default='.', localalias=None):
651 def revsingle(repo, revspec, default='.', localalias=None):
652 if not revspec and revspec != 0:
652 if not revspec and revspec != 0:
653 return repo[default]
653 return repo[default]
654
654
655 l = revrange(repo, [revspec], localalias=localalias)
655 l = revrange(repo, [revspec], localalias=localalias)
656 if not l:
656 if not l:
657 raise error.Abort(_('empty revision set'))
657 raise error.Abort(_('empty revision set'))
658 return repo[l.last()]
658 return repo[l.last()]
659
659
660 def _pairspec(revspec):
660 def _pairspec(revspec):
661 tree = revsetlang.parse(revspec)
661 tree = revsetlang.parse(revspec)
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
663
663
664 def revpair(repo, revs):
664 def revpair(repo, revs):
665 if not revs:
665 if not revs:
666 return repo['.'], repo[None]
666 return repo['.'], repo[None]
667
667
668 l = revrange(repo, revs)
668 l = revrange(repo, revs)
669
669
670 if not l:
670 if not l:
671 raise error.Abort(_('empty revision range'))
671 raise error.Abort(_('empty revision range'))
672
672
673 first = l.first()
673 first = l.first()
674 second = l.last()
674 second = l.last()
675
675
676 if (first == second and len(revs) >= 2
676 if (first == second and len(revs) >= 2
677 and not all(revrange(repo, [r]) for r in revs)):
677 and not all(revrange(repo, [r]) for r in revs)):
678 raise error.Abort(_('empty revision on one side of range'))
678 raise error.Abort(_('empty revision on one side of range'))
679
679
680 # if top-level is range expression, the result must always be a pair
680 # if top-level is range expression, the result must always be a pair
681 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
682 return repo[first], repo[None]
682 return repo[first], repo[None]
683
683
684 return repo[first], repo[second]
684 return repo[first], repo[second]
685
685
686 def revrange(repo, specs, localalias=None):
686 def revrange(repo, specs, localalias=None):
687 """Execute 1 to many revsets and return the union.
687 """Execute 1 to many revsets and return the union.
688
688
689 This is the preferred mechanism for executing revsets using user-specified
689 This is the preferred mechanism for executing revsets using user-specified
690 config options, such as revset aliases.
690 config options, such as revset aliases.
691
691
692 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 The revsets specified by ``specs`` will be executed via a chained ``OR``
693 expression. If ``specs`` is empty, an empty result is returned.
693 expression. If ``specs`` is empty, an empty result is returned.
694
694
695 ``specs`` can contain integers, in which case they are assumed to be
695 ``specs`` can contain integers, in which case they are assumed to be
696 revision numbers.
696 revision numbers.
697
697
698 It is assumed the revsets are already formatted. If you have arguments
698 It is assumed the revsets are already formatted. If you have arguments
699 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 that need to be expanded in the revset, call ``revsetlang.formatspec()``
700 and pass the result as an element of ``specs``.
700 and pass the result as an element of ``specs``.
701
701
702 Specifying a single revset is allowed.
702 Specifying a single revset is allowed.
703
703
704 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 Returns a ``revset.abstractsmartset`` which is a list-like interface over
705 integer revisions.
705 integer revisions.
706 """
706 """
707 allspecs = []
707 allspecs = []
708 for spec in specs:
708 for spec in specs:
709 if isinstance(spec, int):
709 if isinstance(spec, int):
710 spec = revsetlang.formatspec('%d', spec)
710 spec = revsetlang.formatspec('%d', spec)
711 allspecs.append(spec)
711 allspecs.append(spec)
712 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712 return repo.anyrevs(allspecs, user=True, localalias=localalias)
713
713
714 def meaningfulparents(repo, ctx):
714 def meaningfulparents(repo, ctx):
715 """Return list of meaningful (or all if debug) parentrevs for rev.
715 """Return list of meaningful (or all if debug) parentrevs for rev.
716
716
717 For merges (two non-nullrev revisions) both parents are meaningful.
717 For merges (two non-nullrev revisions) both parents are meaningful.
718 Otherwise the first parent revision is considered meaningful if it
718 Otherwise the first parent revision is considered meaningful if it
719 is not the preceding revision.
719 is not the preceding revision.
720 """
720 """
721 parents = ctx.parents()
721 parents = ctx.parents()
722 if len(parents) > 1:
722 if len(parents) > 1:
723 return parents
723 return parents
724 if repo.ui.debugflag:
724 if repo.ui.debugflag:
725 return [parents[0], repo[nullrev]]
725 return [parents[0], repo[nullrev]]
726 if parents[0].rev() >= intrev(ctx) - 1:
726 if parents[0].rev() >= intrev(ctx) - 1:
727 return []
727 return []
728 return parents
728 return parents
729
729
730 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
731 """Return a function that produced paths for presenting to the user.
731 """Return a function that produced paths for presenting to the user.
732
732
733 The returned function takes a repo-relative path and produces a path
733 The returned function takes a repo-relative path and produces a path
734 that can be presented in the UI.
734 that can be presented in the UI.
735
735
736 Depending on the value of ui.relative-paths, either a repo-relative or
736 Depending on the value of ui.relative-paths, either a repo-relative or
737 cwd-relative path will be produced.
737 cwd-relative path will be produced.
738
738
739 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739 legacyrelativevalue is the value to use if ui.relative-paths=legacy
740
740
741 If forcerelativevalue is not None, then that value will be used regardless
741 If forcerelativevalue is not None, then that value will be used regardless
742 of what ui.relative-paths is set to.
742 of what ui.relative-paths is set to.
743 """
743 """
744 if forcerelativevalue is not None:
744 if forcerelativevalue is not None:
745 relative = forcerelativevalue
745 relative = forcerelativevalue
746 else:
746 else:
747 config = repo.ui.config('ui', 'relative-paths')
747 config = repo.ui.config('ui', 'relative-paths')
748 if config == 'legacy':
748 if config == 'legacy':
749 relative = legacyrelativevalue
749 relative = legacyrelativevalue
750 else:
750 else:
751 relative = stringutil.parsebool(config)
751 relative = stringutil.parsebool(config)
752 if relative is None:
752 if relative is None:
753 raise error.ConfigError(
753 raise error.ConfigError(
754 _("ui.relative-paths is not a boolean ('%s')") % config)
754 _("ui.relative-paths is not a boolean ('%s')") % config)
755
755
756 if relative:
756 if relative:
757 cwd = repo.getcwd()
757 cwd = repo.getcwd()
758 pathto = repo.pathto
758 pathto = repo.pathto
759 return lambda f: pathto(f, cwd)
759 return lambda f: pathto(f, cwd)
760 elif repo.ui.configbool('ui', 'slash'):
760 elif repo.ui.configbool('ui', 'slash'):
761 return lambda f: f
761 return lambda f: f
762 else:
762 else:
763 return util.localpath
763 return util.localpath
764
764
765 def subdiruipathfn(subpath, uipathfn):
765 def subdiruipathfn(subpath, uipathfn):
766 '''Create a new uipathfn that treats the file as relative to subpath.'''
766 '''Create a new uipathfn that treats the file as relative to subpath.'''
767 return lambda f: uipathfn(posixpath.join(subpath, f))
767 return lambda f: uipathfn(posixpath.join(subpath, f))
768
768
769 def anypats(pats, opts):
769 def anypats(pats, opts):
770 '''Checks if any patterns, including --include and --exclude were given.
770 '''Checks if any patterns, including --include and --exclude were given.
771
771
772 Some commands (e.g. addremove) use this condition for deciding whether to
772 Some commands (e.g. addremove) use this condition for deciding whether to
773 print absolute or relative paths.
773 print absolute or relative paths.
774 '''
774 '''
775 return bool(pats or opts.get('include') or opts.get('exclude'))
775 return bool(pats or opts.get('include') or opts.get('exclude'))
776
776
777 def expandpats(pats):
777 def expandpats(pats):
778 '''Expand bare globs when running on windows.
778 '''Expand bare globs when running on windows.
779 On posix we assume it already has already been done by sh.'''
779 On posix we assume it already has already been done by sh.'''
780 if not util.expandglobs:
780 if not util.expandglobs:
781 return list(pats)
781 return list(pats)
782 ret = []
782 ret = []
783 for kindpat in pats:
783 for kindpat in pats:
784 kind, pat = matchmod._patsplit(kindpat, None)
784 kind, pat = matchmod._patsplit(kindpat, None)
785 if kind is None:
785 if kind is None:
786 try:
786 try:
787 globbed = glob.glob(pat)
787 globbed = glob.glob(pat)
788 except re.error:
788 except re.error:
789 globbed = [pat]
789 globbed = [pat]
790 if globbed:
790 if globbed:
791 ret.extend(globbed)
791 ret.extend(globbed)
792 continue
792 continue
793 ret.append(kindpat)
793 ret.append(kindpat)
794 return ret
794 return ret
795
795
796 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
796 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
797 badfn=None):
797 badfn=None):
798 '''Return a matcher and the patterns that were used.
798 '''Return a matcher and the patterns that were used.
799 The matcher will warn about bad matches, unless an alternate badfn callback
799 The matcher will warn about bad matches, unless an alternate badfn callback
800 is provided.'''
800 is provided.'''
801 if opts is None:
801 if opts is None:
802 opts = {}
802 opts = {}
803 if not globbed and default == 'relpath':
803 if not globbed and default == 'relpath':
804 pats = expandpats(pats or [])
804 pats = expandpats(pats or [])
805
805
806 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
806 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
807 def bad(f, msg):
807 def bad(f, msg):
808 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
808 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
809
809
810 if badfn is None:
810 if badfn is None:
811 badfn = bad
811 badfn = bad
812
812
813 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
813 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
814 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
814 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
815
815
816 if m.always():
816 if m.always():
817 pats = []
817 pats = []
818 return m, pats
818 return m, pats
819
819
820 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
820 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
821 badfn=None):
821 badfn=None):
822 '''Return a matcher that will warn about bad matches.'''
822 '''Return a matcher that will warn about bad matches.'''
823 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
823 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
824
824
825 def matchall(repo):
825 def matchall(repo):
826 '''Return a matcher that will efficiently match everything.'''
826 '''Return a matcher that will efficiently match everything.'''
827 return matchmod.always()
827 return matchmod.always()
828
828
829 def matchfiles(repo, files, badfn=None):
829 def matchfiles(repo, files, badfn=None):
830 '''Return a matcher that will efficiently match exactly these files.'''
830 '''Return a matcher that will efficiently match exactly these files.'''
831 return matchmod.exact(files, badfn=badfn)
831 return matchmod.exact(files, badfn=badfn)
832
832
833 def parsefollowlinespattern(repo, rev, pat, msg):
833 def parsefollowlinespattern(repo, rev, pat, msg):
834 """Return a file name from `pat` pattern suitable for usage in followlines
834 """Return a file name from `pat` pattern suitable for usage in followlines
835 logic.
835 logic.
836 """
836 """
837 if not matchmod.patkind(pat):
837 if not matchmod.patkind(pat):
838 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
838 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
839 else:
839 else:
840 ctx = repo[rev]
840 ctx = repo[rev]
841 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
841 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
842 files = [f for f in ctx if m(f)]
842 files = [f for f in ctx if m(f)]
843 if len(files) != 1:
843 if len(files) != 1:
844 raise error.ParseError(msg)
844 raise error.ParseError(msg)
845 return files[0]
845 return files[0]
846
846
847 def getorigvfs(ui, repo):
847 def getorigvfs(ui, repo):
848 """return a vfs suitable to save 'orig' file
848 """return a vfs suitable to save 'orig' file
849
849
850 return None if no special directory is configured"""
850 return None if no special directory is configured"""
851 origbackuppath = ui.config('ui', 'origbackuppath')
851 origbackuppath = ui.config('ui', 'origbackuppath')
852 if not origbackuppath:
852 if not origbackuppath:
853 return None
853 return None
854 return vfs.vfs(repo.wvfs.join(origbackuppath))
854 return vfs.vfs(repo.wvfs.join(origbackuppath))
855
855
856 def backuppath(ui, repo, filepath):
856 def backuppath(ui, repo, filepath):
857 '''customize where working copy backup files (.orig files) are created
857 '''customize where working copy backup files (.orig files) are created
858
858
859 Fetch user defined path from config file: [ui] origbackuppath = <path>
859 Fetch user defined path from config file: [ui] origbackuppath = <path>
860 Fall back to default (filepath with .orig suffix) if not specified
860 Fall back to default (filepath with .orig suffix) if not specified
861
861
862 filepath is repo-relative
862 filepath is repo-relative
863
863
864 Returns an absolute path
864 Returns an absolute path
865 '''
865 '''
866 origvfs = getorigvfs(ui, repo)
866 origvfs = getorigvfs(ui, repo)
867 if origvfs is None:
867 if origvfs is None:
868 return repo.wjoin(filepath + ".orig")
868 return repo.wjoin(filepath + ".orig")
869
869
870 origbackupdir = origvfs.dirname(filepath)
870 origbackupdir = origvfs.dirname(filepath)
871 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
871 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
872 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
872 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
873
873
874 # Remove any files that conflict with the backup file's path
874 # Remove any files that conflict with the backup file's path
875 for f in reversed(list(util.finddirs(filepath))):
875 for f in reversed(list(util.finddirs(filepath))):
876 if origvfs.isfileorlink(f):
876 if origvfs.isfileorlink(f):
877 ui.note(_('removing conflicting file: %s\n')
877 ui.note(_('removing conflicting file: %s\n')
878 % origvfs.join(f))
878 % origvfs.join(f))
879 origvfs.unlink(f)
879 origvfs.unlink(f)
880 break
880 break
881
881
882 origvfs.makedirs(origbackupdir)
882 origvfs.makedirs(origbackupdir)
883
883
884 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
884 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
885 ui.note(_('removing conflicting directory: %s\n')
885 ui.note(_('removing conflicting directory: %s\n')
886 % origvfs.join(filepath))
886 % origvfs.join(filepath))
887 origvfs.rmtree(filepath, forcibly=True)
887 origvfs.rmtree(filepath, forcibly=True)
888
888
889 return origvfs.join(filepath)
889 return origvfs.join(filepath)
890
890
891 class _containsnode(object):
891 class _containsnode(object):
892 """proxy __contains__(node) to container.__contains__ which accepts revs"""
892 """proxy __contains__(node) to container.__contains__ which accepts revs"""
893
893
894 def __init__(self, repo, revcontainer):
894 def __init__(self, repo, revcontainer):
895 self._torev = repo.changelog.rev
895 self._torev = repo.changelog.rev
896 self._revcontains = revcontainer.__contains__
896 self._revcontains = revcontainer.__contains__
897
897
898 def __contains__(self, node):
898 def __contains__(self, node):
899 return self._revcontains(self._torev(node))
899 return self._revcontains(self._torev(node))
900
900
901 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
901 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
902 fixphase=False, targetphase=None, backup=True):
902 fixphase=False, targetphase=None, backup=True):
903 """do common cleanups when old nodes are replaced by new nodes
903 """do common cleanups when old nodes are replaced by new nodes
904
904
905 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
905 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
906 (we might also want to move working directory parent in the future)
906 (we might also want to move working directory parent in the future)
907
907
908 By default, bookmark moves are calculated automatically from 'replacements',
908 By default, bookmark moves are calculated automatically from 'replacements',
909 but 'moves' can be used to override that. Also, 'moves' may include
909 but 'moves' can be used to override that. Also, 'moves' may include
910 additional bookmark moves that should not have associated obsmarkers.
910 additional bookmark moves that should not have associated obsmarkers.
911
911
912 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
912 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
913 have replacements. operation is a string, like "rebase".
913 have replacements. operation is a string, like "rebase".
914
914
915 metadata is dictionary containing metadata to be stored in obsmarker if
915 metadata is dictionary containing metadata to be stored in obsmarker if
916 obsolescence is enabled.
916 obsolescence is enabled.
917 """
917 """
918 assert fixphase or targetphase is None
918 assert fixphase or targetphase is None
919 if not replacements and not moves:
919 if not replacements and not moves:
920 return
920 return
921
921
922 # translate mapping's other forms
922 # translate mapping's other forms
923 if not util.safehasattr(replacements, 'items'):
923 if not util.safehasattr(replacements, 'items'):
924 replacements = {(n,): () for n in replacements}
924 replacements = {(n,): () for n in replacements}
925 else:
925 else:
926 # upgrading non tuple "source" to tuple ones for BC
926 # upgrading non tuple "source" to tuple ones for BC
927 repls = {}
927 repls = {}
928 for key, value in replacements.items():
928 for key, value in replacements.items():
929 if not isinstance(key, tuple):
929 if not isinstance(key, tuple):
930 key = (key,)
930 key = (key,)
931 repls[key] = value
931 repls[key] = value
932 replacements = repls
932 replacements = repls
933
933
934 # Unfiltered repo is needed since nodes in replacements might be hidden.
934 # Unfiltered repo is needed since nodes in replacements might be hidden.
935 unfi = repo.unfiltered()
935 unfi = repo.unfiltered()
936
936
937 # Calculate bookmark movements
937 # Calculate bookmark movements
938 if moves is None:
938 if moves is None:
939 moves = {}
939 moves = {}
940 for oldnodes, newnodes in replacements.items():
940 for oldnodes, newnodes in replacements.items():
941 for oldnode in oldnodes:
941 for oldnode in oldnodes:
942 if oldnode in moves:
942 if oldnode in moves:
943 continue
943 continue
944 if len(newnodes) > 1:
944 if len(newnodes) > 1:
945 # usually a split, take the one with biggest rev number
945 # usually a split, take the one with biggest rev number
946 newnode = next(unfi.set('max(%ln)', newnodes)).node()
946 newnode = next(unfi.set('max(%ln)', newnodes)).node()
947 elif len(newnodes) == 0:
947 elif len(newnodes) == 0:
948 # move bookmark backwards
948 # move bookmark backwards
949 allreplaced = []
949 allreplaced = []
950 for rep in replacements:
950 for rep in replacements:
951 allreplaced.extend(rep)
951 allreplaced.extend(rep)
952 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
952 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
953 allreplaced))
953 allreplaced))
954 if roots:
954 if roots:
955 newnode = roots[0].node()
955 newnode = roots[0].node()
956 else:
956 else:
957 newnode = nullid
957 newnode = nullid
958 else:
958 else:
959 newnode = newnodes[0]
959 newnode = newnodes[0]
960 moves[oldnode] = newnode
960 moves[oldnode] = newnode
961
961
962 allnewnodes = [n for ns in replacements.values() for n in ns]
962 allnewnodes = [n for ns in replacements.values() for n in ns]
963 toretract = {}
963 toretract = {}
964 toadvance = {}
964 toadvance = {}
965 if fixphase:
965 if fixphase:
966 precursors = {}
966 precursors = {}
967 for oldnodes, newnodes in replacements.items():
967 for oldnodes, newnodes in replacements.items():
968 for oldnode in oldnodes:
968 for oldnode in oldnodes:
969 for newnode in newnodes:
969 for newnode in newnodes:
970 precursors.setdefault(newnode, []).append(oldnode)
970 precursors.setdefault(newnode, []).append(oldnode)
971
971
972 allnewnodes.sort(key=lambda n: unfi[n].rev())
972 allnewnodes.sort(key=lambda n: unfi[n].rev())
973 newphases = {}
973 newphases = {}
974 def phase(ctx):
974 def phase(ctx):
975 return newphases.get(ctx.node(), ctx.phase())
975 return newphases.get(ctx.node(), ctx.phase())
976 for newnode in allnewnodes:
976 for newnode in allnewnodes:
977 ctx = unfi[newnode]
977 ctx = unfi[newnode]
978 parentphase = max(phase(p) for p in ctx.parents())
978 parentphase = max(phase(p) for p in ctx.parents())
979 if targetphase is None:
979 if targetphase is None:
980 oldphase = max(unfi[oldnode].phase()
980 oldphase = max(unfi[oldnode].phase()
981 for oldnode in precursors[newnode])
981 for oldnode in precursors[newnode])
982 newphase = max(oldphase, parentphase)
982 newphase = max(oldphase, parentphase)
983 else:
983 else:
984 newphase = max(targetphase, parentphase)
984 newphase = max(targetphase, parentphase)
985 newphases[newnode] = newphase
985 newphases[newnode] = newphase
986 if newphase > ctx.phase():
986 if newphase > ctx.phase():
987 toretract.setdefault(newphase, []).append(newnode)
987 toretract.setdefault(newphase, []).append(newnode)
988 elif newphase < ctx.phase():
988 elif newphase < ctx.phase():
989 toadvance.setdefault(newphase, []).append(newnode)
989 toadvance.setdefault(newphase, []).append(newnode)
990
990
991 with repo.transaction('cleanup') as tr:
991 with repo.transaction('cleanup') as tr:
992 # Move bookmarks
992 # Move bookmarks
993 bmarks = repo._bookmarks
993 bmarks = repo._bookmarks
994 bmarkchanges = []
994 bmarkchanges = []
995 for oldnode, newnode in moves.items():
995 for oldnode, newnode in moves.items():
996 oldbmarks = repo.nodebookmarks(oldnode)
996 oldbmarks = repo.nodebookmarks(oldnode)
997 if not oldbmarks:
997 if not oldbmarks:
998 continue
998 continue
999 from . import bookmarks # avoid import cycle
999 from . import bookmarks # avoid import cycle
1000 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1000 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1001 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1001 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1002 hex(oldnode), hex(newnode)))
1002 hex(oldnode), hex(newnode)))
1003 # Delete divergent bookmarks being parents of related newnodes
1003 # Delete divergent bookmarks being parents of related newnodes
1004 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1004 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1005 allnewnodes, newnode, oldnode)
1005 allnewnodes, newnode, oldnode)
1006 deletenodes = _containsnode(repo, deleterevs)
1006 deletenodes = _containsnode(repo, deleterevs)
1007 for name in oldbmarks:
1007 for name in oldbmarks:
1008 bmarkchanges.append((name, newnode))
1008 bmarkchanges.append((name, newnode))
1009 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1009 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1010 bmarkchanges.append((b, None))
1010 bmarkchanges.append((b, None))
1011
1011
1012 if bmarkchanges:
1012 if bmarkchanges:
1013 bmarks.applychanges(repo, tr, bmarkchanges)
1013 bmarks.applychanges(repo, tr, bmarkchanges)
1014
1014
1015 for phase, nodes in toretract.items():
1015 for phase, nodes in toretract.items():
1016 phases.retractboundary(repo, tr, phase, nodes)
1016 phases.retractboundary(repo, tr, phase, nodes)
1017 for phase, nodes in toadvance.items():
1017 for phase, nodes in toadvance.items():
1018 phases.advanceboundary(repo, tr, phase, nodes)
1018 phases.advanceboundary(repo, tr, phase, nodes)
1019
1019
1020 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1020 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1021 # Obsolete or strip nodes
1021 # Obsolete or strip nodes
1022 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1022 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1023 # If a node is already obsoleted, and we want to obsolete it
1023 # If a node is already obsoleted, and we want to obsolete it
1024 # without a successor, skip that obssolete request since it's
1024 # without a successor, skip that obssolete request since it's
1025 # unnecessary. That's the "if s or not isobs(n)" check below.
1025 # unnecessary. That's the "if s or not isobs(n)" check below.
1026 # Also sort the node in topology order, that might be useful for
1026 # Also sort the node in topology order, that might be useful for
1027 # some obsstore logic.
1027 # some obsstore logic.
1028 # NOTE: the sorting might belong to createmarkers.
1028 # NOTE: the sorting might belong to createmarkers.
1029 torev = unfi.changelog.rev
1029 torev = unfi.changelog.rev
1030 sortfunc = lambda ns: torev(ns[0][0])
1030 sortfunc = lambda ns: torev(ns[0][0])
1031 rels = []
1031 rels = []
1032 for ns, s in sorted(replacements.items(), key=sortfunc):
1032 for ns, s in sorted(replacements.items(), key=sortfunc):
1033 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1033 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1034 rels.append(rel)
1034 rels.append(rel)
1035 if rels:
1035 if rels:
1036 obsolete.createmarkers(repo, rels, operation=operation,
1036 obsolete.createmarkers(repo, rels, operation=operation,
1037 metadata=metadata)
1037 metadata=metadata)
1038 elif phases.supportinternal(repo) and mayusearchived:
1038 elif phases.supportinternal(repo) and mayusearchived:
1039 # this assume we do not have "unstable" nodes above the cleaned ones
1039 # this assume we do not have "unstable" nodes above the cleaned ones
1040 allreplaced = set()
1040 allreplaced = set()
1041 for ns in replacements.keys():
1041 for ns in replacements.keys():
1042 allreplaced.update(ns)
1042 allreplaced.update(ns)
1043 if backup:
1043 if backup:
1044 from . import repair # avoid import cycle
1044 from . import repair # avoid import cycle
1045 node = min(allreplaced, key=repo.changelog.rev)
1045 node = min(allreplaced, key=repo.changelog.rev)
1046 repair.backupbundle(repo, allreplaced, allreplaced, node,
1046 repair.backupbundle(repo, allreplaced, allreplaced, node,
1047 operation)
1047 operation)
1048 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1048 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1049 else:
1049 else:
1050 from . import repair # avoid import cycle
1050 from . import repair # avoid import cycle
1051 tostrip = list(n for ns in replacements for n in ns)
1051 tostrip = list(n for ns in replacements for n in ns)
1052 if tostrip:
1052 if tostrip:
1053 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1053 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1054 backup=backup)
1054 backup=backup)
1055
1055
1056 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1056 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1057 if opts is None:
1057 if opts is None:
1058 opts = {}
1058 opts = {}
1059 m = matcher
1059 m = matcher
1060 dry_run = opts.get('dry_run')
1060 dry_run = opts.get('dry_run')
1061 try:
1061 try:
1062 similarity = float(opts.get('similarity') or 0)
1062 similarity = float(opts.get('similarity') or 0)
1063 except ValueError:
1063 except ValueError:
1064 raise error.Abort(_('similarity must be a number'))
1064 raise error.Abort(_('similarity must be a number'))
1065 if similarity < 0 or similarity > 100:
1065 if similarity < 0 or similarity > 100:
1066 raise error.Abort(_('similarity must be between 0 and 100'))
1066 raise error.Abort(_('similarity must be between 0 and 100'))
1067 similarity /= 100.0
1067 similarity /= 100.0
1068
1068
1069 ret = 0
1069 ret = 0
1070
1070
1071 wctx = repo[None]
1071 wctx = repo[None]
1072 for subpath in sorted(wctx.substate):
1072 for subpath in sorted(wctx.substate):
1073 submatch = matchmod.subdirmatcher(subpath, m)
1073 submatch = matchmod.subdirmatcher(subpath, m)
1074 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1074 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1075 sub = wctx.sub(subpath)
1075 sub = wctx.sub(subpath)
1076 subprefix = repo.wvfs.reljoin(prefix, subpath)
1076 subprefix = repo.wvfs.reljoin(prefix, subpath)
1077 subuipathfn = subdiruipathfn(subpath, uipathfn)
1077 subuipathfn = subdiruipathfn(subpath, uipathfn)
1078 try:
1078 try:
1079 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1079 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1080 ret = 1
1080 ret = 1
1081 except error.LookupError:
1081 except error.LookupError:
1082 repo.ui.status(_("skipping missing subrepository: %s\n")
1082 repo.ui.status(_("skipping missing subrepository: %s\n")
1083 % uipathfn(subpath))
1083 % uipathfn(subpath))
1084
1084
1085 rejected = []
1085 rejected = []
1086 def badfn(f, msg):
1086 def badfn(f, msg):
1087 if f in m.files():
1087 if f in m.files():
1088 m.bad(f, msg)
1088 m.bad(f, msg)
1089 rejected.append(f)
1089 rejected.append(f)
1090
1090
1091 badmatch = matchmod.badmatch(m, badfn)
1091 badmatch = matchmod.badmatch(m, badfn)
1092 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1092 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1093 badmatch)
1093 badmatch)
1094
1094
1095 unknownset = set(unknown + forgotten)
1095 unknownset = set(unknown + forgotten)
1096 toprint = unknownset.copy()
1096 toprint = unknownset.copy()
1097 toprint.update(deleted)
1097 toprint.update(deleted)
1098 for abs in sorted(toprint):
1098 for abs in sorted(toprint):
1099 if repo.ui.verbose or not m.exact(abs):
1099 if repo.ui.verbose or not m.exact(abs):
1100 if abs in unknownset:
1100 if abs in unknownset:
1101 status = _('adding %s\n') % uipathfn(abs)
1101 status = _('adding %s\n') % uipathfn(abs)
1102 label = 'ui.addremove.added'
1102 label = 'ui.addremove.added'
1103 else:
1103 else:
1104 status = _('removing %s\n') % uipathfn(abs)
1104 status = _('removing %s\n') % uipathfn(abs)
1105 label = 'ui.addremove.removed'
1105 label = 'ui.addremove.removed'
1106 repo.ui.status(status, label=label)
1106 repo.ui.status(status, label=label)
1107
1107
1108 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1108 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1109 similarity, uipathfn)
1109 similarity, uipathfn)
1110
1110
1111 if not dry_run:
1111 if not dry_run:
1112 _markchanges(repo, unknown + forgotten, deleted, renames)
1112 _markchanges(repo, unknown + forgotten, deleted, renames)
1113
1113
1114 for f in rejected:
1114 for f in rejected:
1115 if f in m.files():
1115 if f in m.files():
1116 return 1
1116 return 1
1117 return ret
1117 return ret
1118
1118
1119 def marktouched(repo, files, similarity=0.0):
1119 def marktouched(repo, files, similarity=0.0):
1120 '''Assert that files have somehow been operated upon. files are relative to
1120 '''Assert that files have somehow been operated upon. files are relative to
1121 the repo root.'''
1121 the repo root.'''
1122 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1122 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1123 rejected = []
1123 rejected = []
1124
1124
1125 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1125 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1126
1126
1127 if repo.ui.verbose:
1127 if repo.ui.verbose:
1128 unknownset = set(unknown + forgotten)
1128 unknownset = set(unknown + forgotten)
1129 toprint = unknownset.copy()
1129 toprint = unknownset.copy()
1130 toprint.update(deleted)
1130 toprint.update(deleted)
1131 for abs in sorted(toprint):
1131 for abs in sorted(toprint):
1132 if abs in unknownset:
1132 if abs in unknownset:
1133 status = _('adding %s\n') % abs
1133 status = _('adding %s\n') % abs
1134 else:
1134 else:
1135 status = _('removing %s\n') % abs
1135 status = _('removing %s\n') % abs
1136 repo.ui.status(status)
1136 repo.ui.status(status)
1137
1137
1138 # TODO: We should probably have the caller pass in uipathfn and apply it to
1138 # TODO: We should probably have the caller pass in uipathfn and apply it to
1139 # the messages above too. legacyrelativevalue=True is consistent with how
1139 # the messages above too. legacyrelativevalue=True is consistent with how
1140 # it used to work.
1140 # it used to work.
1141 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1141 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1142 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1142 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1143 similarity, uipathfn)
1143 similarity, uipathfn)
1144
1144
1145 _markchanges(repo, unknown + forgotten, deleted, renames)
1145 _markchanges(repo, unknown + forgotten, deleted, renames)
1146
1146
1147 for f in rejected:
1147 for f in rejected:
1148 if f in m.files():
1148 if f in m.files():
1149 return 1
1149 return 1
1150 return 0
1150 return 0
1151
1151
1152 def _interestingfiles(repo, matcher):
1152 def _interestingfiles(repo, matcher):
1153 '''Walk dirstate with matcher, looking for files that addremove would care
1153 '''Walk dirstate with matcher, looking for files that addremove would care
1154 about.
1154 about.
1155
1155
1156 This is different from dirstate.status because it doesn't care about
1156 This is different from dirstate.status because it doesn't care about
1157 whether files are modified or clean.'''
1157 whether files are modified or clean.'''
1158 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1158 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1159 audit_path = pathutil.pathauditor(repo.root, cached=True)
1159 audit_path = pathutil.pathauditor(repo.root, cached=True)
1160
1160
1161 ctx = repo[None]
1161 ctx = repo[None]
1162 dirstate = repo.dirstate
1162 dirstate = repo.dirstate
1163 matcher = repo.narrowmatch(matcher, includeexact=True)
1163 matcher = repo.narrowmatch(matcher, includeexact=True)
1164 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1164 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1165 unknown=True, ignored=False, full=False)
1165 unknown=True, ignored=False, full=False)
1166 for abs, st in walkresults.iteritems():
1166 for abs, st in walkresults.iteritems():
1167 dstate = dirstate[abs]
1167 dstate = dirstate[abs]
1168 if dstate == '?' and audit_path.check(abs):
1168 if dstate == '?' and audit_path.check(abs):
1169 unknown.append(abs)
1169 unknown.append(abs)
1170 elif dstate != 'r' and not st:
1170 elif dstate != 'r' and not st:
1171 deleted.append(abs)
1171 deleted.append(abs)
1172 elif dstate == 'r' and st:
1172 elif dstate == 'r' and st:
1173 forgotten.append(abs)
1173 forgotten.append(abs)
1174 # for finding renames
1174 # for finding renames
1175 elif dstate == 'r' and not st:
1175 elif dstate == 'r' and not st:
1176 removed.append(abs)
1176 removed.append(abs)
1177 elif dstate == 'a':
1177 elif dstate == 'a':
1178 added.append(abs)
1178 added.append(abs)
1179
1179
1180 return added, unknown, deleted, removed, forgotten
1180 return added, unknown, deleted, removed, forgotten
1181
1181
1182 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1182 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1183 '''Find renames from removed files to added ones.'''
1183 '''Find renames from removed files to added ones.'''
1184 renames = {}
1184 renames = {}
1185 if similarity > 0:
1185 if similarity > 0:
1186 for old, new, score in similar.findrenames(repo, added, removed,
1186 for old, new, score in similar.findrenames(repo, added, removed,
1187 similarity):
1187 similarity):
1188 if (repo.ui.verbose or not matcher.exact(old)
1188 if (repo.ui.verbose or not matcher.exact(old)
1189 or not matcher.exact(new)):
1189 or not matcher.exact(new)):
1190 repo.ui.status(_('recording removal of %s as rename to %s '
1190 repo.ui.status(_('recording removal of %s as rename to %s '
1191 '(%d%% similar)\n') %
1191 '(%d%% similar)\n') %
1192 (uipathfn(old), uipathfn(new),
1192 (uipathfn(old), uipathfn(new),
1193 score * 100))
1193 score * 100))
1194 renames[new] = old
1194 renames[new] = old
1195 return renames
1195 return renames
1196
1196
1197 def _markchanges(repo, unknown, deleted, renames):
1197 def _markchanges(repo, unknown, deleted, renames):
1198 '''Marks the files in unknown as added, the files in deleted as removed,
1198 '''Marks the files in unknown as added, the files in deleted as removed,
1199 and the files in renames as copied.'''
1199 and the files in renames as copied.'''
1200 wctx = repo[None]
1200 wctx = repo[None]
1201 with repo.wlock():
1201 with repo.wlock():
1202 wctx.forget(deleted)
1202 wctx.forget(deleted)
1203 wctx.add(unknown)
1203 wctx.add(unknown)
1204 for new, old in renames.iteritems():
1204 for new, old in renames.iteritems():
1205 wctx.copy(old, new)
1205 wctx.copy(old, new)
1206
1206
1207 def getrenamedfn(repo, endrev=None):
1207 def getrenamedfn(repo, endrev=None):
1208 if copiesmod.usechangesetcentricalgo(repo):
1208 if copiesmod.usechangesetcentricalgo(repo):
1209 def getrenamed(fn, rev):
1209 def getrenamed(fn, rev):
1210 ctx = repo[rev]
1210 ctx = repo[rev]
1211 p1copies = ctx.p1copies()
1211 p1copies = ctx.p1copies()
1212 if fn in p1copies:
1212 if fn in p1copies:
1213 return p1copies[fn]
1213 return p1copies[fn]
1214 p2copies = ctx.p2copies()
1214 p2copies = ctx.p2copies()
1215 if fn in p2copies:
1215 if fn in p2copies:
1216 return p2copies[fn]
1216 return p2copies[fn]
1217 return None
1217 return None
1218 return getrenamed
1218 return getrenamed
1219
1219
1220 rcache = {}
1220 rcache = {}
1221 if endrev is None:
1221 if endrev is None:
1222 endrev = len(repo)
1222 endrev = len(repo)
1223
1223
1224 def getrenamed(fn, rev):
1224 def getrenamed(fn, rev):
1225 '''looks up all renames for a file (up to endrev) the first
1225 '''looks up all renames for a file (up to endrev) the first
1226 time the file is given. It indexes on the changerev and only
1226 time the file is given. It indexes on the changerev and only
1227 parses the manifest if linkrev != changerev.
1227 parses the manifest if linkrev != changerev.
1228 Returns rename info for fn at changerev rev.'''
1228 Returns rename info for fn at changerev rev.'''
1229 if fn not in rcache:
1229 if fn not in rcache:
1230 rcache[fn] = {}
1230 rcache[fn] = {}
1231 fl = repo.file(fn)
1231 fl = repo.file(fn)
1232 for i in fl:
1232 for i in fl:
1233 lr = fl.linkrev(i)
1233 lr = fl.linkrev(i)
1234 renamed = fl.renamed(fl.node(i))
1234 renamed = fl.renamed(fl.node(i))
1235 rcache[fn][lr] = renamed and renamed[0]
1235 rcache[fn][lr] = renamed and renamed[0]
1236 if lr >= endrev:
1236 if lr >= endrev:
1237 break
1237 break
1238 if rev in rcache[fn]:
1238 if rev in rcache[fn]:
1239 return rcache[fn][rev]
1239 return rcache[fn][rev]
1240
1240
1241 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1241 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1242 # filectx logic.
1242 # filectx logic.
1243 try:
1243 try:
1244 return repo[rev][fn].copysource()
1244 return repo[rev][fn].copysource()
1245 except error.LookupError:
1245 except error.LookupError:
1246 return None
1246 return None
1247
1247
1248 return getrenamed
1248 return getrenamed
1249
1249
1250 def getcopiesfn(repo, endrev=None):
1251 if copiesmod.usechangesetcentricalgo(repo):
1252 def copiesfn(ctx):
1253 if ctx.p2copies():
1254 allcopies = ctx.p1copies().copy()
1255 # There should be no overlap
1256 allcopies.update(ctx.p2copies())
1257 return sorted(allcopies.items())
1258 else:
1259 return sorted(ctx.p1copies().items())
1260 else:
1261 getrenamed = getrenamedfn(repo, endrev)
1262 def copiesfn(ctx):
1263 copies = []
1264 for fn in ctx.files():
1265 rename = getrenamed(fn, ctx.rev())
1266 if rename:
1267 copies.append((fn, rename))
1268 return copies
1269
1270 return copiesfn
1271
1250 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1272 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1251 """Update the dirstate to reflect the intent of copying src to dst. For
1273 """Update the dirstate to reflect the intent of copying src to dst. For
1252 different reasons it might not end with dst being marked as copied from src.
1274 different reasons it might not end with dst being marked as copied from src.
1253 """
1275 """
1254 origsrc = repo.dirstate.copied(src) or src
1276 origsrc = repo.dirstate.copied(src) or src
1255 if dst == origsrc: # copying back a copy?
1277 if dst == origsrc: # copying back a copy?
1256 if repo.dirstate[dst] not in 'mn' and not dryrun:
1278 if repo.dirstate[dst] not in 'mn' and not dryrun:
1257 repo.dirstate.normallookup(dst)
1279 repo.dirstate.normallookup(dst)
1258 else:
1280 else:
1259 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1281 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1260 if not ui.quiet:
1282 if not ui.quiet:
1261 ui.warn(_("%s has not been committed yet, so no copy "
1283 ui.warn(_("%s has not been committed yet, so no copy "
1262 "data will be stored for %s.\n")
1284 "data will be stored for %s.\n")
1263 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1285 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1264 if repo.dirstate[dst] in '?r' and not dryrun:
1286 if repo.dirstate[dst] in '?r' and not dryrun:
1265 wctx.add([dst])
1287 wctx.add([dst])
1266 elif not dryrun:
1288 elif not dryrun:
1267 wctx.copy(origsrc, dst)
1289 wctx.copy(origsrc, dst)
1268
1290
1269 def movedirstate(repo, newctx, match=None):
1291 def movedirstate(repo, newctx, match=None):
1270 """Move the dirstate to newctx and adjust it as necessary.
1292 """Move the dirstate to newctx and adjust it as necessary.
1271
1293
1272 A matcher can be provided as an optimization. It is probably a bug to pass
1294 A matcher can be provided as an optimization. It is probably a bug to pass
1273 a matcher that doesn't match all the differences between the parent of the
1295 a matcher that doesn't match all the differences between the parent of the
1274 working copy and newctx.
1296 working copy and newctx.
1275 """
1297 """
1276 oldctx = repo['.']
1298 oldctx = repo['.']
1277 ds = repo.dirstate
1299 ds = repo.dirstate
1278 ds.setparents(newctx.node(), nullid)
1300 ds.setparents(newctx.node(), nullid)
1279 copies = dict(ds.copies())
1301 copies = dict(ds.copies())
1280 s = newctx.status(oldctx, match=match)
1302 s = newctx.status(oldctx, match=match)
1281 for f in s.modified:
1303 for f in s.modified:
1282 if ds[f] == 'r':
1304 if ds[f] == 'r':
1283 # modified + removed -> removed
1305 # modified + removed -> removed
1284 continue
1306 continue
1285 ds.normallookup(f)
1307 ds.normallookup(f)
1286
1308
1287 for f in s.added:
1309 for f in s.added:
1288 if ds[f] == 'r':
1310 if ds[f] == 'r':
1289 # added + removed -> unknown
1311 # added + removed -> unknown
1290 ds.drop(f)
1312 ds.drop(f)
1291 elif ds[f] != 'a':
1313 elif ds[f] != 'a':
1292 ds.add(f)
1314 ds.add(f)
1293
1315
1294 for f in s.removed:
1316 for f in s.removed:
1295 if ds[f] == 'a':
1317 if ds[f] == 'a':
1296 # removed + added -> normal
1318 # removed + added -> normal
1297 ds.normallookup(f)
1319 ds.normallookup(f)
1298 elif ds[f] != 'r':
1320 elif ds[f] != 'r':
1299 ds.remove(f)
1321 ds.remove(f)
1300
1322
1301 # Merge old parent and old working dir copies
1323 # Merge old parent and old working dir copies
1302 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1324 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1303 oldcopies.update(copies)
1325 oldcopies.update(copies)
1304 copies = dict((dst, oldcopies.get(src, src))
1326 copies = dict((dst, oldcopies.get(src, src))
1305 for dst, src in oldcopies.iteritems())
1327 for dst, src in oldcopies.iteritems())
1306 # Adjust the dirstate copies
1328 # Adjust the dirstate copies
1307 for dst, src in copies.iteritems():
1329 for dst, src in copies.iteritems():
1308 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
1330 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
1309 src = None
1331 src = None
1310 ds.copy(src, dst)
1332 ds.copy(src, dst)
1311
1333
1312 def writerequires(opener, requirements):
1334 def writerequires(opener, requirements):
1313 with opener('requires', 'w', atomictemp=True) as fp:
1335 with opener('requires', 'w', atomictemp=True) as fp:
1314 for r in sorted(requirements):
1336 for r in sorted(requirements):
1315 fp.write("%s\n" % r)
1337 fp.write("%s\n" % r)
1316
1338
1317 class filecachesubentry(object):
1339 class filecachesubentry(object):
1318 def __init__(self, path, stat):
1340 def __init__(self, path, stat):
1319 self.path = path
1341 self.path = path
1320 self.cachestat = None
1342 self.cachestat = None
1321 self._cacheable = None
1343 self._cacheable = None
1322
1344
1323 if stat:
1345 if stat:
1324 self.cachestat = filecachesubentry.stat(self.path)
1346 self.cachestat = filecachesubentry.stat(self.path)
1325
1347
1326 if self.cachestat:
1348 if self.cachestat:
1327 self._cacheable = self.cachestat.cacheable()
1349 self._cacheable = self.cachestat.cacheable()
1328 else:
1350 else:
1329 # None means we don't know yet
1351 # None means we don't know yet
1330 self._cacheable = None
1352 self._cacheable = None
1331
1353
1332 def refresh(self):
1354 def refresh(self):
1333 if self.cacheable():
1355 if self.cacheable():
1334 self.cachestat = filecachesubentry.stat(self.path)
1356 self.cachestat = filecachesubentry.stat(self.path)
1335
1357
1336 def cacheable(self):
1358 def cacheable(self):
1337 if self._cacheable is not None:
1359 if self._cacheable is not None:
1338 return self._cacheable
1360 return self._cacheable
1339
1361
1340 # we don't know yet, assume it is for now
1362 # we don't know yet, assume it is for now
1341 return True
1363 return True
1342
1364
1343 def changed(self):
1365 def changed(self):
1344 # no point in going further if we can't cache it
1366 # no point in going further if we can't cache it
1345 if not self.cacheable():
1367 if not self.cacheable():
1346 return True
1368 return True
1347
1369
1348 newstat = filecachesubentry.stat(self.path)
1370 newstat = filecachesubentry.stat(self.path)
1349
1371
1350 # we may not know if it's cacheable yet, check again now
1372 # we may not know if it's cacheable yet, check again now
1351 if newstat and self._cacheable is None:
1373 if newstat and self._cacheable is None:
1352 self._cacheable = newstat.cacheable()
1374 self._cacheable = newstat.cacheable()
1353
1375
1354 # check again
1376 # check again
1355 if not self._cacheable:
1377 if not self._cacheable:
1356 return True
1378 return True
1357
1379
1358 if self.cachestat != newstat:
1380 if self.cachestat != newstat:
1359 self.cachestat = newstat
1381 self.cachestat = newstat
1360 return True
1382 return True
1361 else:
1383 else:
1362 return False
1384 return False
1363
1385
1364 @staticmethod
1386 @staticmethod
1365 def stat(path):
1387 def stat(path):
1366 try:
1388 try:
1367 return util.cachestat(path)
1389 return util.cachestat(path)
1368 except OSError as e:
1390 except OSError as e:
1369 if e.errno != errno.ENOENT:
1391 if e.errno != errno.ENOENT:
1370 raise
1392 raise
1371
1393
1372 class filecacheentry(object):
1394 class filecacheentry(object):
1373 def __init__(self, paths, stat=True):
1395 def __init__(self, paths, stat=True):
1374 self._entries = []
1396 self._entries = []
1375 for path in paths:
1397 for path in paths:
1376 self._entries.append(filecachesubentry(path, stat))
1398 self._entries.append(filecachesubentry(path, stat))
1377
1399
1378 def changed(self):
1400 def changed(self):
1379 '''true if any entry has changed'''
1401 '''true if any entry has changed'''
1380 for entry in self._entries:
1402 for entry in self._entries:
1381 if entry.changed():
1403 if entry.changed():
1382 return True
1404 return True
1383 return False
1405 return False
1384
1406
1385 def refresh(self):
1407 def refresh(self):
1386 for entry in self._entries:
1408 for entry in self._entries:
1387 entry.refresh()
1409 entry.refresh()
1388
1410
1389 class filecache(object):
1411 class filecache(object):
1390 """A property like decorator that tracks files under .hg/ for updates.
1412 """A property like decorator that tracks files under .hg/ for updates.
1391
1413
1392 On first access, the files defined as arguments are stat()ed and the
1414 On first access, the files defined as arguments are stat()ed and the
1393 results cached. The decorated function is called. The results are stashed
1415 results cached. The decorated function is called. The results are stashed
1394 away in a ``_filecache`` dict on the object whose method is decorated.
1416 away in a ``_filecache`` dict on the object whose method is decorated.
1395
1417
1396 On subsequent access, the cached result is used as it is set to the
1418 On subsequent access, the cached result is used as it is set to the
1397 instance dictionary.
1419 instance dictionary.
1398
1420
1399 On external property set/delete operations, the caller must update the
1421 On external property set/delete operations, the caller must update the
1400 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1422 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1401 instead of directly setting <attr>.
1423 instead of directly setting <attr>.
1402
1424
1403 When using the property API, the cached data is always used if available.
1425 When using the property API, the cached data is always used if available.
1404 No stat() is performed to check if the file has changed.
1426 No stat() is performed to check if the file has changed.
1405
1427
1406 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1428 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1407 can populate an entry before the property's getter is called. In this case,
1429 can populate an entry before the property's getter is called. In this case,
1408 entries in ``_filecache`` will be used during property operations,
1430 entries in ``_filecache`` will be used during property operations,
1409 if available. If the underlying file changes, it is up to external callers
1431 if available. If the underlying file changes, it is up to external callers
1410 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1432 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1411 method result as well as possibly calling ``del obj._filecache[attr]`` to
1433 method result as well as possibly calling ``del obj._filecache[attr]`` to
1412 remove the ``filecacheentry``.
1434 remove the ``filecacheentry``.
1413 """
1435 """
1414
1436
1415 def __init__(self, *paths):
1437 def __init__(self, *paths):
1416 self.paths = paths
1438 self.paths = paths
1417
1439
1418 def join(self, obj, fname):
1440 def join(self, obj, fname):
1419 """Used to compute the runtime path of a cached file.
1441 """Used to compute the runtime path of a cached file.
1420
1442
1421 Users should subclass filecache and provide their own version of this
1443 Users should subclass filecache and provide their own version of this
1422 function to call the appropriate join function on 'obj' (an instance
1444 function to call the appropriate join function on 'obj' (an instance
1423 of the class that its member function was decorated).
1445 of the class that its member function was decorated).
1424 """
1446 """
1425 raise NotImplementedError
1447 raise NotImplementedError
1426
1448
1427 def __call__(self, func):
1449 def __call__(self, func):
1428 self.func = func
1450 self.func = func
1429 self.sname = func.__name__
1451 self.sname = func.__name__
1430 self.name = pycompat.sysbytes(self.sname)
1452 self.name = pycompat.sysbytes(self.sname)
1431 return self
1453 return self
1432
1454
1433 def __get__(self, obj, type=None):
1455 def __get__(self, obj, type=None):
1434 # if accessed on the class, return the descriptor itself.
1456 # if accessed on the class, return the descriptor itself.
1435 if obj is None:
1457 if obj is None:
1436 return self
1458 return self
1437
1459
1438 assert self.sname not in obj.__dict__
1460 assert self.sname not in obj.__dict__
1439
1461
1440 entry = obj._filecache.get(self.name)
1462 entry = obj._filecache.get(self.name)
1441
1463
1442 if entry:
1464 if entry:
1443 if entry.changed():
1465 if entry.changed():
1444 entry.obj = self.func(obj)
1466 entry.obj = self.func(obj)
1445 else:
1467 else:
1446 paths = [self.join(obj, path) for path in self.paths]
1468 paths = [self.join(obj, path) for path in self.paths]
1447
1469
1448 # We stat -before- creating the object so our cache doesn't lie if
1470 # We stat -before- creating the object so our cache doesn't lie if
1449 # a writer modified between the time we read and stat
1471 # a writer modified between the time we read and stat
1450 entry = filecacheentry(paths, True)
1472 entry = filecacheentry(paths, True)
1451 entry.obj = self.func(obj)
1473 entry.obj = self.func(obj)
1452
1474
1453 obj._filecache[self.name] = entry
1475 obj._filecache[self.name] = entry
1454
1476
1455 obj.__dict__[self.sname] = entry.obj
1477 obj.__dict__[self.sname] = entry.obj
1456 return entry.obj
1478 return entry.obj
1457
1479
1458 # don't implement __set__(), which would make __dict__ lookup as slow as
1480 # don't implement __set__(), which would make __dict__ lookup as slow as
1459 # function call.
1481 # function call.
1460
1482
1461 def set(self, obj, value):
1483 def set(self, obj, value):
1462 if self.name not in obj._filecache:
1484 if self.name not in obj._filecache:
1463 # we add an entry for the missing value because X in __dict__
1485 # we add an entry for the missing value because X in __dict__
1464 # implies X in _filecache
1486 # implies X in _filecache
1465 paths = [self.join(obj, path) for path in self.paths]
1487 paths = [self.join(obj, path) for path in self.paths]
1466 ce = filecacheentry(paths, False)
1488 ce = filecacheentry(paths, False)
1467 obj._filecache[self.name] = ce
1489 obj._filecache[self.name] = ce
1468 else:
1490 else:
1469 ce = obj._filecache[self.name]
1491 ce = obj._filecache[self.name]
1470
1492
1471 ce.obj = value # update cached copy
1493 ce.obj = value # update cached copy
1472 obj.__dict__[self.sname] = value # update copy returned by obj.x
1494 obj.__dict__[self.sname] = value # update copy returned by obj.x
1473
1495
1474 def extdatasource(repo, source):
1496 def extdatasource(repo, source):
1475 """Gather a map of rev -> value dict from the specified source
1497 """Gather a map of rev -> value dict from the specified source
1476
1498
1477 A source spec is treated as a URL, with a special case shell: type
1499 A source spec is treated as a URL, with a special case shell: type
1478 for parsing the output from a shell command.
1500 for parsing the output from a shell command.
1479
1501
1480 The data is parsed as a series of newline-separated records where
1502 The data is parsed as a series of newline-separated records where
1481 each record is a revision specifier optionally followed by a space
1503 each record is a revision specifier optionally followed by a space
1482 and a freeform string value. If the revision is known locally, it
1504 and a freeform string value. If the revision is known locally, it
1483 is converted to a rev, otherwise the record is skipped.
1505 is converted to a rev, otherwise the record is skipped.
1484
1506
1485 Note that both key and value are treated as UTF-8 and converted to
1507 Note that both key and value are treated as UTF-8 and converted to
1486 the local encoding. This allows uniformity between local and
1508 the local encoding. This allows uniformity between local and
1487 remote data sources.
1509 remote data sources.
1488 """
1510 """
1489
1511
1490 spec = repo.ui.config("extdata", source)
1512 spec = repo.ui.config("extdata", source)
1491 if not spec:
1513 if not spec:
1492 raise error.Abort(_("unknown extdata source '%s'") % source)
1514 raise error.Abort(_("unknown extdata source '%s'") % source)
1493
1515
1494 data = {}
1516 data = {}
1495 src = proc = None
1517 src = proc = None
1496 try:
1518 try:
1497 if spec.startswith("shell:"):
1519 if spec.startswith("shell:"):
1498 # external commands should be run relative to the repo root
1520 # external commands should be run relative to the repo root
1499 cmd = spec[6:]
1521 cmd = spec[6:]
1500 proc = subprocess.Popen(procutil.tonativestr(cmd),
1522 proc = subprocess.Popen(procutil.tonativestr(cmd),
1501 shell=True, bufsize=-1,
1523 shell=True, bufsize=-1,
1502 close_fds=procutil.closefds,
1524 close_fds=procutil.closefds,
1503 stdout=subprocess.PIPE,
1525 stdout=subprocess.PIPE,
1504 cwd=procutil.tonativestr(repo.root))
1526 cwd=procutil.tonativestr(repo.root))
1505 src = proc.stdout
1527 src = proc.stdout
1506 else:
1528 else:
1507 # treat as a URL or file
1529 # treat as a URL or file
1508 src = url.open(repo.ui, spec)
1530 src = url.open(repo.ui, spec)
1509 for l in src:
1531 for l in src:
1510 if " " in l:
1532 if " " in l:
1511 k, v = l.strip().split(" ", 1)
1533 k, v = l.strip().split(" ", 1)
1512 else:
1534 else:
1513 k, v = l.strip(), ""
1535 k, v = l.strip(), ""
1514
1536
1515 k = encoding.tolocal(k)
1537 k = encoding.tolocal(k)
1516 try:
1538 try:
1517 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1539 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1518 except (error.LookupError, error.RepoLookupError):
1540 except (error.LookupError, error.RepoLookupError):
1519 pass # we ignore data for nodes that don't exist locally
1541 pass # we ignore data for nodes that don't exist locally
1520 finally:
1542 finally:
1521 if proc:
1543 if proc:
1522 proc.communicate()
1544 proc.communicate()
1523 if src:
1545 if src:
1524 src.close()
1546 src.close()
1525 if proc and proc.returncode != 0:
1547 if proc and proc.returncode != 0:
1526 raise error.Abort(_("extdata command '%s' failed: %s")
1548 raise error.Abort(_("extdata command '%s' failed: %s")
1527 % (cmd, procutil.explainexit(proc.returncode)))
1549 % (cmd, procutil.explainexit(proc.returncode)))
1528
1550
1529 return data
1551 return data
1530
1552
1531 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1553 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1532 if lock is None:
1554 if lock is None:
1533 raise error.LockInheritanceContractViolation(
1555 raise error.LockInheritanceContractViolation(
1534 'lock can only be inherited while held')
1556 'lock can only be inherited while held')
1535 if environ is None:
1557 if environ is None:
1536 environ = {}
1558 environ = {}
1537 with lock.inherit() as locker:
1559 with lock.inherit() as locker:
1538 environ[envvar] = locker
1560 environ[envvar] = locker
1539 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1561 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1540
1562
1541 def wlocksub(repo, cmd, *args, **kwargs):
1563 def wlocksub(repo, cmd, *args, **kwargs):
1542 """run cmd as a subprocess that allows inheriting repo's wlock
1564 """run cmd as a subprocess that allows inheriting repo's wlock
1543
1565
1544 This can only be called while the wlock is held. This takes all the
1566 This can only be called while the wlock is held. This takes all the
1545 arguments that ui.system does, and returns the exit code of the
1567 arguments that ui.system does, and returns the exit code of the
1546 subprocess."""
1568 subprocess."""
1547 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1569 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1548 **kwargs)
1570 **kwargs)
1549
1571
1550 class progress(object):
1572 class progress(object):
1551 def __init__(self, ui, updatebar, topic, unit="", total=None):
1573 def __init__(self, ui, updatebar, topic, unit="", total=None):
1552 self.ui = ui
1574 self.ui = ui
1553 self.pos = 0
1575 self.pos = 0
1554 self.topic = topic
1576 self.topic = topic
1555 self.unit = unit
1577 self.unit = unit
1556 self.total = total
1578 self.total = total
1557 self.debug = ui.configbool('progress', 'debug')
1579 self.debug = ui.configbool('progress', 'debug')
1558 self._updatebar = updatebar
1580 self._updatebar = updatebar
1559
1581
1560 def __enter__(self):
1582 def __enter__(self):
1561 return self
1583 return self
1562
1584
1563 def __exit__(self, exc_type, exc_value, exc_tb):
1585 def __exit__(self, exc_type, exc_value, exc_tb):
1564 self.complete()
1586 self.complete()
1565
1587
1566 def update(self, pos, item="", total=None):
1588 def update(self, pos, item="", total=None):
1567 assert pos is not None
1589 assert pos is not None
1568 if total:
1590 if total:
1569 self.total = total
1591 self.total = total
1570 self.pos = pos
1592 self.pos = pos
1571 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1593 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1572 if self.debug:
1594 if self.debug:
1573 self._printdebug(item)
1595 self._printdebug(item)
1574
1596
1575 def increment(self, step=1, item="", total=None):
1597 def increment(self, step=1, item="", total=None):
1576 self.update(self.pos + step, item, total)
1598 self.update(self.pos + step, item, total)
1577
1599
1578 def complete(self):
1600 def complete(self):
1579 self.pos = None
1601 self.pos = None
1580 self.unit = ""
1602 self.unit = ""
1581 self.total = None
1603 self.total = None
1582 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1604 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1583
1605
1584 def _printdebug(self, item):
1606 def _printdebug(self, item):
1585 if self.unit:
1607 if self.unit:
1586 unit = ' ' + self.unit
1608 unit = ' ' + self.unit
1587 if item:
1609 if item:
1588 item = ' ' + item
1610 item = ' ' + item
1589
1611
1590 if self.total:
1612 if self.total:
1591 pct = 100.0 * self.pos / self.total
1613 pct = 100.0 * self.pos / self.total
1592 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1614 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1593 % (self.topic, item, self.pos, self.total, unit, pct))
1615 % (self.topic, item, self.pos, self.total, unit, pct))
1594 else:
1616 else:
1595 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1617 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1596
1618
1597 def gdinitconfig(ui):
1619 def gdinitconfig(ui):
1598 """helper function to know if a repo should be created as general delta
1620 """helper function to know if a repo should be created as general delta
1599 """
1621 """
1600 # experimental config: format.generaldelta
1622 # experimental config: format.generaldelta
1601 return (ui.configbool('format', 'generaldelta')
1623 return (ui.configbool('format', 'generaldelta')
1602 or ui.configbool('format', 'usegeneraldelta'))
1624 or ui.configbool('format', 'usegeneraldelta'))
1603
1625
1604 def gddeltaconfig(ui):
1626 def gddeltaconfig(ui):
1605 """helper function to know if incoming delta should be optimised
1627 """helper function to know if incoming delta should be optimised
1606 """
1628 """
1607 # experimental config: format.generaldelta
1629 # experimental config: format.generaldelta
1608 return ui.configbool('format', 'generaldelta')
1630 return ui.configbool('format', 'generaldelta')
1609
1631
1610 class simplekeyvaluefile(object):
1632 class simplekeyvaluefile(object):
1611 """A simple file with key=value lines
1633 """A simple file with key=value lines
1612
1634
1613 Keys must be alphanumerics and start with a letter, values must not
1635 Keys must be alphanumerics and start with a letter, values must not
1614 contain '\n' characters"""
1636 contain '\n' characters"""
1615 firstlinekey = '__firstline'
1637 firstlinekey = '__firstline'
1616
1638
1617 def __init__(self, vfs, path, keys=None):
1639 def __init__(self, vfs, path, keys=None):
1618 self.vfs = vfs
1640 self.vfs = vfs
1619 self.path = path
1641 self.path = path
1620
1642
1621 def read(self, firstlinenonkeyval=False):
1643 def read(self, firstlinenonkeyval=False):
1622 """Read the contents of a simple key-value file
1644 """Read the contents of a simple key-value file
1623
1645
1624 'firstlinenonkeyval' indicates whether the first line of file should
1646 'firstlinenonkeyval' indicates whether the first line of file should
1625 be treated as a key-value pair or reuturned fully under the
1647 be treated as a key-value pair or reuturned fully under the
1626 __firstline key."""
1648 __firstline key."""
1627 lines = self.vfs.readlines(self.path)
1649 lines = self.vfs.readlines(self.path)
1628 d = {}
1650 d = {}
1629 if firstlinenonkeyval:
1651 if firstlinenonkeyval:
1630 if not lines:
1652 if not lines:
1631 e = _("empty simplekeyvalue file")
1653 e = _("empty simplekeyvalue file")
1632 raise error.CorruptedState(e)
1654 raise error.CorruptedState(e)
1633 # we don't want to include '\n' in the __firstline
1655 # we don't want to include '\n' in the __firstline
1634 d[self.firstlinekey] = lines[0][:-1]
1656 d[self.firstlinekey] = lines[0][:-1]
1635 del lines[0]
1657 del lines[0]
1636
1658
1637 try:
1659 try:
1638 # the 'if line.strip()' part prevents us from failing on empty
1660 # the 'if line.strip()' part prevents us from failing on empty
1639 # lines which only contain '\n' therefore are not skipped
1661 # lines which only contain '\n' therefore are not skipped
1640 # by 'if line'
1662 # by 'if line'
1641 updatedict = dict(line[:-1].split('=', 1) for line in lines
1663 updatedict = dict(line[:-1].split('=', 1) for line in lines
1642 if line.strip())
1664 if line.strip())
1643 if self.firstlinekey in updatedict:
1665 if self.firstlinekey in updatedict:
1644 e = _("%r can't be used as a key")
1666 e = _("%r can't be used as a key")
1645 raise error.CorruptedState(e % self.firstlinekey)
1667 raise error.CorruptedState(e % self.firstlinekey)
1646 d.update(updatedict)
1668 d.update(updatedict)
1647 except ValueError as e:
1669 except ValueError as e:
1648 raise error.CorruptedState(str(e))
1670 raise error.CorruptedState(str(e))
1649 return d
1671 return d
1650
1672
1651 def write(self, data, firstline=None):
1673 def write(self, data, firstline=None):
1652 """Write key=>value mapping to a file
1674 """Write key=>value mapping to a file
1653 data is a dict. Keys must be alphanumerical and start with a letter.
1675 data is a dict. Keys must be alphanumerical and start with a letter.
1654 Values must not contain newline characters.
1676 Values must not contain newline characters.
1655
1677
1656 If 'firstline' is not None, it is written to file before
1678 If 'firstline' is not None, it is written to file before
1657 everything else, as it is, not in a key=value form"""
1679 everything else, as it is, not in a key=value form"""
1658 lines = []
1680 lines = []
1659 if firstline is not None:
1681 if firstline is not None:
1660 lines.append('%s\n' % firstline)
1682 lines.append('%s\n' % firstline)
1661
1683
1662 for k, v in data.items():
1684 for k, v in data.items():
1663 if k == self.firstlinekey:
1685 if k == self.firstlinekey:
1664 e = "key name '%s' is reserved" % self.firstlinekey
1686 e = "key name '%s' is reserved" % self.firstlinekey
1665 raise error.ProgrammingError(e)
1687 raise error.ProgrammingError(e)
1666 if not k[0:1].isalpha():
1688 if not k[0:1].isalpha():
1667 e = "keys must start with a letter in a key-value file"
1689 e = "keys must start with a letter in a key-value file"
1668 raise error.ProgrammingError(e)
1690 raise error.ProgrammingError(e)
1669 if not k.isalnum():
1691 if not k.isalnum():
1670 e = "invalid key name in a simple key-value file"
1692 e = "invalid key name in a simple key-value file"
1671 raise error.ProgrammingError(e)
1693 raise error.ProgrammingError(e)
1672 if '\n' in v:
1694 if '\n' in v:
1673 e = "invalid value in a simple key-value file"
1695 e = "invalid value in a simple key-value file"
1674 raise error.ProgrammingError(e)
1696 raise error.ProgrammingError(e)
1675 lines.append("%s=%s\n" % (k, v))
1697 lines.append("%s=%s\n" % (k, v))
1676 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1698 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1677 fp.write(''.join(lines))
1699 fp.write(''.join(lines))
1678
1700
1679 _reportobsoletedsource = [
1701 _reportobsoletedsource = [
1680 'debugobsolete',
1702 'debugobsolete',
1681 'pull',
1703 'pull',
1682 'push',
1704 'push',
1683 'serve',
1705 'serve',
1684 'unbundle',
1706 'unbundle',
1685 ]
1707 ]
1686
1708
1687 _reportnewcssource = [
1709 _reportnewcssource = [
1688 'pull',
1710 'pull',
1689 'unbundle',
1711 'unbundle',
1690 ]
1712 ]
1691
1713
1692 def prefetchfiles(repo, revs, match):
1714 def prefetchfiles(repo, revs, match):
1693 """Invokes the registered file prefetch functions, allowing extensions to
1715 """Invokes the registered file prefetch functions, allowing extensions to
1694 ensure the corresponding files are available locally, before the command
1716 ensure the corresponding files are available locally, before the command
1695 uses them."""
1717 uses them."""
1696 if match:
1718 if match:
1697 # The command itself will complain about files that don't exist, so
1719 # The command itself will complain about files that don't exist, so
1698 # don't duplicate the message.
1720 # don't duplicate the message.
1699 match = matchmod.badmatch(match, lambda fn, msg: None)
1721 match = matchmod.badmatch(match, lambda fn, msg: None)
1700 else:
1722 else:
1701 match = matchall(repo)
1723 match = matchall(repo)
1702
1724
1703 fileprefetchhooks(repo, revs, match)
1725 fileprefetchhooks(repo, revs, match)
1704
1726
1705 # a list of (repo, revs, match) prefetch functions
1727 # a list of (repo, revs, match) prefetch functions
1706 fileprefetchhooks = util.hooks()
1728 fileprefetchhooks = util.hooks()
1707
1729
1708 # A marker that tells the evolve extension to suppress its own reporting
1730 # A marker that tells the evolve extension to suppress its own reporting
1709 _reportstroubledchangesets = True
1731 _reportstroubledchangesets = True
1710
1732
1711 def registersummarycallback(repo, otr, txnname=''):
1733 def registersummarycallback(repo, otr, txnname=''):
1712 """register a callback to issue a summary after the transaction is closed
1734 """register a callback to issue a summary after the transaction is closed
1713 """
1735 """
1714 def txmatch(sources):
1736 def txmatch(sources):
1715 return any(txnname.startswith(source) for source in sources)
1737 return any(txnname.startswith(source) for source in sources)
1716
1738
1717 categories = []
1739 categories = []
1718
1740
1719 def reportsummary(func):
1741 def reportsummary(func):
1720 """decorator for report callbacks."""
1742 """decorator for report callbacks."""
1721 # The repoview life cycle is shorter than the one of the actual
1743 # The repoview life cycle is shorter than the one of the actual
1722 # underlying repository. So the filtered object can die before the
1744 # underlying repository. So the filtered object can die before the
1723 # weakref is used leading to troubles. We keep a reference to the
1745 # weakref is used leading to troubles. We keep a reference to the
1724 # unfiltered object and restore the filtering when retrieving the
1746 # unfiltered object and restore the filtering when retrieving the
1725 # repository through the weakref.
1747 # repository through the weakref.
1726 filtername = repo.filtername
1748 filtername = repo.filtername
1727 reporef = weakref.ref(repo.unfiltered())
1749 reporef = weakref.ref(repo.unfiltered())
1728 def wrapped(tr):
1750 def wrapped(tr):
1729 repo = reporef()
1751 repo = reporef()
1730 if filtername:
1752 if filtername:
1731 repo = repo.filtered(filtername)
1753 repo = repo.filtered(filtername)
1732 func(repo, tr)
1754 func(repo, tr)
1733 newcat = '%02i-txnreport' % len(categories)
1755 newcat = '%02i-txnreport' % len(categories)
1734 otr.addpostclose(newcat, wrapped)
1756 otr.addpostclose(newcat, wrapped)
1735 categories.append(newcat)
1757 categories.append(newcat)
1736 return wrapped
1758 return wrapped
1737
1759
1738 if txmatch(_reportobsoletedsource):
1760 if txmatch(_reportobsoletedsource):
1739 @reportsummary
1761 @reportsummary
1740 def reportobsoleted(repo, tr):
1762 def reportobsoleted(repo, tr):
1741 obsoleted = obsutil.getobsoleted(repo, tr)
1763 obsoleted = obsutil.getobsoleted(repo, tr)
1742 if obsoleted:
1764 if obsoleted:
1743 repo.ui.status(_('obsoleted %i changesets\n')
1765 repo.ui.status(_('obsoleted %i changesets\n')
1744 % len(obsoleted))
1766 % len(obsoleted))
1745
1767
1746 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1768 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1747 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1769 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1748 instabilitytypes = [
1770 instabilitytypes = [
1749 ('orphan', 'orphan'),
1771 ('orphan', 'orphan'),
1750 ('phase-divergent', 'phasedivergent'),
1772 ('phase-divergent', 'phasedivergent'),
1751 ('content-divergent', 'contentdivergent'),
1773 ('content-divergent', 'contentdivergent'),
1752 ]
1774 ]
1753
1775
1754 def getinstabilitycounts(repo):
1776 def getinstabilitycounts(repo):
1755 filtered = repo.changelog.filteredrevs
1777 filtered = repo.changelog.filteredrevs
1756 counts = {}
1778 counts = {}
1757 for instability, revset in instabilitytypes:
1779 for instability, revset in instabilitytypes:
1758 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1780 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1759 filtered)
1781 filtered)
1760 return counts
1782 return counts
1761
1783
1762 oldinstabilitycounts = getinstabilitycounts(repo)
1784 oldinstabilitycounts = getinstabilitycounts(repo)
1763 @reportsummary
1785 @reportsummary
1764 def reportnewinstabilities(repo, tr):
1786 def reportnewinstabilities(repo, tr):
1765 newinstabilitycounts = getinstabilitycounts(repo)
1787 newinstabilitycounts = getinstabilitycounts(repo)
1766 for instability, revset in instabilitytypes:
1788 for instability, revset in instabilitytypes:
1767 delta = (newinstabilitycounts[instability] -
1789 delta = (newinstabilitycounts[instability] -
1768 oldinstabilitycounts[instability])
1790 oldinstabilitycounts[instability])
1769 msg = getinstabilitymessage(delta, instability)
1791 msg = getinstabilitymessage(delta, instability)
1770 if msg:
1792 if msg:
1771 repo.ui.warn(msg)
1793 repo.ui.warn(msg)
1772
1794
1773 if txmatch(_reportnewcssource):
1795 if txmatch(_reportnewcssource):
1774 @reportsummary
1796 @reportsummary
1775 def reportnewcs(repo, tr):
1797 def reportnewcs(repo, tr):
1776 """Report the range of new revisions pulled/unbundled."""
1798 """Report the range of new revisions pulled/unbundled."""
1777 origrepolen = tr.changes.get('origrepolen', len(repo))
1799 origrepolen = tr.changes.get('origrepolen', len(repo))
1778 unfi = repo.unfiltered()
1800 unfi = repo.unfiltered()
1779 if origrepolen >= len(unfi):
1801 if origrepolen >= len(unfi):
1780 return
1802 return
1781
1803
1782 # Compute the bounds of new visible revisions' range.
1804 # Compute the bounds of new visible revisions' range.
1783 revs = smartset.spanset(repo, start=origrepolen)
1805 revs = smartset.spanset(repo, start=origrepolen)
1784 if revs:
1806 if revs:
1785 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1807 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1786
1808
1787 if minrev == maxrev:
1809 if minrev == maxrev:
1788 revrange = minrev
1810 revrange = minrev
1789 else:
1811 else:
1790 revrange = '%s:%s' % (minrev, maxrev)
1812 revrange = '%s:%s' % (minrev, maxrev)
1791 draft = len(repo.revs('%ld and draft()', revs))
1813 draft = len(repo.revs('%ld and draft()', revs))
1792 secret = len(repo.revs('%ld and secret()', revs))
1814 secret = len(repo.revs('%ld and secret()', revs))
1793 if not (draft or secret):
1815 if not (draft or secret):
1794 msg = _('new changesets %s\n') % revrange
1816 msg = _('new changesets %s\n') % revrange
1795 elif draft and secret:
1817 elif draft and secret:
1796 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1818 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1797 msg %= (revrange, draft, secret)
1819 msg %= (revrange, draft, secret)
1798 elif draft:
1820 elif draft:
1799 msg = _('new changesets %s (%d drafts)\n')
1821 msg = _('new changesets %s (%d drafts)\n')
1800 msg %= (revrange, draft)
1822 msg %= (revrange, draft)
1801 elif secret:
1823 elif secret:
1802 msg = _('new changesets %s (%d secrets)\n')
1824 msg = _('new changesets %s (%d secrets)\n')
1803 msg %= (revrange, secret)
1825 msg %= (revrange, secret)
1804 else:
1826 else:
1805 errormsg = 'entered unreachable condition'
1827 errormsg = 'entered unreachable condition'
1806 raise error.ProgrammingError(errormsg)
1828 raise error.ProgrammingError(errormsg)
1807 repo.ui.status(msg)
1829 repo.ui.status(msg)
1808
1830
1809 # search new changesets directly pulled as obsolete
1831 # search new changesets directly pulled as obsolete
1810 duplicates = tr.changes.get('revduplicates', ())
1832 duplicates = tr.changes.get('revduplicates', ())
1811 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1833 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1812 origrepolen, duplicates)
1834 origrepolen, duplicates)
1813 cl = repo.changelog
1835 cl = repo.changelog
1814 extinctadded = [r for r in obsadded if r not in cl]
1836 extinctadded = [r for r in obsadded if r not in cl]
1815 if extinctadded:
1837 if extinctadded:
1816 # They are not just obsolete, but obsolete and invisible
1838 # They are not just obsolete, but obsolete and invisible
1817 # we call them "extinct" internally but the terms have not been
1839 # we call them "extinct" internally but the terms have not been
1818 # exposed to users.
1840 # exposed to users.
1819 msg = '(%d other changesets obsolete on arrival)\n'
1841 msg = '(%d other changesets obsolete on arrival)\n'
1820 repo.ui.status(msg % len(extinctadded))
1842 repo.ui.status(msg % len(extinctadded))
1821
1843
1822 @reportsummary
1844 @reportsummary
1823 def reportphasechanges(repo, tr):
1845 def reportphasechanges(repo, tr):
1824 """Report statistics of phase changes for changesets pre-existing
1846 """Report statistics of phase changes for changesets pre-existing
1825 pull/unbundle.
1847 pull/unbundle.
1826 """
1848 """
1827 origrepolen = tr.changes.get('origrepolen', len(repo))
1849 origrepolen = tr.changes.get('origrepolen', len(repo))
1828 phasetracking = tr.changes.get('phases', {})
1850 phasetracking = tr.changes.get('phases', {})
1829 if not phasetracking:
1851 if not phasetracking:
1830 return
1852 return
1831 published = [
1853 published = [
1832 rev for rev, (old, new) in phasetracking.iteritems()
1854 rev for rev, (old, new) in phasetracking.iteritems()
1833 if new == phases.public and rev < origrepolen
1855 if new == phases.public and rev < origrepolen
1834 ]
1856 ]
1835 if not published:
1857 if not published:
1836 return
1858 return
1837 repo.ui.status(_('%d local changesets published\n')
1859 repo.ui.status(_('%d local changesets published\n')
1838 % len(published))
1860 % len(published))
1839
1861
1840 def getinstabilitymessage(delta, instability):
1862 def getinstabilitymessage(delta, instability):
1841 """function to return the message to show warning about new instabilities
1863 """function to return the message to show warning about new instabilities
1842
1864
1843 exists as a separate function so that extension can wrap to show more
1865 exists as a separate function so that extension can wrap to show more
1844 information like how to fix instabilities"""
1866 information like how to fix instabilities"""
1845 if delta > 0:
1867 if delta > 0:
1846 return _('%i new %s changesets\n') % (delta, instability)
1868 return _('%i new %s changesets\n') % (delta, instability)
1847
1869
1848 def nodesummaries(repo, nodes, maxnumnodes=4):
1870 def nodesummaries(repo, nodes, maxnumnodes=4):
1849 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1871 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1850 return ' '.join(short(h) for h in nodes)
1872 return ' '.join(short(h) for h in nodes)
1851 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1873 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1852 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1874 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1853
1875
1854 def enforcesinglehead(repo, tr, desc):
1876 def enforcesinglehead(repo, tr, desc):
1855 """check that no named branch has multiple heads"""
1877 """check that no named branch has multiple heads"""
1856 if desc in ('strip', 'repair'):
1878 if desc in ('strip', 'repair'):
1857 # skip the logic during strip
1879 # skip the logic during strip
1858 return
1880 return
1859 visible = repo.filtered('visible')
1881 visible = repo.filtered('visible')
1860 # possible improvement: we could restrict the check to affected branch
1882 # possible improvement: we could restrict the check to affected branch
1861 for name, heads in visible.branchmap().iteritems():
1883 for name, heads in visible.branchmap().iteritems():
1862 if len(heads) > 1:
1884 if len(heads) > 1:
1863 msg = _('rejecting multiple heads on branch "%s"')
1885 msg = _('rejecting multiple heads on branch "%s"')
1864 msg %= name
1886 msg %= name
1865 hint = _('%d heads: %s')
1887 hint = _('%d heads: %s')
1866 hint %= (len(heads), nodesummaries(repo, heads))
1888 hint %= (len(heads), nodesummaries(repo, heads))
1867 raise error.Abort(msg, hint=hint)
1889 raise error.Abort(msg, hint=hint)
1868
1890
1869 def wrapconvertsink(sink):
1891 def wrapconvertsink(sink):
1870 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1892 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1871 before it is used, whether or not the convert extension was formally loaded.
1893 before it is used, whether or not the convert extension was formally loaded.
1872 """
1894 """
1873 return sink
1895 return sink
1874
1896
1875 def unhidehashlikerevs(repo, specs, hiddentype):
1897 def unhidehashlikerevs(repo, specs, hiddentype):
1876 """parse the user specs and unhide changesets whose hash or revision number
1898 """parse the user specs and unhide changesets whose hash or revision number
1877 is passed.
1899 is passed.
1878
1900
1879 hiddentype can be: 1) 'warn': warn while unhiding changesets
1901 hiddentype can be: 1) 'warn': warn while unhiding changesets
1880 2) 'nowarn': don't warn while unhiding changesets
1902 2) 'nowarn': don't warn while unhiding changesets
1881
1903
1882 returns a repo object with the required changesets unhidden
1904 returns a repo object with the required changesets unhidden
1883 """
1905 """
1884 if not repo.filtername or not repo.ui.configbool('experimental',
1906 if not repo.filtername or not repo.ui.configbool('experimental',
1885 'directaccess'):
1907 'directaccess'):
1886 return repo
1908 return repo
1887
1909
1888 if repo.filtername not in ('visible', 'visible-hidden'):
1910 if repo.filtername not in ('visible', 'visible-hidden'):
1889 return repo
1911 return repo
1890
1912
1891 symbols = set()
1913 symbols = set()
1892 for spec in specs:
1914 for spec in specs:
1893 try:
1915 try:
1894 tree = revsetlang.parse(spec)
1916 tree = revsetlang.parse(spec)
1895 except error.ParseError: # will be reported by scmutil.revrange()
1917 except error.ParseError: # will be reported by scmutil.revrange()
1896 continue
1918 continue
1897
1919
1898 symbols.update(revsetlang.gethashlikesymbols(tree))
1920 symbols.update(revsetlang.gethashlikesymbols(tree))
1899
1921
1900 if not symbols:
1922 if not symbols:
1901 return repo
1923 return repo
1902
1924
1903 revs = _getrevsfromsymbols(repo, symbols)
1925 revs = _getrevsfromsymbols(repo, symbols)
1904
1926
1905 if not revs:
1927 if not revs:
1906 return repo
1928 return repo
1907
1929
1908 if hiddentype == 'warn':
1930 if hiddentype == 'warn':
1909 unfi = repo.unfiltered()
1931 unfi = repo.unfiltered()
1910 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1932 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1911 repo.ui.warn(_("warning: accessing hidden changesets for write "
1933 repo.ui.warn(_("warning: accessing hidden changesets for write "
1912 "operation: %s\n") % revstr)
1934 "operation: %s\n") % revstr)
1913
1935
1914 # we have to use new filtername to separate branch/tags cache until we can
1936 # we have to use new filtername to separate branch/tags cache until we can
1915 # disbale these cache when revisions are dynamically pinned.
1937 # disbale these cache when revisions are dynamically pinned.
1916 return repo.filtered('visible-hidden', revs)
1938 return repo.filtered('visible-hidden', revs)
1917
1939
1918 def _getrevsfromsymbols(repo, symbols):
1940 def _getrevsfromsymbols(repo, symbols):
1919 """parse the list of symbols and returns a set of revision numbers of hidden
1941 """parse the list of symbols and returns a set of revision numbers of hidden
1920 changesets present in symbols"""
1942 changesets present in symbols"""
1921 revs = set()
1943 revs = set()
1922 unfi = repo.unfiltered()
1944 unfi = repo.unfiltered()
1923 unficl = unfi.changelog
1945 unficl = unfi.changelog
1924 cl = repo.changelog
1946 cl = repo.changelog
1925 tiprev = len(unficl)
1947 tiprev = len(unficl)
1926 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1948 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1927 for s in symbols:
1949 for s in symbols:
1928 try:
1950 try:
1929 n = int(s)
1951 n = int(s)
1930 if n <= tiprev:
1952 if n <= tiprev:
1931 if not allowrevnums:
1953 if not allowrevnums:
1932 continue
1954 continue
1933 else:
1955 else:
1934 if n not in cl:
1956 if n not in cl:
1935 revs.add(n)
1957 revs.add(n)
1936 continue
1958 continue
1937 except ValueError:
1959 except ValueError:
1938 pass
1960 pass
1939
1961
1940 try:
1962 try:
1941 s = resolvehexnodeidprefix(unfi, s)
1963 s = resolvehexnodeidprefix(unfi, s)
1942 except (error.LookupError, error.WdirUnsupported):
1964 except (error.LookupError, error.WdirUnsupported):
1943 s = None
1965 s = None
1944
1966
1945 if s is not None:
1967 if s is not None:
1946 rev = unficl.rev(s)
1968 rev = unficl.rev(s)
1947 if rev not in cl:
1969 if rev not in cl:
1948 revs.add(rev)
1970 revs.add(rev)
1949
1971
1950 return revs
1972 return revs
1951
1973
1952 def bookmarkrevs(repo, mark):
1974 def bookmarkrevs(repo, mark):
1953 """
1975 """
1954 Select revisions reachable by a given bookmark
1976 Select revisions reachable by a given bookmark
1955 """
1977 """
1956 return repo.revs("ancestors(bookmark(%s)) - "
1978 return repo.revs("ancestors(bookmark(%s)) - "
1957 "ancestors(head() and not bookmark(%s)) - "
1979 "ancestors(head() and not bookmark(%s)) - "
1958 "ancestors(bookmark() and not bookmark(%s))",
1980 "ancestors(bookmark() and not bookmark(%s))",
1959 mark, mark, mark)
1981 mark, mark, mark)
@@ -1,864 +1,860 b''
1 # templatekw.py - common changeset template keywords
1 # templatekw.py - common changeset template keywords
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 hex,
12 hex,
13 nullid,
13 nullid,
14 wdirid,
14 wdirid,
15 wdirrev,
15 wdirrev,
16 )
16 )
17
17
18 from . import (
18 from . import (
19 diffutil,
19 diffutil,
20 encoding,
20 encoding,
21 error,
21 error,
22 hbisect,
22 hbisect,
23 i18n,
23 i18n,
24 obsutil,
24 obsutil,
25 patch,
25 patch,
26 pycompat,
26 pycompat,
27 registrar,
27 registrar,
28 scmutil,
28 scmutil,
29 templateutil,
29 templateutil,
30 util,
30 util,
31 )
31 )
32 from .utils import (
32 from .utils import (
33 stringutil,
33 stringutil,
34 )
34 )
35
35
36 _hybrid = templateutil.hybrid
36 _hybrid = templateutil.hybrid
37 hybriddict = templateutil.hybriddict
37 hybriddict = templateutil.hybriddict
38 hybridlist = templateutil.hybridlist
38 hybridlist = templateutil.hybridlist
39 compatdict = templateutil.compatdict
39 compatdict = templateutil.compatdict
40 compatlist = templateutil.compatlist
40 compatlist = templateutil.compatlist
41 _showcompatlist = templateutil._showcompatlist
41 _showcompatlist = templateutil._showcompatlist
42
42
43 def getlatesttags(context, mapping, pattern=None):
43 def getlatesttags(context, mapping, pattern=None):
44 '''return date, distance and name for the latest tag of rev'''
44 '''return date, distance and name for the latest tag of rev'''
45 repo = context.resource(mapping, 'repo')
45 repo = context.resource(mapping, 'repo')
46 ctx = context.resource(mapping, 'ctx')
46 ctx = context.resource(mapping, 'ctx')
47 cache = context.resource(mapping, 'cache')
47 cache = context.resource(mapping, 'cache')
48
48
49 cachename = 'latesttags'
49 cachename = 'latesttags'
50 if pattern is not None:
50 if pattern is not None:
51 cachename += '-' + pattern
51 cachename += '-' + pattern
52 match = stringutil.stringmatcher(pattern)[2]
52 match = stringutil.stringmatcher(pattern)[2]
53 else:
53 else:
54 match = util.always
54 match = util.always
55
55
56 if cachename not in cache:
56 if cachename not in cache:
57 # Cache mapping from rev to a tuple with tag date, tag
57 # Cache mapping from rev to a tuple with tag date, tag
58 # distance and tag name
58 # distance and tag name
59 cache[cachename] = {-1: (0, 0, ['null'])}
59 cache[cachename] = {-1: (0, 0, ['null'])}
60 latesttags = cache[cachename]
60 latesttags = cache[cachename]
61
61
62 rev = ctx.rev()
62 rev = ctx.rev()
63 todo = [rev]
63 todo = [rev]
64 while todo:
64 while todo:
65 rev = todo.pop()
65 rev = todo.pop()
66 if rev in latesttags:
66 if rev in latesttags:
67 continue
67 continue
68 ctx = repo[rev]
68 ctx = repo[rev]
69 tags = [t for t in ctx.tags()
69 tags = [t for t in ctx.tags()
70 if (repo.tagtype(t) and repo.tagtype(t) != 'local'
70 if (repo.tagtype(t) and repo.tagtype(t) != 'local'
71 and match(t))]
71 and match(t))]
72 if tags:
72 if tags:
73 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
73 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
74 continue
74 continue
75 try:
75 try:
76 ptags = [latesttags[p.rev()] for p in ctx.parents()]
76 ptags = [latesttags[p.rev()] for p in ctx.parents()]
77 if len(ptags) > 1:
77 if len(ptags) > 1:
78 if ptags[0][2] == ptags[1][2]:
78 if ptags[0][2] == ptags[1][2]:
79 # The tuples are laid out so the right one can be found by
79 # The tuples are laid out so the right one can be found by
80 # comparison in this case.
80 # comparison in this case.
81 pdate, pdist, ptag = max(ptags)
81 pdate, pdist, ptag = max(ptags)
82 else:
82 else:
83 def key(x):
83 def key(x):
84 tag = x[2][0]
84 tag = x[2][0]
85 if ctx.rev() is None:
85 if ctx.rev() is None:
86 # only() doesn't support wdir
86 # only() doesn't support wdir
87 prevs = [c.rev() for c in ctx.parents()]
87 prevs = [c.rev() for c in ctx.parents()]
88 changes = repo.revs('only(%ld, %s)', prevs, tag)
88 changes = repo.revs('only(%ld, %s)', prevs, tag)
89 changessincetag = len(changes) + 1
89 changessincetag = len(changes) + 1
90 else:
90 else:
91 changes = repo.revs('only(%d, %s)', ctx.rev(), tag)
91 changes = repo.revs('only(%d, %s)', ctx.rev(), tag)
92 changessincetag = len(changes)
92 changessincetag = len(changes)
93 # Smallest number of changes since tag wins. Date is
93 # Smallest number of changes since tag wins. Date is
94 # used as tiebreaker.
94 # used as tiebreaker.
95 return [-changessincetag, x[0]]
95 return [-changessincetag, x[0]]
96 pdate, pdist, ptag = max(ptags, key=key)
96 pdate, pdist, ptag = max(ptags, key=key)
97 else:
97 else:
98 pdate, pdist, ptag = ptags[0]
98 pdate, pdist, ptag = ptags[0]
99 except KeyError:
99 except KeyError:
100 # Cache miss - recurse
100 # Cache miss - recurse
101 todo.append(rev)
101 todo.append(rev)
102 todo.extend(p.rev() for p in ctx.parents())
102 todo.extend(p.rev() for p in ctx.parents())
103 continue
103 continue
104 latesttags[rev] = pdate, pdist + 1, ptag
104 latesttags[rev] = pdate, pdist + 1, ptag
105 return latesttags[rev]
105 return latesttags[rev]
106
106
107 def getlogcolumns():
107 def getlogcolumns():
108 """Return a dict of log column labels"""
108 """Return a dict of log column labels"""
109 _ = pycompat.identity # temporarily disable gettext
109 _ = pycompat.identity # temporarily disable gettext
110 # i18n: column positioning for "hg log"
110 # i18n: column positioning for "hg log"
111 columns = _('bookmark: %s\n'
111 columns = _('bookmark: %s\n'
112 'branch: %s\n'
112 'branch: %s\n'
113 'changeset: %s\n'
113 'changeset: %s\n'
114 'copies: %s\n'
114 'copies: %s\n'
115 'date: %s\n'
115 'date: %s\n'
116 'extra: %s=%s\n'
116 'extra: %s=%s\n'
117 'files+: %s\n'
117 'files+: %s\n'
118 'files-: %s\n'
118 'files-: %s\n'
119 'files: %s\n'
119 'files: %s\n'
120 'instability: %s\n'
120 'instability: %s\n'
121 'manifest: %s\n'
121 'manifest: %s\n'
122 'obsolete: %s\n'
122 'obsolete: %s\n'
123 'parent: %s\n'
123 'parent: %s\n'
124 'phase: %s\n'
124 'phase: %s\n'
125 'summary: %s\n'
125 'summary: %s\n'
126 'tag: %s\n'
126 'tag: %s\n'
127 'user: %s\n')
127 'user: %s\n')
128 return dict(zip([s.split(':', 1)[0] for s in columns.splitlines()],
128 return dict(zip([s.split(':', 1)[0] for s in columns.splitlines()],
129 i18n._(columns).splitlines(True)))
129 i18n._(columns).splitlines(True)))
130
130
131 # basic internal templates
131 # basic internal templates
132 _changeidtmpl = '{rev}:{node|formatnode}'
132 _changeidtmpl = '{rev}:{node|formatnode}'
133
133
134 # default templates internally used for rendering of lists
134 # default templates internally used for rendering of lists
135 defaulttempl = {
135 defaulttempl = {
136 'parent': _changeidtmpl + ' ',
136 'parent': _changeidtmpl + ' ',
137 'manifest': _changeidtmpl,
137 'manifest': _changeidtmpl,
138 'file_copy': '{name} ({source})',
138 'file_copy': '{name} ({source})',
139 'envvar': '{key}={value}',
139 'envvar': '{key}={value}',
140 'extra': '{key}={value|stringescape}'
140 'extra': '{key}={value|stringescape}'
141 }
141 }
142 # filecopy is preserved for compatibility reasons
142 # filecopy is preserved for compatibility reasons
143 defaulttempl['filecopy'] = defaulttempl['file_copy']
143 defaulttempl['filecopy'] = defaulttempl['file_copy']
144
144
145 # keywords are callables (see registrar.templatekeyword for details)
145 # keywords are callables (see registrar.templatekeyword for details)
146 keywords = {}
146 keywords = {}
147 templatekeyword = registrar.templatekeyword(keywords)
147 templatekeyword = registrar.templatekeyword(keywords)
148
148
149 @templatekeyword('author', requires={'ctx'})
149 @templatekeyword('author', requires={'ctx'})
150 def showauthor(context, mapping):
150 def showauthor(context, mapping):
151 """Alias for ``{user}``"""
151 """Alias for ``{user}``"""
152 return showuser(context, mapping)
152 return showuser(context, mapping)
153
153
154 @templatekeyword('bisect', requires={'repo', 'ctx'})
154 @templatekeyword('bisect', requires={'repo', 'ctx'})
155 def showbisect(context, mapping):
155 def showbisect(context, mapping):
156 """String. The changeset bisection status."""
156 """String. The changeset bisection status."""
157 repo = context.resource(mapping, 'repo')
157 repo = context.resource(mapping, 'repo')
158 ctx = context.resource(mapping, 'ctx')
158 ctx = context.resource(mapping, 'ctx')
159 return hbisect.label(repo, ctx.node())
159 return hbisect.label(repo, ctx.node())
160
160
161 @templatekeyword('branch', requires={'ctx'})
161 @templatekeyword('branch', requires={'ctx'})
162 def showbranch(context, mapping):
162 def showbranch(context, mapping):
163 """String. The name of the branch on which the changeset was
163 """String. The name of the branch on which the changeset was
164 committed.
164 committed.
165 """
165 """
166 ctx = context.resource(mapping, 'ctx')
166 ctx = context.resource(mapping, 'ctx')
167 return ctx.branch()
167 return ctx.branch()
168
168
169 @templatekeyword('branches', requires={'ctx'})
169 @templatekeyword('branches', requires={'ctx'})
170 def showbranches(context, mapping):
170 def showbranches(context, mapping):
171 """List of strings. The name of the branch on which the
171 """List of strings. The name of the branch on which the
172 changeset was committed. Will be empty if the branch name was
172 changeset was committed. Will be empty if the branch name was
173 default. (DEPRECATED)
173 default. (DEPRECATED)
174 """
174 """
175 ctx = context.resource(mapping, 'ctx')
175 ctx = context.resource(mapping, 'ctx')
176 branch = ctx.branch()
176 branch = ctx.branch()
177 if branch != 'default':
177 if branch != 'default':
178 return compatlist(context, mapping, 'branch', [branch],
178 return compatlist(context, mapping, 'branch', [branch],
179 plural='branches')
179 plural='branches')
180 return compatlist(context, mapping, 'branch', [], plural='branches')
180 return compatlist(context, mapping, 'branch', [], plural='branches')
181
181
182 @templatekeyword('bookmarks', requires={'repo', 'ctx'})
182 @templatekeyword('bookmarks', requires={'repo', 'ctx'})
183 def showbookmarks(context, mapping):
183 def showbookmarks(context, mapping):
184 """List of strings. Any bookmarks associated with the
184 """List of strings. Any bookmarks associated with the
185 changeset. Also sets 'active', the name of the active bookmark.
185 changeset. Also sets 'active', the name of the active bookmark.
186 """
186 """
187 repo = context.resource(mapping, 'repo')
187 repo = context.resource(mapping, 'repo')
188 ctx = context.resource(mapping, 'ctx')
188 ctx = context.resource(mapping, 'ctx')
189 bookmarks = ctx.bookmarks()
189 bookmarks = ctx.bookmarks()
190 active = repo._activebookmark
190 active = repo._activebookmark
191 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
191 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
192 f = _showcompatlist(context, mapping, 'bookmark', bookmarks)
192 f = _showcompatlist(context, mapping, 'bookmark', bookmarks)
193 return _hybrid(f, bookmarks, makemap, pycompat.identity)
193 return _hybrid(f, bookmarks, makemap, pycompat.identity)
194
194
195 @templatekeyword('children', requires={'ctx'})
195 @templatekeyword('children', requires={'ctx'})
196 def showchildren(context, mapping):
196 def showchildren(context, mapping):
197 """List of strings. The children of the changeset."""
197 """List of strings. The children of the changeset."""
198 ctx = context.resource(mapping, 'ctx')
198 ctx = context.resource(mapping, 'ctx')
199 childrevs = ['%d:%s' % (cctx.rev(), cctx) for cctx in ctx.children()]
199 childrevs = ['%d:%s' % (cctx.rev(), cctx) for cctx in ctx.children()]
200 return compatlist(context, mapping, 'children', childrevs, element='child')
200 return compatlist(context, mapping, 'children', childrevs, element='child')
201
201
202 # Deprecated, but kept alive for help generation a purpose.
202 # Deprecated, but kept alive for help generation a purpose.
203 @templatekeyword('currentbookmark', requires={'repo', 'ctx'})
203 @templatekeyword('currentbookmark', requires={'repo', 'ctx'})
204 def showcurrentbookmark(context, mapping):
204 def showcurrentbookmark(context, mapping):
205 """String. The active bookmark, if it is associated with the changeset.
205 """String. The active bookmark, if it is associated with the changeset.
206 (DEPRECATED)"""
206 (DEPRECATED)"""
207 return showactivebookmark(context, mapping)
207 return showactivebookmark(context, mapping)
208
208
209 @templatekeyword('activebookmark', requires={'repo', 'ctx'})
209 @templatekeyword('activebookmark', requires={'repo', 'ctx'})
210 def showactivebookmark(context, mapping):
210 def showactivebookmark(context, mapping):
211 """String. The active bookmark, if it is associated with the changeset."""
211 """String. The active bookmark, if it is associated with the changeset."""
212 repo = context.resource(mapping, 'repo')
212 repo = context.resource(mapping, 'repo')
213 ctx = context.resource(mapping, 'ctx')
213 ctx = context.resource(mapping, 'ctx')
214 active = repo._activebookmark
214 active = repo._activebookmark
215 if active and active in ctx.bookmarks():
215 if active and active in ctx.bookmarks():
216 return active
216 return active
217 return ''
217 return ''
218
218
219 @templatekeyword('date', requires={'ctx'})
219 @templatekeyword('date', requires={'ctx'})
220 def showdate(context, mapping):
220 def showdate(context, mapping):
221 """Date information. The date when the changeset was committed."""
221 """Date information. The date when the changeset was committed."""
222 ctx = context.resource(mapping, 'ctx')
222 ctx = context.resource(mapping, 'ctx')
223 # the default string format is '<float(unixtime)><tzoffset>' because
223 # the default string format is '<float(unixtime)><tzoffset>' because
224 # python-hglib splits date at decimal separator.
224 # python-hglib splits date at decimal separator.
225 return templateutil.date(ctx.date(), showfmt='%d.0%d')
225 return templateutil.date(ctx.date(), showfmt='%d.0%d')
226
226
227 @templatekeyword('desc', requires={'ctx'})
227 @templatekeyword('desc', requires={'ctx'})
228 def showdescription(context, mapping):
228 def showdescription(context, mapping):
229 """String. The text of the changeset description."""
229 """String. The text of the changeset description."""
230 ctx = context.resource(mapping, 'ctx')
230 ctx = context.resource(mapping, 'ctx')
231 s = ctx.description()
231 s = ctx.description()
232 if isinstance(s, encoding.localstr):
232 if isinstance(s, encoding.localstr):
233 # try hard to preserve utf-8 bytes
233 # try hard to preserve utf-8 bytes
234 return encoding.tolocal(encoding.fromlocal(s).strip())
234 return encoding.tolocal(encoding.fromlocal(s).strip())
235 elif isinstance(s, encoding.safelocalstr):
235 elif isinstance(s, encoding.safelocalstr):
236 return encoding.safelocalstr(s.strip())
236 return encoding.safelocalstr(s.strip())
237 else:
237 else:
238 return s.strip()
238 return s.strip()
239
239
240 @templatekeyword('diffstat', requires={'ui', 'ctx'})
240 @templatekeyword('diffstat', requires={'ui', 'ctx'})
241 def showdiffstat(context, mapping):
241 def showdiffstat(context, mapping):
242 """String. Statistics of changes with the following format:
242 """String. Statistics of changes with the following format:
243 "modified files: +added/-removed lines"
243 "modified files: +added/-removed lines"
244 """
244 """
245 ui = context.resource(mapping, 'ui')
245 ui = context.resource(mapping, 'ui')
246 ctx = context.resource(mapping, 'ctx')
246 ctx = context.resource(mapping, 'ctx')
247 diffopts = diffutil.diffallopts(ui, {'noprefix': False})
247 diffopts = diffutil.diffallopts(ui, {'noprefix': False})
248 diff = ctx.diff(opts=diffopts)
248 diff = ctx.diff(opts=diffopts)
249 stats = patch.diffstatdata(util.iterlines(diff))
249 stats = patch.diffstatdata(util.iterlines(diff))
250 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
250 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
251 return '%d: +%d/-%d' % (len(stats), adds, removes)
251 return '%d: +%d/-%d' % (len(stats), adds, removes)
252
252
253 @templatekeyword('envvars', requires={'ui'})
253 @templatekeyword('envvars', requires={'ui'})
254 def showenvvars(context, mapping):
254 def showenvvars(context, mapping):
255 """A dictionary of environment variables. (EXPERIMENTAL)"""
255 """A dictionary of environment variables. (EXPERIMENTAL)"""
256 ui = context.resource(mapping, 'ui')
256 ui = context.resource(mapping, 'ui')
257 env = ui.exportableenviron()
257 env = ui.exportableenviron()
258 env = util.sortdict((k, env[k]) for k in sorted(env))
258 env = util.sortdict((k, env[k]) for k in sorted(env))
259 return compatdict(context, mapping, 'envvar', env, plural='envvars')
259 return compatdict(context, mapping, 'envvar', env, plural='envvars')
260
260
261 @templatekeyword('extras', requires={'ctx'})
261 @templatekeyword('extras', requires={'ctx'})
262 def showextras(context, mapping):
262 def showextras(context, mapping):
263 """List of dicts with key, value entries of the 'extras'
263 """List of dicts with key, value entries of the 'extras'
264 field of this changeset."""
264 field of this changeset."""
265 ctx = context.resource(mapping, 'ctx')
265 ctx = context.resource(mapping, 'ctx')
266 extras = ctx.extra()
266 extras = ctx.extra()
267 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
267 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
268 makemap = lambda k: {'key': k, 'value': extras[k]}
268 makemap = lambda k: {'key': k, 'value': extras[k]}
269 c = [makemap(k) for k in extras]
269 c = [makemap(k) for k in extras]
270 f = _showcompatlist(context, mapping, 'extra', c, plural='extras')
270 f = _showcompatlist(context, mapping, 'extra', c, plural='extras')
271 return _hybrid(f, extras, makemap,
271 return _hybrid(f, extras, makemap,
272 lambda k: '%s=%s' % (k, stringutil.escapestr(extras[k])))
272 lambda k: '%s=%s' % (k, stringutil.escapestr(extras[k])))
273
273
274 def _getfilestatus(context, mapping, listall=False):
274 def _getfilestatus(context, mapping, listall=False):
275 ctx = context.resource(mapping, 'ctx')
275 ctx = context.resource(mapping, 'ctx')
276 revcache = context.resource(mapping, 'revcache')
276 revcache = context.resource(mapping, 'revcache')
277 if 'filestatus' not in revcache or revcache['filestatusall'] < listall:
277 if 'filestatus' not in revcache or revcache['filestatusall'] < listall:
278 stat = ctx.p1().status(ctx, listignored=listall, listclean=listall,
278 stat = ctx.p1().status(ctx, listignored=listall, listclean=listall,
279 listunknown=listall)
279 listunknown=listall)
280 revcache['filestatus'] = stat
280 revcache['filestatus'] = stat
281 revcache['filestatusall'] = listall
281 revcache['filestatusall'] = listall
282 return revcache['filestatus']
282 return revcache['filestatus']
283
283
284 def _getfilestatusmap(context, mapping, listall=False):
284 def _getfilestatusmap(context, mapping, listall=False):
285 revcache = context.resource(mapping, 'revcache')
285 revcache = context.resource(mapping, 'revcache')
286 if 'filestatusmap' not in revcache or revcache['filestatusall'] < listall:
286 if 'filestatusmap' not in revcache or revcache['filestatusall'] < listall:
287 stat = _getfilestatus(context, mapping, listall=listall)
287 stat = _getfilestatus(context, mapping, listall=listall)
288 revcache['filestatusmap'] = statmap = {}
288 revcache['filestatusmap'] = statmap = {}
289 for char, files in zip(pycompat.iterbytestr('MAR!?IC'), stat):
289 for char, files in zip(pycompat.iterbytestr('MAR!?IC'), stat):
290 statmap.update((f, char) for f in files)
290 statmap.update((f, char) for f in files)
291 return revcache['filestatusmap'] # {path: statchar}
291 return revcache['filestatusmap'] # {path: statchar}
292
292
293 @templatekeyword('file_copies',
293 @templatekeyword('file_copies',
294 requires={'repo', 'ctx', 'cache', 'revcache'})
294 requires={'repo', 'ctx', 'cache', 'revcache'})
295 def showfilecopies(context, mapping):
295 def showfilecopies(context, mapping):
296 """List of strings. Files copied in this changeset with
296 """List of strings. Files copied in this changeset with
297 their sources.
297 their sources.
298 """
298 """
299 repo = context.resource(mapping, 'repo')
299 repo = context.resource(mapping, 'repo')
300 ctx = context.resource(mapping, 'ctx')
300 ctx = context.resource(mapping, 'ctx')
301 cache = context.resource(mapping, 'cache')
301 cache = context.resource(mapping, 'cache')
302 copies = context.resource(mapping, 'revcache').get('copies')
302 copies = context.resource(mapping, 'revcache').get('copies')
303 if copies is None:
303 if copies is None:
304 if 'getrenamed' not in cache:
304 if 'getcopies' not in cache:
305 cache['getrenamed'] = scmutil.getrenamedfn(repo)
305 cache['getcopies'] = scmutil.getcopiesfn(repo)
306 copies = []
306 getcopies = cache['getcopies']
307 getrenamed = cache['getrenamed']
307 copies = getcopies(ctx)
308 for fn in ctx.files():
309 rename = getrenamed(fn, ctx.rev())
310 if rename:
311 copies.append((fn, rename))
312 return templateutil.compatfilecopiesdict(context, mapping, 'file_copy',
308 return templateutil.compatfilecopiesdict(context, mapping, 'file_copy',
313 copies)
309 copies)
314
310
315 # showfilecopiesswitch() displays file copies only if copy records are
311 # showfilecopiesswitch() displays file copies only if copy records are
316 # provided before calling the templater, usually with a --copies
312 # provided before calling the templater, usually with a --copies
317 # command line switch.
313 # command line switch.
318 @templatekeyword('file_copies_switch', requires={'revcache'})
314 @templatekeyword('file_copies_switch', requires={'revcache'})
319 def showfilecopiesswitch(context, mapping):
315 def showfilecopiesswitch(context, mapping):
320 """List of strings. Like "file_copies" but displayed
316 """List of strings. Like "file_copies" but displayed
321 only if the --copied switch is set.
317 only if the --copied switch is set.
322 """
318 """
323 copies = context.resource(mapping, 'revcache').get('copies') or []
319 copies = context.resource(mapping, 'revcache').get('copies') or []
324 return templateutil.compatfilecopiesdict(context, mapping, 'file_copy',
320 return templateutil.compatfilecopiesdict(context, mapping, 'file_copy',
325 copies)
321 copies)
326
322
327 @templatekeyword('file_adds', requires={'ctx', 'revcache'})
323 @templatekeyword('file_adds', requires={'ctx', 'revcache'})
328 def showfileadds(context, mapping):
324 def showfileadds(context, mapping):
329 """List of strings. Files added by this changeset."""
325 """List of strings. Files added by this changeset."""
330 ctx = context.resource(mapping, 'ctx')
326 ctx = context.resource(mapping, 'ctx')
331 return templateutil.compatfileslist(context, mapping, 'file_add',
327 return templateutil.compatfileslist(context, mapping, 'file_add',
332 ctx.filesadded())
328 ctx.filesadded())
333
329
334 @templatekeyword('file_dels', requires={'ctx', 'revcache'})
330 @templatekeyword('file_dels', requires={'ctx', 'revcache'})
335 def showfiledels(context, mapping):
331 def showfiledels(context, mapping):
336 """List of strings. Files removed by this changeset."""
332 """List of strings. Files removed by this changeset."""
337 ctx = context.resource(mapping, 'ctx')
333 ctx = context.resource(mapping, 'ctx')
338 return templateutil.compatfileslist(context, mapping, 'file_del',
334 return templateutil.compatfileslist(context, mapping, 'file_del',
339 ctx.filesremoved())
335 ctx.filesremoved())
340
336
341 @templatekeyword('file_mods', requires={'ctx', 'revcache'})
337 @templatekeyword('file_mods', requires={'ctx', 'revcache'})
342 def showfilemods(context, mapping):
338 def showfilemods(context, mapping):
343 """List of strings. Files modified by this changeset."""
339 """List of strings. Files modified by this changeset."""
344 ctx = context.resource(mapping, 'ctx')
340 ctx = context.resource(mapping, 'ctx')
345 return templateutil.compatfileslist(context, mapping, 'file_mod',
341 return templateutil.compatfileslist(context, mapping, 'file_mod',
346 ctx.filesmodified())
342 ctx.filesmodified())
347
343
348 @templatekeyword('files', requires={'ctx'})
344 @templatekeyword('files', requires={'ctx'})
349 def showfiles(context, mapping):
345 def showfiles(context, mapping):
350 """List of strings. All files modified, added, or removed by this
346 """List of strings. All files modified, added, or removed by this
351 changeset.
347 changeset.
352 """
348 """
353 ctx = context.resource(mapping, 'ctx')
349 ctx = context.resource(mapping, 'ctx')
354 return templateutil.compatfileslist(context, mapping, 'file', ctx.files())
350 return templateutil.compatfileslist(context, mapping, 'file', ctx.files())
355
351
356 @templatekeyword('graphnode', requires={'repo', 'ctx'})
352 @templatekeyword('graphnode', requires={'repo', 'ctx'})
357 def showgraphnode(context, mapping):
353 def showgraphnode(context, mapping):
358 """String. The character representing the changeset node in an ASCII
354 """String. The character representing the changeset node in an ASCII
359 revision graph."""
355 revision graph."""
360 repo = context.resource(mapping, 'repo')
356 repo = context.resource(mapping, 'repo')
361 ctx = context.resource(mapping, 'ctx')
357 ctx = context.resource(mapping, 'ctx')
362 return getgraphnode(repo, ctx)
358 return getgraphnode(repo, ctx)
363
359
364 def getgraphnode(repo, ctx):
360 def getgraphnode(repo, ctx):
365 return getgraphnodecurrent(repo, ctx) or getgraphnodesymbol(ctx)
361 return getgraphnodecurrent(repo, ctx) or getgraphnodesymbol(ctx)
366
362
367 def getgraphnodecurrent(repo, ctx):
363 def getgraphnodecurrent(repo, ctx):
368 wpnodes = repo.dirstate.parents()
364 wpnodes = repo.dirstate.parents()
369 if wpnodes[1] == nullid:
365 if wpnodes[1] == nullid:
370 wpnodes = wpnodes[:1]
366 wpnodes = wpnodes[:1]
371 if ctx.node() in wpnodes:
367 if ctx.node() in wpnodes:
372 return '@'
368 return '@'
373 else:
369 else:
374 return ''
370 return ''
375
371
376 def getgraphnodesymbol(ctx):
372 def getgraphnodesymbol(ctx):
377 if ctx.obsolete():
373 if ctx.obsolete():
378 return 'x'
374 return 'x'
379 elif ctx.isunstable():
375 elif ctx.isunstable():
380 return '*'
376 return '*'
381 elif ctx.closesbranch():
377 elif ctx.closesbranch():
382 return '_'
378 return '_'
383 else:
379 else:
384 return 'o'
380 return 'o'
385
381
386 @templatekeyword('graphwidth', requires=())
382 @templatekeyword('graphwidth', requires=())
387 def showgraphwidth(context, mapping):
383 def showgraphwidth(context, mapping):
388 """Integer. The width of the graph drawn by 'log --graph' or zero."""
384 """Integer. The width of the graph drawn by 'log --graph' or zero."""
389 # just hosts documentation; should be overridden by template mapping
385 # just hosts documentation; should be overridden by template mapping
390 return 0
386 return 0
391
387
392 @templatekeyword('index', requires=())
388 @templatekeyword('index', requires=())
393 def showindex(context, mapping):
389 def showindex(context, mapping):
394 """Integer. The current iteration of the loop. (0 indexed)"""
390 """Integer. The current iteration of the loop. (0 indexed)"""
395 # just hosts documentation; should be overridden by template mapping
391 # just hosts documentation; should be overridden by template mapping
396 raise error.Abort(_("can't use index in this context"))
392 raise error.Abort(_("can't use index in this context"))
397
393
398 @templatekeyword('latesttag', requires={'repo', 'ctx', 'cache'})
394 @templatekeyword('latesttag', requires={'repo', 'ctx', 'cache'})
399 def showlatesttag(context, mapping):
395 def showlatesttag(context, mapping):
400 """List of strings. The global tags on the most recent globally
396 """List of strings. The global tags on the most recent globally
401 tagged ancestor of this changeset. If no such tags exist, the list
397 tagged ancestor of this changeset. If no such tags exist, the list
402 consists of the single string "null".
398 consists of the single string "null".
403 """
399 """
404 return showlatesttags(context, mapping, None)
400 return showlatesttags(context, mapping, None)
405
401
406 def showlatesttags(context, mapping, pattern):
402 def showlatesttags(context, mapping, pattern):
407 """helper method for the latesttag keyword and function"""
403 """helper method for the latesttag keyword and function"""
408 latesttags = getlatesttags(context, mapping, pattern)
404 latesttags = getlatesttags(context, mapping, pattern)
409
405
410 # latesttag[0] is an implementation detail for sorting csets on different
406 # latesttag[0] is an implementation detail for sorting csets on different
411 # branches in a stable manner- it is the date the tagged cset was created,
407 # branches in a stable manner- it is the date the tagged cset was created,
412 # not the date the tag was created. Therefore it isn't made visible here.
408 # not the date the tag was created. Therefore it isn't made visible here.
413 makemap = lambda v: {
409 makemap = lambda v: {
414 'changes': _showchangessincetag,
410 'changes': _showchangessincetag,
415 'distance': latesttags[1],
411 'distance': latesttags[1],
416 'latesttag': v, # BC with {latesttag % '{latesttag}'}
412 'latesttag': v, # BC with {latesttag % '{latesttag}'}
417 'tag': v
413 'tag': v
418 }
414 }
419
415
420 tags = latesttags[2]
416 tags = latesttags[2]
421 f = _showcompatlist(context, mapping, 'latesttag', tags, separator=':')
417 f = _showcompatlist(context, mapping, 'latesttag', tags, separator=':')
422 return _hybrid(f, tags, makemap, pycompat.identity)
418 return _hybrid(f, tags, makemap, pycompat.identity)
423
419
424 @templatekeyword('latesttagdistance', requires={'repo', 'ctx', 'cache'})
420 @templatekeyword('latesttagdistance', requires={'repo', 'ctx', 'cache'})
425 def showlatesttagdistance(context, mapping):
421 def showlatesttagdistance(context, mapping):
426 """Integer. Longest path to the latest tag."""
422 """Integer. Longest path to the latest tag."""
427 return getlatesttags(context, mapping)[1]
423 return getlatesttags(context, mapping)[1]
428
424
429 @templatekeyword('changessincelatesttag', requires={'repo', 'ctx', 'cache'})
425 @templatekeyword('changessincelatesttag', requires={'repo', 'ctx', 'cache'})
430 def showchangessincelatesttag(context, mapping):
426 def showchangessincelatesttag(context, mapping):
431 """Integer. All ancestors not in the latest tag."""
427 """Integer. All ancestors not in the latest tag."""
432 tag = getlatesttags(context, mapping)[2][0]
428 tag = getlatesttags(context, mapping)[2][0]
433 mapping = context.overlaymap(mapping, {'tag': tag})
429 mapping = context.overlaymap(mapping, {'tag': tag})
434 return _showchangessincetag(context, mapping)
430 return _showchangessincetag(context, mapping)
435
431
436 def _showchangessincetag(context, mapping):
432 def _showchangessincetag(context, mapping):
437 repo = context.resource(mapping, 'repo')
433 repo = context.resource(mapping, 'repo')
438 ctx = context.resource(mapping, 'ctx')
434 ctx = context.resource(mapping, 'ctx')
439 offset = 0
435 offset = 0
440 revs = [ctx.rev()]
436 revs = [ctx.rev()]
441 tag = context.symbol(mapping, 'tag')
437 tag = context.symbol(mapping, 'tag')
442
438
443 # The only() revset doesn't currently support wdir()
439 # The only() revset doesn't currently support wdir()
444 if ctx.rev() is None:
440 if ctx.rev() is None:
445 offset = 1
441 offset = 1
446 revs = [p.rev() for p in ctx.parents()]
442 revs = [p.rev() for p in ctx.parents()]
447
443
448 return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
444 return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
449
445
450 # teach templater latesttags.changes is switched to (context, mapping) API
446 # teach templater latesttags.changes is switched to (context, mapping) API
451 _showchangessincetag._requires = {'repo', 'ctx'}
447 _showchangessincetag._requires = {'repo', 'ctx'}
452
448
453 @templatekeyword('manifest', requires={'repo', 'ctx'})
449 @templatekeyword('manifest', requires={'repo', 'ctx'})
454 def showmanifest(context, mapping):
450 def showmanifest(context, mapping):
455 repo = context.resource(mapping, 'repo')
451 repo = context.resource(mapping, 'repo')
456 ctx = context.resource(mapping, 'ctx')
452 ctx = context.resource(mapping, 'ctx')
457 mnode = ctx.manifestnode()
453 mnode = ctx.manifestnode()
458 if mnode is None:
454 if mnode is None:
459 mnode = wdirid
455 mnode = wdirid
460 mrev = wdirrev
456 mrev = wdirrev
461 else:
457 else:
462 mrev = repo.manifestlog.rev(mnode)
458 mrev = repo.manifestlog.rev(mnode)
463 mhex = hex(mnode)
459 mhex = hex(mnode)
464 mapping = context.overlaymap(mapping, {'rev': mrev, 'node': mhex})
460 mapping = context.overlaymap(mapping, {'rev': mrev, 'node': mhex})
465 f = context.process('manifest', mapping)
461 f = context.process('manifest', mapping)
466 return templateutil.hybriditem(f, None, f,
462 return templateutil.hybriditem(f, None, f,
467 lambda x: {'rev': mrev, 'node': mhex})
463 lambda x: {'rev': mrev, 'node': mhex})
468
464
469 @templatekeyword('obsfate', requires={'ui', 'repo', 'ctx'})
465 @templatekeyword('obsfate', requires={'ui', 'repo', 'ctx'})
470 def showobsfate(context, mapping):
466 def showobsfate(context, mapping):
471 # this function returns a list containing pre-formatted obsfate strings.
467 # this function returns a list containing pre-formatted obsfate strings.
472 #
468 #
473 # This function will be replaced by templates fragments when we will have
469 # This function will be replaced by templates fragments when we will have
474 # the verbosity templatekw available.
470 # the verbosity templatekw available.
475 succsandmarkers = showsuccsandmarkers(context, mapping)
471 succsandmarkers = showsuccsandmarkers(context, mapping)
476
472
477 ui = context.resource(mapping, 'ui')
473 ui = context.resource(mapping, 'ui')
478 repo = context.resource(mapping, 'repo')
474 repo = context.resource(mapping, 'repo')
479 values = []
475 values = []
480
476
481 for x in succsandmarkers.tovalue(context, mapping):
477 for x in succsandmarkers.tovalue(context, mapping):
482 v = obsutil.obsfateprinter(ui, repo, x['successors'], x['markers'],
478 v = obsutil.obsfateprinter(ui, repo, x['successors'], x['markers'],
483 scmutil.formatchangeid)
479 scmutil.formatchangeid)
484 values.append(v)
480 values.append(v)
485
481
486 return compatlist(context, mapping, "fate", values)
482 return compatlist(context, mapping, "fate", values)
487
483
488 def shownames(context, mapping, namespace):
484 def shownames(context, mapping, namespace):
489 """helper method to generate a template keyword for a namespace"""
485 """helper method to generate a template keyword for a namespace"""
490 repo = context.resource(mapping, 'repo')
486 repo = context.resource(mapping, 'repo')
491 ctx = context.resource(mapping, 'ctx')
487 ctx = context.resource(mapping, 'ctx')
492 ns = repo.names[namespace]
488 ns = repo.names[namespace]
493 names = ns.names(repo, ctx.node())
489 names = ns.names(repo, ctx.node())
494 return compatlist(context, mapping, ns.templatename, names,
490 return compatlist(context, mapping, ns.templatename, names,
495 plural=namespace)
491 plural=namespace)
496
492
497 @templatekeyword('namespaces', requires={'repo', 'ctx'})
493 @templatekeyword('namespaces', requires={'repo', 'ctx'})
498 def shownamespaces(context, mapping):
494 def shownamespaces(context, mapping):
499 """Dict of lists. Names attached to this changeset per
495 """Dict of lists. Names attached to this changeset per
500 namespace."""
496 namespace."""
501 repo = context.resource(mapping, 'repo')
497 repo = context.resource(mapping, 'repo')
502 ctx = context.resource(mapping, 'ctx')
498 ctx = context.resource(mapping, 'ctx')
503
499
504 namespaces = util.sortdict()
500 namespaces = util.sortdict()
505 def makensmapfn(ns):
501 def makensmapfn(ns):
506 # 'name' for iterating over namespaces, templatename for local reference
502 # 'name' for iterating over namespaces, templatename for local reference
507 return lambda v: {'name': v, ns.templatename: v}
503 return lambda v: {'name': v, ns.templatename: v}
508
504
509 for k, ns in repo.names.iteritems():
505 for k, ns in repo.names.iteritems():
510 names = ns.names(repo, ctx.node())
506 names = ns.names(repo, ctx.node())
511 f = _showcompatlist(context, mapping, 'name', names)
507 f = _showcompatlist(context, mapping, 'name', names)
512 namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity)
508 namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity)
513
509
514 f = _showcompatlist(context, mapping, 'namespace', list(namespaces))
510 f = _showcompatlist(context, mapping, 'namespace', list(namespaces))
515
511
516 def makemap(ns):
512 def makemap(ns):
517 return {
513 return {
518 'namespace': ns,
514 'namespace': ns,
519 'names': namespaces[ns],
515 'names': namespaces[ns],
520 'builtin': repo.names[ns].builtin,
516 'builtin': repo.names[ns].builtin,
521 'colorname': repo.names[ns].colorname,
517 'colorname': repo.names[ns].colorname,
522 }
518 }
523
519
524 return _hybrid(f, namespaces, makemap, pycompat.identity)
520 return _hybrid(f, namespaces, makemap, pycompat.identity)
525
521
526 @templatekeyword('negrev', requires={'repo', 'ctx'})
522 @templatekeyword('negrev', requires={'repo', 'ctx'})
527 def shownegrev(context, mapping):
523 def shownegrev(context, mapping):
528 """Integer. The repository-local changeset negative revision number,
524 """Integer. The repository-local changeset negative revision number,
529 which counts in the opposite direction."""
525 which counts in the opposite direction."""
530 ctx = context.resource(mapping, 'ctx')
526 ctx = context.resource(mapping, 'ctx')
531 rev = ctx.rev()
527 rev = ctx.rev()
532 if rev is None or rev < 0: # wdir() or nullrev?
528 if rev is None or rev < 0: # wdir() or nullrev?
533 return None
529 return None
534 repo = context.resource(mapping, 'repo')
530 repo = context.resource(mapping, 'repo')
535 return rev - len(repo)
531 return rev - len(repo)
536
532
537 @templatekeyword('node', requires={'ctx'})
533 @templatekeyword('node', requires={'ctx'})
538 def shownode(context, mapping):
534 def shownode(context, mapping):
539 """String. The changeset identification hash, as a 40 hexadecimal
535 """String. The changeset identification hash, as a 40 hexadecimal
540 digit string.
536 digit string.
541 """
537 """
542 ctx = context.resource(mapping, 'ctx')
538 ctx = context.resource(mapping, 'ctx')
543 return ctx.hex()
539 return ctx.hex()
544
540
545 @templatekeyword('obsolete', requires={'ctx'})
541 @templatekeyword('obsolete', requires={'ctx'})
546 def showobsolete(context, mapping):
542 def showobsolete(context, mapping):
547 """String. Whether the changeset is obsolete. (EXPERIMENTAL)"""
543 """String. Whether the changeset is obsolete. (EXPERIMENTAL)"""
548 ctx = context.resource(mapping, 'ctx')
544 ctx = context.resource(mapping, 'ctx')
549 if ctx.obsolete():
545 if ctx.obsolete():
550 return 'obsolete'
546 return 'obsolete'
551 return ''
547 return ''
552
548
553 @templatekeyword('path', requires={'fctx'})
549 @templatekeyword('path', requires={'fctx'})
554 def showpath(context, mapping):
550 def showpath(context, mapping):
555 """String. Repository-absolute path of the current file. (EXPERIMENTAL)"""
551 """String. Repository-absolute path of the current file. (EXPERIMENTAL)"""
556 fctx = context.resource(mapping, 'fctx')
552 fctx = context.resource(mapping, 'fctx')
557 return fctx.path()
553 return fctx.path()
558
554
559 @templatekeyword('peerurls', requires={'repo'})
555 @templatekeyword('peerurls', requires={'repo'})
560 def showpeerurls(context, mapping):
556 def showpeerurls(context, mapping):
561 """A dictionary of repository locations defined in the [paths] section
557 """A dictionary of repository locations defined in the [paths] section
562 of your configuration file."""
558 of your configuration file."""
563 repo = context.resource(mapping, 'repo')
559 repo = context.resource(mapping, 'repo')
564 # see commands.paths() for naming of dictionary keys
560 # see commands.paths() for naming of dictionary keys
565 paths = repo.ui.paths
561 paths = repo.ui.paths
566 urls = util.sortdict((k, p.rawloc) for k, p in sorted(paths.iteritems()))
562 urls = util.sortdict((k, p.rawloc) for k, p in sorted(paths.iteritems()))
567 def makemap(k):
563 def makemap(k):
568 p = paths[k]
564 p = paths[k]
569 d = {'name': k, 'url': p.rawloc}
565 d = {'name': k, 'url': p.rawloc}
570 d.update((o, v) for o, v in sorted(p.suboptions.iteritems()))
566 d.update((o, v) for o, v in sorted(p.suboptions.iteritems()))
571 return d
567 return d
572 return _hybrid(None, urls, makemap, lambda k: '%s=%s' % (k, urls[k]))
568 return _hybrid(None, urls, makemap, lambda k: '%s=%s' % (k, urls[k]))
573
569
574 @templatekeyword("predecessors", requires={'repo', 'ctx'})
570 @templatekeyword("predecessors", requires={'repo', 'ctx'})
575 def showpredecessors(context, mapping):
571 def showpredecessors(context, mapping):
576 """Returns the list of the closest visible predecessors. (EXPERIMENTAL)"""
572 """Returns the list of the closest visible predecessors. (EXPERIMENTAL)"""
577 repo = context.resource(mapping, 'repo')
573 repo = context.resource(mapping, 'repo')
578 ctx = context.resource(mapping, 'ctx')
574 ctx = context.resource(mapping, 'ctx')
579 predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node()))
575 predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node()))
580 predecessors = pycompat.maplist(hex, predecessors)
576 predecessors = pycompat.maplist(hex, predecessors)
581
577
582 return _hybrid(None, predecessors,
578 return _hybrid(None, predecessors,
583 lambda x: {'ctx': repo[x]},
579 lambda x: {'ctx': repo[x]},
584 lambda x: scmutil.formatchangeid(repo[x]))
580 lambda x: scmutil.formatchangeid(repo[x]))
585
581
586 @templatekeyword('reporoot', requires={'repo'})
582 @templatekeyword('reporoot', requires={'repo'})
587 def showreporoot(context, mapping):
583 def showreporoot(context, mapping):
588 """String. The root directory of the current repository."""
584 """String. The root directory of the current repository."""
589 repo = context.resource(mapping, 'repo')
585 repo = context.resource(mapping, 'repo')
590 return repo.root
586 return repo.root
591
587
592 @templatekeyword('size', requires={'fctx'})
588 @templatekeyword('size', requires={'fctx'})
593 def showsize(context, mapping):
589 def showsize(context, mapping):
594 """Integer. Size of the current file in bytes. (EXPERIMENTAL)"""
590 """Integer. Size of the current file in bytes. (EXPERIMENTAL)"""
595 fctx = context.resource(mapping, 'fctx')
591 fctx = context.resource(mapping, 'fctx')
596 return fctx.size()
592 return fctx.size()
597
593
598 # requires 'fctx' to denote {status} depends on (ctx, path) pair
594 # requires 'fctx' to denote {status} depends on (ctx, path) pair
599 @templatekeyword('status', requires={'ctx', 'fctx', 'revcache'})
595 @templatekeyword('status', requires={'ctx', 'fctx', 'revcache'})
600 def showstatus(context, mapping):
596 def showstatus(context, mapping):
601 """String. Status code of the current file. (EXPERIMENTAL)"""
597 """String. Status code of the current file. (EXPERIMENTAL)"""
602 path = templateutil.runsymbol(context, mapping, 'path')
598 path = templateutil.runsymbol(context, mapping, 'path')
603 path = templateutil.stringify(context, mapping, path)
599 path = templateutil.stringify(context, mapping, path)
604 if not path:
600 if not path:
605 return
601 return
606 statmap = _getfilestatusmap(context, mapping)
602 statmap = _getfilestatusmap(context, mapping)
607 if path not in statmap:
603 if path not in statmap:
608 statmap = _getfilestatusmap(context, mapping, listall=True)
604 statmap = _getfilestatusmap(context, mapping, listall=True)
609 return statmap.get(path)
605 return statmap.get(path)
610
606
611 @templatekeyword("successorssets", requires={'repo', 'ctx'})
607 @templatekeyword("successorssets", requires={'repo', 'ctx'})
612 def showsuccessorssets(context, mapping):
608 def showsuccessorssets(context, mapping):
613 """Returns a string of sets of successors for a changectx. Format used
609 """Returns a string of sets of successors for a changectx. Format used
614 is: [ctx1, ctx2], [ctx3] if ctx has been split into ctx1 and ctx2
610 is: [ctx1, ctx2], [ctx3] if ctx has been split into ctx1 and ctx2
615 while also diverged into ctx3. (EXPERIMENTAL)"""
611 while also diverged into ctx3. (EXPERIMENTAL)"""
616 repo = context.resource(mapping, 'repo')
612 repo = context.resource(mapping, 'repo')
617 ctx = context.resource(mapping, 'ctx')
613 ctx = context.resource(mapping, 'ctx')
618 if not ctx.obsolete():
614 if not ctx.obsolete():
619 return ''
615 return ''
620
616
621 ssets = obsutil.successorssets(repo, ctx.node(), closest=True)
617 ssets = obsutil.successorssets(repo, ctx.node(), closest=True)
622 ssets = [[hex(n) for n in ss] for ss in ssets]
618 ssets = [[hex(n) for n in ss] for ss in ssets]
623
619
624 data = []
620 data = []
625 for ss in ssets:
621 for ss in ssets:
626 h = _hybrid(None, ss, lambda x: {'ctx': repo[x]},
622 h = _hybrid(None, ss, lambda x: {'ctx': repo[x]},
627 lambda x: scmutil.formatchangeid(repo[x]))
623 lambda x: scmutil.formatchangeid(repo[x]))
628 data.append(h)
624 data.append(h)
629
625
630 # Format the successorssets
626 # Format the successorssets
631 def render(d):
627 def render(d):
632 return templateutil.stringify(context, mapping, d)
628 return templateutil.stringify(context, mapping, d)
633
629
634 def gen(data):
630 def gen(data):
635 yield "; ".join(render(d) for d in data)
631 yield "; ".join(render(d) for d in data)
636
632
637 return _hybrid(gen(data), data, lambda x: {'successorset': x},
633 return _hybrid(gen(data), data, lambda x: {'successorset': x},
638 pycompat.identity)
634 pycompat.identity)
639
635
640 @templatekeyword("succsandmarkers", requires={'repo', 'ctx'})
636 @templatekeyword("succsandmarkers", requires={'repo', 'ctx'})
641 def showsuccsandmarkers(context, mapping):
637 def showsuccsandmarkers(context, mapping):
642 """Returns a list of dict for each final successor of ctx. The dict
638 """Returns a list of dict for each final successor of ctx. The dict
643 contains successors node id in "successors" keys and the list of
639 contains successors node id in "successors" keys and the list of
644 obs-markers from ctx to the set of successors in "markers".
640 obs-markers from ctx to the set of successors in "markers".
645 (EXPERIMENTAL)
641 (EXPERIMENTAL)
646 """
642 """
647 repo = context.resource(mapping, 'repo')
643 repo = context.resource(mapping, 'repo')
648 ctx = context.resource(mapping, 'ctx')
644 ctx = context.resource(mapping, 'ctx')
649
645
650 values = obsutil.successorsandmarkers(repo, ctx)
646 values = obsutil.successorsandmarkers(repo, ctx)
651
647
652 if values is None:
648 if values is None:
653 values = []
649 values = []
654
650
655 # Format successors and markers to avoid exposing binary to templates
651 # Format successors and markers to avoid exposing binary to templates
656 data = []
652 data = []
657 for i in values:
653 for i in values:
658 # Format successors
654 # Format successors
659 successors = i['successors']
655 successors = i['successors']
660
656
661 successors = [hex(n) for n in successors]
657 successors = [hex(n) for n in successors]
662 successors = _hybrid(None, successors,
658 successors = _hybrid(None, successors,
663 lambda x: {'ctx': repo[x]},
659 lambda x: {'ctx': repo[x]},
664 lambda x: scmutil.formatchangeid(repo[x]))
660 lambda x: scmutil.formatchangeid(repo[x]))
665
661
666 # Format markers
662 # Format markers
667 finalmarkers = []
663 finalmarkers = []
668 for m in i['markers']:
664 for m in i['markers']:
669 hexprec = hex(m[0])
665 hexprec = hex(m[0])
670 hexsucs = tuple(hex(n) for n in m[1])
666 hexsucs = tuple(hex(n) for n in m[1])
671 hexparents = None
667 hexparents = None
672 if m[5] is not None:
668 if m[5] is not None:
673 hexparents = tuple(hex(n) for n in m[5])
669 hexparents = tuple(hex(n) for n in m[5])
674 newmarker = (hexprec, hexsucs) + m[2:5] + (hexparents,) + m[6:]
670 newmarker = (hexprec, hexsucs) + m[2:5] + (hexparents,) + m[6:]
675 finalmarkers.append(newmarker)
671 finalmarkers.append(newmarker)
676
672
677 data.append({'successors': successors, 'markers': finalmarkers})
673 data.append({'successors': successors, 'markers': finalmarkers})
678
674
679 return templateutil.mappinglist(data)
675 return templateutil.mappinglist(data)
680
676
681 @templatekeyword('p1', requires={'ctx'})
677 @templatekeyword('p1', requires={'ctx'})
682 def showp1(context, mapping):
678 def showp1(context, mapping):
683 """Changeset. The changeset's first parent. ``{p1.rev}`` for the revision
679 """Changeset. The changeset's first parent. ``{p1.rev}`` for the revision
684 number, and ``{p1.node}`` for the identification hash."""
680 number, and ``{p1.node}`` for the identification hash."""
685 ctx = context.resource(mapping, 'ctx')
681 ctx = context.resource(mapping, 'ctx')
686 return templateutil.mappingdict({'ctx': ctx.p1()}, tmpl=_changeidtmpl)
682 return templateutil.mappingdict({'ctx': ctx.p1()}, tmpl=_changeidtmpl)
687
683
688 @templatekeyword('p2', requires={'ctx'})
684 @templatekeyword('p2', requires={'ctx'})
689 def showp2(context, mapping):
685 def showp2(context, mapping):
690 """Changeset. The changeset's second parent. ``{p2.rev}`` for the revision
686 """Changeset. The changeset's second parent. ``{p2.rev}`` for the revision
691 number, and ``{p2.node}`` for the identification hash."""
687 number, and ``{p2.node}`` for the identification hash."""
692 ctx = context.resource(mapping, 'ctx')
688 ctx = context.resource(mapping, 'ctx')
693 return templateutil.mappingdict({'ctx': ctx.p2()}, tmpl=_changeidtmpl)
689 return templateutil.mappingdict({'ctx': ctx.p2()}, tmpl=_changeidtmpl)
694
690
695 @templatekeyword('p1rev', requires={'ctx'})
691 @templatekeyword('p1rev', requires={'ctx'})
696 def showp1rev(context, mapping):
692 def showp1rev(context, mapping):
697 """Integer. The repository-local revision number of the changeset's
693 """Integer. The repository-local revision number of the changeset's
698 first parent, or -1 if the changeset has no parents. (DEPRECATED)"""
694 first parent, or -1 if the changeset has no parents. (DEPRECATED)"""
699 ctx = context.resource(mapping, 'ctx')
695 ctx = context.resource(mapping, 'ctx')
700 return ctx.p1().rev()
696 return ctx.p1().rev()
701
697
702 @templatekeyword('p2rev', requires={'ctx'})
698 @templatekeyword('p2rev', requires={'ctx'})
703 def showp2rev(context, mapping):
699 def showp2rev(context, mapping):
704 """Integer. The repository-local revision number of the changeset's
700 """Integer. The repository-local revision number of the changeset's
705 second parent, or -1 if the changeset has no second parent. (DEPRECATED)"""
701 second parent, or -1 if the changeset has no second parent. (DEPRECATED)"""
706 ctx = context.resource(mapping, 'ctx')
702 ctx = context.resource(mapping, 'ctx')
707 return ctx.p2().rev()
703 return ctx.p2().rev()
708
704
709 @templatekeyword('p1node', requires={'ctx'})
705 @templatekeyword('p1node', requires={'ctx'})
710 def showp1node(context, mapping):
706 def showp1node(context, mapping):
711 """String. The identification hash of the changeset's first parent,
707 """String. The identification hash of the changeset's first parent,
712 as a 40 digit hexadecimal string. If the changeset has no parents, all
708 as a 40 digit hexadecimal string. If the changeset has no parents, all
713 digits are 0. (DEPRECATED)"""
709 digits are 0. (DEPRECATED)"""
714 ctx = context.resource(mapping, 'ctx')
710 ctx = context.resource(mapping, 'ctx')
715 return ctx.p1().hex()
711 return ctx.p1().hex()
716
712
717 @templatekeyword('p2node', requires={'ctx'})
713 @templatekeyword('p2node', requires={'ctx'})
718 def showp2node(context, mapping):
714 def showp2node(context, mapping):
719 """String. The identification hash of the changeset's second
715 """String. The identification hash of the changeset's second
720 parent, as a 40 digit hexadecimal string. If the changeset has no second
716 parent, as a 40 digit hexadecimal string. If the changeset has no second
721 parent, all digits are 0. (DEPRECATED)"""
717 parent, all digits are 0. (DEPRECATED)"""
722 ctx = context.resource(mapping, 'ctx')
718 ctx = context.resource(mapping, 'ctx')
723 return ctx.p2().hex()
719 return ctx.p2().hex()
724
720
725 @templatekeyword('parents', requires={'repo', 'ctx'})
721 @templatekeyword('parents', requires={'repo', 'ctx'})
726 def showparents(context, mapping):
722 def showparents(context, mapping):
727 """List of strings. The parents of the changeset in "rev:node"
723 """List of strings. The parents of the changeset in "rev:node"
728 format. If the changeset has only one "natural" parent (the predecessor
724 format. If the changeset has only one "natural" parent (the predecessor
729 revision) nothing is shown."""
725 revision) nothing is shown."""
730 repo = context.resource(mapping, 'repo')
726 repo = context.resource(mapping, 'repo')
731 ctx = context.resource(mapping, 'ctx')
727 ctx = context.resource(mapping, 'ctx')
732 pctxs = scmutil.meaningfulparents(repo, ctx)
728 pctxs = scmutil.meaningfulparents(repo, ctx)
733 prevs = [p.rev() for p in pctxs]
729 prevs = [p.rev() for p in pctxs]
734 parents = [[('rev', p.rev()),
730 parents = [[('rev', p.rev()),
735 ('node', p.hex()),
731 ('node', p.hex()),
736 ('phase', p.phasestr())]
732 ('phase', p.phasestr())]
737 for p in pctxs]
733 for p in pctxs]
738 f = _showcompatlist(context, mapping, 'parent', parents)
734 f = _showcompatlist(context, mapping, 'parent', parents)
739 return _hybrid(f, prevs, lambda x: {'ctx': repo[x]},
735 return _hybrid(f, prevs, lambda x: {'ctx': repo[x]},
740 lambda x: scmutil.formatchangeid(repo[x]), keytype=int)
736 lambda x: scmutil.formatchangeid(repo[x]), keytype=int)
741
737
742 @templatekeyword('phase', requires={'ctx'})
738 @templatekeyword('phase', requires={'ctx'})
743 def showphase(context, mapping):
739 def showphase(context, mapping):
744 """String. The changeset phase name."""
740 """String. The changeset phase name."""
745 ctx = context.resource(mapping, 'ctx')
741 ctx = context.resource(mapping, 'ctx')
746 return ctx.phasestr()
742 return ctx.phasestr()
747
743
748 @templatekeyword('phaseidx', requires={'ctx'})
744 @templatekeyword('phaseidx', requires={'ctx'})
749 def showphaseidx(context, mapping):
745 def showphaseidx(context, mapping):
750 """Integer. The changeset phase index. (ADVANCED)"""
746 """Integer. The changeset phase index. (ADVANCED)"""
751 ctx = context.resource(mapping, 'ctx')
747 ctx = context.resource(mapping, 'ctx')
752 return ctx.phase()
748 return ctx.phase()
753
749
754 @templatekeyword('rev', requires={'ctx'})
750 @templatekeyword('rev', requires={'ctx'})
755 def showrev(context, mapping):
751 def showrev(context, mapping):
756 """Integer. The repository-local changeset revision number."""
752 """Integer. The repository-local changeset revision number."""
757 ctx = context.resource(mapping, 'ctx')
753 ctx = context.resource(mapping, 'ctx')
758 return scmutil.intrev(ctx)
754 return scmutil.intrev(ctx)
759
755
760 def showrevslist(context, mapping, name, revs):
756 def showrevslist(context, mapping, name, revs):
761 """helper to generate a list of revisions in which a mapped template will
757 """helper to generate a list of revisions in which a mapped template will
762 be evaluated"""
758 be evaluated"""
763 repo = context.resource(mapping, 'repo')
759 repo = context.resource(mapping, 'repo')
764 # revs may be a smartset; don't compute it until f() has to be evaluated
760 # revs may be a smartset; don't compute it until f() has to be evaluated
765 def f():
761 def f():
766 srevs = ['%d' % r for r in revs]
762 srevs = ['%d' % r for r in revs]
767 return _showcompatlist(context, mapping, name, srevs)
763 return _showcompatlist(context, mapping, name, srevs)
768 return _hybrid(f, revs,
764 return _hybrid(f, revs,
769 lambda x: {name: x, 'ctx': repo[x]},
765 lambda x: {name: x, 'ctx': repo[x]},
770 pycompat.identity, keytype=int)
766 pycompat.identity, keytype=int)
771
767
772 @templatekeyword('subrepos', requires={'ctx'})
768 @templatekeyword('subrepos', requires={'ctx'})
773 def showsubrepos(context, mapping):
769 def showsubrepos(context, mapping):
774 """List of strings. Updated subrepositories in the changeset."""
770 """List of strings. Updated subrepositories in the changeset."""
775 ctx = context.resource(mapping, 'ctx')
771 ctx = context.resource(mapping, 'ctx')
776 substate = ctx.substate
772 substate = ctx.substate
777 if not substate:
773 if not substate:
778 return compatlist(context, mapping, 'subrepo', [])
774 return compatlist(context, mapping, 'subrepo', [])
779 psubstate = ctx.p1().substate or {}
775 psubstate = ctx.p1().substate or {}
780 subrepos = []
776 subrepos = []
781 for sub in substate:
777 for sub in substate:
782 if sub not in psubstate or substate[sub] != psubstate[sub]:
778 if sub not in psubstate or substate[sub] != psubstate[sub]:
783 subrepos.append(sub) # modified or newly added in ctx
779 subrepos.append(sub) # modified or newly added in ctx
784 for sub in psubstate:
780 for sub in psubstate:
785 if sub not in substate:
781 if sub not in substate:
786 subrepos.append(sub) # removed in ctx
782 subrepos.append(sub) # removed in ctx
787 return compatlist(context, mapping, 'subrepo', sorted(subrepos))
783 return compatlist(context, mapping, 'subrepo', sorted(subrepos))
788
784
789 # don't remove "showtags" definition, even though namespaces will put
785 # don't remove "showtags" definition, even though namespaces will put
790 # a helper function for "tags" keyword into "keywords" map automatically,
786 # a helper function for "tags" keyword into "keywords" map automatically,
791 # because online help text is built without namespaces initialization
787 # because online help text is built without namespaces initialization
792 @templatekeyword('tags', requires={'repo', 'ctx'})
788 @templatekeyword('tags', requires={'repo', 'ctx'})
793 def showtags(context, mapping):
789 def showtags(context, mapping):
794 """List of strings. Any tags associated with the changeset."""
790 """List of strings. Any tags associated with the changeset."""
795 return shownames(context, mapping, 'tags')
791 return shownames(context, mapping, 'tags')
796
792
797 @templatekeyword('termwidth', requires={'ui'})
793 @templatekeyword('termwidth', requires={'ui'})
798 def showtermwidth(context, mapping):
794 def showtermwidth(context, mapping):
799 """Integer. The width of the current terminal."""
795 """Integer. The width of the current terminal."""
800 ui = context.resource(mapping, 'ui')
796 ui = context.resource(mapping, 'ui')
801 return ui.termwidth()
797 return ui.termwidth()
802
798
803 @templatekeyword('user', requires={'ctx'})
799 @templatekeyword('user', requires={'ctx'})
804 def showuser(context, mapping):
800 def showuser(context, mapping):
805 """String. The unmodified author of the changeset."""
801 """String. The unmodified author of the changeset."""
806 ctx = context.resource(mapping, 'ctx')
802 ctx = context.resource(mapping, 'ctx')
807 return ctx.user()
803 return ctx.user()
808
804
809 @templatekeyword('instabilities', requires={'ctx'})
805 @templatekeyword('instabilities', requires={'ctx'})
810 def showinstabilities(context, mapping):
806 def showinstabilities(context, mapping):
811 """List of strings. Evolution instabilities affecting the changeset.
807 """List of strings. Evolution instabilities affecting the changeset.
812 (EXPERIMENTAL)
808 (EXPERIMENTAL)
813 """
809 """
814 ctx = context.resource(mapping, 'ctx')
810 ctx = context.resource(mapping, 'ctx')
815 return compatlist(context, mapping, 'instability', ctx.instabilities(),
811 return compatlist(context, mapping, 'instability', ctx.instabilities(),
816 plural='instabilities')
812 plural='instabilities')
817
813
818 @templatekeyword('verbosity', requires={'ui'})
814 @templatekeyword('verbosity', requires={'ui'})
819 def showverbosity(context, mapping):
815 def showverbosity(context, mapping):
820 """String. The current output verbosity in 'debug', 'quiet', 'verbose',
816 """String. The current output verbosity in 'debug', 'quiet', 'verbose',
821 or ''."""
817 or ''."""
822 ui = context.resource(mapping, 'ui')
818 ui = context.resource(mapping, 'ui')
823 # see logcmdutil.changesettemplater for priority of these flags
819 # see logcmdutil.changesettemplater for priority of these flags
824 if ui.debugflag:
820 if ui.debugflag:
825 return 'debug'
821 return 'debug'
826 elif ui.quiet:
822 elif ui.quiet:
827 return 'quiet'
823 return 'quiet'
828 elif ui.verbose:
824 elif ui.verbose:
829 return 'verbose'
825 return 'verbose'
830 return ''
826 return ''
831
827
832 @templatekeyword('whyunstable', requires={'repo', 'ctx'})
828 @templatekeyword('whyunstable', requires={'repo', 'ctx'})
833 def showwhyunstable(context, mapping):
829 def showwhyunstable(context, mapping):
834 """List of dicts explaining all instabilities of a changeset.
830 """List of dicts explaining all instabilities of a changeset.
835 (EXPERIMENTAL)
831 (EXPERIMENTAL)
836 """
832 """
837 repo = context.resource(mapping, 'repo')
833 repo = context.resource(mapping, 'repo')
838 ctx = context.resource(mapping, 'ctx')
834 ctx = context.resource(mapping, 'ctx')
839
835
840 def formatnode(ctx):
836 def formatnode(ctx):
841 return '%s (%s)' % (scmutil.formatchangeid(ctx), ctx.phasestr())
837 return '%s (%s)' % (scmutil.formatchangeid(ctx), ctx.phasestr())
842
838
843 entries = obsutil.whyunstable(repo, ctx)
839 entries = obsutil.whyunstable(repo, ctx)
844
840
845 for entry in entries:
841 for entry in entries:
846 if entry.get('divergentnodes'):
842 if entry.get('divergentnodes'):
847 dnodes = entry['divergentnodes']
843 dnodes = entry['divergentnodes']
848 dnhybrid = _hybrid(None, [dnode.hex() for dnode in dnodes],
844 dnhybrid = _hybrid(None, [dnode.hex() for dnode in dnodes],
849 lambda x: {'ctx': repo[x]},
845 lambda x: {'ctx': repo[x]},
850 lambda x: formatnode(repo[x]))
846 lambda x: formatnode(repo[x]))
851 entry['divergentnodes'] = dnhybrid
847 entry['divergentnodes'] = dnhybrid
852
848
853 tmpl = ('{instability}:{if(divergentnodes, " ")}{divergentnodes} '
849 tmpl = ('{instability}:{if(divergentnodes, " ")}{divergentnodes} '
854 '{reason} {node|short}')
850 '{reason} {node|short}')
855 return templateutil.mappinglist(entries, tmpl=tmpl, sep='\n')
851 return templateutil.mappinglist(entries, tmpl=tmpl, sep='\n')
856
852
857 def loadkeyword(ui, extname, registrarobj):
853 def loadkeyword(ui, extname, registrarobj):
858 """Load template keyword from specified registrarobj
854 """Load template keyword from specified registrarobj
859 """
855 """
860 for name, func in registrarobj._table.iteritems():
856 for name, func in registrarobj._table.iteritems():
861 keywords[name] = func
857 keywords[name] = func
862
858
863 # tell hggettext to extract docstrings from these functions:
859 # tell hggettext to extract docstrings from these functions:
864 i18nfunctions = keywords.values()
860 i18nfunctions = keywords.values()
General Comments 0
You need to be logged in to leave comments. Login now