##// END OF EJS Templates
obsolete: closest divergent support...
Boris Feld -
r33274:68f3e819 default
parent child Browse files
Show More
@@ -1,2245 +1,2248
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 obsutil,
50 obsutil,
51 phases,
51 phases,
52 policy,
52 policy,
53 pvec,
53 pvec,
54 pycompat,
54 pycompat,
55 registrar,
55 registrar,
56 repair,
56 repair,
57 revlog,
57 revlog,
58 revset,
58 revset,
59 revsetlang,
59 revsetlang,
60 scmutil,
60 scmutil,
61 setdiscovery,
61 setdiscovery,
62 simplemerge,
62 simplemerge,
63 smartset,
63 smartset,
64 sslutil,
64 sslutil,
65 streamclone,
65 streamclone,
66 templater,
66 templater,
67 treediscovery,
67 treediscovery,
68 upgrade,
68 upgrade,
69 util,
69 util,
70 vfs as vfsmod,
70 vfs as vfsmod,
71 )
71 )
72
72
73 release = lockmod.release
73 release = lockmod.release
74
74
75 command = registrar.command()
75 command = registrar.command()
76
76
77 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
78 def debugancestor(ui, repo, *args):
78 def debugancestor(ui, repo, *args):
79 """find the ancestor revision of two revisions in a given index"""
79 """find the ancestor revision of two revisions in a given index"""
80 if len(args) == 3:
80 if len(args) == 3:
81 index, rev1, rev2 = args
81 index, rev1, rev2 = args
82 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
83 lookup = r.lookup
83 lookup = r.lookup
84 elif len(args) == 2:
84 elif len(args) == 2:
85 if not repo:
85 if not repo:
86 raise error.Abort(_('there is no Mercurial repository here '
86 raise error.Abort(_('there is no Mercurial repository here '
87 '(.hg not found)'))
87 '(.hg not found)'))
88 rev1, rev2 = args
88 rev1, rev2 = args
89 r = repo.changelog
89 r = repo.changelog
90 lookup = repo.lookup
90 lookup = repo.lookup
91 else:
91 else:
92 raise error.Abort(_('either two or three arguments required'))
92 raise error.Abort(_('either two or three arguments required'))
93 a = r.ancestor(lookup(rev1), lookup(rev2))
93 a = r.ancestor(lookup(rev1), lookup(rev2))
94 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
95
95
96 @command('debugapplystreamclonebundle', [], 'FILE')
96 @command('debugapplystreamclonebundle', [], 'FILE')
97 def debugapplystreamclonebundle(ui, repo, fname):
97 def debugapplystreamclonebundle(ui, repo, fname):
98 """apply a stream clone bundle file"""
98 """apply a stream clone bundle file"""
99 f = hg.openpath(ui, fname)
99 f = hg.openpath(ui, fname)
100 gen = exchange.readbundle(ui, f, fname)
100 gen = exchange.readbundle(ui, f, fname)
101 gen.apply(repo)
101 gen.apply(repo)
102
102
103 @command('debugbuilddag',
103 @command('debugbuilddag',
104 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
105 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
106 ('n', 'new-file', None, _('add new file at each rev'))],
106 ('n', 'new-file', None, _('add new file at each rev'))],
107 _('[OPTION]... [TEXT]'))
107 _('[OPTION]... [TEXT]'))
108 def debugbuilddag(ui, repo, text=None,
108 def debugbuilddag(ui, repo, text=None,
109 mergeable_file=False,
109 mergeable_file=False,
110 overwritten_file=False,
110 overwritten_file=False,
111 new_file=False):
111 new_file=False):
112 """builds a repo with a given DAG from scratch in the current empty repo
112 """builds a repo with a given DAG from scratch in the current empty repo
113
113
114 The description of the DAG is read from stdin if not given on the
114 The description of the DAG is read from stdin if not given on the
115 command line.
115 command line.
116
116
117 Elements:
117 Elements:
118
118
119 - "+n" is a linear run of n nodes based on the current default parent
119 - "+n" is a linear run of n nodes based on the current default parent
120 - "." is a single node based on the current default parent
120 - "." is a single node based on the current default parent
121 - "$" resets the default parent to null (implied at the start);
121 - "$" resets the default parent to null (implied at the start);
122 otherwise the default parent is always the last node created
122 otherwise the default parent is always the last node created
123 - "<p" sets the default parent to the backref p
123 - "<p" sets the default parent to the backref p
124 - "*p" is a fork at parent p, which is a backref
124 - "*p" is a fork at parent p, which is a backref
125 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
126 - "/p2" is a merge of the preceding node and p2
126 - "/p2" is a merge of the preceding node and p2
127 - ":tag" defines a local tag for the preceding node
127 - ":tag" defines a local tag for the preceding node
128 - "@branch" sets the named branch for subsequent nodes
128 - "@branch" sets the named branch for subsequent nodes
129 - "#...\\n" is a comment up to the end of the line
129 - "#...\\n" is a comment up to the end of the line
130
130
131 Whitespace between the above elements is ignored.
131 Whitespace between the above elements is ignored.
132
132
133 A backref is either
133 A backref is either
134
134
135 - a number n, which references the node curr-n, where curr is the current
135 - a number n, which references the node curr-n, where curr is the current
136 node, or
136 node, or
137 - the name of a local tag you placed earlier using ":tag", or
137 - the name of a local tag you placed earlier using ":tag", or
138 - empty to denote the default parent.
138 - empty to denote the default parent.
139
139
140 All string valued-elements are either strictly alphanumeric, or must
140 All string valued-elements are either strictly alphanumeric, or must
141 be enclosed in double quotes ("..."), with "\\" as escape character.
141 be enclosed in double quotes ("..."), with "\\" as escape character.
142 """
142 """
143
143
144 if text is None:
144 if text is None:
145 ui.status(_("reading DAG from stdin\n"))
145 ui.status(_("reading DAG from stdin\n"))
146 text = ui.fin.read()
146 text = ui.fin.read()
147
147
148 cl = repo.changelog
148 cl = repo.changelog
149 if len(cl) > 0:
149 if len(cl) > 0:
150 raise error.Abort(_('repository is not empty'))
150 raise error.Abort(_('repository is not empty'))
151
151
152 # determine number of revs in DAG
152 # determine number of revs in DAG
153 total = 0
153 total = 0
154 for type, data in dagparser.parsedag(text):
154 for type, data in dagparser.parsedag(text):
155 if type == 'n':
155 if type == 'n':
156 total += 1
156 total += 1
157
157
158 if mergeable_file:
158 if mergeable_file:
159 linesperrev = 2
159 linesperrev = 2
160 # make a file with k lines per rev
160 # make a file with k lines per rev
161 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
162 initialmergedlines.append("")
162 initialmergedlines.append("")
163
163
164 tags = []
164 tags = []
165
165
166 wlock = lock = tr = None
166 wlock = lock = tr = None
167 try:
167 try:
168 wlock = repo.wlock()
168 wlock = repo.wlock()
169 lock = repo.lock()
169 lock = repo.lock()
170 tr = repo.transaction("builddag")
170 tr = repo.transaction("builddag")
171
171
172 at = -1
172 at = -1
173 atbranch = 'default'
173 atbranch = 'default'
174 nodeids = []
174 nodeids = []
175 id = 0
175 id = 0
176 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 ui.progress(_('building'), id, unit=_('revisions'), total=total)
177 for type, data in dagparser.parsedag(text):
177 for type, data in dagparser.parsedag(text):
178 if type == 'n':
178 if type == 'n':
179 ui.note(('node %s\n' % str(data)))
179 ui.note(('node %s\n' % str(data)))
180 id, ps = data
180 id, ps = data
181
181
182 files = []
182 files = []
183 fctxs = {}
183 fctxs = {}
184
184
185 p2 = None
185 p2 = None
186 if mergeable_file:
186 if mergeable_file:
187 fn = "mf"
187 fn = "mf"
188 p1 = repo[ps[0]]
188 p1 = repo[ps[0]]
189 if len(ps) > 1:
189 if len(ps) > 1:
190 p2 = repo[ps[1]]
190 p2 = repo[ps[1]]
191 pa = p1.ancestor(p2)
191 pa = p1.ancestor(p2)
192 base, local, other = [x[fn].data() for x in (pa, p1,
192 base, local, other = [x[fn].data() for x in (pa, p1,
193 p2)]
193 p2)]
194 m3 = simplemerge.Merge3Text(base, local, other)
194 m3 = simplemerge.Merge3Text(base, local, other)
195 ml = [l.strip() for l in m3.merge_lines()]
195 ml = [l.strip() for l in m3.merge_lines()]
196 ml.append("")
196 ml.append("")
197 elif at > 0:
197 elif at > 0:
198 ml = p1[fn].data().split("\n")
198 ml = p1[fn].data().split("\n")
199 else:
199 else:
200 ml = initialmergedlines
200 ml = initialmergedlines
201 ml[id * linesperrev] += " r%i" % id
201 ml[id * linesperrev] += " r%i" % id
202 mergedtext = "\n".join(ml)
202 mergedtext = "\n".join(ml)
203 files.append(fn)
203 files.append(fn)
204 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
205
205
206 if overwritten_file:
206 if overwritten_file:
207 fn = "of"
207 fn = "of"
208 files.append(fn)
208 files.append(fn)
209 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
210
210
211 if new_file:
211 if new_file:
212 fn = "nf%i" % id
212 fn = "nf%i" % id
213 files.append(fn)
213 files.append(fn)
214 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
215 if len(ps) > 1:
215 if len(ps) > 1:
216 if not p2:
216 if not p2:
217 p2 = repo[ps[1]]
217 p2 = repo[ps[1]]
218 for fn in p2:
218 for fn in p2:
219 if fn.startswith("nf"):
219 if fn.startswith("nf"):
220 files.append(fn)
220 files.append(fn)
221 fctxs[fn] = p2[fn]
221 fctxs[fn] = p2[fn]
222
222
223 def fctxfn(repo, cx, path):
223 def fctxfn(repo, cx, path):
224 return fctxs.get(path)
224 return fctxs.get(path)
225
225
226 if len(ps) == 0 or ps[0] < 0:
226 if len(ps) == 0 or ps[0] < 0:
227 pars = [None, None]
227 pars = [None, None]
228 elif len(ps) == 1:
228 elif len(ps) == 1:
229 pars = [nodeids[ps[0]], None]
229 pars = [nodeids[ps[0]], None]
230 else:
230 else:
231 pars = [nodeids[p] for p in ps]
231 pars = [nodeids[p] for p in ps]
232 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
233 date=(id, 0),
233 date=(id, 0),
234 user="debugbuilddag",
234 user="debugbuilddag",
235 extra={'branch': atbranch})
235 extra={'branch': atbranch})
236 nodeid = repo.commitctx(cx)
236 nodeid = repo.commitctx(cx)
237 nodeids.append(nodeid)
237 nodeids.append(nodeid)
238 at = id
238 at = id
239 elif type == 'l':
239 elif type == 'l':
240 id, name = data
240 id, name = data
241 ui.note(('tag %s\n' % name))
241 ui.note(('tag %s\n' % name))
242 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
243 elif type == 'a':
243 elif type == 'a':
244 ui.note(('branch %s\n' % data))
244 ui.note(('branch %s\n' % data))
245 atbranch = data
245 atbranch = data
246 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 ui.progress(_('building'), id, unit=_('revisions'), total=total)
247 tr.close()
247 tr.close()
248
248
249 if tags:
249 if tags:
250 repo.vfs.write("localtags", "".join(tags))
250 repo.vfs.write("localtags", "".join(tags))
251 finally:
251 finally:
252 ui.progress(_('building'), None)
252 ui.progress(_('building'), None)
253 release(tr, lock, wlock)
253 release(tr, lock, wlock)
254
254
255 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
256 indent_string = ' ' * indent
256 indent_string = ' ' * indent
257 if all:
257 if all:
258 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
259 % indent_string)
259 % indent_string)
260
260
261 def showchunks(named):
261 def showchunks(named):
262 ui.write("\n%s%s\n" % (indent_string, named))
262 ui.write("\n%s%s\n" % (indent_string, named))
263 chain = None
263 chain = None
264 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
265 node = chunkdata['node']
265 node = chunkdata['node']
266 p1 = chunkdata['p1']
266 p1 = chunkdata['p1']
267 p2 = chunkdata['p2']
267 p2 = chunkdata['p2']
268 cs = chunkdata['cs']
268 cs = chunkdata['cs']
269 deltabase = chunkdata['deltabase']
269 deltabase = chunkdata['deltabase']
270 delta = chunkdata['delta']
270 delta = chunkdata['delta']
271 ui.write("%s%s %s %s %s %s %s\n" %
271 ui.write("%s%s %s %s %s %s %s\n" %
272 (indent_string, hex(node), hex(p1), hex(p2),
272 (indent_string, hex(node), hex(p1), hex(p2),
273 hex(cs), hex(deltabase), len(delta)))
273 hex(cs), hex(deltabase), len(delta)))
274 chain = node
274 chain = node
275
275
276 chunkdata = gen.changelogheader()
276 chunkdata = gen.changelogheader()
277 showchunks("changelog")
277 showchunks("changelog")
278 chunkdata = gen.manifestheader()
278 chunkdata = gen.manifestheader()
279 showchunks("manifest")
279 showchunks("manifest")
280 for chunkdata in iter(gen.filelogheader, {}):
280 for chunkdata in iter(gen.filelogheader, {}):
281 fname = chunkdata['filename']
281 fname = chunkdata['filename']
282 showchunks(fname)
282 showchunks(fname)
283 else:
283 else:
284 if isinstance(gen, bundle2.unbundle20):
284 if isinstance(gen, bundle2.unbundle20):
285 raise error.Abort(_('use debugbundle2 for this file'))
285 raise error.Abort(_('use debugbundle2 for this file'))
286 chunkdata = gen.changelogheader()
286 chunkdata = gen.changelogheader()
287 chain = None
287 chain = None
288 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
289 node = chunkdata['node']
289 node = chunkdata['node']
290 ui.write("%s%s\n" % (indent_string, hex(node)))
290 ui.write("%s%s\n" % (indent_string, hex(node)))
291 chain = node
291 chain = node
292
292
293 def _debugobsmarkers(ui, part, indent=0, **opts):
293 def _debugobsmarkers(ui, part, indent=0, **opts):
294 """display version and markers contained in 'data'"""
294 """display version and markers contained in 'data'"""
295 opts = pycompat.byteskwargs(opts)
295 opts = pycompat.byteskwargs(opts)
296 data = part.read()
296 data = part.read()
297 indent_string = ' ' * indent
297 indent_string = ' ' * indent
298 try:
298 try:
299 version, markers = obsolete._readmarkers(data)
299 version, markers = obsolete._readmarkers(data)
300 except error.UnknownVersion as exc:
300 except error.UnknownVersion as exc:
301 msg = "%sunsupported version: %s (%d bytes)\n"
301 msg = "%sunsupported version: %s (%d bytes)\n"
302 msg %= indent_string, exc.version, len(data)
302 msg %= indent_string, exc.version, len(data)
303 ui.write(msg)
303 ui.write(msg)
304 else:
304 else:
305 msg = "%sversion: %s (%d bytes)\n"
305 msg = "%sversion: %s (%d bytes)\n"
306 msg %= indent_string, version, len(data)
306 msg %= indent_string, version, len(data)
307 ui.write(msg)
307 ui.write(msg)
308 fm = ui.formatter('debugobsolete', opts)
308 fm = ui.formatter('debugobsolete', opts)
309 for rawmarker in sorted(markers):
309 for rawmarker in sorted(markers):
310 m = obsutil.marker(None, rawmarker)
310 m = obsutil.marker(None, rawmarker)
311 fm.startitem()
311 fm.startitem()
312 fm.plain(indent_string)
312 fm.plain(indent_string)
313 cmdutil.showmarker(fm, m)
313 cmdutil.showmarker(fm, m)
314 fm.end()
314 fm.end()
315
315
316 def _debugphaseheads(ui, data, indent=0):
316 def _debugphaseheads(ui, data, indent=0):
317 """display version and markers contained in 'data'"""
317 """display version and markers contained in 'data'"""
318 indent_string = ' ' * indent
318 indent_string = ' ' * indent
319 headsbyphase = bundle2._readphaseheads(data)
319 headsbyphase = bundle2._readphaseheads(data)
320 for phase in phases.allphases:
320 for phase in phases.allphases:
321 for head in headsbyphase[phase]:
321 for head in headsbyphase[phase]:
322 ui.write(indent_string)
322 ui.write(indent_string)
323 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
323 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
324
324
325 def _debugbundle2(ui, gen, all=None, **opts):
325 def _debugbundle2(ui, gen, all=None, **opts):
326 """lists the contents of a bundle2"""
326 """lists the contents of a bundle2"""
327 if not isinstance(gen, bundle2.unbundle20):
327 if not isinstance(gen, bundle2.unbundle20):
328 raise error.Abort(_('not a bundle2 file'))
328 raise error.Abort(_('not a bundle2 file'))
329 ui.write(('Stream params: %s\n' % repr(gen.params)))
329 ui.write(('Stream params: %s\n' % repr(gen.params)))
330 parttypes = opts.get(r'part_type', [])
330 parttypes = opts.get(r'part_type', [])
331 for part in gen.iterparts():
331 for part in gen.iterparts():
332 if parttypes and part.type not in parttypes:
332 if parttypes and part.type not in parttypes:
333 continue
333 continue
334 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
334 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
335 if part.type == 'changegroup':
335 if part.type == 'changegroup':
336 version = part.params.get('version', '01')
336 version = part.params.get('version', '01')
337 cg = changegroup.getunbundler(version, part, 'UN')
337 cg = changegroup.getunbundler(version, part, 'UN')
338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 if part.type == 'obsmarkers':
339 if part.type == 'obsmarkers':
340 _debugobsmarkers(ui, part, indent=4, **opts)
340 _debugobsmarkers(ui, part, indent=4, **opts)
341 if part.type == 'phase-heads':
341 if part.type == 'phase-heads':
342 _debugphaseheads(ui, part, indent=4)
342 _debugphaseheads(ui, part, indent=4)
343
343
344 @command('debugbundle',
344 @command('debugbundle',
345 [('a', 'all', None, _('show all details')),
345 [('a', 'all', None, _('show all details')),
346 ('', 'part-type', [], _('show only the named part type')),
346 ('', 'part-type', [], _('show only the named part type')),
347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 _('FILE'),
348 _('FILE'),
349 norepo=True)
349 norepo=True)
350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 """lists the contents of a bundle"""
351 """lists the contents of a bundle"""
352 with hg.openpath(ui, bundlepath) as f:
352 with hg.openpath(ui, bundlepath) as f:
353 if spec:
353 if spec:
354 spec = exchange.getbundlespec(ui, f)
354 spec = exchange.getbundlespec(ui, f)
355 ui.write('%s\n' % spec)
355 ui.write('%s\n' % spec)
356 return
356 return
357
357
358 gen = exchange.readbundle(ui, f, bundlepath)
358 gen = exchange.readbundle(ui, f, bundlepath)
359 if isinstance(gen, bundle2.unbundle20):
359 if isinstance(gen, bundle2.unbundle20):
360 return _debugbundle2(ui, gen, all=all, **opts)
360 return _debugbundle2(ui, gen, all=all, **opts)
361 _debugchangegroup(ui, gen, all=all, **opts)
361 _debugchangegroup(ui, gen, all=all, **opts)
362
362
363 @command('debugcheckstate', [], '')
363 @command('debugcheckstate', [], '')
364 def debugcheckstate(ui, repo):
364 def debugcheckstate(ui, repo):
365 """validate the correctness of the current dirstate"""
365 """validate the correctness of the current dirstate"""
366 parent1, parent2 = repo.dirstate.parents()
366 parent1, parent2 = repo.dirstate.parents()
367 m1 = repo[parent1].manifest()
367 m1 = repo[parent1].manifest()
368 m2 = repo[parent2].manifest()
368 m2 = repo[parent2].manifest()
369 errors = 0
369 errors = 0
370 for f in repo.dirstate:
370 for f in repo.dirstate:
371 state = repo.dirstate[f]
371 state = repo.dirstate[f]
372 if state in "nr" and f not in m1:
372 if state in "nr" and f not in m1:
373 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
373 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
374 errors += 1
374 errors += 1
375 if state in "a" and f in m1:
375 if state in "a" and f in m1:
376 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
376 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
377 errors += 1
377 errors += 1
378 if state in "m" and f not in m1 and f not in m2:
378 if state in "m" and f not in m1 and f not in m2:
379 ui.warn(_("%s in state %s, but not in either manifest\n") %
379 ui.warn(_("%s in state %s, but not in either manifest\n") %
380 (f, state))
380 (f, state))
381 errors += 1
381 errors += 1
382 for f in m1:
382 for f in m1:
383 state = repo.dirstate[f]
383 state = repo.dirstate[f]
384 if state not in "nrm":
384 if state not in "nrm":
385 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
385 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
386 errors += 1
386 errors += 1
387 if errors:
387 if errors:
388 error = _(".hg/dirstate inconsistent with current parent's manifest")
388 error = _(".hg/dirstate inconsistent with current parent's manifest")
389 raise error.Abort(error)
389 raise error.Abort(error)
390
390
391 @command('debugcolor',
391 @command('debugcolor',
392 [('', 'style', None, _('show all configured styles'))],
392 [('', 'style', None, _('show all configured styles'))],
393 'hg debugcolor')
393 'hg debugcolor')
394 def debugcolor(ui, repo, **opts):
394 def debugcolor(ui, repo, **opts):
395 """show available color, effects or style"""
395 """show available color, effects or style"""
396 ui.write(('color mode: %s\n') % ui._colormode)
396 ui.write(('color mode: %s\n') % ui._colormode)
397 if opts.get(r'style'):
397 if opts.get(r'style'):
398 return _debugdisplaystyle(ui)
398 return _debugdisplaystyle(ui)
399 else:
399 else:
400 return _debugdisplaycolor(ui)
400 return _debugdisplaycolor(ui)
401
401
402 def _debugdisplaycolor(ui):
402 def _debugdisplaycolor(ui):
403 ui = ui.copy()
403 ui = ui.copy()
404 ui._styles.clear()
404 ui._styles.clear()
405 for effect in color._activeeffects(ui).keys():
405 for effect in color._activeeffects(ui).keys():
406 ui._styles[effect] = effect
406 ui._styles[effect] = effect
407 if ui._terminfoparams:
407 if ui._terminfoparams:
408 for k, v in ui.configitems('color'):
408 for k, v in ui.configitems('color'):
409 if k.startswith('color.'):
409 if k.startswith('color.'):
410 ui._styles[k] = k[6:]
410 ui._styles[k] = k[6:]
411 elif k.startswith('terminfo.'):
411 elif k.startswith('terminfo.'):
412 ui._styles[k] = k[9:]
412 ui._styles[k] = k[9:]
413 ui.write(_('available colors:\n'))
413 ui.write(_('available colors:\n'))
414 # sort label with a '_' after the other to group '_background' entry.
414 # sort label with a '_' after the other to group '_background' entry.
415 items = sorted(ui._styles.items(),
415 items = sorted(ui._styles.items(),
416 key=lambda i: ('_' in i[0], i[0], i[1]))
416 key=lambda i: ('_' in i[0], i[0], i[1]))
417 for colorname, label in items:
417 for colorname, label in items:
418 ui.write(('%s\n') % colorname, label=label)
418 ui.write(('%s\n') % colorname, label=label)
419
419
420 def _debugdisplaystyle(ui):
420 def _debugdisplaystyle(ui):
421 ui.write(_('available style:\n'))
421 ui.write(_('available style:\n'))
422 width = max(len(s) for s in ui._styles)
422 width = max(len(s) for s in ui._styles)
423 for label, effects in sorted(ui._styles.items()):
423 for label, effects in sorted(ui._styles.items()):
424 ui.write('%s' % label, label=label)
424 ui.write('%s' % label, label=label)
425 if effects:
425 if effects:
426 # 50
426 # 50
427 ui.write(': ')
427 ui.write(': ')
428 ui.write(' ' * (max(0, width - len(label))))
428 ui.write(' ' * (max(0, width - len(label))))
429 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
429 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
430 ui.write('\n')
430 ui.write('\n')
431
431
432 @command('debugcreatestreamclonebundle', [], 'FILE')
432 @command('debugcreatestreamclonebundle', [], 'FILE')
433 def debugcreatestreamclonebundle(ui, repo, fname):
433 def debugcreatestreamclonebundle(ui, repo, fname):
434 """create a stream clone bundle file
434 """create a stream clone bundle file
435
435
436 Stream bundles are special bundles that are essentially archives of
436 Stream bundles are special bundles that are essentially archives of
437 revlog files. They are commonly used for cloning very quickly.
437 revlog files. They are commonly used for cloning very quickly.
438 """
438 """
439 # TODO we may want to turn this into an abort when this functionality
439 # TODO we may want to turn this into an abort when this functionality
440 # is moved into `hg bundle`.
440 # is moved into `hg bundle`.
441 if phases.hassecret(repo):
441 if phases.hassecret(repo):
442 ui.warn(_('(warning: stream clone bundle will contain secret '
442 ui.warn(_('(warning: stream clone bundle will contain secret '
443 'revisions)\n'))
443 'revisions)\n'))
444
444
445 requirements, gen = streamclone.generatebundlev1(repo)
445 requirements, gen = streamclone.generatebundlev1(repo)
446 changegroup.writechunks(ui, gen, fname)
446 changegroup.writechunks(ui, gen, fname)
447
447
448 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
448 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
449
449
450 @command('debugdag',
450 @command('debugdag',
451 [('t', 'tags', None, _('use tags as labels')),
451 [('t', 'tags', None, _('use tags as labels')),
452 ('b', 'branches', None, _('annotate with branch names')),
452 ('b', 'branches', None, _('annotate with branch names')),
453 ('', 'dots', None, _('use dots for runs')),
453 ('', 'dots', None, _('use dots for runs')),
454 ('s', 'spaces', None, _('separate elements by spaces'))],
454 ('s', 'spaces', None, _('separate elements by spaces'))],
455 _('[OPTION]... [FILE [REV]...]'),
455 _('[OPTION]... [FILE [REV]...]'),
456 optionalrepo=True)
456 optionalrepo=True)
457 def debugdag(ui, repo, file_=None, *revs, **opts):
457 def debugdag(ui, repo, file_=None, *revs, **opts):
458 """format the changelog or an index DAG as a concise textual description
458 """format the changelog or an index DAG as a concise textual description
459
459
460 If you pass a revlog index, the revlog's DAG is emitted. If you list
460 If you pass a revlog index, the revlog's DAG is emitted. If you list
461 revision numbers, they get labeled in the output as rN.
461 revision numbers, they get labeled in the output as rN.
462
462
463 Otherwise, the changelog DAG of the current repo is emitted.
463 Otherwise, the changelog DAG of the current repo is emitted.
464 """
464 """
465 spaces = opts.get(r'spaces')
465 spaces = opts.get(r'spaces')
466 dots = opts.get(r'dots')
466 dots = opts.get(r'dots')
467 if file_:
467 if file_:
468 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
468 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
469 file_)
469 file_)
470 revs = set((int(r) for r in revs))
470 revs = set((int(r) for r in revs))
471 def events():
471 def events():
472 for r in rlog:
472 for r in rlog:
473 yield 'n', (r, list(p for p in rlog.parentrevs(r)
473 yield 'n', (r, list(p for p in rlog.parentrevs(r)
474 if p != -1))
474 if p != -1))
475 if r in revs:
475 if r in revs:
476 yield 'l', (r, "r%i" % r)
476 yield 'l', (r, "r%i" % r)
477 elif repo:
477 elif repo:
478 cl = repo.changelog
478 cl = repo.changelog
479 tags = opts.get(r'tags')
479 tags = opts.get(r'tags')
480 branches = opts.get(r'branches')
480 branches = opts.get(r'branches')
481 if tags:
481 if tags:
482 labels = {}
482 labels = {}
483 for l, n in repo.tags().items():
483 for l, n in repo.tags().items():
484 labels.setdefault(cl.rev(n), []).append(l)
484 labels.setdefault(cl.rev(n), []).append(l)
485 def events():
485 def events():
486 b = "default"
486 b = "default"
487 for r in cl:
487 for r in cl:
488 if branches:
488 if branches:
489 newb = cl.read(cl.node(r))[5]['branch']
489 newb = cl.read(cl.node(r))[5]['branch']
490 if newb != b:
490 if newb != b:
491 yield 'a', newb
491 yield 'a', newb
492 b = newb
492 b = newb
493 yield 'n', (r, list(p for p in cl.parentrevs(r)
493 yield 'n', (r, list(p for p in cl.parentrevs(r)
494 if p != -1))
494 if p != -1))
495 if tags:
495 if tags:
496 ls = labels.get(r)
496 ls = labels.get(r)
497 if ls:
497 if ls:
498 for l in ls:
498 for l in ls:
499 yield 'l', (r, l)
499 yield 'l', (r, l)
500 else:
500 else:
501 raise error.Abort(_('need repo for changelog dag'))
501 raise error.Abort(_('need repo for changelog dag'))
502
502
503 for line in dagparser.dagtextlines(events(),
503 for line in dagparser.dagtextlines(events(),
504 addspaces=spaces,
504 addspaces=spaces,
505 wraplabels=True,
505 wraplabels=True,
506 wrapannotations=True,
506 wrapannotations=True,
507 wrapnonlinear=dots,
507 wrapnonlinear=dots,
508 usedots=dots,
508 usedots=dots,
509 maxlinewidth=70):
509 maxlinewidth=70):
510 ui.write(line)
510 ui.write(line)
511 ui.write("\n")
511 ui.write("\n")
512
512
513 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
513 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
514 def debugdata(ui, repo, file_, rev=None, **opts):
514 def debugdata(ui, repo, file_, rev=None, **opts):
515 """dump the contents of a data file revision"""
515 """dump the contents of a data file revision"""
516 opts = pycompat.byteskwargs(opts)
516 opts = pycompat.byteskwargs(opts)
517 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
517 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
518 if rev is not None:
518 if rev is not None:
519 raise error.CommandError('debugdata', _('invalid arguments'))
519 raise error.CommandError('debugdata', _('invalid arguments'))
520 file_, rev = None, file_
520 file_, rev = None, file_
521 elif rev is None:
521 elif rev is None:
522 raise error.CommandError('debugdata', _('invalid arguments'))
522 raise error.CommandError('debugdata', _('invalid arguments'))
523 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
523 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
524 try:
524 try:
525 ui.write(r.revision(r.lookup(rev), raw=True))
525 ui.write(r.revision(r.lookup(rev), raw=True))
526 except KeyError:
526 except KeyError:
527 raise error.Abort(_('invalid revision identifier %s') % rev)
527 raise error.Abort(_('invalid revision identifier %s') % rev)
528
528
529 @command('debugdate',
529 @command('debugdate',
530 [('e', 'extended', None, _('try extended date formats'))],
530 [('e', 'extended', None, _('try extended date formats'))],
531 _('[-e] DATE [RANGE]'),
531 _('[-e] DATE [RANGE]'),
532 norepo=True, optionalrepo=True)
532 norepo=True, optionalrepo=True)
533 def debugdate(ui, date, range=None, **opts):
533 def debugdate(ui, date, range=None, **opts):
534 """parse and display a date"""
534 """parse and display a date"""
535 if opts[r"extended"]:
535 if opts[r"extended"]:
536 d = util.parsedate(date, util.extendeddateformats)
536 d = util.parsedate(date, util.extendeddateformats)
537 else:
537 else:
538 d = util.parsedate(date)
538 d = util.parsedate(date)
539 ui.write(("internal: %s %s\n") % d)
539 ui.write(("internal: %s %s\n") % d)
540 ui.write(("standard: %s\n") % util.datestr(d))
540 ui.write(("standard: %s\n") % util.datestr(d))
541 if range:
541 if range:
542 m = util.matchdate(range)
542 m = util.matchdate(range)
543 ui.write(("match: %s\n") % m(d[0]))
543 ui.write(("match: %s\n") % m(d[0]))
544
544
545 @command('debugdeltachain',
545 @command('debugdeltachain',
546 cmdutil.debugrevlogopts + cmdutil.formatteropts,
546 cmdutil.debugrevlogopts + cmdutil.formatteropts,
547 _('-c|-m|FILE'),
547 _('-c|-m|FILE'),
548 optionalrepo=True)
548 optionalrepo=True)
549 def debugdeltachain(ui, repo, file_=None, **opts):
549 def debugdeltachain(ui, repo, file_=None, **opts):
550 """dump information about delta chains in a revlog
550 """dump information about delta chains in a revlog
551
551
552 Output can be templatized. Available template keywords are:
552 Output can be templatized. Available template keywords are:
553
553
554 :``rev``: revision number
554 :``rev``: revision number
555 :``chainid``: delta chain identifier (numbered by unique base)
555 :``chainid``: delta chain identifier (numbered by unique base)
556 :``chainlen``: delta chain length to this revision
556 :``chainlen``: delta chain length to this revision
557 :``prevrev``: previous revision in delta chain
557 :``prevrev``: previous revision in delta chain
558 :``deltatype``: role of delta / how it was computed
558 :``deltatype``: role of delta / how it was computed
559 :``compsize``: compressed size of revision
559 :``compsize``: compressed size of revision
560 :``uncompsize``: uncompressed size of revision
560 :``uncompsize``: uncompressed size of revision
561 :``chainsize``: total size of compressed revisions in chain
561 :``chainsize``: total size of compressed revisions in chain
562 :``chainratio``: total chain size divided by uncompressed revision size
562 :``chainratio``: total chain size divided by uncompressed revision size
563 (new delta chains typically start at ratio 2.00)
563 (new delta chains typically start at ratio 2.00)
564 :``lindist``: linear distance from base revision in delta chain to end
564 :``lindist``: linear distance from base revision in delta chain to end
565 of this revision
565 of this revision
566 :``extradist``: total size of revisions not part of this delta chain from
566 :``extradist``: total size of revisions not part of this delta chain from
567 base of delta chain to end of this revision; a measurement
567 base of delta chain to end of this revision; a measurement
568 of how much extra data we need to read/seek across to read
568 of how much extra data we need to read/seek across to read
569 the delta chain for this revision
569 the delta chain for this revision
570 :``extraratio``: extradist divided by chainsize; another representation of
570 :``extraratio``: extradist divided by chainsize; another representation of
571 how much unrelated data is needed to load this delta chain
571 how much unrelated data is needed to load this delta chain
572 """
572 """
573 opts = pycompat.byteskwargs(opts)
573 opts = pycompat.byteskwargs(opts)
574 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
574 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
575 index = r.index
575 index = r.index
576 generaldelta = r.version & revlog.FLAG_GENERALDELTA
576 generaldelta = r.version & revlog.FLAG_GENERALDELTA
577
577
578 def revinfo(rev):
578 def revinfo(rev):
579 e = index[rev]
579 e = index[rev]
580 compsize = e[1]
580 compsize = e[1]
581 uncompsize = e[2]
581 uncompsize = e[2]
582 chainsize = 0
582 chainsize = 0
583
583
584 if generaldelta:
584 if generaldelta:
585 if e[3] == e[5]:
585 if e[3] == e[5]:
586 deltatype = 'p1'
586 deltatype = 'p1'
587 elif e[3] == e[6]:
587 elif e[3] == e[6]:
588 deltatype = 'p2'
588 deltatype = 'p2'
589 elif e[3] == rev - 1:
589 elif e[3] == rev - 1:
590 deltatype = 'prev'
590 deltatype = 'prev'
591 elif e[3] == rev:
591 elif e[3] == rev:
592 deltatype = 'base'
592 deltatype = 'base'
593 else:
593 else:
594 deltatype = 'other'
594 deltatype = 'other'
595 else:
595 else:
596 if e[3] == rev:
596 if e[3] == rev:
597 deltatype = 'base'
597 deltatype = 'base'
598 else:
598 else:
599 deltatype = 'prev'
599 deltatype = 'prev'
600
600
601 chain = r._deltachain(rev)[0]
601 chain = r._deltachain(rev)[0]
602 for iterrev in chain:
602 for iterrev in chain:
603 e = index[iterrev]
603 e = index[iterrev]
604 chainsize += e[1]
604 chainsize += e[1]
605
605
606 return compsize, uncompsize, deltatype, chain, chainsize
606 return compsize, uncompsize, deltatype, chain, chainsize
607
607
608 fm = ui.formatter('debugdeltachain', opts)
608 fm = ui.formatter('debugdeltachain', opts)
609
609
610 fm.plain(' rev chain# chainlen prev delta '
610 fm.plain(' rev chain# chainlen prev delta '
611 'size rawsize chainsize ratio lindist extradist '
611 'size rawsize chainsize ratio lindist extradist '
612 'extraratio\n')
612 'extraratio\n')
613
613
614 chainbases = {}
614 chainbases = {}
615 for rev in r:
615 for rev in r:
616 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
616 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
617 chainbase = chain[0]
617 chainbase = chain[0]
618 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
618 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
619 basestart = r.start(chainbase)
619 basestart = r.start(chainbase)
620 revstart = r.start(rev)
620 revstart = r.start(rev)
621 lineardist = revstart + comp - basestart
621 lineardist = revstart + comp - basestart
622 extradist = lineardist - chainsize
622 extradist = lineardist - chainsize
623 try:
623 try:
624 prevrev = chain[-2]
624 prevrev = chain[-2]
625 except IndexError:
625 except IndexError:
626 prevrev = -1
626 prevrev = -1
627
627
628 chainratio = float(chainsize) / float(uncomp)
628 chainratio = float(chainsize) / float(uncomp)
629 extraratio = float(extradist) / float(chainsize)
629 extraratio = float(extradist) / float(chainsize)
630
630
631 fm.startitem()
631 fm.startitem()
632 fm.write('rev chainid chainlen prevrev deltatype compsize '
632 fm.write('rev chainid chainlen prevrev deltatype compsize '
633 'uncompsize chainsize chainratio lindist extradist '
633 'uncompsize chainsize chainratio lindist extradist '
634 'extraratio',
634 'extraratio',
635 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
635 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
636 rev, chainid, len(chain), prevrev, deltatype, comp,
636 rev, chainid, len(chain), prevrev, deltatype, comp,
637 uncomp, chainsize, chainratio, lineardist, extradist,
637 uncomp, chainsize, chainratio, lineardist, extradist,
638 extraratio,
638 extraratio,
639 rev=rev, chainid=chainid, chainlen=len(chain),
639 rev=rev, chainid=chainid, chainlen=len(chain),
640 prevrev=prevrev, deltatype=deltatype, compsize=comp,
640 prevrev=prevrev, deltatype=deltatype, compsize=comp,
641 uncompsize=uncomp, chainsize=chainsize,
641 uncompsize=uncomp, chainsize=chainsize,
642 chainratio=chainratio, lindist=lineardist,
642 chainratio=chainratio, lindist=lineardist,
643 extradist=extradist, extraratio=extraratio)
643 extradist=extradist, extraratio=extraratio)
644
644
645 fm.end()
645 fm.end()
646
646
647 @command('debugdirstate|debugstate',
647 @command('debugdirstate|debugstate',
648 [('', 'nodates', None, _('do not display the saved mtime')),
648 [('', 'nodates', None, _('do not display the saved mtime')),
649 ('', 'datesort', None, _('sort by saved mtime'))],
649 ('', 'datesort', None, _('sort by saved mtime'))],
650 _('[OPTION]...'))
650 _('[OPTION]...'))
651 def debugstate(ui, repo, **opts):
651 def debugstate(ui, repo, **opts):
652 """show the contents of the current dirstate"""
652 """show the contents of the current dirstate"""
653
653
654 nodates = opts.get(r'nodates')
654 nodates = opts.get(r'nodates')
655 datesort = opts.get(r'datesort')
655 datesort = opts.get(r'datesort')
656
656
657 timestr = ""
657 timestr = ""
658 if datesort:
658 if datesort:
659 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
659 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
660 else:
660 else:
661 keyfunc = None # sort by filename
661 keyfunc = None # sort by filename
662 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
662 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
663 if ent[3] == -1:
663 if ent[3] == -1:
664 timestr = 'unset '
664 timestr = 'unset '
665 elif nodates:
665 elif nodates:
666 timestr = 'set '
666 timestr = 'set '
667 else:
667 else:
668 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
668 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
669 time.localtime(ent[3]))
669 time.localtime(ent[3]))
670 if ent[1] & 0o20000:
670 if ent[1] & 0o20000:
671 mode = 'lnk'
671 mode = 'lnk'
672 else:
672 else:
673 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
673 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
674 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
674 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
675 for f in repo.dirstate.copies():
675 for f in repo.dirstate.copies():
676 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
676 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
677
677
678 @command('debugdiscovery',
678 @command('debugdiscovery',
679 [('', 'old', None, _('use old-style discovery')),
679 [('', 'old', None, _('use old-style discovery')),
680 ('', 'nonheads', None,
680 ('', 'nonheads', None,
681 _('use old-style discovery with non-heads included')),
681 _('use old-style discovery with non-heads included')),
682 ] + cmdutil.remoteopts,
682 ] + cmdutil.remoteopts,
683 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
683 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
684 def debugdiscovery(ui, repo, remoteurl="default", **opts):
684 def debugdiscovery(ui, repo, remoteurl="default", **opts):
685 """runs the changeset discovery protocol in isolation"""
685 """runs the changeset discovery protocol in isolation"""
686 opts = pycompat.byteskwargs(opts)
686 opts = pycompat.byteskwargs(opts)
687 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
687 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
688 opts.get('branch'))
688 opts.get('branch'))
689 remote = hg.peer(repo, opts, remoteurl)
689 remote = hg.peer(repo, opts, remoteurl)
690 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
690 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
691
691
692 # make sure tests are repeatable
692 # make sure tests are repeatable
693 random.seed(12323)
693 random.seed(12323)
694
694
695 def doit(localheads, remoteheads, remote=remote):
695 def doit(localheads, remoteheads, remote=remote):
696 if opts.get('old'):
696 if opts.get('old'):
697 if localheads:
697 if localheads:
698 raise error.Abort('cannot use localheads with old style '
698 raise error.Abort('cannot use localheads with old style '
699 'discovery')
699 'discovery')
700 if not util.safehasattr(remote, 'branches'):
700 if not util.safehasattr(remote, 'branches'):
701 # enable in-client legacy support
701 # enable in-client legacy support
702 remote = localrepo.locallegacypeer(remote.local())
702 remote = localrepo.locallegacypeer(remote.local())
703 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
703 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
704 force=True)
704 force=True)
705 common = set(common)
705 common = set(common)
706 if not opts.get('nonheads'):
706 if not opts.get('nonheads'):
707 ui.write(("unpruned common: %s\n") %
707 ui.write(("unpruned common: %s\n") %
708 " ".join(sorted(short(n) for n in common)))
708 " ".join(sorted(short(n) for n in common)))
709 dag = dagutil.revlogdag(repo.changelog)
709 dag = dagutil.revlogdag(repo.changelog)
710 all = dag.ancestorset(dag.internalizeall(common))
710 all = dag.ancestorset(dag.internalizeall(common))
711 common = dag.externalizeall(dag.headsetofconnecteds(all))
711 common = dag.externalizeall(dag.headsetofconnecteds(all))
712 else:
712 else:
713 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
713 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
714 common = set(common)
714 common = set(common)
715 rheads = set(hds)
715 rheads = set(hds)
716 lheads = set(repo.heads())
716 lheads = set(repo.heads())
717 ui.write(("common heads: %s\n") %
717 ui.write(("common heads: %s\n") %
718 " ".join(sorted(short(n) for n in common)))
718 " ".join(sorted(short(n) for n in common)))
719 if lheads <= common:
719 if lheads <= common:
720 ui.write(("local is subset\n"))
720 ui.write(("local is subset\n"))
721 elif rheads <= common:
721 elif rheads <= common:
722 ui.write(("remote is subset\n"))
722 ui.write(("remote is subset\n"))
723
723
724 serverlogs = opts.get('serverlog')
724 serverlogs = opts.get('serverlog')
725 if serverlogs:
725 if serverlogs:
726 for filename in serverlogs:
726 for filename in serverlogs:
727 with open(filename, 'r') as logfile:
727 with open(filename, 'r') as logfile:
728 line = logfile.readline()
728 line = logfile.readline()
729 while line:
729 while line:
730 parts = line.strip().split(';')
730 parts = line.strip().split(';')
731 op = parts[1]
731 op = parts[1]
732 if op == 'cg':
732 if op == 'cg':
733 pass
733 pass
734 elif op == 'cgss':
734 elif op == 'cgss':
735 doit(parts[2].split(' '), parts[3].split(' '))
735 doit(parts[2].split(' '), parts[3].split(' '))
736 elif op == 'unb':
736 elif op == 'unb':
737 doit(parts[3].split(' '), parts[2].split(' '))
737 doit(parts[3].split(' '), parts[2].split(' '))
738 line = logfile.readline()
738 line = logfile.readline()
739 else:
739 else:
740 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
740 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
741 opts.get('remote_head'))
741 opts.get('remote_head'))
742 localrevs = opts.get('local_head')
742 localrevs = opts.get('local_head')
743 doit(localrevs, remoterevs)
743 doit(localrevs, remoterevs)
744
744
745 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
745 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
746 def debugextensions(ui, **opts):
746 def debugextensions(ui, **opts):
747 '''show information about active extensions'''
747 '''show information about active extensions'''
748 opts = pycompat.byteskwargs(opts)
748 opts = pycompat.byteskwargs(opts)
749 exts = extensions.extensions(ui)
749 exts = extensions.extensions(ui)
750 hgver = util.version()
750 hgver = util.version()
751 fm = ui.formatter('debugextensions', opts)
751 fm = ui.formatter('debugextensions', opts)
752 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
752 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
753 isinternal = extensions.ismoduleinternal(extmod)
753 isinternal = extensions.ismoduleinternal(extmod)
754 extsource = pycompat.fsencode(extmod.__file__)
754 extsource = pycompat.fsencode(extmod.__file__)
755 if isinternal:
755 if isinternal:
756 exttestedwith = [] # never expose magic string to users
756 exttestedwith = [] # never expose magic string to users
757 else:
757 else:
758 exttestedwith = getattr(extmod, 'testedwith', '').split()
758 exttestedwith = getattr(extmod, 'testedwith', '').split()
759 extbuglink = getattr(extmod, 'buglink', None)
759 extbuglink = getattr(extmod, 'buglink', None)
760
760
761 fm.startitem()
761 fm.startitem()
762
762
763 if ui.quiet or ui.verbose:
763 if ui.quiet or ui.verbose:
764 fm.write('name', '%s\n', extname)
764 fm.write('name', '%s\n', extname)
765 else:
765 else:
766 fm.write('name', '%s', extname)
766 fm.write('name', '%s', extname)
767 if isinternal or hgver in exttestedwith:
767 if isinternal or hgver in exttestedwith:
768 fm.plain('\n')
768 fm.plain('\n')
769 elif not exttestedwith:
769 elif not exttestedwith:
770 fm.plain(_(' (untested!)\n'))
770 fm.plain(_(' (untested!)\n'))
771 else:
771 else:
772 lasttestedversion = exttestedwith[-1]
772 lasttestedversion = exttestedwith[-1]
773 fm.plain(' (%s!)\n' % lasttestedversion)
773 fm.plain(' (%s!)\n' % lasttestedversion)
774
774
775 fm.condwrite(ui.verbose and extsource, 'source',
775 fm.condwrite(ui.verbose and extsource, 'source',
776 _(' location: %s\n'), extsource or "")
776 _(' location: %s\n'), extsource or "")
777
777
778 if ui.verbose:
778 if ui.verbose:
779 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
779 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
780 fm.data(bundled=isinternal)
780 fm.data(bundled=isinternal)
781
781
782 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
782 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
783 _(' tested with: %s\n'),
783 _(' tested with: %s\n'),
784 fm.formatlist(exttestedwith, name='ver'))
784 fm.formatlist(exttestedwith, name='ver'))
785
785
786 fm.condwrite(ui.verbose and extbuglink, 'buglink',
786 fm.condwrite(ui.verbose and extbuglink, 'buglink',
787 _(' bug reporting: %s\n'), extbuglink or "")
787 _(' bug reporting: %s\n'), extbuglink or "")
788
788
789 fm.end()
789 fm.end()
790
790
791 @command('debugfileset',
791 @command('debugfileset',
792 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
792 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
793 _('[-r REV] FILESPEC'))
793 _('[-r REV] FILESPEC'))
794 def debugfileset(ui, repo, expr, **opts):
794 def debugfileset(ui, repo, expr, **opts):
795 '''parse and apply a fileset specification'''
795 '''parse and apply a fileset specification'''
796 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
796 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
797 if ui.verbose:
797 if ui.verbose:
798 tree = fileset.parse(expr)
798 tree = fileset.parse(expr)
799 ui.note(fileset.prettyformat(tree), "\n")
799 ui.note(fileset.prettyformat(tree), "\n")
800
800
801 for f in ctx.getfileset(expr):
801 for f in ctx.getfileset(expr):
802 ui.write("%s\n" % f)
802 ui.write("%s\n" % f)
803
803
804 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
804 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
805 def debugfsinfo(ui, path="."):
805 def debugfsinfo(ui, path="."):
806 """show information detected about current filesystem"""
806 """show information detected about current filesystem"""
807 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
807 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
808 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
808 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
809 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
809 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
810 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
810 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
811 casesensitive = '(unknown)'
811 casesensitive = '(unknown)'
812 try:
812 try:
813 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
813 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
814 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
814 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
815 except OSError:
815 except OSError:
816 pass
816 pass
817 ui.write(('case-sensitive: %s\n') % casesensitive)
817 ui.write(('case-sensitive: %s\n') % casesensitive)
818
818
819 @command('debuggetbundle',
819 @command('debuggetbundle',
820 [('H', 'head', [], _('id of head node'), _('ID')),
820 [('H', 'head', [], _('id of head node'), _('ID')),
821 ('C', 'common', [], _('id of common node'), _('ID')),
821 ('C', 'common', [], _('id of common node'), _('ID')),
822 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
822 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
823 _('REPO FILE [-H|-C ID]...'),
823 _('REPO FILE [-H|-C ID]...'),
824 norepo=True)
824 norepo=True)
825 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
825 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
826 """retrieves a bundle from a repo
826 """retrieves a bundle from a repo
827
827
828 Every ID must be a full-length hex node id string. Saves the bundle to the
828 Every ID must be a full-length hex node id string. Saves the bundle to the
829 given file.
829 given file.
830 """
830 """
831 opts = pycompat.byteskwargs(opts)
831 opts = pycompat.byteskwargs(opts)
832 repo = hg.peer(ui, opts, repopath)
832 repo = hg.peer(ui, opts, repopath)
833 if not repo.capable('getbundle'):
833 if not repo.capable('getbundle'):
834 raise error.Abort("getbundle() not supported by target repository")
834 raise error.Abort("getbundle() not supported by target repository")
835 args = {}
835 args = {}
836 if common:
836 if common:
837 args[r'common'] = [bin(s) for s in common]
837 args[r'common'] = [bin(s) for s in common]
838 if head:
838 if head:
839 args[r'heads'] = [bin(s) for s in head]
839 args[r'heads'] = [bin(s) for s in head]
840 # TODO: get desired bundlecaps from command line.
840 # TODO: get desired bundlecaps from command line.
841 args[r'bundlecaps'] = None
841 args[r'bundlecaps'] = None
842 bundle = repo.getbundle('debug', **args)
842 bundle = repo.getbundle('debug', **args)
843
843
844 bundletype = opts.get('type', 'bzip2').lower()
844 bundletype = opts.get('type', 'bzip2').lower()
845 btypes = {'none': 'HG10UN',
845 btypes = {'none': 'HG10UN',
846 'bzip2': 'HG10BZ',
846 'bzip2': 'HG10BZ',
847 'gzip': 'HG10GZ',
847 'gzip': 'HG10GZ',
848 'bundle2': 'HG20'}
848 'bundle2': 'HG20'}
849 bundletype = btypes.get(bundletype)
849 bundletype = btypes.get(bundletype)
850 if bundletype not in bundle2.bundletypes:
850 if bundletype not in bundle2.bundletypes:
851 raise error.Abort(_('unknown bundle type specified with --type'))
851 raise error.Abort(_('unknown bundle type specified with --type'))
852 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
852 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
853
853
854 @command('debugignore', [], '[FILE]')
854 @command('debugignore', [], '[FILE]')
855 def debugignore(ui, repo, *files, **opts):
855 def debugignore(ui, repo, *files, **opts):
856 """display the combined ignore pattern and information about ignored files
856 """display the combined ignore pattern and information about ignored files
857
857
858 With no argument display the combined ignore pattern.
858 With no argument display the combined ignore pattern.
859
859
860 Given space separated file names, shows if the given file is ignored and
860 Given space separated file names, shows if the given file is ignored and
861 if so, show the ignore rule (file and line number) that matched it.
861 if so, show the ignore rule (file and line number) that matched it.
862 """
862 """
863 ignore = repo.dirstate._ignore
863 ignore = repo.dirstate._ignore
864 if not files:
864 if not files:
865 # Show all the patterns
865 # Show all the patterns
866 ui.write("%s\n" % repr(ignore))
866 ui.write("%s\n" % repr(ignore))
867 else:
867 else:
868 for f in files:
868 for f in files:
869 nf = util.normpath(f)
869 nf = util.normpath(f)
870 ignored = None
870 ignored = None
871 ignoredata = None
871 ignoredata = None
872 if nf != '.':
872 if nf != '.':
873 if ignore(nf):
873 if ignore(nf):
874 ignored = nf
874 ignored = nf
875 ignoredata = repo.dirstate._ignorefileandline(nf)
875 ignoredata = repo.dirstate._ignorefileandline(nf)
876 else:
876 else:
877 for p in util.finddirs(nf):
877 for p in util.finddirs(nf):
878 if ignore(p):
878 if ignore(p):
879 ignored = p
879 ignored = p
880 ignoredata = repo.dirstate._ignorefileandline(p)
880 ignoredata = repo.dirstate._ignorefileandline(p)
881 break
881 break
882 if ignored:
882 if ignored:
883 if ignored == nf:
883 if ignored == nf:
884 ui.write(_("%s is ignored\n") % f)
884 ui.write(_("%s is ignored\n") % f)
885 else:
885 else:
886 ui.write(_("%s is ignored because of "
886 ui.write(_("%s is ignored because of "
887 "containing folder %s\n")
887 "containing folder %s\n")
888 % (f, ignored))
888 % (f, ignored))
889 ignorefile, lineno, line = ignoredata
889 ignorefile, lineno, line = ignoredata
890 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
890 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
891 % (ignorefile, lineno, line))
891 % (ignorefile, lineno, line))
892 else:
892 else:
893 ui.write(_("%s is not ignored\n") % f)
893 ui.write(_("%s is not ignored\n") % f)
894
894
895 @command('debugindex', cmdutil.debugrevlogopts +
895 @command('debugindex', cmdutil.debugrevlogopts +
896 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
896 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
897 _('[-f FORMAT] -c|-m|FILE'),
897 _('[-f FORMAT] -c|-m|FILE'),
898 optionalrepo=True)
898 optionalrepo=True)
899 def debugindex(ui, repo, file_=None, **opts):
899 def debugindex(ui, repo, file_=None, **opts):
900 """dump the contents of an index file"""
900 """dump the contents of an index file"""
901 opts = pycompat.byteskwargs(opts)
901 opts = pycompat.byteskwargs(opts)
902 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
902 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
903 format = opts.get('format', 0)
903 format = opts.get('format', 0)
904 if format not in (0, 1):
904 if format not in (0, 1):
905 raise error.Abort(_("unknown format %d") % format)
905 raise error.Abort(_("unknown format %d") % format)
906
906
907 generaldelta = r.version & revlog.FLAG_GENERALDELTA
907 generaldelta = r.version & revlog.FLAG_GENERALDELTA
908 if generaldelta:
908 if generaldelta:
909 basehdr = ' delta'
909 basehdr = ' delta'
910 else:
910 else:
911 basehdr = ' base'
911 basehdr = ' base'
912
912
913 if ui.debugflag:
913 if ui.debugflag:
914 shortfn = hex
914 shortfn = hex
915 else:
915 else:
916 shortfn = short
916 shortfn = short
917
917
918 # There might not be anything in r, so have a sane default
918 # There might not be anything in r, so have a sane default
919 idlen = 12
919 idlen = 12
920 for i in r:
920 for i in r:
921 idlen = len(shortfn(r.node(i)))
921 idlen = len(shortfn(r.node(i)))
922 break
922 break
923
923
924 if format == 0:
924 if format == 0:
925 ui.write((" rev offset length " + basehdr + " linkrev"
925 ui.write((" rev offset length " + basehdr + " linkrev"
926 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
926 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
927 elif format == 1:
927 elif format == 1:
928 ui.write((" rev flag offset length"
928 ui.write((" rev flag offset length"
929 " size " + basehdr + " link p1 p2"
929 " size " + basehdr + " link p1 p2"
930 " %s\n") % "nodeid".rjust(idlen))
930 " %s\n") % "nodeid".rjust(idlen))
931
931
932 for i in r:
932 for i in r:
933 node = r.node(i)
933 node = r.node(i)
934 if generaldelta:
934 if generaldelta:
935 base = r.deltaparent(i)
935 base = r.deltaparent(i)
936 else:
936 else:
937 base = r.chainbase(i)
937 base = r.chainbase(i)
938 if format == 0:
938 if format == 0:
939 try:
939 try:
940 pp = r.parents(node)
940 pp = r.parents(node)
941 except Exception:
941 except Exception:
942 pp = [nullid, nullid]
942 pp = [nullid, nullid]
943 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
943 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
944 i, r.start(i), r.length(i), base, r.linkrev(i),
944 i, r.start(i), r.length(i), base, r.linkrev(i),
945 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
945 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
946 elif format == 1:
946 elif format == 1:
947 pr = r.parentrevs(i)
947 pr = r.parentrevs(i)
948 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
948 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
949 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
949 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
950 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
950 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
951
951
952 @command('debugindexdot', cmdutil.debugrevlogopts,
952 @command('debugindexdot', cmdutil.debugrevlogopts,
953 _('-c|-m|FILE'), optionalrepo=True)
953 _('-c|-m|FILE'), optionalrepo=True)
954 def debugindexdot(ui, repo, file_=None, **opts):
954 def debugindexdot(ui, repo, file_=None, **opts):
955 """dump an index DAG as a graphviz dot file"""
955 """dump an index DAG as a graphviz dot file"""
956 opts = pycompat.byteskwargs(opts)
956 opts = pycompat.byteskwargs(opts)
957 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
957 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
958 ui.write(("digraph G {\n"))
958 ui.write(("digraph G {\n"))
959 for i in r:
959 for i in r:
960 node = r.node(i)
960 node = r.node(i)
961 pp = r.parents(node)
961 pp = r.parents(node)
962 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
962 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
963 if pp[1] != nullid:
963 if pp[1] != nullid:
964 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
964 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
965 ui.write("}\n")
965 ui.write("}\n")
966
966
967 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
967 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
968 def debuginstall(ui, **opts):
968 def debuginstall(ui, **opts):
969 '''test Mercurial installation
969 '''test Mercurial installation
970
970
971 Returns 0 on success.
971 Returns 0 on success.
972 '''
972 '''
973 opts = pycompat.byteskwargs(opts)
973 opts = pycompat.byteskwargs(opts)
974
974
975 def writetemp(contents):
975 def writetemp(contents):
976 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
976 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
977 f = os.fdopen(fd, pycompat.sysstr("wb"))
977 f = os.fdopen(fd, pycompat.sysstr("wb"))
978 f.write(contents)
978 f.write(contents)
979 f.close()
979 f.close()
980 return name
980 return name
981
981
982 problems = 0
982 problems = 0
983
983
984 fm = ui.formatter('debuginstall', opts)
984 fm = ui.formatter('debuginstall', opts)
985 fm.startitem()
985 fm.startitem()
986
986
987 # encoding
987 # encoding
988 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
988 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
989 err = None
989 err = None
990 try:
990 try:
991 encoding.fromlocal("test")
991 encoding.fromlocal("test")
992 except error.Abort as inst:
992 except error.Abort as inst:
993 err = inst
993 err = inst
994 problems += 1
994 problems += 1
995 fm.condwrite(err, 'encodingerror', _(" %s\n"
995 fm.condwrite(err, 'encodingerror', _(" %s\n"
996 " (check that your locale is properly set)\n"), err)
996 " (check that your locale is properly set)\n"), err)
997
997
998 # Python
998 # Python
999 fm.write('pythonexe', _("checking Python executable (%s)\n"),
999 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1000 pycompat.sysexecutable)
1000 pycompat.sysexecutable)
1001 fm.write('pythonver', _("checking Python version (%s)\n"),
1001 fm.write('pythonver', _("checking Python version (%s)\n"),
1002 ("%d.%d.%d" % sys.version_info[:3]))
1002 ("%d.%d.%d" % sys.version_info[:3]))
1003 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1003 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1004 os.path.dirname(pycompat.fsencode(os.__file__)))
1004 os.path.dirname(pycompat.fsencode(os.__file__)))
1005
1005
1006 security = set(sslutil.supportedprotocols)
1006 security = set(sslutil.supportedprotocols)
1007 if sslutil.hassni:
1007 if sslutil.hassni:
1008 security.add('sni')
1008 security.add('sni')
1009
1009
1010 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1010 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1011 fm.formatlist(sorted(security), name='protocol',
1011 fm.formatlist(sorted(security), name='protocol',
1012 fmt='%s', sep=','))
1012 fmt='%s', sep=','))
1013
1013
1014 # These are warnings, not errors. So don't increment problem count. This
1014 # These are warnings, not errors. So don't increment problem count. This
1015 # may change in the future.
1015 # may change in the future.
1016 if 'tls1.2' not in security:
1016 if 'tls1.2' not in security:
1017 fm.plain(_(' TLS 1.2 not supported by Python install; '
1017 fm.plain(_(' TLS 1.2 not supported by Python install; '
1018 'network connections lack modern security\n'))
1018 'network connections lack modern security\n'))
1019 if 'sni' not in security:
1019 if 'sni' not in security:
1020 fm.plain(_(' SNI not supported by Python install; may have '
1020 fm.plain(_(' SNI not supported by Python install; may have '
1021 'connectivity issues with some servers\n'))
1021 'connectivity issues with some servers\n'))
1022
1022
1023 # TODO print CA cert info
1023 # TODO print CA cert info
1024
1024
1025 # hg version
1025 # hg version
1026 hgver = util.version()
1026 hgver = util.version()
1027 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1027 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1028 hgver.split('+')[0])
1028 hgver.split('+')[0])
1029 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1029 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1030 '+'.join(hgver.split('+')[1:]))
1030 '+'.join(hgver.split('+')[1:]))
1031
1031
1032 # compiled modules
1032 # compiled modules
1033 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1033 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1034 policy.policy)
1034 policy.policy)
1035 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1035 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1036 os.path.dirname(pycompat.fsencode(__file__)))
1036 os.path.dirname(pycompat.fsencode(__file__)))
1037
1037
1038 if policy.policy in ('c', 'allow'):
1038 if policy.policy in ('c', 'allow'):
1039 err = None
1039 err = None
1040 try:
1040 try:
1041 from .cext import (
1041 from .cext import (
1042 base85,
1042 base85,
1043 bdiff,
1043 bdiff,
1044 mpatch,
1044 mpatch,
1045 osutil,
1045 osutil,
1046 )
1046 )
1047 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1047 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1048 except Exception as inst:
1048 except Exception as inst:
1049 err = inst
1049 err = inst
1050 problems += 1
1050 problems += 1
1051 fm.condwrite(err, 'extensionserror', " %s\n", err)
1051 fm.condwrite(err, 'extensionserror', " %s\n", err)
1052
1052
1053 compengines = util.compengines._engines.values()
1053 compengines = util.compengines._engines.values()
1054 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1054 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1055 fm.formatlist(sorted(e.name() for e in compengines),
1055 fm.formatlist(sorted(e.name() for e in compengines),
1056 name='compengine', fmt='%s', sep=', '))
1056 name='compengine', fmt='%s', sep=', '))
1057 fm.write('compenginesavail', _('checking available compression engines '
1057 fm.write('compenginesavail', _('checking available compression engines '
1058 '(%s)\n'),
1058 '(%s)\n'),
1059 fm.formatlist(sorted(e.name() for e in compengines
1059 fm.formatlist(sorted(e.name() for e in compengines
1060 if e.available()),
1060 if e.available()),
1061 name='compengine', fmt='%s', sep=', '))
1061 name='compengine', fmt='%s', sep=', '))
1062 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1062 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1063 fm.write('compenginesserver', _('checking available compression engines '
1063 fm.write('compenginesserver', _('checking available compression engines '
1064 'for wire protocol (%s)\n'),
1064 'for wire protocol (%s)\n'),
1065 fm.formatlist([e.name() for e in wirecompengines
1065 fm.formatlist([e.name() for e in wirecompengines
1066 if e.wireprotosupport()],
1066 if e.wireprotosupport()],
1067 name='compengine', fmt='%s', sep=', '))
1067 name='compengine', fmt='%s', sep=', '))
1068
1068
1069 # templates
1069 # templates
1070 p = templater.templatepaths()
1070 p = templater.templatepaths()
1071 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1071 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1072 fm.condwrite(not p, '', _(" no template directories found\n"))
1072 fm.condwrite(not p, '', _(" no template directories found\n"))
1073 if p:
1073 if p:
1074 m = templater.templatepath("map-cmdline.default")
1074 m = templater.templatepath("map-cmdline.default")
1075 if m:
1075 if m:
1076 # template found, check if it is working
1076 # template found, check if it is working
1077 err = None
1077 err = None
1078 try:
1078 try:
1079 templater.templater.frommapfile(m)
1079 templater.templater.frommapfile(m)
1080 except Exception as inst:
1080 except Exception as inst:
1081 err = inst
1081 err = inst
1082 p = None
1082 p = None
1083 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1083 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1084 else:
1084 else:
1085 p = None
1085 p = None
1086 fm.condwrite(p, 'defaulttemplate',
1086 fm.condwrite(p, 'defaulttemplate',
1087 _("checking default template (%s)\n"), m)
1087 _("checking default template (%s)\n"), m)
1088 fm.condwrite(not m, 'defaulttemplatenotfound',
1088 fm.condwrite(not m, 'defaulttemplatenotfound',
1089 _(" template '%s' not found\n"), "default")
1089 _(" template '%s' not found\n"), "default")
1090 if not p:
1090 if not p:
1091 problems += 1
1091 problems += 1
1092 fm.condwrite(not p, '',
1092 fm.condwrite(not p, '',
1093 _(" (templates seem to have been installed incorrectly)\n"))
1093 _(" (templates seem to have been installed incorrectly)\n"))
1094
1094
1095 # editor
1095 # editor
1096 editor = ui.geteditor()
1096 editor = ui.geteditor()
1097 editor = util.expandpath(editor)
1097 editor = util.expandpath(editor)
1098 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1098 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1099 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1099 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1100 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1100 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1101 _(" No commit editor set and can't find %s in PATH\n"
1101 _(" No commit editor set and can't find %s in PATH\n"
1102 " (specify a commit editor in your configuration"
1102 " (specify a commit editor in your configuration"
1103 " file)\n"), not cmdpath and editor == 'vi' and editor)
1103 " file)\n"), not cmdpath and editor == 'vi' and editor)
1104 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1104 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1105 _(" Can't find editor '%s' in PATH\n"
1105 _(" Can't find editor '%s' in PATH\n"
1106 " (specify a commit editor in your configuration"
1106 " (specify a commit editor in your configuration"
1107 " file)\n"), not cmdpath and editor)
1107 " file)\n"), not cmdpath and editor)
1108 if not cmdpath and editor != 'vi':
1108 if not cmdpath and editor != 'vi':
1109 problems += 1
1109 problems += 1
1110
1110
1111 # check username
1111 # check username
1112 username = None
1112 username = None
1113 err = None
1113 err = None
1114 try:
1114 try:
1115 username = ui.username()
1115 username = ui.username()
1116 except error.Abort as e:
1116 except error.Abort as e:
1117 err = e
1117 err = e
1118 problems += 1
1118 problems += 1
1119
1119
1120 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1120 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1121 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1121 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1122 " (specify a username in your configuration file)\n"), err)
1122 " (specify a username in your configuration file)\n"), err)
1123
1123
1124 fm.condwrite(not problems, '',
1124 fm.condwrite(not problems, '',
1125 _("no problems detected\n"))
1125 _("no problems detected\n"))
1126 if not problems:
1126 if not problems:
1127 fm.data(problems=problems)
1127 fm.data(problems=problems)
1128 fm.condwrite(problems, 'problems',
1128 fm.condwrite(problems, 'problems',
1129 _("%d problems detected,"
1129 _("%d problems detected,"
1130 " please check your install!\n"), problems)
1130 " please check your install!\n"), problems)
1131 fm.end()
1131 fm.end()
1132
1132
1133 return problems
1133 return problems
1134
1134
1135 @command('debugknown', [], _('REPO ID...'), norepo=True)
1135 @command('debugknown', [], _('REPO ID...'), norepo=True)
1136 def debugknown(ui, repopath, *ids, **opts):
1136 def debugknown(ui, repopath, *ids, **opts):
1137 """test whether node ids are known to a repo
1137 """test whether node ids are known to a repo
1138
1138
1139 Every ID must be a full-length hex node id string. Returns a list of 0s
1139 Every ID must be a full-length hex node id string. Returns a list of 0s
1140 and 1s indicating unknown/known.
1140 and 1s indicating unknown/known.
1141 """
1141 """
1142 opts = pycompat.byteskwargs(opts)
1142 opts = pycompat.byteskwargs(opts)
1143 repo = hg.peer(ui, opts, repopath)
1143 repo = hg.peer(ui, opts, repopath)
1144 if not repo.capable('known'):
1144 if not repo.capable('known'):
1145 raise error.Abort("known() not supported by target repository")
1145 raise error.Abort("known() not supported by target repository")
1146 flags = repo.known([bin(s) for s in ids])
1146 flags = repo.known([bin(s) for s in ids])
1147 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1147 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1148
1148
1149 @command('debuglabelcomplete', [], _('LABEL...'))
1149 @command('debuglabelcomplete', [], _('LABEL...'))
1150 def debuglabelcomplete(ui, repo, *args):
1150 def debuglabelcomplete(ui, repo, *args):
1151 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1151 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1152 debugnamecomplete(ui, repo, *args)
1152 debugnamecomplete(ui, repo, *args)
1153
1153
1154 @command('debuglocks',
1154 @command('debuglocks',
1155 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1155 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1156 ('W', 'force-wlock', None,
1156 ('W', 'force-wlock', None,
1157 _('free the working state lock (DANGEROUS)'))],
1157 _('free the working state lock (DANGEROUS)'))],
1158 _('[OPTION]...'))
1158 _('[OPTION]...'))
1159 def debuglocks(ui, repo, **opts):
1159 def debuglocks(ui, repo, **opts):
1160 """show or modify state of locks
1160 """show or modify state of locks
1161
1161
1162 By default, this command will show which locks are held. This
1162 By default, this command will show which locks are held. This
1163 includes the user and process holding the lock, the amount of time
1163 includes the user and process holding the lock, the amount of time
1164 the lock has been held, and the machine name where the process is
1164 the lock has been held, and the machine name where the process is
1165 running if it's not local.
1165 running if it's not local.
1166
1166
1167 Locks protect the integrity of Mercurial's data, so should be
1167 Locks protect the integrity of Mercurial's data, so should be
1168 treated with care. System crashes or other interruptions may cause
1168 treated with care. System crashes or other interruptions may cause
1169 locks to not be properly released, though Mercurial will usually
1169 locks to not be properly released, though Mercurial will usually
1170 detect and remove such stale locks automatically.
1170 detect and remove such stale locks automatically.
1171
1171
1172 However, detecting stale locks may not always be possible (for
1172 However, detecting stale locks may not always be possible (for
1173 instance, on a shared filesystem). Removing locks may also be
1173 instance, on a shared filesystem). Removing locks may also be
1174 blocked by filesystem permissions.
1174 blocked by filesystem permissions.
1175
1175
1176 Returns 0 if no locks are held.
1176 Returns 0 if no locks are held.
1177
1177
1178 """
1178 """
1179
1179
1180 if opts.get(r'force_lock'):
1180 if opts.get(r'force_lock'):
1181 repo.svfs.unlink('lock')
1181 repo.svfs.unlink('lock')
1182 if opts.get(r'force_wlock'):
1182 if opts.get(r'force_wlock'):
1183 repo.vfs.unlink('wlock')
1183 repo.vfs.unlink('wlock')
1184 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1184 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1185 return 0
1185 return 0
1186
1186
1187 now = time.time()
1187 now = time.time()
1188 held = 0
1188 held = 0
1189
1189
1190 def report(vfs, name, method):
1190 def report(vfs, name, method):
1191 # this causes stale locks to get reaped for more accurate reporting
1191 # this causes stale locks to get reaped for more accurate reporting
1192 try:
1192 try:
1193 l = method(False)
1193 l = method(False)
1194 except error.LockHeld:
1194 except error.LockHeld:
1195 l = None
1195 l = None
1196
1196
1197 if l:
1197 if l:
1198 l.release()
1198 l.release()
1199 else:
1199 else:
1200 try:
1200 try:
1201 stat = vfs.lstat(name)
1201 stat = vfs.lstat(name)
1202 age = now - stat.st_mtime
1202 age = now - stat.st_mtime
1203 user = util.username(stat.st_uid)
1203 user = util.username(stat.st_uid)
1204 locker = vfs.readlock(name)
1204 locker = vfs.readlock(name)
1205 if ":" in locker:
1205 if ":" in locker:
1206 host, pid = locker.split(':')
1206 host, pid = locker.split(':')
1207 if host == socket.gethostname():
1207 if host == socket.gethostname():
1208 locker = 'user %s, process %s' % (user, pid)
1208 locker = 'user %s, process %s' % (user, pid)
1209 else:
1209 else:
1210 locker = 'user %s, process %s, host %s' \
1210 locker = 'user %s, process %s, host %s' \
1211 % (user, pid, host)
1211 % (user, pid, host)
1212 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1212 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1213 return 1
1213 return 1
1214 except OSError as e:
1214 except OSError as e:
1215 if e.errno != errno.ENOENT:
1215 if e.errno != errno.ENOENT:
1216 raise
1216 raise
1217
1217
1218 ui.write(("%-6s free\n") % (name + ":"))
1218 ui.write(("%-6s free\n") % (name + ":"))
1219 return 0
1219 return 0
1220
1220
1221 held += report(repo.svfs, "lock", repo.lock)
1221 held += report(repo.svfs, "lock", repo.lock)
1222 held += report(repo.vfs, "wlock", repo.wlock)
1222 held += report(repo.vfs, "wlock", repo.wlock)
1223
1223
1224 return held
1224 return held
1225
1225
1226 @command('debugmergestate', [], '')
1226 @command('debugmergestate', [], '')
1227 def debugmergestate(ui, repo, *args):
1227 def debugmergestate(ui, repo, *args):
1228 """print merge state
1228 """print merge state
1229
1229
1230 Use --verbose to print out information about whether v1 or v2 merge state
1230 Use --verbose to print out information about whether v1 or v2 merge state
1231 was chosen."""
1231 was chosen."""
1232 def _hashornull(h):
1232 def _hashornull(h):
1233 if h == nullhex:
1233 if h == nullhex:
1234 return 'null'
1234 return 'null'
1235 else:
1235 else:
1236 return h
1236 return h
1237
1237
1238 def printrecords(version):
1238 def printrecords(version):
1239 ui.write(('* version %s records\n') % version)
1239 ui.write(('* version %s records\n') % version)
1240 if version == 1:
1240 if version == 1:
1241 records = v1records
1241 records = v1records
1242 else:
1242 else:
1243 records = v2records
1243 records = v2records
1244
1244
1245 for rtype, record in records:
1245 for rtype, record in records:
1246 # pretty print some record types
1246 # pretty print some record types
1247 if rtype == 'L':
1247 if rtype == 'L':
1248 ui.write(('local: %s\n') % record)
1248 ui.write(('local: %s\n') % record)
1249 elif rtype == 'O':
1249 elif rtype == 'O':
1250 ui.write(('other: %s\n') % record)
1250 ui.write(('other: %s\n') % record)
1251 elif rtype == 'm':
1251 elif rtype == 'm':
1252 driver, mdstate = record.split('\0', 1)
1252 driver, mdstate = record.split('\0', 1)
1253 ui.write(('merge driver: %s (state "%s")\n')
1253 ui.write(('merge driver: %s (state "%s")\n')
1254 % (driver, mdstate))
1254 % (driver, mdstate))
1255 elif rtype in 'FDC':
1255 elif rtype in 'FDC':
1256 r = record.split('\0')
1256 r = record.split('\0')
1257 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1257 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1258 if version == 1:
1258 if version == 1:
1259 onode = 'not stored in v1 format'
1259 onode = 'not stored in v1 format'
1260 flags = r[7]
1260 flags = r[7]
1261 else:
1261 else:
1262 onode, flags = r[7:9]
1262 onode, flags = r[7:9]
1263 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1263 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1264 % (f, rtype, state, _hashornull(hash)))
1264 % (f, rtype, state, _hashornull(hash)))
1265 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1265 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1266 ui.write((' ancestor path: %s (node %s)\n')
1266 ui.write((' ancestor path: %s (node %s)\n')
1267 % (afile, _hashornull(anode)))
1267 % (afile, _hashornull(anode)))
1268 ui.write((' other path: %s (node %s)\n')
1268 ui.write((' other path: %s (node %s)\n')
1269 % (ofile, _hashornull(onode)))
1269 % (ofile, _hashornull(onode)))
1270 elif rtype == 'f':
1270 elif rtype == 'f':
1271 filename, rawextras = record.split('\0', 1)
1271 filename, rawextras = record.split('\0', 1)
1272 extras = rawextras.split('\0')
1272 extras = rawextras.split('\0')
1273 i = 0
1273 i = 0
1274 extrastrings = []
1274 extrastrings = []
1275 while i < len(extras):
1275 while i < len(extras):
1276 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1276 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1277 i += 2
1277 i += 2
1278
1278
1279 ui.write(('file extras: %s (%s)\n')
1279 ui.write(('file extras: %s (%s)\n')
1280 % (filename, ', '.join(extrastrings)))
1280 % (filename, ', '.join(extrastrings)))
1281 elif rtype == 'l':
1281 elif rtype == 'l':
1282 labels = record.split('\0', 2)
1282 labels = record.split('\0', 2)
1283 labels = [l for l in labels if len(l) > 0]
1283 labels = [l for l in labels if len(l) > 0]
1284 ui.write(('labels:\n'))
1284 ui.write(('labels:\n'))
1285 ui.write((' local: %s\n' % labels[0]))
1285 ui.write((' local: %s\n' % labels[0]))
1286 ui.write((' other: %s\n' % labels[1]))
1286 ui.write((' other: %s\n' % labels[1]))
1287 if len(labels) > 2:
1287 if len(labels) > 2:
1288 ui.write((' base: %s\n' % labels[2]))
1288 ui.write((' base: %s\n' % labels[2]))
1289 else:
1289 else:
1290 ui.write(('unrecognized entry: %s\t%s\n')
1290 ui.write(('unrecognized entry: %s\t%s\n')
1291 % (rtype, record.replace('\0', '\t')))
1291 % (rtype, record.replace('\0', '\t')))
1292
1292
1293 # Avoid mergestate.read() since it may raise an exception for unsupported
1293 # Avoid mergestate.read() since it may raise an exception for unsupported
1294 # merge state records. We shouldn't be doing this, but this is OK since this
1294 # merge state records. We shouldn't be doing this, but this is OK since this
1295 # command is pretty low-level.
1295 # command is pretty low-level.
1296 ms = mergemod.mergestate(repo)
1296 ms = mergemod.mergestate(repo)
1297
1297
1298 # sort so that reasonable information is on top
1298 # sort so that reasonable information is on top
1299 v1records = ms._readrecordsv1()
1299 v1records = ms._readrecordsv1()
1300 v2records = ms._readrecordsv2()
1300 v2records = ms._readrecordsv2()
1301 order = 'LOml'
1301 order = 'LOml'
1302 def key(r):
1302 def key(r):
1303 idx = order.find(r[0])
1303 idx = order.find(r[0])
1304 if idx == -1:
1304 if idx == -1:
1305 return (1, r[1])
1305 return (1, r[1])
1306 else:
1306 else:
1307 return (0, idx)
1307 return (0, idx)
1308 v1records.sort(key=key)
1308 v1records.sort(key=key)
1309 v2records.sort(key=key)
1309 v2records.sort(key=key)
1310
1310
1311 if not v1records and not v2records:
1311 if not v1records and not v2records:
1312 ui.write(('no merge state found\n'))
1312 ui.write(('no merge state found\n'))
1313 elif not v2records:
1313 elif not v2records:
1314 ui.note(('no version 2 merge state\n'))
1314 ui.note(('no version 2 merge state\n'))
1315 printrecords(1)
1315 printrecords(1)
1316 elif ms._v1v2match(v1records, v2records):
1316 elif ms._v1v2match(v1records, v2records):
1317 ui.note(('v1 and v2 states match: using v2\n'))
1317 ui.note(('v1 and v2 states match: using v2\n'))
1318 printrecords(2)
1318 printrecords(2)
1319 else:
1319 else:
1320 ui.note(('v1 and v2 states mismatch: using v1\n'))
1320 ui.note(('v1 and v2 states mismatch: using v1\n'))
1321 printrecords(1)
1321 printrecords(1)
1322 if ui.verbose:
1322 if ui.verbose:
1323 printrecords(2)
1323 printrecords(2)
1324
1324
1325 @command('debugnamecomplete', [], _('NAME...'))
1325 @command('debugnamecomplete', [], _('NAME...'))
1326 def debugnamecomplete(ui, repo, *args):
1326 def debugnamecomplete(ui, repo, *args):
1327 '''complete "names" - tags, open branch names, bookmark names'''
1327 '''complete "names" - tags, open branch names, bookmark names'''
1328
1328
1329 names = set()
1329 names = set()
1330 # since we previously only listed open branches, we will handle that
1330 # since we previously only listed open branches, we will handle that
1331 # specially (after this for loop)
1331 # specially (after this for loop)
1332 for name, ns in repo.names.iteritems():
1332 for name, ns in repo.names.iteritems():
1333 if name != 'branches':
1333 if name != 'branches':
1334 names.update(ns.listnames(repo))
1334 names.update(ns.listnames(repo))
1335 names.update(tag for (tag, heads, tip, closed)
1335 names.update(tag for (tag, heads, tip, closed)
1336 in repo.branchmap().iterbranches() if not closed)
1336 in repo.branchmap().iterbranches() if not closed)
1337 completions = set()
1337 completions = set()
1338 if not args:
1338 if not args:
1339 args = ['']
1339 args = ['']
1340 for a in args:
1340 for a in args:
1341 completions.update(n for n in names if n.startswith(a))
1341 completions.update(n for n in names if n.startswith(a))
1342 ui.write('\n'.join(sorted(completions)))
1342 ui.write('\n'.join(sorted(completions)))
1343 ui.write('\n')
1343 ui.write('\n')
1344
1344
1345 @command('debugobsolete',
1345 @command('debugobsolete',
1346 [('', 'flags', 0, _('markers flag')),
1346 [('', 'flags', 0, _('markers flag')),
1347 ('', 'record-parents', False,
1347 ('', 'record-parents', False,
1348 _('record parent information for the precursor')),
1348 _('record parent information for the precursor')),
1349 ('r', 'rev', [], _('display markers relevant to REV')),
1349 ('r', 'rev', [], _('display markers relevant to REV')),
1350 ('', 'exclusive', False, _('restrict display to markers only '
1350 ('', 'exclusive', False, _('restrict display to markers only '
1351 'relevant to REV')),
1351 'relevant to REV')),
1352 ('', 'index', False, _('display index of the marker')),
1352 ('', 'index', False, _('display index of the marker')),
1353 ('', 'delete', [], _('delete markers specified by indices')),
1353 ('', 'delete', [], _('delete markers specified by indices')),
1354 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1354 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1355 _('[OBSOLETED [REPLACEMENT ...]]'))
1355 _('[OBSOLETED [REPLACEMENT ...]]'))
1356 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1356 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1357 """create arbitrary obsolete marker
1357 """create arbitrary obsolete marker
1358
1358
1359 With no arguments, displays the list of obsolescence markers."""
1359 With no arguments, displays the list of obsolescence markers."""
1360
1360
1361 opts = pycompat.byteskwargs(opts)
1361 opts = pycompat.byteskwargs(opts)
1362
1362
1363 def parsenodeid(s):
1363 def parsenodeid(s):
1364 try:
1364 try:
1365 # We do not use revsingle/revrange functions here to accept
1365 # We do not use revsingle/revrange functions here to accept
1366 # arbitrary node identifiers, possibly not present in the
1366 # arbitrary node identifiers, possibly not present in the
1367 # local repository.
1367 # local repository.
1368 n = bin(s)
1368 n = bin(s)
1369 if len(n) != len(nullid):
1369 if len(n) != len(nullid):
1370 raise TypeError()
1370 raise TypeError()
1371 return n
1371 return n
1372 except TypeError:
1372 except TypeError:
1373 raise error.Abort('changeset references must be full hexadecimal '
1373 raise error.Abort('changeset references must be full hexadecimal '
1374 'node identifiers')
1374 'node identifiers')
1375
1375
1376 if opts.get('delete'):
1376 if opts.get('delete'):
1377 indices = []
1377 indices = []
1378 for v in opts.get('delete'):
1378 for v in opts.get('delete'):
1379 try:
1379 try:
1380 indices.append(int(v))
1380 indices.append(int(v))
1381 except ValueError:
1381 except ValueError:
1382 raise error.Abort(_('invalid index value: %r') % v,
1382 raise error.Abort(_('invalid index value: %r') % v,
1383 hint=_('use integers for indices'))
1383 hint=_('use integers for indices'))
1384
1384
1385 if repo.currenttransaction():
1385 if repo.currenttransaction():
1386 raise error.Abort(_('cannot delete obsmarkers in the middle '
1386 raise error.Abort(_('cannot delete obsmarkers in the middle '
1387 'of transaction.'))
1387 'of transaction.'))
1388
1388
1389 with repo.lock():
1389 with repo.lock():
1390 n = repair.deleteobsmarkers(repo.obsstore, indices)
1390 n = repair.deleteobsmarkers(repo.obsstore, indices)
1391 ui.write(_('deleted %i obsolescence markers\n') % n)
1391 ui.write(_('deleted %i obsolescence markers\n') % n)
1392
1392
1393 return
1393 return
1394
1394
1395 if precursor is not None:
1395 if precursor is not None:
1396 if opts['rev']:
1396 if opts['rev']:
1397 raise error.Abort('cannot select revision when creating marker')
1397 raise error.Abort('cannot select revision when creating marker')
1398 metadata = {}
1398 metadata = {}
1399 metadata['user'] = opts['user'] or ui.username()
1399 metadata['user'] = opts['user'] or ui.username()
1400 succs = tuple(parsenodeid(succ) for succ in successors)
1400 succs = tuple(parsenodeid(succ) for succ in successors)
1401 l = repo.lock()
1401 l = repo.lock()
1402 try:
1402 try:
1403 tr = repo.transaction('debugobsolete')
1403 tr = repo.transaction('debugobsolete')
1404 try:
1404 try:
1405 date = opts.get('date')
1405 date = opts.get('date')
1406 if date:
1406 if date:
1407 date = util.parsedate(date)
1407 date = util.parsedate(date)
1408 else:
1408 else:
1409 date = None
1409 date = None
1410 prec = parsenodeid(precursor)
1410 prec = parsenodeid(precursor)
1411 parents = None
1411 parents = None
1412 if opts['record_parents']:
1412 if opts['record_parents']:
1413 if prec not in repo.unfiltered():
1413 if prec not in repo.unfiltered():
1414 raise error.Abort('cannot used --record-parents on '
1414 raise error.Abort('cannot used --record-parents on '
1415 'unknown changesets')
1415 'unknown changesets')
1416 parents = repo.unfiltered()[prec].parents()
1416 parents = repo.unfiltered()[prec].parents()
1417 parents = tuple(p.node() for p in parents)
1417 parents = tuple(p.node() for p in parents)
1418 repo.obsstore.create(tr, prec, succs, opts['flags'],
1418 repo.obsstore.create(tr, prec, succs, opts['flags'],
1419 parents=parents, date=date,
1419 parents=parents, date=date,
1420 metadata=metadata, ui=ui)
1420 metadata=metadata, ui=ui)
1421 tr.close()
1421 tr.close()
1422 except ValueError as exc:
1422 except ValueError as exc:
1423 raise error.Abort(_('bad obsmarker input: %s') % exc)
1423 raise error.Abort(_('bad obsmarker input: %s') % exc)
1424 finally:
1424 finally:
1425 tr.release()
1425 tr.release()
1426 finally:
1426 finally:
1427 l.release()
1427 l.release()
1428 else:
1428 else:
1429 if opts['rev']:
1429 if opts['rev']:
1430 revs = scmutil.revrange(repo, opts['rev'])
1430 revs = scmutil.revrange(repo, opts['rev'])
1431 nodes = [repo[r].node() for r in revs]
1431 nodes = [repo[r].node() for r in revs]
1432 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1432 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1433 exclusive=opts['exclusive']))
1433 exclusive=opts['exclusive']))
1434 markers.sort(key=lambda x: x._data)
1434 markers.sort(key=lambda x: x._data)
1435 else:
1435 else:
1436 markers = obsutil.getmarkers(repo)
1436 markers = obsutil.getmarkers(repo)
1437
1437
1438 markerstoiter = markers
1438 markerstoiter = markers
1439 isrelevant = lambda m: True
1439 isrelevant = lambda m: True
1440 if opts.get('rev') and opts.get('index'):
1440 if opts.get('rev') and opts.get('index'):
1441 markerstoiter = obsutil.getmarkers(repo)
1441 markerstoiter = obsutil.getmarkers(repo)
1442 markerset = set(markers)
1442 markerset = set(markers)
1443 isrelevant = lambda m: m in markerset
1443 isrelevant = lambda m: m in markerset
1444
1444
1445 fm = ui.formatter('debugobsolete', opts)
1445 fm = ui.formatter('debugobsolete', opts)
1446 for i, m in enumerate(markerstoiter):
1446 for i, m in enumerate(markerstoiter):
1447 if not isrelevant(m):
1447 if not isrelevant(m):
1448 # marker can be irrelevant when we're iterating over a set
1448 # marker can be irrelevant when we're iterating over a set
1449 # of markers (markerstoiter) which is bigger than the set
1449 # of markers (markerstoiter) which is bigger than the set
1450 # of markers we want to display (markers)
1450 # of markers we want to display (markers)
1451 # this can happen if both --index and --rev options are
1451 # this can happen if both --index and --rev options are
1452 # provided and thus we need to iterate over all of the markers
1452 # provided and thus we need to iterate over all of the markers
1453 # to get the correct indices, but only display the ones that
1453 # to get the correct indices, but only display the ones that
1454 # are relevant to --rev value
1454 # are relevant to --rev value
1455 continue
1455 continue
1456 fm.startitem()
1456 fm.startitem()
1457 ind = i if opts.get('index') else None
1457 ind = i if opts.get('index') else None
1458 cmdutil.showmarker(fm, m, index=ind)
1458 cmdutil.showmarker(fm, m, index=ind)
1459 fm.end()
1459 fm.end()
1460
1460
1461 @command('debugpathcomplete',
1461 @command('debugpathcomplete',
1462 [('f', 'full', None, _('complete an entire path')),
1462 [('f', 'full', None, _('complete an entire path')),
1463 ('n', 'normal', None, _('show only normal files')),
1463 ('n', 'normal', None, _('show only normal files')),
1464 ('a', 'added', None, _('show only added files')),
1464 ('a', 'added', None, _('show only added files')),
1465 ('r', 'removed', None, _('show only removed files'))],
1465 ('r', 'removed', None, _('show only removed files'))],
1466 _('FILESPEC...'))
1466 _('FILESPEC...'))
1467 def debugpathcomplete(ui, repo, *specs, **opts):
1467 def debugpathcomplete(ui, repo, *specs, **opts):
1468 '''complete part or all of a tracked path
1468 '''complete part or all of a tracked path
1469
1469
1470 This command supports shells that offer path name completion. It
1470 This command supports shells that offer path name completion. It
1471 currently completes only files already known to the dirstate.
1471 currently completes only files already known to the dirstate.
1472
1472
1473 Completion extends only to the next path segment unless
1473 Completion extends only to the next path segment unless
1474 --full is specified, in which case entire paths are used.'''
1474 --full is specified, in which case entire paths are used.'''
1475
1475
1476 def complete(path, acceptable):
1476 def complete(path, acceptable):
1477 dirstate = repo.dirstate
1477 dirstate = repo.dirstate
1478 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1478 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1479 rootdir = repo.root + pycompat.ossep
1479 rootdir = repo.root + pycompat.ossep
1480 if spec != repo.root and not spec.startswith(rootdir):
1480 if spec != repo.root and not spec.startswith(rootdir):
1481 return [], []
1481 return [], []
1482 if os.path.isdir(spec):
1482 if os.path.isdir(spec):
1483 spec += '/'
1483 spec += '/'
1484 spec = spec[len(rootdir):]
1484 spec = spec[len(rootdir):]
1485 fixpaths = pycompat.ossep != '/'
1485 fixpaths = pycompat.ossep != '/'
1486 if fixpaths:
1486 if fixpaths:
1487 spec = spec.replace(pycompat.ossep, '/')
1487 spec = spec.replace(pycompat.ossep, '/')
1488 speclen = len(spec)
1488 speclen = len(spec)
1489 fullpaths = opts[r'full']
1489 fullpaths = opts[r'full']
1490 files, dirs = set(), set()
1490 files, dirs = set(), set()
1491 adddir, addfile = dirs.add, files.add
1491 adddir, addfile = dirs.add, files.add
1492 for f, st in dirstate.iteritems():
1492 for f, st in dirstate.iteritems():
1493 if f.startswith(spec) and st[0] in acceptable:
1493 if f.startswith(spec) and st[0] in acceptable:
1494 if fixpaths:
1494 if fixpaths:
1495 f = f.replace('/', pycompat.ossep)
1495 f = f.replace('/', pycompat.ossep)
1496 if fullpaths:
1496 if fullpaths:
1497 addfile(f)
1497 addfile(f)
1498 continue
1498 continue
1499 s = f.find(pycompat.ossep, speclen)
1499 s = f.find(pycompat.ossep, speclen)
1500 if s >= 0:
1500 if s >= 0:
1501 adddir(f[:s])
1501 adddir(f[:s])
1502 else:
1502 else:
1503 addfile(f)
1503 addfile(f)
1504 return files, dirs
1504 return files, dirs
1505
1505
1506 acceptable = ''
1506 acceptable = ''
1507 if opts[r'normal']:
1507 if opts[r'normal']:
1508 acceptable += 'nm'
1508 acceptable += 'nm'
1509 if opts[r'added']:
1509 if opts[r'added']:
1510 acceptable += 'a'
1510 acceptable += 'a'
1511 if opts[r'removed']:
1511 if opts[r'removed']:
1512 acceptable += 'r'
1512 acceptable += 'r'
1513 cwd = repo.getcwd()
1513 cwd = repo.getcwd()
1514 if not specs:
1514 if not specs:
1515 specs = ['.']
1515 specs = ['.']
1516
1516
1517 files, dirs = set(), set()
1517 files, dirs = set(), set()
1518 for spec in specs:
1518 for spec in specs:
1519 f, d = complete(spec, acceptable or 'nmar')
1519 f, d = complete(spec, acceptable or 'nmar')
1520 files.update(f)
1520 files.update(f)
1521 dirs.update(d)
1521 dirs.update(d)
1522 files.update(dirs)
1522 files.update(dirs)
1523 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1523 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1524 ui.write('\n')
1524 ui.write('\n')
1525
1525
1526 @command('debugpickmergetool',
1526 @command('debugpickmergetool',
1527 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1527 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1528 ('', 'changedelete', None, _('emulate merging change and delete')),
1528 ('', 'changedelete', None, _('emulate merging change and delete')),
1529 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1529 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1530 _('[PATTERN]...'),
1530 _('[PATTERN]...'),
1531 inferrepo=True)
1531 inferrepo=True)
1532 def debugpickmergetool(ui, repo, *pats, **opts):
1532 def debugpickmergetool(ui, repo, *pats, **opts):
1533 """examine which merge tool is chosen for specified file
1533 """examine which merge tool is chosen for specified file
1534
1534
1535 As described in :hg:`help merge-tools`, Mercurial examines
1535 As described in :hg:`help merge-tools`, Mercurial examines
1536 configurations below in this order to decide which merge tool is
1536 configurations below in this order to decide which merge tool is
1537 chosen for specified file.
1537 chosen for specified file.
1538
1538
1539 1. ``--tool`` option
1539 1. ``--tool`` option
1540 2. ``HGMERGE`` environment variable
1540 2. ``HGMERGE`` environment variable
1541 3. configurations in ``merge-patterns`` section
1541 3. configurations in ``merge-patterns`` section
1542 4. configuration of ``ui.merge``
1542 4. configuration of ``ui.merge``
1543 5. configurations in ``merge-tools`` section
1543 5. configurations in ``merge-tools`` section
1544 6. ``hgmerge`` tool (for historical reason only)
1544 6. ``hgmerge`` tool (for historical reason only)
1545 7. default tool for fallback (``:merge`` or ``:prompt``)
1545 7. default tool for fallback (``:merge`` or ``:prompt``)
1546
1546
1547 This command writes out examination result in the style below::
1547 This command writes out examination result in the style below::
1548
1548
1549 FILE = MERGETOOL
1549 FILE = MERGETOOL
1550
1550
1551 By default, all files known in the first parent context of the
1551 By default, all files known in the first parent context of the
1552 working directory are examined. Use file patterns and/or -I/-X
1552 working directory are examined. Use file patterns and/or -I/-X
1553 options to limit target files. -r/--rev is also useful to examine
1553 options to limit target files. -r/--rev is also useful to examine
1554 files in another context without actual updating to it.
1554 files in another context without actual updating to it.
1555
1555
1556 With --debug, this command shows warning messages while matching
1556 With --debug, this command shows warning messages while matching
1557 against ``merge-patterns`` and so on, too. It is recommended to
1557 against ``merge-patterns`` and so on, too. It is recommended to
1558 use this option with explicit file patterns and/or -I/-X options,
1558 use this option with explicit file patterns and/or -I/-X options,
1559 because this option increases amount of output per file according
1559 because this option increases amount of output per file according
1560 to configurations in hgrc.
1560 to configurations in hgrc.
1561
1561
1562 With -v/--verbose, this command shows configurations below at
1562 With -v/--verbose, this command shows configurations below at
1563 first (only if specified).
1563 first (only if specified).
1564
1564
1565 - ``--tool`` option
1565 - ``--tool`` option
1566 - ``HGMERGE`` environment variable
1566 - ``HGMERGE`` environment variable
1567 - configuration of ``ui.merge``
1567 - configuration of ``ui.merge``
1568
1568
1569 If merge tool is chosen before matching against
1569 If merge tool is chosen before matching against
1570 ``merge-patterns``, this command can't show any helpful
1570 ``merge-patterns``, this command can't show any helpful
1571 information, even with --debug. In such case, information above is
1571 information, even with --debug. In such case, information above is
1572 useful to know why a merge tool is chosen.
1572 useful to know why a merge tool is chosen.
1573 """
1573 """
1574 opts = pycompat.byteskwargs(opts)
1574 opts = pycompat.byteskwargs(opts)
1575 overrides = {}
1575 overrides = {}
1576 if opts['tool']:
1576 if opts['tool']:
1577 overrides[('ui', 'forcemerge')] = opts['tool']
1577 overrides[('ui', 'forcemerge')] = opts['tool']
1578 ui.note(('with --tool %r\n') % (opts['tool']))
1578 ui.note(('with --tool %r\n') % (opts['tool']))
1579
1579
1580 with ui.configoverride(overrides, 'debugmergepatterns'):
1580 with ui.configoverride(overrides, 'debugmergepatterns'):
1581 hgmerge = encoding.environ.get("HGMERGE")
1581 hgmerge = encoding.environ.get("HGMERGE")
1582 if hgmerge is not None:
1582 if hgmerge is not None:
1583 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1583 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1584 uimerge = ui.config("ui", "merge")
1584 uimerge = ui.config("ui", "merge")
1585 if uimerge:
1585 if uimerge:
1586 ui.note(('with ui.merge=%r\n') % (uimerge))
1586 ui.note(('with ui.merge=%r\n') % (uimerge))
1587
1587
1588 ctx = scmutil.revsingle(repo, opts.get('rev'))
1588 ctx = scmutil.revsingle(repo, opts.get('rev'))
1589 m = scmutil.match(ctx, pats, opts)
1589 m = scmutil.match(ctx, pats, opts)
1590 changedelete = opts['changedelete']
1590 changedelete = opts['changedelete']
1591 for path in ctx.walk(m):
1591 for path in ctx.walk(m):
1592 fctx = ctx[path]
1592 fctx = ctx[path]
1593 try:
1593 try:
1594 if not ui.debugflag:
1594 if not ui.debugflag:
1595 ui.pushbuffer(error=True)
1595 ui.pushbuffer(error=True)
1596 tool, toolpath = filemerge._picktool(repo, ui, path,
1596 tool, toolpath = filemerge._picktool(repo, ui, path,
1597 fctx.isbinary(),
1597 fctx.isbinary(),
1598 'l' in fctx.flags(),
1598 'l' in fctx.flags(),
1599 changedelete)
1599 changedelete)
1600 finally:
1600 finally:
1601 if not ui.debugflag:
1601 if not ui.debugflag:
1602 ui.popbuffer()
1602 ui.popbuffer()
1603 ui.write(('%s = %s\n') % (path, tool))
1603 ui.write(('%s = %s\n') % (path, tool))
1604
1604
1605 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1605 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1606 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1606 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1607 '''access the pushkey key/value protocol
1607 '''access the pushkey key/value protocol
1608
1608
1609 With two args, list the keys in the given namespace.
1609 With two args, list the keys in the given namespace.
1610
1610
1611 With five args, set a key to new if it currently is set to old.
1611 With five args, set a key to new if it currently is set to old.
1612 Reports success or failure.
1612 Reports success or failure.
1613 '''
1613 '''
1614
1614
1615 target = hg.peer(ui, {}, repopath)
1615 target = hg.peer(ui, {}, repopath)
1616 if keyinfo:
1616 if keyinfo:
1617 key, old, new = keyinfo
1617 key, old, new = keyinfo
1618 r = target.pushkey(namespace, key, old, new)
1618 r = target.pushkey(namespace, key, old, new)
1619 ui.status(str(r) + '\n')
1619 ui.status(str(r) + '\n')
1620 return not r
1620 return not r
1621 else:
1621 else:
1622 for k, v in sorted(target.listkeys(namespace).iteritems()):
1622 for k, v in sorted(target.listkeys(namespace).iteritems()):
1623 ui.write("%s\t%s\n" % (util.escapestr(k),
1623 ui.write("%s\t%s\n" % (util.escapestr(k),
1624 util.escapestr(v)))
1624 util.escapestr(v)))
1625
1625
1626 @command('debugpvec', [], _('A B'))
1626 @command('debugpvec', [], _('A B'))
1627 def debugpvec(ui, repo, a, b=None):
1627 def debugpvec(ui, repo, a, b=None):
1628 ca = scmutil.revsingle(repo, a)
1628 ca = scmutil.revsingle(repo, a)
1629 cb = scmutil.revsingle(repo, b)
1629 cb = scmutil.revsingle(repo, b)
1630 pa = pvec.ctxpvec(ca)
1630 pa = pvec.ctxpvec(ca)
1631 pb = pvec.ctxpvec(cb)
1631 pb = pvec.ctxpvec(cb)
1632 if pa == pb:
1632 if pa == pb:
1633 rel = "="
1633 rel = "="
1634 elif pa > pb:
1634 elif pa > pb:
1635 rel = ">"
1635 rel = ">"
1636 elif pa < pb:
1636 elif pa < pb:
1637 rel = "<"
1637 rel = "<"
1638 elif pa | pb:
1638 elif pa | pb:
1639 rel = "|"
1639 rel = "|"
1640 ui.write(_("a: %s\n") % pa)
1640 ui.write(_("a: %s\n") % pa)
1641 ui.write(_("b: %s\n") % pb)
1641 ui.write(_("b: %s\n") % pb)
1642 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1642 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1643 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1643 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1644 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1644 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1645 pa.distance(pb), rel))
1645 pa.distance(pb), rel))
1646
1646
1647 @command('debugrebuilddirstate|debugrebuildstate',
1647 @command('debugrebuilddirstate|debugrebuildstate',
1648 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1648 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1649 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1649 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1650 'the working copy parent')),
1650 'the working copy parent')),
1651 ],
1651 ],
1652 _('[-r REV]'))
1652 _('[-r REV]'))
1653 def debugrebuilddirstate(ui, repo, rev, **opts):
1653 def debugrebuilddirstate(ui, repo, rev, **opts):
1654 """rebuild the dirstate as it would look like for the given revision
1654 """rebuild the dirstate as it would look like for the given revision
1655
1655
1656 If no revision is specified the first current parent will be used.
1656 If no revision is specified the first current parent will be used.
1657
1657
1658 The dirstate will be set to the files of the given revision.
1658 The dirstate will be set to the files of the given revision.
1659 The actual working directory content or existing dirstate
1659 The actual working directory content or existing dirstate
1660 information such as adds or removes is not considered.
1660 information such as adds or removes is not considered.
1661
1661
1662 ``minimal`` will only rebuild the dirstate status for files that claim to be
1662 ``minimal`` will only rebuild the dirstate status for files that claim to be
1663 tracked but are not in the parent manifest, or that exist in the parent
1663 tracked but are not in the parent manifest, or that exist in the parent
1664 manifest but are not in the dirstate. It will not change adds, removes, or
1664 manifest but are not in the dirstate. It will not change adds, removes, or
1665 modified files that are in the working copy parent.
1665 modified files that are in the working copy parent.
1666
1666
1667 One use of this command is to make the next :hg:`status` invocation
1667 One use of this command is to make the next :hg:`status` invocation
1668 check the actual file content.
1668 check the actual file content.
1669 """
1669 """
1670 ctx = scmutil.revsingle(repo, rev)
1670 ctx = scmutil.revsingle(repo, rev)
1671 with repo.wlock():
1671 with repo.wlock():
1672 dirstate = repo.dirstate
1672 dirstate = repo.dirstate
1673 changedfiles = None
1673 changedfiles = None
1674 # See command doc for what minimal does.
1674 # See command doc for what minimal does.
1675 if opts.get(r'minimal'):
1675 if opts.get(r'minimal'):
1676 manifestfiles = set(ctx.manifest().keys())
1676 manifestfiles = set(ctx.manifest().keys())
1677 dirstatefiles = set(dirstate)
1677 dirstatefiles = set(dirstate)
1678 manifestonly = manifestfiles - dirstatefiles
1678 manifestonly = manifestfiles - dirstatefiles
1679 dsonly = dirstatefiles - manifestfiles
1679 dsonly = dirstatefiles - manifestfiles
1680 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1680 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1681 changedfiles = manifestonly | dsnotadded
1681 changedfiles = manifestonly | dsnotadded
1682
1682
1683 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1683 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1684
1684
1685 @command('debugrebuildfncache', [], '')
1685 @command('debugrebuildfncache', [], '')
1686 def debugrebuildfncache(ui, repo):
1686 def debugrebuildfncache(ui, repo):
1687 """rebuild the fncache file"""
1687 """rebuild the fncache file"""
1688 repair.rebuildfncache(ui, repo)
1688 repair.rebuildfncache(ui, repo)
1689
1689
1690 @command('debugrename',
1690 @command('debugrename',
1691 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1691 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1692 _('[-r REV] FILE'))
1692 _('[-r REV] FILE'))
1693 def debugrename(ui, repo, file1, *pats, **opts):
1693 def debugrename(ui, repo, file1, *pats, **opts):
1694 """dump rename information"""
1694 """dump rename information"""
1695
1695
1696 opts = pycompat.byteskwargs(opts)
1696 opts = pycompat.byteskwargs(opts)
1697 ctx = scmutil.revsingle(repo, opts.get('rev'))
1697 ctx = scmutil.revsingle(repo, opts.get('rev'))
1698 m = scmutil.match(ctx, (file1,) + pats, opts)
1698 m = scmutil.match(ctx, (file1,) + pats, opts)
1699 for abs in ctx.walk(m):
1699 for abs in ctx.walk(m):
1700 fctx = ctx[abs]
1700 fctx = ctx[abs]
1701 o = fctx.filelog().renamed(fctx.filenode())
1701 o = fctx.filelog().renamed(fctx.filenode())
1702 rel = m.rel(abs)
1702 rel = m.rel(abs)
1703 if o:
1703 if o:
1704 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1704 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1705 else:
1705 else:
1706 ui.write(_("%s not renamed\n") % rel)
1706 ui.write(_("%s not renamed\n") % rel)
1707
1707
1708 @command('debugrevlog', cmdutil.debugrevlogopts +
1708 @command('debugrevlog', cmdutil.debugrevlogopts +
1709 [('d', 'dump', False, _('dump index data'))],
1709 [('d', 'dump', False, _('dump index data'))],
1710 _('-c|-m|FILE'),
1710 _('-c|-m|FILE'),
1711 optionalrepo=True)
1711 optionalrepo=True)
1712 def debugrevlog(ui, repo, file_=None, **opts):
1712 def debugrevlog(ui, repo, file_=None, **opts):
1713 """show data and statistics about a revlog"""
1713 """show data and statistics about a revlog"""
1714 opts = pycompat.byteskwargs(opts)
1714 opts = pycompat.byteskwargs(opts)
1715 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1715 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1716
1716
1717 if opts.get("dump"):
1717 if opts.get("dump"):
1718 numrevs = len(r)
1718 numrevs = len(r)
1719 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1719 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1720 " rawsize totalsize compression heads chainlen\n"))
1720 " rawsize totalsize compression heads chainlen\n"))
1721 ts = 0
1721 ts = 0
1722 heads = set()
1722 heads = set()
1723
1723
1724 for rev in xrange(numrevs):
1724 for rev in xrange(numrevs):
1725 dbase = r.deltaparent(rev)
1725 dbase = r.deltaparent(rev)
1726 if dbase == -1:
1726 if dbase == -1:
1727 dbase = rev
1727 dbase = rev
1728 cbase = r.chainbase(rev)
1728 cbase = r.chainbase(rev)
1729 clen = r.chainlen(rev)
1729 clen = r.chainlen(rev)
1730 p1, p2 = r.parentrevs(rev)
1730 p1, p2 = r.parentrevs(rev)
1731 rs = r.rawsize(rev)
1731 rs = r.rawsize(rev)
1732 ts = ts + rs
1732 ts = ts + rs
1733 heads -= set(r.parentrevs(rev))
1733 heads -= set(r.parentrevs(rev))
1734 heads.add(rev)
1734 heads.add(rev)
1735 try:
1735 try:
1736 compression = ts / r.end(rev)
1736 compression = ts / r.end(rev)
1737 except ZeroDivisionError:
1737 except ZeroDivisionError:
1738 compression = 0
1738 compression = 0
1739 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1739 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1740 "%11d %5d %8d\n" %
1740 "%11d %5d %8d\n" %
1741 (rev, p1, p2, r.start(rev), r.end(rev),
1741 (rev, p1, p2, r.start(rev), r.end(rev),
1742 r.start(dbase), r.start(cbase),
1742 r.start(dbase), r.start(cbase),
1743 r.start(p1), r.start(p2),
1743 r.start(p1), r.start(p2),
1744 rs, ts, compression, len(heads), clen))
1744 rs, ts, compression, len(heads), clen))
1745 return 0
1745 return 0
1746
1746
1747 v = r.version
1747 v = r.version
1748 format = v & 0xFFFF
1748 format = v & 0xFFFF
1749 flags = []
1749 flags = []
1750 gdelta = False
1750 gdelta = False
1751 if v & revlog.FLAG_INLINE_DATA:
1751 if v & revlog.FLAG_INLINE_DATA:
1752 flags.append('inline')
1752 flags.append('inline')
1753 if v & revlog.FLAG_GENERALDELTA:
1753 if v & revlog.FLAG_GENERALDELTA:
1754 gdelta = True
1754 gdelta = True
1755 flags.append('generaldelta')
1755 flags.append('generaldelta')
1756 if not flags:
1756 if not flags:
1757 flags = ['(none)']
1757 flags = ['(none)']
1758
1758
1759 nummerges = 0
1759 nummerges = 0
1760 numfull = 0
1760 numfull = 0
1761 numprev = 0
1761 numprev = 0
1762 nump1 = 0
1762 nump1 = 0
1763 nump2 = 0
1763 nump2 = 0
1764 numother = 0
1764 numother = 0
1765 nump1prev = 0
1765 nump1prev = 0
1766 nump2prev = 0
1766 nump2prev = 0
1767 chainlengths = []
1767 chainlengths = []
1768 chainbases = []
1768 chainbases = []
1769 chainspans = []
1769 chainspans = []
1770
1770
1771 datasize = [None, 0, 0]
1771 datasize = [None, 0, 0]
1772 fullsize = [None, 0, 0]
1772 fullsize = [None, 0, 0]
1773 deltasize = [None, 0, 0]
1773 deltasize = [None, 0, 0]
1774 chunktypecounts = {}
1774 chunktypecounts = {}
1775 chunktypesizes = {}
1775 chunktypesizes = {}
1776
1776
1777 def addsize(size, l):
1777 def addsize(size, l):
1778 if l[0] is None or size < l[0]:
1778 if l[0] is None or size < l[0]:
1779 l[0] = size
1779 l[0] = size
1780 if size > l[1]:
1780 if size > l[1]:
1781 l[1] = size
1781 l[1] = size
1782 l[2] += size
1782 l[2] += size
1783
1783
1784 numrevs = len(r)
1784 numrevs = len(r)
1785 for rev in xrange(numrevs):
1785 for rev in xrange(numrevs):
1786 p1, p2 = r.parentrevs(rev)
1786 p1, p2 = r.parentrevs(rev)
1787 delta = r.deltaparent(rev)
1787 delta = r.deltaparent(rev)
1788 if format > 0:
1788 if format > 0:
1789 addsize(r.rawsize(rev), datasize)
1789 addsize(r.rawsize(rev), datasize)
1790 if p2 != nullrev:
1790 if p2 != nullrev:
1791 nummerges += 1
1791 nummerges += 1
1792 size = r.length(rev)
1792 size = r.length(rev)
1793 if delta == nullrev:
1793 if delta == nullrev:
1794 chainlengths.append(0)
1794 chainlengths.append(0)
1795 chainbases.append(r.start(rev))
1795 chainbases.append(r.start(rev))
1796 chainspans.append(size)
1796 chainspans.append(size)
1797 numfull += 1
1797 numfull += 1
1798 addsize(size, fullsize)
1798 addsize(size, fullsize)
1799 else:
1799 else:
1800 chainlengths.append(chainlengths[delta] + 1)
1800 chainlengths.append(chainlengths[delta] + 1)
1801 baseaddr = chainbases[delta]
1801 baseaddr = chainbases[delta]
1802 revaddr = r.start(rev)
1802 revaddr = r.start(rev)
1803 chainbases.append(baseaddr)
1803 chainbases.append(baseaddr)
1804 chainspans.append((revaddr - baseaddr) + size)
1804 chainspans.append((revaddr - baseaddr) + size)
1805 addsize(size, deltasize)
1805 addsize(size, deltasize)
1806 if delta == rev - 1:
1806 if delta == rev - 1:
1807 numprev += 1
1807 numprev += 1
1808 if delta == p1:
1808 if delta == p1:
1809 nump1prev += 1
1809 nump1prev += 1
1810 elif delta == p2:
1810 elif delta == p2:
1811 nump2prev += 1
1811 nump2prev += 1
1812 elif delta == p1:
1812 elif delta == p1:
1813 nump1 += 1
1813 nump1 += 1
1814 elif delta == p2:
1814 elif delta == p2:
1815 nump2 += 1
1815 nump2 += 1
1816 elif delta != nullrev:
1816 elif delta != nullrev:
1817 numother += 1
1817 numother += 1
1818
1818
1819 # Obtain data on the raw chunks in the revlog.
1819 # Obtain data on the raw chunks in the revlog.
1820 segment = r._getsegmentforrevs(rev, rev)[1]
1820 segment = r._getsegmentforrevs(rev, rev)[1]
1821 if segment:
1821 if segment:
1822 chunktype = bytes(segment[0:1])
1822 chunktype = bytes(segment[0:1])
1823 else:
1823 else:
1824 chunktype = 'empty'
1824 chunktype = 'empty'
1825
1825
1826 if chunktype not in chunktypecounts:
1826 if chunktype not in chunktypecounts:
1827 chunktypecounts[chunktype] = 0
1827 chunktypecounts[chunktype] = 0
1828 chunktypesizes[chunktype] = 0
1828 chunktypesizes[chunktype] = 0
1829
1829
1830 chunktypecounts[chunktype] += 1
1830 chunktypecounts[chunktype] += 1
1831 chunktypesizes[chunktype] += size
1831 chunktypesizes[chunktype] += size
1832
1832
1833 # Adjust size min value for empty cases
1833 # Adjust size min value for empty cases
1834 for size in (datasize, fullsize, deltasize):
1834 for size in (datasize, fullsize, deltasize):
1835 if size[0] is None:
1835 if size[0] is None:
1836 size[0] = 0
1836 size[0] = 0
1837
1837
1838 numdeltas = numrevs - numfull
1838 numdeltas = numrevs - numfull
1839 numoprev = numprev - nump1prev - nump2prev
1839 numoprev = numprev - nump1prev - nump2prev
1840 totalrawsize = datasize[2]
1840 totalrawsize = datasize[2]
1841 datasize[2] /= numrevs
1841 datasize[2] /= numrevs
1842 fulltotal = fullsize[2]
1842 fulltotal = fullsize[2]
1843 fullsize[2] /= numfull
1843 fullsize[2] /= numfull
1844 deltatotal = deltasize[2]
1844 deltatotal = deltasize[2]
1845 if numrevs - numfull > 0:
1845 if numrevs - numfull > 0:
1846 deltasize[2] /= numrevs - numfull
1846 deltasize[2] /= numrevs - numfull
1847 totalsize = fulltotal + deltatotal
1847 totalsize = fulltotal + deltatotal
1848 avgchainlen = sum(chainlengths) / numrevs
1848 avgchainlen = sum(chainlengths) / numrevs
1849 maxchainlen = max(chainlengths)
1849 maxchainlen = max(chainlengths)
1850 maxchainspan = max(chainspans)
1850 maxchainspan = max(chainspans)
1851 compratio = 1
1851 compratio = 1
1852 if totalsize:
1852 if totalsize:
1853 compratio = totalrawsize / totalsize
1853 compratio = totalrawsize / totalsize
1854
1854
1855 basedfmtstr = '%%%dd\n'
1855 basedfmtstr = '%%%dd\n'
1856 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1856 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1857
1857
1858 def dfmtstr(max):
1858 def dfmtstr(max):
1859 return basedfmtstr % len(str(max))
1859 return basedfmtstr % len(str(max))
1860 def pcfmtstr(max, padding=0):
1860 def pcfmtstr(max, padding=0):
1861 return basepcfmtstr % (len(str(max)), ' ' * padding)
1861 return basepcfmtstr % (len(str(max)), ' ' * padding)
1862
1862
1863 def pcfmt(value, total):
1863 def pcfmt(value, total):
1864 if total:
1864 if total:
1865 return (value, 100 * float(value) / total)
1865 return (value, 100 * float(value) / total)
1866 else:
1866 else:
1867 return value, 100.0
1867 return value, 100.0
1868
1868
1869 ui.write(('format : %d\n') % format)
1869 ui.write(('format : %d\n') % format)
1870 ui.write(('flags : %s\n') % ', '.join(flags))
1870 ui.write(('flags : %s\n') % ', '.join(flags))
1871
1871
1872 ui.write('\n')
1872 ui.write('\n')
1873 fmt = pcfmtstr(totalsize)
1873 fmt = pcfmtstr(totalsize)
1874 fmt2 = dfmtstr(totalsize)
1874 fmt2 = dfmtstr(totalsize)
1875 ui.write(('revisions : ') + fmt2 % numrevs)
1875 ui.write(('revisions : ') + fmt2 % numrevs)
1876 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1876 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1877 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1877 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1878 ui.write(('revisions : ') + fmt2 % numrevs)
1878 ui.write(('revisions : ') + fmt2 % numrevs)
1879 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1879 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1880 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1880 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1881 ui.write(('revision size : ') + fmt2 % totalsize)
1881 ui.write(('revision size : ') + fmt2 % totalsize)
1882 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1882 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1883 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1883 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1884
1884
1885 def fmtchunktype(chunktype):
1885 def fmtchunktype(chunktype):
1886 if chunktype == 'empty':
1886 if chunktype == 'empty':
1887 return ' %s : ' % chunktype
1887 return ' %s : ' % chunktype
1888 elif chunktype in pycompat.bytestr(string.ascii_letters):
1888 elif chunktype in pycompat.bytestr(string.ascii_letters):
1889 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1889 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1890 else:
1890 else:
1891 return ' 0x%s : ' % hex(chunktype)
1891 return ' 0x%s : ' % hex(chunktype)
1892
1892
1893 ui.write('\n')
1893 ui.write('\n')
1894 ui.write(('chunks : ') + fmt2 % numrevs)
1894 ui.write(('chunks : ') + fmt2 % numrevs)
1895 for chunktype in sorted(chunktypecounts):
1895 for chunktype in sorted(chunktypecounts):
1896 ui.write(fmtchunktype(chunktype))
1896 ui.write(fmtchunktype(chunktype))
1897 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1897 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1898 ui.write(('chunks size : ') + fmt2 % totalsize)
1898 ui.write(('chunks size : ') + fmt2 % totalsize)
1899 for chunktype in sorted(chunktypecounts):
1899 for chunktype in sorted(chunktypecounts):
1900 ui.write(fmtchunktype(chunktype))
1900 ui.write(fmtchunktype(chunktype))
1901 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1901 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1902
1902
1903 ui.write('\n')
1903 ui.write('\n')
1904 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1904 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1905 ui.write(('avg chain length : ') + fmt % avgchainlen)
1905 ui.write(('avg chain length : ') + fmt % avgchainlen)
1906 ui.write(('max chain length : ') + fmt % maxchainlen)
1906 ui.write(('max chain length : ') + fmt % maxchainlen)
1907 ui.write(('max chain reach : ') + fmt % maxchainspan)
1907 ui.write(('max chain reach : ') + fmt % maxchainspan)
1908 ui.write(('compression ratio : ') + fmt % compratio)
1908 ui.write(('compression ratio : ') + fmt % compratio)
1909
1909
1910 if format > 0:
1910 if format > 0:
1911 ui.write('\n')
1911 ui.write('\n')
1912 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1912 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1913 % tuple(datasize))
1913 % tuple(datasize))
1914 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1914 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1915 % tuple(fullsize))
1915 % tuple(fullsize))
1916 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1916 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1917 % tuple(deltasize))
1917 % tuple(deltasize))
1918
1918
1919 if numdeltas > 0:
1919 if numdeltas > 0:
1920 ui.write('\n')
1920 ui.write('\n')
1921 fmt = pcfmtstr(numdeltas)
1921 fmt = pcfmtstr(numdeltas)
1922 fmt2 = pcfmtstr(numdeltas, 4)
1922 fmt2 = pcfmtstr(numdeltas, 4)
1923 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1923 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1924 if numprev > 0:
1924 if numprev > 0:
1925 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1925 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1926 numprev))
1926 numprev))
1927 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1927 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1928 numprev))
1928 numprev))
1929 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1929 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1930 numprev))
1930 numprev))
1931 if gdelta:
1931 if gdelta:
1932 ui.write(('deltas against p1 : ')
1932 ui.write(('deltas against p1 : ')
1933 + fmt % pcfmt(nump1, numdeltas))
1933 + fmt % pcfmt(nump1, numdeltas))
1934 ui.write(('deltas against p2 : ')
1934 ui.write(('deltas against p2 : ')
1935 + fmt % pcfmt(nump2, numdeltas))
1935 + fmt % pcfmt(nump2, numdeltas))
1936 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1936 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1937 numdeltas))
1937 numdeltas))
1938
1938
1939 @command('debugrevspec',
1939 @command('debugrevspec',
1940 [('', 'optimize', None,
1940 [('', 'optimize', None,
1941 _('print parsed tree after optimizing (DEPRECATED)')),
1941 _('print parsed tree after optimizing (DEPRECATED)')),
1942 ('', 'show-revs', True, _('print list of result revisions (default)')),
1942 ('', 'show-revs', True, _('print list of result revisions (default)')),
1943 ('s', 'show-set', None, _('print internal representation of result set')),
1943 ('s', 'show-set', None, _('print internal representation of result set')),
1944 ('p', 'show-stage', [],
1944 ('p', 'show-stage', [],
1945 _('print parsed tree at the given stage'), _('NAME')),
1945 _('print parsed tree at the given stage'), _('NAME')),
1946 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1946 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1947 ('', 'verify-optimized', False, _('verify optimized result')),
1947 ('', 'verify-optimized', False, _('verify optimized result')),
1948 ],
1948 ],
1949 ('REVSPEC'))
1949 ('REVSPEC'))
1950 def debugrevspec(ui, repo, expr, **opts):
1950 def debugrevspec(ui, repo, expr, **opts):
1951 """parse and apply a revision specification
1951 """parse and apply a revision specification
1952
1952
1953 Use -p/--show-stage option to print the parsed tree at the given stages.
1953 Use -p/--show-stage option to print the parsed tree at the given stages.
1954 Use -p all to print tree at every stage.
1954 Use -p all to print tree at every stage.
1955
1955
1956 Use --no-show-revs option with -s or -p to print only the set
1956 Use --no-show-revs option with -s or -p to print only the set
1957 representation or the parsed tree respectively.
1957 representation or the parsed tree respectively.
1958
1958
1959 Use --verify-optimized to compare the optimized result with the unoptimized
1959 Use --verify-optimized to compare the optimized result with the unoptimized
1960 one. Returns 1 if the optimized result differs.
1960 one. Returns 1 if the optimized result differs.
1961 """
1961 """
1962 opts = pycompat.byteskwargs(opts)
1962 opts = pycompat.byteskwargs(opts)
1963 stages = [
1963 stages = [
1964 ('parsed', lambda tree: tree),
1964 ('parsed', lambda tree: tree),
1965 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1965 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1966 ('concatenated', revsetlang.foldconcat),
1966 ('concatenated', revsetlang.foldconcat),
1967 ('analyzed', revsetlang.analyze),
1967 ('analyzed', revsetlang.analyze),
1968 ('optimized', revsetlang.optimize),
1968 ('optimized', revsetlang.optimize),
1969 ]
1969 ]
1970 if opts['no_optimized']:
1970 if opts['no_optimized']:
1971 stages = stages[:-1]
1971 stages = stages[:-1]
1972 if opts['verify_optimized'] and opts['no_optimized']:
1972 if opts['verify_optimized'] and opts['no_optimized']:
1973 raise error.Abort(_('cannot use --verify-optimized with '
1973 raise error.Abort(_('cannot use --verify-optimized with '
1974 '--no-optimized'))
1974 '--no-optimized'))
1975 stagenames = set(n for n, f in stages)
1975 stagenames = set(n for n, f in stages)
1976
1976
1977 showalways = set()
1977 showalways = set()
1978 showchanged = set()
1978 showchanged = set()
1979 if ui.verbose and not opts['show_stage']:
1979 if ui.verbose and not opts['show_stage']:
1980 # show parsed tree by --verbose (deprecated)
1980 # show parsed tree by --verbose (deprecated)
1981 showalways.add('parsed')
1981 showalways.add('parsed')
1982 showchanged.update(['expanded', 'concatenated'])
1982 showchanged.update(['expanded', 'concatenated'])
1983 if opts['optimize']:
1983 if opts['optimize']:
1984 showalways.add('optimized')
1984 showalways.add('optimized')
1985 if opts['show_stage'] and opts['optimize']:
1985 if opts['show_stage'] and opts['optimize']:
1986 raise error.Abort(_('cannot use --optimize with --show-stage'))
1986 raise error.Abort(_('cannot use --optimize with --show-stage'))
1987 if opts['show_stage'] == ['all']:
1987 if opts['show_stage'] == ['all']:
1988 showalways.update(stagenames)
1988 showalways.update(stagenames)
1989 else:
1989 else:
1990 for n in opts['show_stage']:
1990 for n in opts['show_stage']:
1991 if n not in stagenames:
1991 if n not in stagenames:
1992 raise error.Abort(_('invalid stage name: %s') % n)
1992 raise error.Abort(_('invalid stage name: %s') % n)
1993 showalways.update(opts['show_stage'])
1993 showalways.update(opts['show_stage'])
1994
1994
1995 treebystage = {}
1995 treebystage = {}
1996 printedtree = None
1996 printedtree = None
1997 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1997 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1998 for n, f in stages:
1998 for n, f in stages:
1999 treebystage[n] = tree = f(tree)
1999 treebystage[n] = tree = f(tree)
2000 if n in showalways or (n in showchanged and tree != printedtree):
2000 if n in showalways or (n in showchanged and tree != printedtree):
2001 if opts['show_stage'] or n != 'parsed':
2001 if opts['show_stage'] or n != 'parsed':
2002 ui.write(("* %s:\n") % n)
2002 ui.write(("* %s:\n") % n)
2003 ui.write(revsetlang.prettyformat(tree), "\n")
2003 ui.write(revsetlang.prettyformat(tree), "\n")
2004 printedtree = tree
2004 printedtree = tree
2005
2005
2006 if opts['verify_optimized']:
2006 if opts['verify_optimized']:
2007 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2007 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2008 brevs = revset.makematcher(treebystage['optimized'])(repo)
2008 brevs = revset.makematcher(treebystage['optimized'])(repo)
2009 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2009 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2010 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2010 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2011 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2011 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2012 arevs = list(arevs)
2012 arevs = list(arevs)
2013 brevs = list(brevs)
2013 brevs = list(brevs)
2014 if arevs == brevs:
2014 if arevs == brevs:
2015 return 0
2015 return 0
2016 ui.write(('--- analyzed\n'), label='diff.file_a')
2016 ui.write(('--- analyzed\n'), label='diff.file_a')
2017 ui.write(('+++ optimized\n'), label='diff.file_b')
2017 ui.write(('+++ optimized\n'), label='diff.file_b')
2018 sm = difflib.SequenceMatcher(None, arevs, brevs)
2018 sm = difflib.SequenceMatcher(None, arevs, brevs)
2019 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2019 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2020 if tag in ('delete', 'replace'):
2020 if tag in ('delete', 'replace'):
2021 for c in arevs[alo:ahi]:
2021 for c in arevs[alo:ahi]:
2022 ui.write('-%s\n' % c, label='diff.deleted')
2022 ui.write('-%s\n' % c, label='diff.deleted')
2023 if tag in ('insert', 'replace'):
2023 if tag in ('insert', 'replace'):
2024 for c in brevs[blo:bhi]:
2024 for c in brevs[blo:bhi]:
2025 ui.write('+%s\n' % c, label='diff.inserted')
2025 ui.write('+%s\n' % c, label='diff.inserted')
2026 if tag == 'equal':
2026 if tag == 'equal':
2027 for c in arevs[alo:ahi]:
2027 for c in arevs[alo:ahi]:
2028 ui.write(' %s\n' % c)
2028 ui.write(' %s\n' % c)
2029 return 1
2029 return 1
2030
2030
2031 func = revset.makematcher(tree)
2031 func = revset.makematcher(tree)
2032 revs = func(repo)
2032 revs = func(repo)
2033 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2033 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2034 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2034 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2035 if not opts['show_revs']:
2035 if not opts['show_revs']:
2036 return
2036 return
2037 for c in revs:
2037 for c in revs:
2038 ui.write("%s\n" % c)
2038 ui.write("%s\n" % c)
2039
2039
2040 @command('debugsetparents', [], _('REV1 [REV2]'))
2040 @command('debugsetparents', [], _('REV1 [REV2]'))
2041 def debugsetparents(ui, repo, rev1, rev2=None):
2041 def debugsetparents(ui, repo, rev1, rev2=None):
2042 """manually set the parents of the current working directory
2042 """manually set the parents of the current working directory
2043
2043
2044 This is useful for writing repository conversion tools, but should
2044 This is useful for writing repository conversion tools, but should
2045 be used with care. For example, neither the working directory nor the
2045 be used with care. For example, neither the working directory nor the
2046 dirstate is updated, so file status may be incorrect after running this
2046 dirstate is updated, so file status may be incorrect after running this
2047 command.
2047 command.
2048
2048
2049 Returns 0 on success.
2049 Returns 0 on success.
2050 """
2050 """
2051
2051
2052 r1 = scmutil.revsingle(repo, rev1).node()
2052 r1 = scmutil.revsingle(repo, rev1).node()
2053 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2053 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2054
2054
2055 with repo.wlock():
2055 with repo.wlock():
2056 repo.setparents(r1, r2)
2056 repo.setparents(r1, r2)
2057
2057
2058 @command('debugsub',
2058 @command('debugsub',
2059 [('r', 'rev', '',
2059 [('r', 'rev', '',
2060 _('revision to check'), _('REV'))],
2060 _('revision to check'), _('REV'))],
2061 _('[-r REV] [REV]'))
2061 _('[-r REV] [REV]'))
2062 def debugsub(ui, repo, rev=None):
2062 def debugsub(ui, repo, rev=None):
2063 ctx = scmutil.revsingle(repo, rev, None)
2063 ctx = scmutil.revsingle(repo, rev, None)
2064 for k, v in sorted(ctx.substate.items()):
2064 for k, v in sorted(ctx.substate.items()):
2065 ui.write(('path %s\n') % k)
2065 ui.write(('path %s\n') % k)
2066 ui.write((' source %s\n') % v[0])
2066 ui.write((' source %s\n') % v[0])
2067 ui.write((' revision %s\n') % v[1])
2067 ui.write((' revision %s\n') % v[1])
2068
2068
2069 @command('debugsuccessorssets',
2069 @command('debugsuccessorssets',
2070 [],
2070 [('', 'closest', False, _('return closest successors sets only'))],
2071 _('[REV]'))
2071 _('[REV]'))
2072 def debugsuccessorssets(ui, repo, *revs):
2072 def debugsuccessorssets(ui, repo, *revs, **opts):
2073 """show set of successors for revision
2073 """show set of successors for revision
2074
2074
2075 A successors set of changeset A is a consistent group of revisions that
2075 A successors set of changeset A is a consistent group of revisions that
2076 succeed A. It contains non-obsolete changesets only.
2076 succeed A. It contains non-obsolete changesets only unless closests
2077 successors set is set.
2077
2078
2078 In most cases a changeset A has a single successors set containing a single
2079 In most cases a changeset A has a single successors set containing a single
2079 successor (changeset A replaced by A').
2080 successor (changeset A replaced by A').
2080
2081
2081 A changeset that is made obsolete with no successors are called "pruned".
2082 A changeset that is made obsolete with no successors are called "pruned".
2082 Such changesets have no successors sets at all.
2083 Such changesets have no successors sets at all.
2083
2084
2084 A changeset that has been "split" will have a successors set containing
2085 A changeset that has been "split" will have a successors set containing
2085 more than one successor.
2086 more than one successor.
2086
2087
2087 A changeset that has been rewritten in multiple different ways is called
2088 A changeset that has been rewritten in multiple different ways is called
2088 "divergent". Such changesets have multiple successor sets (each of which
2089 "divergent". Such changesets have multiple successor sets (each of which
2089 may also be split, i.e. have multiple successors).
2090 may also be split, i.e. have multiple successors).
2090
2091
2091 Results are displayed as follows::
2092 Results are displayed as follows::
2092
2093
2093 <rev1>
2094 <rev1>
2094 <successors-1A>
2095 <successors-1A>
2095 <rev2>
2096 <rev2>
2096 <successors-2A>
2097 <successors-2A>
2097 <successors-2B1> <successors-2B2> <successors-2B3>
2098 <successors-2B1> <successors-2B2> <successors-2B3>
2098
2099
2099 Here rev2 has two possible (i.e. divergent) successors sets. The first
2100 Here rev2 has two possible (i.e. divergent) successors sets. The first
2100 holds one element, whereas the second holds three (i.e. the changeset has
2101 holds one element, whereas the second holds three (i.e. the changeset has
2101 been split).
2102 been split).
2102 """
2103 """
2103 # passed to successorssets caching computation from one call to another
2104 # passed to successorssets caching computation from one call to another
2104 cache = {}
2105 cache = {}
2105 ctx2str = str
2106 ctx2str = str
2106 node2str = short
2107 node2str = short
2107 if ui.debug():
2108 if ui.debug():
2108 def ctx2str(ctx):
2109 def ctx2str(ctx):
2109 return ctx.hex()
2110 return ctx.hex()
2110 node2str = hex
2111 node2str = hex
2111 for rev in scmutil.revrange(repo, revs):
2112 for rev in scmutil.revrange(repo, revs):
2112 ctx = repo[rev]
2113 ctx = repo[rev]
2113 ui.write('%s\n'% ctx2str(ctx))
2114 ui.write('%s\n'% ctx2str(ctx))
2114 for succsset in obsutil.successorssets(repo, ctx.node(), cache=cache):
2115 for succsset in obsutil.successorssets(repo, ctx.node(),
2116 closest=opts['closest'],
2117 cache=cache):
2115 if succsset:
2118 if succsset:
2116 ui.write(' ')
2119 ui.write(' ')
2117 ui.write(node2str(succsset[0]))
2120 ui.write(node2str(succsset[0]))
2118 for node in succsset[1:]:
2121 for node in succsset[1:]:
2119 ui.write(' ')
2122 ui.write(' ')
2120 ui.write(node2str(node))
2123 ui.write(node2str(node))
2121 ui.write('\n')
2124 ui.write('\n')
2122
2125
2123 @command('debugtemplate',
2126 @command('debugtemplate',
2124 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2127 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2125 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2128 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2126 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2129 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2127 optionalrepo=True)
2130 optionalrepo=True)
2128 def debugtemplate(ui, repo, tmpl, **opts):
2131 def debugtemplate(ui, repo, tmpl, **opts):
2129 """parse and apply a template
2132 """parse and apply a template
2130
2133
2131 If -r/--rev is given, the template is processed as a log template and
2134 If -r/--rev is given, the template is processed as a log template and
2132 applied to the given changesets. Otherwise, it is processed as a generic
2135 applied to the given changesets. Otherwise, it is processed as a generic
2133 template.
2136 template.
2134
2137
2135 Use --verbose to print the parsed tree.
2138 Use --verbose to print the parsed tree.
2136 """
2139 """
2137 revs = None
2140 revs = None
2138 if opts[r'rev']:
2141 if opts[r'rev']:
2139 if repo is None:
2142 if repo is None:
2140 raise error.RepoError(_('there is no Mercurial repository here '
2143 raise error.RepoError(_('there is no Mercurial repository here '
2141 '(.hg not found)'))
2144 '(.hg not found)'))
2142 revs = scmutil.revrange(repo, opts[r'rev'])
2145 revs = scmutil.revrange(repo, opts[r'rev'])
2143
2146
2144 props = {}
2147 props = {}
2145 for d in opts[r'define']:
2148 for d in opts[r'define']:
2146 try:
2149 try:
2147 k, v = (e.strip() for e in d.split('=', 1))
2150 k, v = (e.strip() for e in d.split('=', 1))
2148 if not k or k == 'ui':
2151 if not k or k == 'ui':
2149 raise ValueError
2152 raise ValueError
2150 props[k] = v
2153 props[k] = v
2151 except ValueError:
2154 except ValueError:
2152 raise error.Abort(_('malformed keyword definition: %s') % d)
2155 raise error.Abort(_('malformed keyword definition: %s') % d)
2153
2156
2154 if ui.verbose:
2157 if ui.verbose:
2155 aliases = ui.configitems('templatealias')
2158 aliases = ui.configitems('templatealias')
2156 tree = templater.parse(tmpl)
2159 tree = templater.parse(tmpl)
2157 ui.note(templater.prettyformat(tree), '\n')
2160 ui.note(templater.prettyformat(tree), '\n')
2158 newtree = templater.expandaliases(tree, aliases)
2161 newtree = templater.expandaliases(tree, aliases)
2159 if newtree != tree:
2162 if newtree != tree:
2160 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2163 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2161
2164
2162 if revs is None:
2165 if revs is None:
2163 t = formatter.maketemplater(ui, tmpl)
2166 t = formatter.maketemplater(ui, tmpl)
2164 props['ui'] = ui
2167 props['ui'] = ui
2165 ui.write(t.render(props))
2168 ui.write(t.render(props))
2166 else:
2169 else:
2167 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2170 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2168 for r in revs:
2171 for r in revs:
2169 displayer.show(repo[r], **pycompat.strkwargs(props))
2172 displayer.show(repo[r], **pycompat.strkwargs(props))
2170 displayer.close()
2173 displayer.close()
2171
2174
2172 @command('debugupdatecaches', [])
2175 @command('debugupdatecaches', [])
2173 def debugupdatecaches(ui, repo, *pats, **opts):
2176 def debugupdatecaches(ui, repo, *pats, **opts):
2174 """warm all known caches in the repository"""
2177 """warm all known caches in the repository"""
2175 with repo.wlock():
2178 with repo.wlock():
2176 with repo.lock():
2179 with repo.lock():
2177 repo.updatecaches()
2180 repo.updatecaches()
2178
2181
2179 @command('debugupgraderepo', [
2182 @command('debugupgraderepo', [
2180 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2183 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2181 ('', 'run', False, _('performs an upgrade')),
2184 ('', 'run', False, _('performs an upgrade')),
2182 ])
2185 ])
2183 def debugupgraderepo(ui, repo, run=False, optimize=None):
2186 def debugupgraderepo(ui, repo, run=False, optimize=None):
2184 """upgrade a repository to use different features
2187 """upgrade a repository to use different features
2185
2188
2186 If no arguments are specified, the repository is evaluated for upgrade
2189 If no arguments are specified, the repository is evaluated for upgrade
2187 and a list of problems and potential optimizations is printed.
2190 and a list of problems and potential optimizations is printed.
2188
2191
2189 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2192 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2190 can be influenced via additional arguments. More details will be provided
2193 can be influenced via additional arguments. More details will be provided
2191 by the command output when run without ``--run``.
2194 by the command output when run without ``--run``.
2192
2195
2193 During the upgrade, the repository will be locked and no writes will be
2196 During the upgrade, the repository will be locked and no writes will be
2194 allowed.
2197 allowed.
2195
2198
2196 At the end of the upgrade, the repository may not be readable while new
2199 At the end of the upgrade, the repository may not be readable while new
2197 repository data is swapped in. This window will be as long as it takes to
2200 repository data is swapped in. This window will be as long as it takes to
2198 rename some directories inside the ``.hg`` directory. On most machines, this
2201 rename some directories inside the ``.hg`` directory. On most machines, this
2199 should complete almost instantaneously and the chances of a consumer being
2202 should complete almost instantaneously and the chances of a consumer being
2200 unable to access the repository should be low.
2203 unable to access the repository should be low.
2201 """
2204 """
2202 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2205 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2203
2206
2204 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2207 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2205 inferrepo=True)
2208 inferrepo=True)
2206 def debugwalk(ui, repo, *pats, **opts):
2209 def debugwalk(ui, repo, *pats, **opts):
2207 """show how files match on given patterns"""
2210 """show how files match on given patterns"""
2208 opts = pycompat.byteskwargs(opts)
2211 opts = pycompat.byteskwargs(opts)
2209 m = scmutil.match(repo[None], pats, opts)
2212 m = scmutil.match(repo[None], pats, opts)
2210 ui.write(('matcher: %r\n' % m))
2213 ui.write(('matcher: %r\n' % m))
2211 items = list(repo[None].walk(m))
2214 items = list(repo[None].walk(m))
2212 if not items:
2215 if not items:
2213 return
2216 return
2214 f = lambda fn: fn
2217 f = lambda fn: fn
2215 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2218 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2216 f = lambda fn: util.normpath(fn)
2219 f = lambda fn: util.normpath(fn)
2217 fmt = 'f %%-%ds %%-%ds %%s' % (
2220 fmt = 'f %%-%ds %%-%ds %%s' % (
2218 max([len(abs) for abs in items]),
2221 max([len(abs) for abs in items]),
2219 max([len(m.rel(abs)) for abs in items]))
2222 max([len(m.rel(abs)) for abs in items]))
2220 for abs in items:
2223 for abs in items:
2221 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2224 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2222 ui.write("%s\n" % line.rstrip())
2225 ui.write("%s\n" % line.rstrip())
2223
2226
2224 @command('debugwireargs',
2227 @command('debugwireargs',
2225 [('', 'three', '', 'three'),
2228 [('', 'three', '', 'three'),
2226 ('', 'four', '', 'four'),
2229 ('', 'four', '', 'four'),
2227 ('', 'five', '', 'five'),
2230 ('', 'five', '', 'five'),
2228 ] + cmdutil.remoteopts,
2231 ] + cmdutil.remoteopts,
2229 _('REPO [OPTIONS]... [ONE [TWO]]'),
2232 _('REPO [OPTIONS]... [ONE [TWO]]'),
2230 norepo=True)
2233 norepo=True)
2231 def debugwireargs(ui, repopath, *vals, **opts):
2234 def debugwireargs(ui, repopath, *vals, **opts):
2232 opts = pycompat.byteskwargs(opts)
2235 opts = pycompat.byteskwargs(opts)
2233 repo = hg.peer(ui, opts, repopath)
2236 repo = hg.peer(ui, opts, repopath)
2234 for opt in cmdutil.remoteopts:
2237 for opt in cmdutil.remoteopts:
2235 del opts[opt[1]]
2238 del opts[opt[1]]
2236 args = {}
2239 args = {}
2237 for k, v in opts.iteritems():
2240 for k, v in opts.iteritems():
2238 if v:
2241 if v:
2239 args[k] = v
2242 args[k] = v
2240 # run twice to check that we don't mess up the stream for the next command
2243 # run twice to check that we don't mess up the stream for the next command
2241 res1 = repo.debugwireargs(*vals, **args)
2244 res1 = repo.debugwireargs(*vals, **args)
2242 res2 = repo.debugwireargs(*vals, **args)
2245 res2 = repo.debugwireargs(*vals, **args)
2243 ui.write("%s\n" % res1)
2246 ui.write("%s\n" % res1)
2244 if res1 != res2:
2247 if res1 != res2:
2245 ui.warn("%s\n" % res2)
2248 ui.warn("%s\n" % res2)
@@ -1,518 +1,537
1 # obsutil.py - utility functions for obsolescence
1 # obsutil.py - utility functions for obsolescence
2 #
2 #
3 # Copyright 2017 Boris Feld <boris.feld@octobus.net>
3 # Copyright 2017 Boris Feld <boris.feld@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from . import (
10 from . import (
11 phases,
11 phases,
12 )
12 )
13
13
14 class marker(object):
14 class marker(object):
15 """Wrap obsolete marker raw data"""
15 """Wrap obsolete marker raw data"""
16
16
17 def __init__(self, repo, data):
17 def __init__(self, repo, data):
18 # the repo argument will be used to create changectx in later version
18 # the repo argument will be used to create changectx in later version
19 self._repo = repo
19 self._repo = repo
20 self._data = data
20 self._data = data
21 self._decodedmeta = None
21 self._decodedmeta = None
22
22
23 def __hash__(self):
23 def __hash__(self):
24 return hash(self._data)
24 return hash(self._data)
25
25
26 def __eq__(self, other):
26 def __eq__(self, other):
27 if type(other) != type(self):
27 if type(other) != type(self):
28 return False
28 return False
29 return self._data == other._data
29 return self._data == other._data
30
30
31 def precnode(self):
31 def precnode(self):
32 """Precursor changeset node identifier"""
32 """Precursor changeset node identifier"""
33 return self._data[0]
33 return self._data[0]
34
34
35 def succnodes(self):
35 def succnodes(self):
36 """List of successor changesets node identifiers"""
36 """List of successor changesets node identifiers"""
37 return self._data[1]
37 return self._data[1]
38
38
39 def parentnodes(self):
39 def parentnodes(self):
40 """Parents of the precursors (None if not recorded)"""
40 """Parents of the precursors (None if not recorded)"""
41 return self._data[5]
41 return self._data[5]
42
42
43 def metadata(self):
43 def metadata(self):
44 """Decoded metadata dictionary"""
44 """Decoded metadata dictionary"""
45 return dict(self._data[3])
45 return dict(self._data[3])
46
46
47 def date(self):
47 def date(self):
48 """Creation date as (unixtime, offset)"""
48 """Creation date as (unixtime, offset)"""
49 return self._data[4]
49 return self._data[4]
50
50
51 def flags(self):
51 def flags(self):
52 """The flags field of the marker"""
52 """The flags field of the marker"""
53 return self._data[2]
53 return self._data[2]
54
54
55 def getmarkers(repo, nodes=None, exclusive=False):
55 def getmarkers(repo, nodes=None, exclusive=False):
56 """returns markers known in a repository
56 """returns markers known in a repository
57
57
58 If <nodes> is specified, only markers "relevant" to those nodes are are
58 If <nodes> is specified, only markers "relevant" to those nodes are are
59 returned"""
59 returned"""
60 if nodes is None:
60 if nodes is None:
61 rawmarkers = repo.obsstore
61 rawmarkers = repo.obsstore
62 elif exclusive:
62 elif exclusive:
63 rawmarkers = exclusivemarkers(repo, nodes)
63 rawmarkers = exclusivemarkers(repo, nodes)
64 else:
64 else:
65 rawmarkers = repo.obsstore.relevantmarkers(nodes)
65 rawmarkers = repo.obsstore.relevantmarkers(nodes)
66
66
67 for markerdata in rawmarkers:
67 for markerdata in rawmarkers:
68 yield marker(repo, markerdata)
68 yield marker(repo, markerdata)
69
69
70 def closestpredecessors(repo, nodeid):
70 def closestpredecessors(repo, nodeid):
71 """yield the list of next predecessors pointing on visible changectx nodes
71 """yield the list of next predecessors pointing on visible changectx nodes
72
72
73 This function respect the repoview filtering, filtered revision will be
73 This function respect the repoview filtering, filtered revision will be
74 considered missing.
74 considered missing.
75 """
75 """
76
76
77 precursors = repo.obsstore.precursors
77 precursors = repo.obsstore.precursors
78 stack = [nodeid]
78 stack = [nodeid]
79 seen = set(stack)
79 seen = set(stack)
80
80
81 while stack:
81 while stack:
82 current = stack.pop()
82 current = stack.pop()
83 currentpreccs = precursors.get(current, ())
83 currentpreccs = precursors.get(current, ())
84
84
85 for prec in currentpreccs:
85 for prec in currentpreccs:
86 precnodeid = prec[0]
86 precnodeid = prec[0]
87
87
88 # Basic cycle protection
88 # Basic cycle protection
89 if precnodeid in seen:
89 if precnodeid in seen:
90 continue
90 continue
91 seen.add(precnodeid)
91 seen.add(precnodeid)
92
92
93 if precnodeid in repo:
93 if precnodeid in repo:
94 yield precnodeid
94 yield precnodeid
95 else:
95 else:
96 stack.append(precnodeid)
96 stack.append(precnodeid)
97
97
98 def allprecursors(obsstore, nodes, ignoreflags=0):
98 def allprecursors(obsstore, nodes, ignoreflags=0):
99 """Yield node for every precursors of <nodes>.
99 """Yield node for every precursors of <nodes>.
100
100
101 Some precursors may be unknown locally.
101 Some precursors may be unknown locally.
102
102
103 This is a linear yield unsuited to detecting folded changesets. It includes
103 This is a linear yield unsuited to detecting folded changesets. It includes
104 initial nodes too."""
104 initial nodes too."""
105
105
106 remaining = set(nodes)
106 remaining = set(nodes)
107 seen = set(remaining)
107 seen = set(remaining)
108 while remaining:
108 while remaining:
109 current = remaining.pop()
109 current = remaining.pop()
110 yield current
110 yield current
111 for mark in obsstore.precursors.get(current, ()):
111 for mark in obsstore.precursors.get(current, ()):
112 # ignore marker flagged with specified flag
112 # ignore marker flagged with specified flag
113 if mark[2] & ignoreflags:
113 if mark[2] & ignoreflags:
114 continue
114 continue
115 suc = mark[0]
115 suc = mark[0]
116 if suc not in seen:
116 if suc not in seen:
117 seen.add(suc)
117 seen.add(suc)
118 remaining.add(suc)
118 remaining.add(suc)
119
119
120 def allsuccessors(obsstore, nodes, ignoreflags=0):
120 def allsuccessors(obsstore, nodes, ignoreflags=0):
121 """Yield node for every successor of <nodes>.
121 """Yield node for every successor of <nodes>.
122
122
123 Some successors may be unknown locally.
123 Some successors may be unknown locally.
124
124
125 This is a linear yield unsuited to detecting split changesets. It includes
125 This is a linear yield unsuited to detecting split changesets. It includes
126 initial nodes too."""
126 initial nodes too."""
127 remaining = set(nodes)
127 remaining = set(nodes)
128 seen = set(remaining)
128 seen = set(remaining)
129 while remaining:
129 while remaining:
130 current = remaining.pop()
130 current = remaining.pop()
131 yield current
131 yield current
132 for mark in obsstore.successors.get(current, ()):
132 for mark in obsstore.successors.get(current, ()):
133 # ignore marker flagged with specified flag
133 # ignore marker flagged with specified flag
134 if mark[2] & ignoreflags:
134 if mark[2] & ignoreflags:
135 continue
135 continue
136 for suc in mark[1]:
136 for suc in mark[1]:
137 if suc not in seen:
137 if suc not in seen:
138 seen.add(suc)
138 seen.add(suc)
139 remaining.add(suc)
139 remaining.add(suc)
140
140
141 def _filterprunes(markers):
141 def _filterprunes(markers):
142 """return a set with no prune markers"""
142 """return a set with no prune markers"""
143 return set(m for m in markers if m[1])
143 return set(m for m in markers if m[1])
144
144
145 def exclusivemarkers(repo, nodes):
145 def exclusivemarkers(repo, nodes):
146 """set of markers relevant to "nodes" but no other locally-known nodes
146 """set of markers relevant to "nodes" but no other locally-known nodes
147
147
148 This function compute the set of markers "exclusive" to a locally-known
148 This function compute the set of markers "exclusive" to a locally-known
149 node. This means we walk the markers starting from <nodes> until we reach a
149 node. This means we walk the markers starting from <nodes> until we reach a
150 locally-known precursors outside of <nodes>. Element of <nodes> with
150 locally-known precursors outside of <nodes>. Element of <nodes> with
151 locally-known successors outside of <nodes> are ignored (since their
151 locally-known successors outside of <nodes> are ignored (since their
152 precursors markers are also relevant to these successors).
152 precursors markers are also relevant to these successors).
153
153
154 For example:
154 For example:
155
155
156 # (A0 rewritten as A1)
156 # (A0 rewritten as A1)
157 #
157 #
158 # A0 <-1- A1 # Marker "1" is exclusive to A1
158 # A0 <-1- A1 # Marker "1" is exclusive to A1
159
159
160 or
160 or
161
161
162 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
162 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
163 #
163 #
164 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
164 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
165
165
166 or
166 or
167
167
168 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
168 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
169 #
169 #
170 # <-2- A1 # Marker "2" is exclusive to A0,A1
170 # <-2- A1 # Marker "2" is exclusive to A0,A1
171 # /
171 # /
172 # <-1- A0
172 # <-1- A0
173 # \
173 # \
174 # <-3- A2 # Marker "3" is exclusive to A0,A2
174 # <-3- A2 # Marker "3" is exclusive to A0,A2
175 #
175 #
176 # in addition:
176 # in addition:
177 #
177 #
178 # Markers "2,3" are exclusive to A1,A2
178 # Markers "2,3" are exclusive to A1,A2
179 # Markers "1,2,3" are exclusive to A0,A1,A2
179 # Markers "1,2,3" are exclusive to A0,A1,A2
180
180
181 See test/test-obsolete-bundle-strip.t for more examples.
181 See test/test-obsolete-bundle-strip.t for more examples.
182
182
183 An example usage is strip. When stripping a changeset, we also want to
183 An example usage is strip. When stripping a changeset, we also want to
184 strip the markers exclusive to this changeset. Otherwise we would have
184 strip the markers exclusive to this changeset. Otherwise we would have
185 "dangling"" obsolescence markers from its precursors: Obsolescence markers
185 "dangling"" obsolescence markers from its precursors: Obsolescence markers
186 marking a node as obsolete without any successors available locally.
186 marking a node as obsolete without any successors available locally.
187
187
188 As for relevant markers, the prune markers for children will be followed.
188 As for relevant markers, the prune markers for children will be followed.
189 Of course, they will only be followed if the pruned children is
189 Of course, they will only be followed if the pruned children is
190 locally-known. Since the prune markers are relevant to the pruned node.
190 locally-known. Since the prune markers are relevant to the pruned node.
191 However, while prune markers are considered relevant to the parent of the
191 However, while prune markers are considered relevant to the parent of the
192 pruned changesets, prune markers for locally-known changeset (with no
192 pruned changesets, prune markers for locally-known changeset (with no
193 successors) are considered exclusive to the pruned nodes. This allows
193 successors) are considered exclusive to the pruned nodes. This allows
194 to strip the prune markers (with the rest of the exclusive chain) alongside
194 to strip the prune markers (with the rest of the exclusive chain) alongside
195 the pruned changesets.
195 the pruned changesets.
196 """
196 """
197 # running on a filtered repository would be dangerous as markers could be
197 # running on a filtered repository would be dangerous as markers could be
198 # reported as exclusive when they are relevant for other filtered nodes.
198 # reported as exclusive when they are relevant for other filtered nodes.
199 unfi = repo.unfiltered()
199 unfi = repo.unfiltered()
200
200
201 # shortcut to various useful item
201 # shortcut to various useful item
202 nm = unfi.changelog.nodemap
202 nm = unfi.changelog.nodemap
203 precursorsmarkers = unfi.obsstore.precursors
203 precursorsmarkers = unfi.obsstore.precursors
204 successormarkers = unfi.obsstore.successors
204 successormarkers = unfi.obsstore.successors
205 childrenmarkers = unfi.obsstore.children
205 childrenmarkers = unfi.obsstore.children
206
206
207 # exclusive markers (return of the function)
207 # exclusive markers (return of the function)
208 exclmarkers = set()
208 exclmarkers = set()
209 # we need fast membership testing
209 # we need fast membership testing
210 nodes = set(nodes)
210 nodes = set(nodes)
211 # looking for head in the obshistory
211 # looking for head in the obshistory
212 #
212 #
213 # XXX we are ignoring all issues in regard with cycle for now.
213 # XXX we are ignoring all issues in regard with cycle for now.
214 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
214 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
215 stack.sort()
215 stack.sort()
216 # nodes already stacked
216 # nodes already stacked
217 seennodes = set(stack)
217 seennodes = set(stack)
218 while stack:
218 while stack:
219 current = stack.pop()
219 current = stack.pop()
220 # fetch precursors markers
220 # fetch precursors markers
221 markers = list(precursorsmarkers.get(current, ()))
221 markers = list(precursorsmarkers.get(current, ()))
222 # extend the list with prune markers
222 # extend the list with prune markers
223 for mark in successormarkers.get(current, ()):
223 for mark in successormarkers.get(current, ()):
224 if not mark[1]:
224 if not mark[1]:
225 markers.append(mark)
225 markers.append(mark)
226 # and markers from children (looking for prune)
226 # and markers from children (looking for prune)
227 for mark in childrenmarkers.get(current, ()):
227 for mark in childrenmarkers.get(current, ()):
228 if not mark[1]:
228 if not mark[1]:
229 markers.append(mark)
229 markers.append(mark)
230 # traverse the markers
230 # traverse the markers
231 for mark in markers:
231 for mark in markers:
232 if mark in exclmarkers:
232 if mark in exclmarkers:
233 # markers already selected
233 # markers already selected
234 continue
234 continue
235
235
236 # If the markers is about the current node, select it
236 # If the markers is about the current node, select it
237 #
237 #
238 # (this delay the addition of markers from children)
238 # (this delay the addition of markers from children)
239 if mark[1] or mark[0] == current:
239 if mark[1] or mark[0] == current:
240 exclmarkers.add(mark)
240 exclmarkers.add(mark)
241
241
242 # should we keep traversing through the precursors?
242 # should we keep traversing through the precursors?
243 prec = mark[0]
243 prec = mark[0]
244
244
245 # nodes in the stack or already processed
245 # nodes in the stack or already processed
246 if prec in seennodes:
246 if prec in seennodes:
247 continue
247 continue
248
248
249 # is this a locally known node ?
249 # is this a locally known node ?
250 known = prec in nm
250 known = prec in nm
251 # if locally-known and not in the <nodes> set the traversal
251 # if locally-known and not in the <nodes> set the traversal
252 # stop here.
252 # stop here.
253 if known and prec not in nodes:
253 if known and prec not in nodes:
254 continue
254 continue
255
255
256 # do not keep going if there are unselected markers pointing to this
256 # do not keep going if there are unselected markers pointing to this
257 # nodes. If we end up traversing these unselected markers later the
257 # nodes. If we end up traversing these unselected markers later the
258 # node will be taken care of at that point.
258 # node will be taken care of at that point.
259 precmarkers = _filterprunes(successormarkers.get(prec))
259 precmarkers = _filterprunes(successormarkers.get(prec))
260 if precmarkers.issubset(exclmarkers):
260 if precmarkers.issubset(exclmarkers):
261 seennodes.add(prec)
261 seennodes.add(prec)
262 stack.append(prec)
262 stack.append(prec)
263
263
264 return exclmarkers
264 return exclmarkers
265
265
266 def foreground(repo, nodes):
266 def foreground(repo, nodes):
267 """return all nodes in the "foreground" of other node
267 """return all nodes in the "foreground" of other node
268
268
269 The foreground of a revision is anything reachable using parent -> children
269 The foreground of a revision is anything reachable using parent -> children
270 or precursor -> successor relation. It is very similar to "descendant" but
270 or precursor -> successor relation. It is very similar to "descendant" but
271 augmented with obsolescence information.
271 augmented with obsolescence information.
272
272
273 Beware that possible obsolescence cycle may result if complex situation.
273 Beware that possible obsolescence cycle may result if complex situation.
274 """
274 """
275 repo = repo.unfiltered()
275 repo = repo.unfiltered()
276 foreground = set(repo.set('%ln::', nodes))
276 foreground = set(repo.set('%ln::', nodes))
277 if repo.obsstore:
277 if repo.obsstore:
278 # We only need this complicated logic if there is obsolescence
278 # We only need this complicated logic if there is obsolescence
279 # XXX will probably deserve an optimised revset.
279 # XXX will probably deserve an optimised revset.
280 nm = repo.changelog.nodemap
280 nm = repo.changelog.nodemap
281 plen = -1
281 plen = -1
282 # compute the whole set of successors or descendants
282 # compute the whole set of successors or descendants
283 while len(foreground) != plen:
283 while len(foreground) != plen:
284 plen = len(foreground)
284 plen = len(foreground)
285 succs = set(c.node() for c in foreground)
285 succs = set(c.node() for c in foreground)
286 mutable = [c.node() for c in foreground if c.mutable()]
286 mutable = [c.node() for c in foreground if c.mutable()]
287 succs.update(allsuccessors(repo.obsstore, mutable))
287 succs.update(allsuccessors(repo.obsstore, mutable))
288 known = (n for n in succs if n in nm)
288 known = (n for n in succs if n in nm)
289 foreground = set(repo.set('%ln::', known))
289 foreground = set(repo.set('%ln::', known))
290 return set(c.node() for c in foreground)
290 return set(c.node() for c in foreground)
291
291
292 def getobsoleted(repo, tr):
292 def getobsoleted(repo, tr):
293 """return the set of pre-existing revisions obsoleted by a transaction"""
293 """return the set of pre-existing revisions obsoleted by a transaction"""
294 torev = repo.unfiltered().changelog.nodemap.get
294 torev = repo.unfiltered().changelog.nodemap.get
295 phase = repo._phasecache.phase
295 phase = repo._phasecache.phase
296 succsmarkers = repo.obsstore.successors.get
296 succsmarkers = repo.obsstore.successors.get
297 public = phases.public
297 public = phases.public
298 addedmarkers = tr.changes.get('obsmarkers')
298 addedmarkers = tr.changes.get('obsmarkers')
299 addedrevs = tr.changes.get('revs')
299 addedrevs = tr.changes.get('revs')
300 seenrevs = set(addedrevs)
300 seenrevs = set(addedrevs)
301 obsoleted = set()
301 obsoleted = set()
302 for mark in addedmarkers:
302 for mark in addedmarkers:
303 node = mark[0]
303 node = mark[0]
304 rev = torev(node)
304 rev = torev(node)
305 if rev is None or rev in seenrevs:
305 if rev is None or rev in seenrevs:
306 continue
306 continue
307 seenrevs.add(rev)
307 seenrevs.add(rev)
308 if phase(repo, rev) == public:
308 if phase(repo, rev) == public:
309 continue
309 continue
310 if set(succsmarkers(node)).issubset(addedmarkers):
310 if set(succsmarkers(node)).issubset(addedmarkers):
311 obsoleted.add(rev)
311 obsoleted.add(rev)
312 return obsoleted
312 return obsoleted
313
313
314 def successorssets(repo, initialnode, cache=None):
314 def successorssets(repo, initialnode, closest=False, cache=None):
315 """Return set of all latest successors of initial nodes
315 """Return set of all latest successors of initial nodes
316
316
317 The successors set of a changeset A are the group of revisions that succeed
317 The successors set of a changeset A are the group of revisions that succeed
318 A. It succeeds A as a consistent whole, each revision being only a partial
318 A. It succeeds A as a consistent whole, each revision being only a partial
319 replacement. The successors set contains non-obsolete changesets only.
319 replacement. By default, the successors set contains non-obsolete
320 changesets only, walking the obsolescence graph until reaching a leaf. If
321 'closest' is set to True, closest successors-sets are return (the
322 obsolescence walk stops on known changesets).
320
323
321 This function returns the full list of successor sets which is why it
324 This function returns the full list of successor sets which is why it
322 returns a list of tuples and not just a single tuple. Each tuple is a valid
325 returns a list of tuples and not just a single tuple. Each tuple is a valid
323 successors set. Note that (A,) may be a valid successors set for changeset A
326 successors set. Note that (A,) may be a valid successors set for changeset A
324 (see below).
327 (see below).
325
328
326 In most cases, a changeset A will have a single element (e.g. the changeset
329 In most cases, a changeset A will have a single element (e.g. the changeset
327 A is replaced by A') in its successors set. Though, it is also common for a
330 A is replaced by A') in its successors set. Though, it is also common for a
328 changeset A to have no elements in its successor set (e.g. the changeset
331 changeset A to have no elements in its successor set (e.g. the changeset
329 has been pruned). Therefore, the returned list of successors sets will be
332 has been pruned). Therefore, the returned list of successors sets will be
330 [(A',)] or [], respectively.
333 [(A',)] or [], respectively.
331
334
332 When a changeset A is split into A' and B', however, it will result in a
335 When a changeset A is split into A' and B', however, it will result in a
333 successors set containing more than a single element, i.e. [(A',B')].
336 successors set containing more than a single element, i.e. [(A',B')].
334 Divergent changesets will result in multiple successors sets, i.e. [(A',),
337 Divergent changesets will result in multiple successors sets, i.e. [(A',),
335 (A'')].
338 (A'')].
336
339
337 If a changeset A is not obsolete, then it will conceptually have no
340 If a changeset A is not obsolete, then it will conceptually have no
338 successors set. To distinguish this from a pruned changeset, the successor
341 successors set. To distinguish this from a pruned changeset, the successor
339 set will contain itself only, i.e. [(A,)].
342 set will contain itself only, i.e. [(A,)].
340
343
341 Finally, final successors unknown locally are considered to be pruned
344 Finally, final successors unknown locally are considered to be pruned
342 (pruned: obsoleted without any successors). (Final: successors not affected
345 (pruned: obsoleted without any successors). (Final: successors not affected
343 by markers).
346 by markers).
344
347
348 The 'closest' mode respect the repoview filtering. For example, without
349 filter it will stop at the first locally known changeset, with 'visible'
350 filter it will stop on visible changesets).
351
345 The optional `cache` parameter is a dictionary that may contains
352 The optional `cache` parameter is a dictionary that may contains
346 precomputed successors sets. It is meant to reuse the computation of a
353 precomputed successors sets. It is meant to reuse the computation of a
347 previous call to `successorssets` when multiple calls are made at the same
354 previous call to `successorssets` when multiple calls are made at the same
348 time. The cache dictionary is updated in place. The caller is responsible
355 time. The cache dictionary is updated in place. The caller is responsible
349 for its life span. Code that makes multiple calls to `successorssets`
356 for its life span. Code that makes multiple calls to `successorssets`
350 *should* use this cache mechanism or risk a performance hit.
357 *should* use this cache mechanism or risk a performance hit.
358
359 Since results are different depending of the 'closest' most, the same cache
360 cannot be reused for both mode.
351 """
361 """
352
362
353 succmarkers = repo.obsstore.successors
363 succmarkers = repo.obsstore.successors
354
364
355 # Stack of nodes we search successors sets for
365 # Stack of nodes we search successors sets for
356 toproceed = [initialnode]
366 toproceed = [initialnode]
357 # set version of above list for fast loop detection
367 # set version of above list for fast loop detection
358 # element added to "toproceed" must be added here
368 # element added to "toproceed" must be added here
359 stackedset = set(toproceed)
369 stackedset = set(toproceed)
360 if cache is None:
370 if cache is None:
361 cache = {}
371 cache = {}
362
372
363 # This while loop is the flattened version of a recursive search for
373 # This while loop is the flattened version of a recursive search for
364 # successors sets
374 # successors sets
365 #
375 #
366 # def successorssets(x):
376 # def successorssets(x):
367 # successors = directsuccessors(x)
377 # successors = directsuccessors(x)
368 # ss = [[]]
378 # ss = [[]]
369 # for succ in directsuccessors(x):
379 # for succ in directsuccessors(x):
370 # # product as in itertools cartesian product
380 # # product as in itertools cartesian product
371 # ss = product(ss, successorssets(succ))
381 # ss = product(ss, successorssets(succ))
372 # return ss
382 # return ss
373 #
383 #
374 # But we can not use plain recursive calls here:
384 # But we can not use plain recursive calls here:
375 # - that would blow the python call stack
385 # - that would blow the python call stack
376 # - obsolescence markers may have cycles, we need to handle them.
386 # - obsolescence markers may have cycles, we need to handle them.
377 #
387 #
378 # The `toproceed` list act as our call stack. Every node we search
388 # The `toproceed` list act as our call stack. Every node we search
379 # successors set for are stacked there.
389 # successors set for are stacked there.
380 #
390 #
381 # The `stackedset` is set version of this stack used to check if a node is
391 # The `stackedset` is set version of this stack used to check if a node is
382 # already stacked. This check is used to detect cycles and prevent infinite
392 # already stacked. This check is used to detect cycles and prevent infinite
383 # loop.
393 # loop.
384 #
394 #
385 # successors set of all nodes are stored in the `cache` dictionary.
395 # successors set of all nodes are stored in the `cache` dictionary.
386 #
396 #
387 # After this while loop ends we use the cache to return the successors sets
397 # After this while loop ends we use the cache to return the successors sets
388 # for the node requested by the caller.
398 # for the node requested by the caller.
389 while toproceed:
399 while toproceed:
390 # Every iteration tries to compute the successors sets of the topmost
400 # Every iteration tries to compute the successors sets of the topmost
391 # node of the stack: CURRENT.
401 # node of the stack: CURRENT.
392 #
402 #
393 # There are four possible outcomes:
403 # There are four possible outcomes:
394 #
404 #
395 # 1) We already know the successors sets of CURRENT:
405 # 1) We already know the successors sets of CURRENT:
396 # -> mission accomplished, pop it from the stack.
406 # -> mission accomplished, pop it from the stack.
397 # 2) Node is not obsolete:
407 # 2) Stop the walk:
408 # default case: Node is not obsolete
409 # closest case: Node is known at this repo filter level
398 # -> the node is its own successors sets. Add it to the cache.
410 # -> the node is its own successors sets. Add it to the cache.
399 # 3) We do not know successors set of direct successors of CURRENT:
411 # 3) We do not know successors set of direct successors of CURRENT:
400 # -> We add those successors to the stack.
412 # -> We add those successors to the stack.
401 # 4) We know successors sets of all direct successors of CURRENT:
413 # 4) We know successors sets of all direct successors of CURRENT:
402 # -> We can compute CURRENT successors set and add it to the
414 # -> We can compute CURRENT successors set and add it to the
403 # cache.
415 # cache.
404 #
416 #
405 current = toproceed[-1]
417 current = toproceed[-1]
418
419 # case 2 condition is a bit hairy because of closest,
420 # we compute it on its own
421 case2condition = ((current not in succmarkers)
422 or (closest and current != initialnode
423 and current in repo))
424
406 if current in cache:
425 if current in cache:
407 # case (1): We already know the successors sets
426 # case (1): We already know the successors sets
408 stackedset.remove(toproceed.pop())
427 stackedset.remove(toproceed.pop())
409 elif current not in succmarkers:
428 elif case2condition:
410 # case (2): The node is not obsolete.
429 # case (2): end of walk.
411 if current in repo:
430 if current in repo:
412 # We have a valid last successors.
431 # We have a valid successors.
413 cache[current] = [(current,)]
432 cache[current] = [(current,)]
414 else:
433 else:
415 # Final obsolete version is unknown locally.
434 # Final obsolete version is unknown locally.
416 # Do not count that as a valid successors
435 # Do not count that as a valid successors
417 cache[current] = []
436 cache[current] = []
418 else:
437 else:
419 # cases (3) and (4)
438 # cases (3) and (4)
420 #
439 #
421 # We proceed in two phases. Phase 1 aims to distinguish case (3)
440 # We proceed in two phases. Phase 1 aims to distinguish case (3)
422 # from case (4):
441 # from case (4):
423 #
442 #
424 # For each direct successors of CURRENT, we check whether its
443 # For each direct successors of CURRENT, we check whether its
425 # successors sets are known. If they are not, we stack the
444 # successors sets are known. If they are not, we stack the
426 # unknown node and proceed to the next iteration of the while
445 # unknown node and proceed to the next iteration of the while
427 # loop. (case 3)
446 # loop. (case 3)
428 #
447 #
429 # During this step, we may detect obsolescence cycles: a node
448 # During this step, we may detect obsolescence cycles: a node
430 # with unknown successors sets but already in the call stack.
449 # with unknown successors sets but already in the call stack.
431 # In such a situation, we arbitrary set the successors sets of
450 # In such a situation, we arbitrary set the successors sets of
432 # the node to nothing (node pruned) to break the cycle.
451 # the node to nothing (node pruned) to break the cycle.
433 #
452 #
434 # If no break was encountered we proceed to phase 2.
453 # If no break was encountered we proceed to phase 2.
435 #
454 #
436 # Phase 2 computes successors sets of CURRENT (case 4); see details
455 # Phase 2 computes successors sets of CURRENT (case 4); see details
437 # in phase 2 itself.
456 # in phase 2 itself.
438 #
457 #
439 # Note the two levels of iteration in each phase.
458 # Note the two levels of iteration in each phase.
440 # - The first one handles obsolescence markers using CURRENT as
459 # - The first one handles obsolescence markers using CURRENT as
441 # precursor (successors markers of CURRENT).
460 # precursor (successors markers of CURRENT).
442 #
461 #
443 # Having multiple entry here means divergence.
462 # Having multiple entry here means divergence.
444 #
463 #
445 # - The second one handles successors defined in each marker.
464 # - The second one handles successors defined in each marker.
446 #
465 #
447 # Having none means pruned node, multiple successors means split,
466 # Having none means pruned node, multiple successors means split,
448 # single successors are standard replacement.
467 # single successors are standard replacement.
449 #
468 #
450 for mark in sorted(succmarkers[current]):
469 for mark in sorted(succmarkers[current]):
451 for suc in mark[1]:
470 for suc in mark[1]:
452 if suc not in cache:
471 if suc not in cache:
453 if suc in stackedset:
472 if suc in stackedset:
454 # cycle breaking
473 # cycle breaking
455 cache[suc] = []
474 cache[suc] = []
456 else:
475 else:
457 # case (3) If we have not computed successors sets
476 # case (3) If we have not computed successors sets
458 # of one of those successors we add it to the
477 # of one of those successors we add it to the
459 # `toproceed` stack and stop all work for this
478 # `toproceed` stack and stop all work for this
460 # iteration.
479 # iteration.
461 toproceed.append(suc)
480 toproceed.append(suc)
462 stackedset.add(suc)
481 stackedset.add(suc)
463 break
482 break
464 else:
483 else:
465 continue
484 continue
466 break
485 break
467 else:
486 else:
468 # case (4): we know all successors sets of all direct
487 # case (4): we know all successors sets of all direct
469 # successors
488 # successors
470 #
489 #
471 # Successors set contributed by each marker depends on the
490 # Successors set contributed by each marker depends on the
472 # successors sets of all its "successors" node.
491 # successors sets of all its "successors" node.
473 #
492 #
474 # Each different marker is a divergence in the obsolescence
493 # Each different marker is a divergence in the obsolescence
475 # history. It contributes successors sets distinct from other
494 # history. It contributes successors sets distinct from other
476 # markers.
495 # markers.
477 #
496 #
478 # Within a marker, a successor may have divergent successors
497 # Within a marker, a successor may have divergent successors
479 # sets. In such a case, the marker will contribute multiple
498 # sets. In such a case, the marker will contribute multiple
480 # divergent successors sets. If multiple successors have
499 # divergent successors sets. If multiple successors have
481 # divergent successors sets, a Cartesian product is used.
500 # divergent successors sets, a Cartesian product is used.
482 #
501 #
483 # At the end we post-process successors sets to remove
502 # At the end we post-process successors sets to remove
484 # duplicated entry and successors set that are strict subset of
503 # duplicated entry and successors set that are strict subset of
485 # another one.
504 # another one.
486 succssets = []
505 succssets = []
487 for mark in sorted(succmarkers[current]):
506 for mark in sorted(succmarkers[current]):
488 # successors sets contributed by this marker
507 # successors sets contributed by this marker
489 markss = [[]]
508 markss = [[]]
490 for suc in mark[1]:
509 for suc in mark[1]:
491 # cardinal product with previous successors
510 # cardinal product with previous successors
492 productresult = []
511 productresult = []
493 for prefix in markss:
512 for prefix in markss:
494 for suffix in cache[suc]:
513 for suffix in cache[suc]:
495 newss = list(prefix)
514 newss = list(prefix)
496 for part in suffix:
515 for part in suffix:
497 # do not duplicated entry in successors set
516 # do not duplicated entry in successors set
498 # first entry wins.
517 # first entry wins.
499 if part not in newss:
518 if part not in newss:
500 newss.append(part)
519 newss.append(part)
501 productresult.append(newss)
520 productresult.append(newss)
502 markss = productresult
521 markss = productresult
503 succssets.extend(markss)
522 succssets.extend(markss)
504 # remove duplicated and subset
523 # remove duplicated and subset
505 seen = []
524 seen = []
506 final = []
525 final = []
507 candidate = sorted(((set(s), s) for s in succssets if s),
526 candidate = sorted(((set(s), s) for s in succssets if s),
508 key=lambda x: len(x[1]), reverse=True)
527 key=lambda x: len(x[1]), reverse=True)
509 for setversion, listversion in candidate:
528 for setversion, listversion in candidate:
510 for seenset in seen:
529 for seenset in seen:
511 if setversion.issubset(seenset):
530 if setversion.issubset(seenset):
512 break
531 break
513 else:
532 else:
514 final.append(listversion)
533 final.append(listversion)
515 seen.append(setversion)
534 seen.append(setversion)
516 final.reverse() # put small successors set first
535 final.reverse() # put small successors set first
517 cache[current] = final
536 cache[current] = final
518 return cache[initialnode]
537 return cache[initialnode]
@@ -1,381 +1,381
1 Show all commands except debug commands
1 Show all commands except debug commands
2 $ hg debugcomplete
2 $ hg debugcomplete
3 add
3 add
4 addremove
4 addremove
5 annotate
5 annotate
6 archive
6 archive
7 backout
7 backout
8 bisect
8 bisect
9 bookmarks
9 bookmarks
10 branch
10 branch
11 branches
11 branches
12 bundle
12 bundle
13 cat
13 cat
14 clone
14 clone
15 commit
15 commit
16 config
16 config
17 copy
17 copy
18 diff
18 diff
19 export
19 export
20 files
20 files
21 forget
21 forget
22 graft
22 graft
23 grep
23 grep
24 heads
24 heads
25 help
25 help
26 identify
26 identify
27 import
27 import
28 incoming
28 incoming
29 init
29 init
30 locate
30 locate
31 log
31 log
32 manifest
32 manifest
33 merge
33 merge
34 outgoing
34 outgoing
35 parents
35 parents
36 paths
36 paths
37 phase
37 phase
38 pull
38 pull
39 push
39 push
40 recover
40 recover
41 remove
41 remove
42 rename
42 rename
43 resolve
43 resolve
44 revert
44 revert
45 rollback
45 rollback
46 root
46 root
47 serve
47 serve
48 status
48 status
49 summary
49 summary
50 tag
50 tag
51 tags
51 tags
52 tip
52 tip
53 unbundle
53 unbundle
54 update
54 update
55 verify
55 verify
56 version
56 version
57
57
58 Show all commands that start with "a"
58 Show all commands that start with "a"
59 $ hg debugcomplete a
59 $ hg debugcomplete a
60 add
60 add
61 addremove
61 addremove
62 annotate
62 annotate
63 archive
63 archive
64
64
65 Do not show debug commands if there are other candidates
65 Do not show debug commands if there are other candidates
66 $ hg debugcomplete d
66 $ hg debugcomplete d
67 diff
67 diff
68
68
69 Show debug commands if there are no other candidates
69 Show debug commands if there are no other candidates
70 $ hg debugcomplete debug
70 $ hg debugcomplete debug
71 debugancestor
71 debugancestor
72 debugapplystreamclonebundle
72 debugapplystreamclonebundle
73 debugbuilddag
73 debugbuilddag
74 debugbundle
74 debugbundle
75 debugcheckstate
75 debugcheckstate
76 debugcolor
76 debugcolor
77 debugcommands
77 debugcommands
78 debugcomplete
78 debugcomplete
79 debugconfig
79 debugconfig
80 debugcreatestreamclonebundle
80 debugcreatestreamclonebundle
81 debugdag
81 debugdag
82 debugdata
82 debugdata
83 debugdate
83 debugdate
84 debugdeltachain
84 debugdeltachain
85 debugdirstate
85 debugdirstate
86 debugdiscovery
86 debugdiscovery
87 debugextensions
87 debugextensions
88 debugfileset
88 debugfileset
89 debugfsinfo
89 debugfsinfo
90 debuggetbundle
90 debuggetbundle
91 debugignore
91 debugignore
92 debugindex
92 debugindex
93 debugindexdot
93 debugindexdot
94 debuginstall
94 debuginstall
95 debugknown
95 debugknown
96 debuglabelcomplete
96 debuglabelcomplete
97 debuglocks
97 debuglocks
98 debugmergestate
98 debugmergestate
99 debugnamecomplete
99 debugnamecomplete
100 debugobsolete
100 debugobsolete
101 debugpathcomplete
101 debugpathcomplete
102 debugpickmergetool
102 debugpickmergetool
103 debugpushkey
103 debugpushkey
104 debugpvec
104 debugpvec
105 debugrebuilddirstate
105 debugrebuilddirstate
106 debugrebuildfncache
106 debugrebuildfncache
107 debugrename
107 debugrename
108 debugrevlog
108 debugrevlog
109 debugrevspec
109 debugrevspec
110 debugsetparents
110 debugsetparents
111 debugsub
111 debugsub
112 debugsuccessorssets
112 debugsuccessorssets
113 debugtemplate
113 debugtemplate
114 debugupdatecaches
114 debugupdatecaches
115 debugupgraderepo
115 debugupgraderepo
116 debugwalk
116 debugwalk
117 debugwireargs
117 debugwireargs
118
118
119 Do not show the alias of a debug command if there are other candidates
119 Do not show the alias of a debug command if there are other candidates
120 (this should hide rawcommit)
120 (this should hide rawcommit)
121 $ hg debugcomplete r
121 $ hg debugcomplete r
122 recover
122 recover
123 remove
123 remove
124 rename
124 rename
125 resolve
125 resolve
126 revert
126 revert
127 rollback
127 rollback
128 root
128 root
129 Show the alias of a debug command if there are no other candidates
129 Show the alias of a debug command if there are no other candidates
130 $ hg debugcomplete rawc
130 $ hg debugcomplete rawc
131
131
132
132
133 Show the global options
133 Show the global options
134 $ hg debugcomplete --options | sort
134 $ hg debugcomplete --options | sort
135 --color
135 --color
136 --config
136 --config
137 --cwd
137 --cwd
138 --debug
138 --debug
139 --debugger
139 --debugger
140 --encoding
140 --encoding
141 --encodingmode
141 --encodingmode
142 --help
142 --help
143 --hidden
143 --hidden
144 --noninteractive
144 --noninteractive
145 --pager
145 --pager
146 --profile
146 --profile
147 --quiet
147 --quiet
148 --repository
148 --repository
149 --time
149 --time
150 --traceback
150 --traceback
151 --verbose
151 --verbose
152 --version
152 --version
153 -R
153 -R
154 -h
154 -h
155 -q
155 -q
156 -v
156 -v
157 -y
157 -y
158
158
159 Show the options for the "serve" command
159 Show the options for the "serve" command
160 $ hg debugcomplete --options serve | sort
160 $ hg debugcomplete --options serve | sort
161 --accesslog
161 --accesslog
162 --address
162 --address
163 --certificate
163 --certificate
164 --cmdserver
164 --cmdserver
165 --color
165 --color
166 --config
166 --config
167 --cwd
167 --cwd
168 --daemon
168 --daemon
169 --daemon-postexec
169 --daemon-postexec
170 --debug
170 --debug
171 --debugger
171 --debugger
172 --encoding
172 --encoding
173 --encodingmode
173 --encodingmode
174 --errorlog
174 --errorlog
175 --help
175 --help
176 --hidden
176 --hidden
177 --ipv6
177 --ipv6
178 --name
178 --name
179 --noninteractive
179 --noninteractive
180 --pager
180 --pager
181 --pid-file
181 --pid-file
182 --port
182 --port
183 --prefix
183 --prefix
184 --profile
184 --profile
185 --quiet
185 --quiet
186 --repository
186 --repository
187 --stdio
187 --stdio
188 --style
188 --style
189 --subrepos
189 --subrepos
190 --templates
190 --templates
191 --time
191 --time
192 --traceback
192 --traceback
193 --verbose
193 --verbose
194 --version
194 --version
195 --web-conf
195 --web-conf
196 -6
196 -6
197 -A
197 -A
198 -E
198 -E
199 -R
199 -R
200 -S
200 -S
201 -a
201 -a
202 -d
202 -d
203 -h
203 -h
204 -n
204 -n
205 -p
205 -p
206 -q
206 -q
207 -t
207 -t
208 -v
208 -v
209 -y
209 -y
210
210
211 Show an error if we use --options with an ambiguous abbreviation
211 Show an error if we use --options with an ambiguous abbreviation
212 $ hg debugcomplete --options s
212 $ hg debugcomplete --options s
213 hg: command 's' is ambiguous:
213 hg: command 's' is ambiguous:
214 serve showconfig status summary
214 serve showconfig status summary
215 [255]
215 [255]
216
216
217 Show all commands + options
217 Show all commands + options
218 $ hg debugcommands
218 $ hg debugcommands
219 add: include, exclude, subrepos, dry-run
219 add: include, exclude, subrepos, dry-run
220 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
220 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
221 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
221 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
222 commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
222 commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
223 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
223 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
224 export: output, switch-parent, rev, text, git, binary, nodates
224 export: output, switch-parent, rev, text, git, binary, nodates
225 forget: include, exclude
225 forget: include, exclude
226 init: ssh, remotecmd, insecure
226 init: ssh, remotecmd, insecure
227 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
227 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
228 merge: force, rev, preview, tool
228 merge: force, rev, preview, tool
229 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
229 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
230 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
230 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
231 remove: after, force, subrepos, include, exclude
231 remove: after, force, subrepos, include, exclude
232 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
232 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
233 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template
233 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template
234 summary: remote
234 summary: remote
235 update: clean, check, merge, date, rev, tool
235 update: clean, check, merge, date, rev, tool
236 addremove: similarity, subrepos, include, exclude, dry-run
236 addremove: similarity, subrepos, include, exclude, dry-run
237 archive: no-decode, prefix, rev, type, subrepos, include, exclude
237 archive: no-decode, prefix, rev, type, subrepos, include, exclude
238 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
238 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
239 bisect: reset, good, bad, skip, extend, command, noupdate
239 bisect: reset, good, bad, skip, extend, command, noupdate
240 bookmarks: force, rev, delete, rename, inactive, template
240 bookmarks: force, rev, delete, rename, inactive, template
241 branch: force, clean
241 branch: force, clean
242 branches: active, closed, template
242 branches: active, closed, template
243 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
243 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
244 cat: output, rev, decode, include, exclude, template
244 cat: output, rev, decode, include, exclude, template
245 config: untrusted, edit, local, global, template
245 config: untrusted, edit, local, global, template
246 copy: after, force, include, exclude, dry-run
246 copy: after, force, include, exclude, dry-run
247 debugancestor:
247 debugancestor:
248 debugapplystreamclonebundle:
248 debugapplystreamclonebundle:
249 debugbuilddag: mergeable-file, overwritten-file, new-file
249 debugbuilddag: mergeable-file, overwritten-file, new-file
250 debugbundle: all, part-type, spec
250 debugbundle: all, part-type, spec
251 debugcheckstate:
251 debugcheckstate:
252 debugcolor: style
252 debugcolor: style
253 debugcommands:
253 debugcommands:
254 debugcomplete: options
254 debugcomplete: options
255 debugcreatestreamclonebundle:
255 debugcreatestreamclonebundle:
256 debugdag: tags, branches, dots, spaces
256 debugdag: tags, branches, dots, spaces
257 debugdata: changelog, manifest, dir
257 debugdata: changelog, manifest, dir
258 debugdate: extended
258 debugdate: extended
259 debugdeltachain: changelog, manifest, dir, template
259 debugdeltachain: changelog, manifest, dir, template
260 debugdirstate: nodates, datesort
260 debugdirstate: nodates, datesort
261 debugdiscovery: old, nonheads, ssh, remotecmd, insecure
261 debugdiscovery: old, nonheads, ssh, remotecmd, insecure
262 debugextensions: template
262 debugextensions: template
263 debugfileset: rev
263 debugfileset: rev
264 debugfsinfo:
264 debugfsinfo:
265 debuggetbundle: head, common, type
265 debuggetbundle: head, common, type
266 debugignore:
266 debugignore:
267 debugindex: changelog, manifest, dir, format
267 debugindex: changelog, manifest, dir, format
268 debugindexdot: changelog, manifest, dir
268 debugindexdot: changelog, manifest, dir
269 debuginstall: template
269 debuginstall: template
270 debugknown:
270 debugknown:
271 debuglabelcomplete:
271 debuglabelcomplete:
272 debuglocks: force-lock, force-wlock
272 debuglocks: force-lock, force-wlock
273 debugmergestate:
273 debugmergestate:
274 debugnamecomplete:
274 debugnamecomplete:
275 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
275 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
276 debugpathcomplete: full, normal, added, removed
276 debugpathcomplete: full, normal, added, removed
277 debugpickmergetool: rev, changedelete, include, exclude, tool
277 debugpickmergetool: rev, changedelete, include, exclude, tool
278 debugpushkey:
278 debugpushkey:
279 debugpvec:
279 debugpvec:
280 debugrebuilddirstate: rev, minimal
280 debugrebuilddirstate: rev, minimal
281 debugrebuildfncache:
281 debugrebuildfncache:
282 debugrename: rev
282 debugrename: rev
283 debugrevlog: changelog, manifest, dir, dump
283 debugrevlog: changelog, manifest, dir, dump
284 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
284 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
285 debugsetparents:
285 debugsetparents:
286 debugsub: rev
286 debugsub: rev
287 debugsuccessorssets:
287 debugsuccessorssets: closest
288 debugtemplate: rev, define
288 debugtemplate: rev, define
289 debugupdatecaches:
289 debugupdatecaches:
290 debugupgraderepo: optimize, run
290 debugupgraderepo: optimize, run
291 debugwalk: include, exclude
291 debugwalk: include, exclude
292 debugwireargs: three, four, five, ssh, remotecmd, insecure
292 debugwireargs: three, four, five, ssh, remotecmd, insecure
293 files: rev, print0, include, exclude, template, subrepos
293 files: rev, print0, include, exclude, template, subrepos
294 graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
294 graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
295 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
295 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
296 heads: rev, topo, active, closed, style, template
296 heads: rev, topo, active, closed, style, template
297 help: extension, command, keyword, system
297 help: extension, command, keyword, system
298 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
298 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
299 import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
299 import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
300 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
300 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
301 locate: rev, print0, fullpath, include, exclude
301 locate: rev, print0, fullpath, include, exclude
302 manifest: rev, all, template
302 manifest: rev, all, template
303 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
303 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
304 parents: rev, style, template
304 parents: rev, style, template
305 paths: template
305 paths: template
306 phase: public, draft, secret, force, rev
306 phase: public, draft, secret, force, rev
307 recover:
307 recover:
308 rename: after, force, include, exclude, dry-run
308 rename: after, force, include, exclude, dry-run
309 resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
309 resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
310 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
310 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
311 rollback: dry-run, force
311 rollback: dry-run, force
312 root:
312 root:
313 tag: force, local, rev, remove, edit, message, date, user
313 tag: force, local, rev, remove, edit, message, date, user
314 tags: template
314 tags: template
315 tip: patch, git, style, template
315 tip: patch, git, style, template
316 unbundle: update
316 unbundle: update
317 verify:
317 verify:
318 version: template
318 version: template
319
319
320 $ hg init a
320 $ hg init a
321 $ cd a
321 $ cd a
322 $ echo fee > fee
322 $ echo fee > fee
323 $ hg ci -q -Amfee
323 $ hg ci -q -Amfee
324 $ hg tag fee
324 $ hg tag fee
325 $ mkdir fie
325 $ mkdir fie
326 $ echo dead > fie/dead
326 $ echo dead > fie/dead
327 $ echo live > fie/live
327 $ echo live > fie/live
328 $ hg bookmark fo
328 $ hg bookmark fo
329 $ hg branch -q fie
329 $ hg branch -q fie
330 $ hg ci -q -Amfie
330 $ hg ci -q -Amfie
331 $ echo fo > fo
331 $ echo fo > fo
332 $ hg branch -qf default
332 $ hg branch -qf default
333 $ hg ci -q -Amfo
333 $ hg ci -q -Amfo
334 $ echo Fum > Fum
334 $ echo Fum > Fum
335 $ hg ci -q -AmFum
335 $ hg ci -q -AmFum
336 $ hg bookmark Fum
336 $ hg bookmark Fum
337
337
338 Test debugpathcomplete
338 Test debugpathcomplete
339
339
340 $ hg debugpathcomplete f
340 $ hg debugpathcomplete f
341 fee
341 fee
342 fie
342 fie
343 fo
343 fo
344 $ hg debugpathcomplete -f f
344 $ hg debugpathcomplete -f f
345 fee
345 fee
346 fie/dead
346 fie/dead
347 fie/live
347 fie/live
348 fo
348 fo
349
349
350 $ hg rm Fum
350 $ hg rm Fum
351 $ hg debugpathcomplete -r F
351 $ hg debugpathcomplete -r F
352 Fum
352 Fum
353
353
354 Test debugnamecomplete
354 Test debugnamecomplete
355
355
356 $ hg debugnamecomplete
356 $ hg debugnamecomplete
357 Fum
357 Fum
358 default
358 default
359 fee
359 fee
360 fie
360 fie
361 fo
361 fo
362 tip
362 tip
363 $ hg debugnamecomplete f
363 $ hg debugnamecomplete f
364 fee
364 fee
365 fie
365 fie
366 fo
366 fo
367
367
368 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
368 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
369 used for completions in some shells.
369 used for completions in some shells.
370
370
371 $ hg debuglabelcomplete
371 $ hg debuglabelcomplete
372 Fum
372 Fum
373 default
373 default
374 fee
374 fee
375 fie
375 fie
376 fo
376 fo
377 tip
377 tip
378 $ hg debuglabelcomplete f
378 $ hg debuglabelcomplete f
379 fee
379 fee
380 fie
380 fie
381 fo
381 fo
@@ -1,437 +1,619
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2
2
3 This is the most complex troubles from far so we isolate it in a dedicated
3 This is the most complex troubles from far so we isolate it in a dedicated
4 file.
4 file.
5
5
6 Enable obsolete
6 Enable obsolete
7
7
8 $ cat >> $HGRCPATH << EOF
8 $ cat >> $HGRCPATH << EOF
9 > [ui]
9 > [ui]
10 > logtemplate = {rev}:{node|short} {desc}\n
10 > logtemplate = {rev}:{node|short} {desc}\n
11 > [experimental]
11 > [experimental]
12 > evolution=createmarkers
12 > evolution=createmarkers
13 > [alias]
13 > [alias]
14 > debugobsolete = debugobsolete -d '0 0'
14 > debugobsolete = debugobsolete -d '0 0'
15 > [phases]
15 > [phases]
16 > publish=False
16 > publish=False
17 > EOF
17 > EOF
18
18
19
19
20 $ mkcommit() {
20 $ mkcommit() {
21 > echo "$1" > "$1"
21 > echo "$1" > "$1"
22 > hg add "$1"
22 > hg add "$1"
23 > hg ci -m "$1"
23 > hg ci -m "$1"
24 > }
24 > }
25 $ getid() {
25 $ getid() {
26 > hg log --hidden -r "desc('$1')" -T '{node}\n'
26 > hg log --hidden -r "desc('$1')" -T '{node}\n'
27 > }
27 > }
28
28
29 setup repo
29 setup repo
30
30
31 $ hg init reference
31 $ hg init reference
32 $ cd reference
32 $ cd reference
33 $ mkcommit base
33 $ mkcommit base
34 $ mkcommit A_0
34 $ mkcommit A_0
35 $ hg up 0
35 $ hg up 0
36 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
36 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
37 $ mkcommit A_1
37 $ mkcommit A_1
38 created new head
38 created new head
39 $ hg up 0
39 $ hg up 0
40 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
40 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
41 $ mkcommit A_2
41 $ mkcommit A_2
42 created new head
42 created new head
43 $ hg up 0
43 $ hg up 0
44 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
44 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
45 $ cd ..
45 $ cd ..
46
46
47
47
48 $ newcase() {
48 $ newcase() {
49 > hg clone -u 0 -q reference $1
49 > hg clone -u 0 -q reference $1
50 > cd $1
50 > cd $1
51 > }
51 > }
52
52
53 direct divergence
53 direct divergence
54 -----------------
54 -----------------
55
55
56 A_1 have two direct and divergent successors A_1 and A_1
56 A_1 have two direct and divergent successors A_1 and A_1
57
57
58 $ newcase direct
58 $ newcase direct
59 $ hg debugobsolete `getid A_0` `getid A_1`
59 $ hg debugobsolete `getid A_0` `getid A_1`
60 $ hg debugobsolete `getid A_0` `getid A_2`
60 $ hg debugobsolete `getid A_0` `getid A_2`
61 $ hg log -G --hidden
61 $ hg log -G --hidden
62 o 3:392fd25390da A_2
62 o 3:392fd25390da A_2
63 |
63 |
64 | o 2:82623d38b9ba A_1
64 | o 2:82623d38b9ba A_1
65 |/
65 |/
66 | x 1:007dc284c1f8 A_0
66 | x 1:007dc284c1f8 A_0
67 |/
67 |/
68 @ 0:d20a80d4def3 base
68 @ 0:d20a80d4def3 base
69
69
70 $ hg debugsuccessorssets --hidden 'all()'
70 $ hg debugsuccessorssets --hidden 'all()'
71 d20a80d4def3
71 d20a80d4def3
72 d20a80d4def3
72 d20a80d4def3
73 007dc284c1f8
73 007dc284c1f8
74 82623d38b9ba
74 82623d38b9ba
75 392fd25390da
75 392fd25390da
76 82623d38b9ba
76 82623d38b9ba
77 82623d38b9ba
77 82623d38b9ba
78 392fd25390da
78 392fd25390da
79 392fd25390da
79 392fd25390da
80 $ hg log -r 'divergent()'
80 $ hg log -r 'divergent()'
81 2:82623d38b9ba A_1
81 2:82623d38b9ba A_1
82 3:392fd25390da A_2
82 3:392fd25390da A_2
83 $ hg debugsuccessorssets 'all()' --closest
84 d20a80d4def3
85 d20a80d4def3
86 82623d38b9ba
87 82623d38b9ba
88 392fd25390da
89 392fd25390da
90 $ hg debugsuccessorssets 'all()' --closest --hidden
91 d20a80d4def3
92 d20a80d4def3
93 007dc284c1f8
94 82623d38b9ba
95 392fd25390da
96 82623d38b9ba
97 82623d38b9ba
98 392fd25390da
99 392fd25390da
83
100
84 check that mercurial refuse to push
101 check that mercurial refuse to push
85
102
86 $ hg init ../other
103 $ hg init ../other
87 $ hg push ../other
104 $ hg push ../other
88 pushing to ../other
105 pushing to ../other
89 searching for changes
106 searching for changes
90 abort: push includes divergent changeset: 392fd25390da!
107 abort: push includes divergent changeset: 392fd25390da!
91 [255]
108 [255]
92
109
93 $ cd ..
110 $ cd ..
94
111
95
112
96 indirect divergence with known changeset
113 indirect divergence with known changeset
97 -------------------------------------------
114 -------------------------------------------
98
115
99 $ newcase indirect_known
116 $ newcase indirect_known
100 $ hg debugobsolete `getid A_0` `getid A_1`
117 $ hg debugobsolete `getid A_0` `getid A_1`
101 $ hg debugobsolete `getid A_0` `getid A_2`
118 $ hg debugobsolete `getid A_0` `getid A_2`
102 $ mkcommit A_3
119 $ mkcommit A_3
103 created new head
120 created new head
104 $ hg debugobsolete `getid A_2` `getid A_3`
121 $ hg debugobsolete `getid A_2` `getid A_3`
105 $ hg log -G --hidden
122 $ hg log -G --hidden
106 @ 4:01f36c5a8fda A_3
123 @ 4:01f36c5a8fda A_3
107 |
124 |
108 | x 3:392fd25390da A_2
125 | x 3:392fd25390da A_2
109 |/
126 |/
110 | o 2:82623d38b9ba A_1
127 | o 2:82623d38b9ba A_1
111 |/
128 |/
112 | x 1:007dc284c1f8 A_0
129 | x 1:007dc284c1f8 A_0
113 |/
130 |/
114 o 0:d20a80d4def3 base
131 o 0:d20a80d4def3 base
115
132
116 $ hg debugsuccessorssets --hidden 'all()'
133 $ hg debugsuccessorssets --hidden 'all()'
117 d20a80d4def3
134 d20a80d4def3
118 d20a80d4def3
135 d20a80d4def3
119 007dc284c1f8
136 007dc284c1f8
120 82623d38b9ba
137 82623d38b9ba
121 01f36c5a8fda
138 01f36c5a8fda
122 82623d38b9ba
139 82623d38b9ba
123 82623d38b9ba
140 82623d38b9ba
124 392fd25390da
141 392fd25390da
125 01f36c5a8fda
142 01f36c5a8fda
126 01f36c5a8fda
143 01f36c5a8fda
127 01f36c5a8fda
144 01f36c5a8fda
128 $ hg log -r 'divergent()'
145 $ hg log -r 'divergent()'
129 2:82623d38b9ba A_1
146 2:82623d38b9ba A_1
130 4:01f36c5a8fda A_3
147 4:01f36c5a8fda A_3
148 $ hg debugsuccessorssets 'all()' --closest
149 d20a80d4def3
150 d20a80d4def3
151 82623d38b9ba
152 82623d38b9ba
153 01f36c5a8fda
154 01f36c5a8fda
155 $ hg debugsuccessorssets 'all()' --closest --hidden
156 d20a80d4def3
157 d20a80d4def3
158 007dc284c1f8
159 82623d38b9ba
160 392fd25390da
161 82623d38b9ba
162 82623d38b9ba
163 392fd25390da
164 392fd25390da
165 01f36c5a8fda
166 01f36c5a8fda
131 $ cd ..
167 $ cd ..
132
168
133
169
134 indirect divergence with known changeset
170 indirect divergence with known changeset
135 -------------------------------------------
171 -------------------------------------------
136
172
137 $ newcase indirect_unknown
173 $ newcase indirect_unknown
138 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
174 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
139 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
175 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
140 $ hg debugobsolete `getid A_0` `getid A_2`
176 $ hg debugobsolete `getid A_0` `getid A_2`
141 $ hg log -G --hidden
177 $ hg log -G --hidden
142 o 3:392fd25390da A_2
178 o 3:392fd25390da A_2
143 |
179 |
144 | o 2:82623d38b9ba A_1
180 | o 2:82623d38b9ba A_1
145 |/
181 |/
146 | x 1:007dc284c1f8 A_0
182 | x 1:007dc284c1f8 A_0
147 |/
183 |/
148 @ 0:d20a80d4def3 base
184 @ 0:d20a80d4def3 base
149
185
150 $ hg debugsuccessorssets --hidden 'all()'
186 $ hg debugsuccessorssets --hidden 'all()'
151 d20a80d4def3
187 d20a80d4def3
152 d20a80d4def3
188 d20a80d4def3
153 007dc284c1f8
189 007dc284c1f8
154 82623d38b9ba
190 82623d38b9ba
155 392fd25390da
191 392fd25390da
156 82623d38b9ba
192 82623d38b9ba
157 82623d38b9ba
193 82623d38b9ba
158 392fd25390da
194 392fd25390da
159 392fd25390da
195 392fd25390da
160 $ hg log -r 'divergent()'
196 $ hg log -r 'divergent()'
161 2:82623d38b9ba A_1
197 2:82623d38b9ba A_1
162 3:392fd25390da A_2
198 3:392fd25390da A_2
199 $ hg debugsuccessorssets 'all()' --closest
200 d20a80d4def3
201 d20a80d4def3
202 82623d38b9ba
203 82623d38b9ba
204 392fd25390da
205 392fd25390da
206 $ hg debugsuccessorssets 'all()' --closest --hidden
207 d20a80d4def3
208 d20a80d4def3
209 007dc284c1f8
210 82623d38b9ba
211 392fd25390da
212 82623d38b9ba
213 82623d38b9ba
214 392fd25390da
215 392fd25390da
163 $ cd ..
216 $ cd ..
164
217
165 do not take unknown node in account if they are final
218 do not take unknown node in account if they are final
166 -----------------------------------------------------
219 -----------------------------------------------------
167
220
168 $ newcase final-unknown
221 $ newcase final-unknown
169 $ hg debugobsolete `getid A_0` `getid A_1`
222 $ hg debugobsolete `getid A_0` `getid A_1`
170 $ hg debugobsolete `getid A_1` `getid A_2`
223 $ hg debugobsolete `getid A_1` `getid A_2`
171 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
224 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
172 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
225 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
173 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
226 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
174
227
175 $ hg debugsuccessorssets --hidden 'desc('A_0')'
228 $ hg debugsuccessorssets --hidden 'desc('A_0')'
176 007dc284c1f8
229 007dc284c1f8
177 392fd25390da
230 392fd25390da
231 $ hg debugsuccessorssets 'desc('A_0')' --closest
232 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
233 007dc284c1f8
234 82623d38b9ba
178
235
179 $ cd ..
236 $ cd ..
180
237
181 divergence that converge again is not divergence anymore
238 divergence that converge again is not divergence anymore
182 -----------------------------------------------------
239 -----------------------------------------------------
183
240
184 $ newcase converged_divergence
241 $ newcase converged_divergence
185 $ hg debugobsolete `getid A_0` `getid A_1`
242 $ hg debugobsolete `getid A_0` `getid A_1`
186 $ hg debugobsolete `getid A_0` `getid A_2`
243 $ hg debugobsolete `getid A_0` `getid A_2`
187 $ mkcommit A_3
244 $ mkcommit A_3
188 created new head
245 created new head
189 $ hg debugobsolete `getid A_1` `getid A_3`
246 $ hg debugobsolete `getid A_1` `getid A_3`
190 $ hg debugobsolete `getid A_2` `getid A_3`
247 $ hg debugobsolete `getid A_2` `getid A_3`
191 $ hg log -G --hidden
248 $ hg log -G --hidden
192 @ 4:01f36c5a8fda A_3
249 @ 4:01f36c5a8fda A_3
193 |
250 |
194 | x 3:392fd25390da A_2
251 | x 3:392fd25390da A_2
195 |/
252 |/
196 | x 2:82623d38b9ba A_1
253 | x 2:82623d38b9ba A_1
197 |/
254 |/
198 | x 1:007dc284c1f8 A_0
255 | x 1:007dc284c1f8 A_0
199 |/
256 |/
200 o 0:d20a80d4def3 base
257 o 0:d20a80d4def3 base
201
258
202 $ hg debugsuccessorssets --hidden 'all()'
259 $ hg debugsuccessorssets --hidden 'all()'
203 d20a80d4def3
260 d20a80d4def3
204 d20a80d4def3
261 d20a80d4def3
205 007dc284c1f8
262 007dc284c1f8
206 01f36c5a8fda
263 01f36c5a8fda
207 82623d38b9ba
264 82623d38b9ba
208 01f36c5a8fda
265 01f36c5a8fda
209 392fd25390da
266 392fd25390da
210 01f36c5a8fda
267 01f36c5a8fda
211 01f36c5a8fda
268 01f36c5a8fda
212 01f36c5a8fda
269 01f36c5a8fda
213 $ hg log -r 'divergent()'
270 $ hg log -r 'divergent()'
271 $ hg debugsuccessorssets 'all()' --closest
272 d20a80d4def3
273 d20a80d4def3
274 01f36c5a8fda
275 01f36c5a8fda
276 $ hg debugsuccessorssets 'all()' --closest --hidden
277 d20a80d4def3
278 d20a80d4def3
279 007dc284c1f8
280 82623d38b9ba
281 392fd25390da
282 82623d38b9ba
283 82623d38b9ba
284 392fd25390da
285 392fd25390da
286 01f36c5a8fda
287 01f36c5a8fda
214 $ cd ..
288 $ cd ..
215
289
216 split is not divergences
290 split is not divergences
217 -----------------------------
291 -----------------------------
218
292
219 $ newcase split
293 $ newcase split
220 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
294 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
221 $ hg log -G --hidden
295 $ hg log -G --hidden
222 o 3:392fd25390da A_2
296 o 3:392fd25390da A_2
223 |
297 |
224 | o 2:82623d38b9ba A_1
298 | o 2:82623d38b9ba A_1
225 |/
299 |/
226 | x 1:007dc284c1f8 A_0
300 | x 1:007dc284c1f8 A_0
227 |/
301 |/
228 @ 0:d20a80d4def3 base
302 @ 0:d20a80d4def3 base
229
303
230 $ hg debugsuccessorssets --hidden 'all()'
304 $ hg debugsuccessorssets --hidden 'all()'
231 d20a80d4def3
305 d20a80d4def3
232 d20a80d4def3
306 d20a80d4def3
233 007dc284c1f8
307 007dc284c1f8
234 82623d38b9ba 392fd25390da
308 82623d38b9ba 392fd25390da
235 82623d38b9ba
309 82623d38b9ba
236 82623d38b9ba
310 82623d38b9ba
237 392fd25390da
311 392fd25390da
238 392fd25390da
312 392fd25390da
239 $ hg log -r 'divergent()'
313 $ hg log -r 'divergent()'
314 $ hg debugsuccessorssets 'all()' --closest
315 d20a80d4def3
316 d20a80d4def3
317 82623d38b9ba
318 82623d38b9ba
319 392fd25390da
320 392fd25390da
321 $ hg debugsuccessorssets 'all()' --closest --hidden
322 d20a80d4def3
323 d20a80d4def3
324 007dc284c1f8
325 82623d38b9ba 392fd25390da
326 82623d38b9ba
327 82623d38b9ba
328 392fd25390da
329 392fd25390da
240
330
241 Even when subsequent rewriting happen
331 Even when subsequent rewriting happen
242
332
243 $ mkcommit A_3
333 $ mkcommit A_3
244 created new head
334 created new head
245 $ hg debugobsolete `getid A_1` `getid A_3`
335 $ hg debugobsolete `getid A_1` `getid A_3`
246 $ hg up 0
336 $ hg up 0
247 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
337 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
248 $ mkcommit A_4
338 $ mkcommit A_4
249 created new head
339 created new head
250 $ hg debugobsolete `getid A_2` `getid A_4`
340 $ hg debugobsolete `getid A_2` `getid A_4`
251 $ hg up 0
341 $ hg up 0
252 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
342 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
253 $ mkcommit A_5
343 $ mkcommit A_5
254 created new head
344 created new head
255 $ hg debugobsolete `getid A_4` `getid A_5`
345 $ hg debugobsolete `getid A_4` `getid A_5`
256 $ hg log -G --hidden
346 $ hg log -G --hidden
257 @ 6:e442cfc57690 A_5
347 @ 6:e442cfc57690 A_5
258 |
348 |
259 | x 5:6a411f0d7a0a A_4
349 | x 5:6a411f0d7a0a A_4
260 |/
350 |/
261 | o 4:01f36c5a8fda A_3
351 | o 4:01f36c5a8fda A_3
262 |/
352 |/
263 | x 3:392fd25390da A_2
353 | x 3:392fd25390da A_2
264 |/
354 |/
265 | x 2:82623d38b9ba A_1
355 | x 2:82623d38b9ba A_1
266 |/
356 |/
267 | x 1:007dc284c1f8 A_0
357 | x 1:007dc284c1f8 A_0
268 |/
358 |/
269 o 0:d20a80d4def3 base
359 o 0:d20a80d4def3 base
270
360
271 $ hg debugsuccessorssets --hidden 'all()'
361 $ hg debugsuccessorssets --hidden 'all()'
272 d20a80d4def3
362 d20a80d4def3
273 d20a80d4def3
363 d20a80d4def3
274 007dc284c1f8
364 007dc284c1f8
275 01f36c5a8fda e442cfc57690
365 01f36c5a8fda e442cfc57690
276 82623d38b9ba
366 82623d38b9ba
277 01f36c5a8fda
367 01f36c5a8fda
278 392fd25390da
368 392fd25390da
279 e442cfc57690
369 e442cfc57690
280 01f36c5a8fda
370 01f36c5a8fda
281 01f36c5a8fda
371 01f36c5a8fda
282 6a411f0d7a0a
372 6a411f0d7a0a
283 e442cfc57690
373 e442cfc57690
284 e442cfc57690
374 e442cfc57690
285 e442cfc57690
375 e442cfc57690
376 $ hg debugsuccessorssets 'all()' --closest
377 d20a80d4def3
378 d20a80d4def3
379 01f36c5a8fda
380 01f36c5a8fda
381 e442cfc57690
382 e442cfc57690
383 $ hg debugsuccessorssets 'all()' --closest --hidden
384 d20a80d4def3
385 d20a80d4def3
386 007dc284c1f8
387 82623d38b9ba 392fd25390da
388 82623d38b9ba
389 82623d38b9ba
390 392fd25390da
391 392fd25390da
392 01f36c5a8fda
393 01f36c5a8fda
394 6a411f0d7a0a
395 e442cfc57690
396 e442cfc57690
397 e442cfc57690
286 $ hg log -r 'divergent()'
398 $ hg log -r 'divergent()'
287
399
288 Check more complex obsolescence graft (with divergence)
400 Check more complex obsolescence graft (with divergence)
289
401
290 $ mkcommit B_0; hg up 0
402 $ mkcommit B_0; hg up 0
291 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
403 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
292 $ hg debugobsolete `getid B_0` `getid A_2`
404 $ hg debugobsolete `getid B_0` `getid A_2`
293 $ mkcommit A_7; hg up 0
405 $ mkcommit A_7; hg up 0
294 created new head
406 created new head
295 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
407 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
296 $ mkcommit A_8; hg up 0
408 $ mkcommit A_8; hg up 0
297 created new head
409 created new head
298 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
410 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
299 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
411 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
300 $ mkcommit A_9; hg up 0
412 $ mkcommit A_9; hg up 0
301 created new head
413 created new head
302 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
414 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
303 $ hg debugobsolete `getid A_5` `getid A_9`
415 $ hg debugobsolete `getid A_5` `getid A_9`
304 $ hg log -G --hidden
416 $ hg log -G --hidden
305 o 10:bed64f5d2f5a A_9
417 o 10:bed64f5d2f5a A_9
306 |
418 |
307 | o 9:14608b260df8 A_8
419 | o 9:14608b260df8 A_8
308 |/
420 |/
309 | o 8:7ae126973a96 A_7
421 | o 8:7ae126973a96 A_7
310 |/
422 |/
311 | x 7:3750ebee865d B_0
423 | x 7:3750ebee865d B_0
312 | |
424 | |
313 | x 6:e442cfc57690 A_5
425 | x 6:e442cfc57690 A_5
314 |/
426 |/
315 | x 5:6a411f0d7a0a A_4
427 | x 5:6a411f0d7a0a A_4
316 |/
428 |/
317 | o 4:01f36c5a8fda A_3
429 | o 4:01f36c5a8fda A_3
318 |/
430 |/
319 | x 3:392fd25390da A_2
431 | x 3:392fd25390da A_2
320 |/
432 |/
321 | x 2:82623d38b9ba A_1
433 | x 2:82623d38b9ba A_1
322 |/
434 |/
323 | x 1:007dc284c1f8 A_0
435 | x 1:007dc284c1f8 A_0
324 |/
436 |/
325 @ 0:d20a80d4def3 base
437 @ 0:d20a80d4def3 base
326
438
327 $ hg debugsuccessorssets --hidden 'all()'
439 $ hg debugsuccessorssets --hidden 'all()'
328 d20a80d4def3
440 d20a80d4def3
329 d20a80d4def3
441 d20a80d4def3
330 007dc284c1f8
442 007dc284c1f8
331 01f36c5a8fda bed64f5d2f5a
443 01f36c5a8fda bed64f5d2f5a
332 01f36c5a8fda 7ae126973a96 14608b260df8
444 01f36c5a8fda 7ae126973a96 14608b260df8
333 82623d38b9ba
445 82623d38b9ba
334 01f36c5a8fda
446 01f36c5a8fda
335 392fd25390da
447 392fd25390da
336 bed64f5d2f5a
448 bed64f5d2f5a
337 7ae126973a96 14608b260df8
449 7ae126973a96 14608b260df8
338 01f36c5a8fda
450 01f36c5a8fda
339 01f36c5a8fda
451 01f36c5a8fda
340 6a411f0d7a0a
452 6a411f0d7a0a
341 bed64f5d2f5a
453 bed64f5d2f5a
342 7ae126973a96 14608b260df8
454 7ae126973a96 14608b260df8
343 e442cfc57690
455 e442cfc57690
344 bed64f5d2f5a
456 bed64f5d2f5a
345 7ae126973a96 14608b260df8
457 7ae126973a96 14608b260df8
346 3750ebee865d
458 3750ebee865d
347 bed64f5d2f5a
459 bed64f5d2f5a
348 7ae126973a96 14608b260df8
460 7ae126973a96 14608b260df8
349 7ae126973a96
461 7ae126973a96
350 7ae126973a96
462 7ae126973a96
351 14608b260df8
463 14608b260df8
352 14608b260df8
464 14608b260df8
353 bed64f5d2f5a
465 bed64f5d2f5a
354 bed64f5d2f5a
466 bed64f5d2f5a
467 $ hg debugsuccessorssets 'all()' --closest
468 d20a80d4def3
469 d20a80d4def3
470 01f36c5a8fda
471 01f36c5a8fda
472 7ae126973a96
473 7ae126973a96
474 14608b260df8
475 14608b260df8
476 bed64f5d2f5a
477 bed64f5d2f5a
478 $ hg debugsuccessorssets 'all()' --closest --hidden
479 d20a80d4def3
480 d20a80d4def3
481 007dc284c1f8
482 82623d38b9ba 392fd25390da
483 82623d38b9ba
484 82623d38b9ba
485 392fd25390da
486 392fd25390da
487 01f36c5a8fda
488 01f36c5a8fda
489 6a411f0d7a0a
490 e442cfc57690
491 e442cfc57690
492 e442cfc57690
493 3750ebee865d
494 392fd25390da
495 7ae126973a96
496 7ae126973a96
497 14608b260df8
498 14608b260df8
499 bed64f5d2f5a
500 bed64f5d2f5a
355 $ hg log -r 'divergent()'
501 $ hg log -r 'divergent()'
356 4:01f36c5a8fda A_3
502 4:01f36c5a8fda A_3
357 8:7ae126973a96 A_7
503 8:7ae126973a96 A_7
358 9:14608b260df8 A_8
504 9:14608b260df8 A_8
359 10:bed64f5d2f5a A_9
505 10:bed64f5d2f5a A_9
360
506
361 fix the divergence
507 fix the divergence
362
508
363 $ mkcommit A_A; hg up 0
509 $ mkcommit A_A; hg up 0
364 created new head
510 created new head
365 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
511 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
366 $ hg debugobsolete `getid A_9` `getid A_A`
512 $ hg debugobsolete `getid A_9` `getid A_A`
367 $ hg debugobsolete `getid A_7` `getid A_A`
513 $ hg debugobsolete `getid A_7` `getid A_A`
368 $ hg debugobsolete `getid A_8` `getid A_A`
514 $ hg debugobsolete `getid A_8` `getid A_A`
369 $ hg log -G --hidden
515 $ hg log -G --hidden
370 o 11:a139f71be9da A_A
516 o 11:a139f71be9da A_A
371 |
517 |
372 | x 10:bed64f5d2f5a A_9
518 | x 10:bed64f5d2f5a A_9
373 |/
519 |/
374 | x 9:14608b260df8 A_8
520 | x 9:14608b260df8 A_8
375 |/
521 |/
376 | x 8:7ae126973a96 A_7
522 | x 8:7ae126973a96 A_7
377 |/
523 |/
378 | x 7:3750ebee865d B_0
524 | x 7:3750ebee865d B_0
379 | |
525 | |
380 | x 6:e442cfc57690 A_5
526 | x 6:e442cfc57690 A_5
381 |/
527 |/
382 | x 5:6a411f0d7a0a A_4
528 | x 5:6a411f0d7a0a A_4
383 |/
529 |/
384 | o 4:01f36c5a8fda A_3
530 | o 4:01f36c5a8fda A_3
385 |/
531 |/
386 | x 3:392fd25390da A_2
532 | x 3:392fd25390da A_2
387 |/
533 |/
388 | x 2:82623d38b9ba A_1
534 | x 2:82623d38b9ba A_1
389 |/
535 |/
390 | x 1:007dc284c1f8 A_0
536 | x 1:007dc284c1f8 A_0
391 |/
537 |/
392 @ 0:d20a80d4def3 base
538 @ 0:d20a80d4def3 base
393
539
394 $ hg debugsuccessorssets --hidden 'all()'
540 $ hg debugsuccessorssets --hidden 'all()'
395 d20a80d4def3
541 d20a80d4def3
396 d20a80d4def3
542 d20a80d4def3
397 007dc284c1f8
543 007dc284c1f8
398 01f36c5a8fda a139f71be9da
544 01f36c5a8fda a139f71be9da
399 82623d38b9ba
545 82623d38b9ba
400 01f36c5a8fda
546 01f36c5a8fda
401 392fd25390da
547 392fd25390da
402 a139f71be9da
548 a139f71be9da
403 01f36c5a8fda
549 01f36c5a8fda
404 01f36c5a8fda
550 01f36c5a8fda
405 6a411f0d7a0a
551 6a411f0d7a0a
406 a139f71be9da
552 a139f71be9da
407 e442cfc57690
553 e442cfc57690
408 a139f71be9da
554 a139f71be9da
409 3750ebee865d
555 3750ebee865d
410 a139f71be9da
556 a139f71be9da
411 7ae126973a96
557 7ae126973a96
412 a139f71be9da
558 a139f71be9da
413 14608b260df8
559 14608b260df8
414 a139f71be9da
560 a139f71be9da
415 bed64f5d2f5a
561 bed64f5d2f5a
416 a139f71be9da
562 a139f71be9da
417 a139f71be9da
563 a139f71be9da
418 a139f71be9da
564 a139f71be9da
565 $ hg debugsuccessorssets 'all()' --closest
566 d20a80d4def3
567 d20a80d4def3
568 01f36c5a8fda
569 01f36c5a8fda
570 a139f71be9da
571 a139f71be9da
572 $ hg debugsuccessorssets 'all()' --closest --hidden
573 d20a80d4def3
574 d20a80d4def3
575 007dc284c1f8
576 82623d38b9ba 392fd25390da
577 82623d38b9ba
578 82623d38b9ba
579 392fd25390da
580 392fd25390da
581 01f36c5a8fda
582 01f36c5a8fda
583 6a411f0d7a0a
584 e442cfc57690
585 e442cfc57690
586 e442cfc57690
587 3750ebee865d
588 392fd25390da
589 7ae126973a96
590 a139f71be9da
591 14608b260df8
592 a139f71be9da
593 bed64f5d2f5a
594 a139f71be9da
595 a139f71be9da
596 a139f71be9da
419 $ hg log -r 'divergent()'
597 $ hg log -r 'divergent()'
420
598
421 $ cd ..
599 $ cd ..
422
600
423
601
424 Subset does not diverge
602 Subset does not diverge
425 ------------------------------
603 ------------------------------
426
604
427 Do not report divergent successors-set if it is a subset of another
605 Do not report divergent successors-set if it is a subset of another
428 successors-set. (report [A,B] not [A] + [A,B])
606 successors-set. (report [A,B] not [A] + [A,B])
429
607
430 $ newcase subset
608 $ newcase subset
431 $ hg debugobsolete `getid A_0` `getid A_2`
609 $ hg debugobsolete `getid A_0` `getid A_2`
432 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
610 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
433 $ hg debugsuccessorssets --hidden 'desc('A_0')'
611 $ hg debugsuccessorssets --hidden 'desc('A_0')'
434 007dc284c1f8
612 007dc284c1f8
435 82623d38b9ba 392fd25390da
613 82623d38b9ba 392fd25390da
614 $ hg debugsuccessorssets 'desc('A_0')' --closest
615 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
616 007dc284c1f8
617 82623d38b9ba 392fd25390da
436
618
437 $ cd ..
619 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now