##// END OF EJS Templates
debugdiscovery: include the number of heads in all sets...
marmoute -
r42321:607a0de9 default
parent child Browse files
Show More
@@ -1,3467 +1,3469 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 compression,
85 compression,
86 dateutil,
86 dateutil,
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90
90
91 from .revlogutils import (
91 from .revlogutils import (
92 deltas as deltautil
92 deltas as deltautil
93 )
93 )
94
94
95 release = lockmod.release
95 release = lockmod.release
96
96
97 command = registrar.command()
97 command = registrar.command()
98
98
99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
100 def debugancestor(ui, repo, *args):
100 def debugancestor(ui, repo, *args):
101 """find the ancestor revision of two revisions in a given index"""
101 """find the ancestor revision of two revisions in a given index"""
102 if len(args) == 3:
102 if len(args) == 3:
103 index, rev1, rev2 = args
103 index, rev1, rev2 = args
104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
105 lookup = r.lookup
105 lookup = r.lookup
106 elif len(args) == 2:
106 elif len(args) == 2:
107 if not repo:
107 if not repo:
108 raise error.Abort(_('there is no Mercurial repository here '
108 raise error.Abort(_('there is no Mercurial repository here '
109 '(.hg not found)'))
109 '(.hg not found)'))
110 rev1, rev2 = args
110 rev1, rev2 = args
111 r = repo.changelog
111 r = repo.changelog
112 lookup = repo.lookup
112 lookup = repo.lookup
113 else:
113 else:
114 raise error.Abort(_('either two or three arguments required'))
114 raise error.Abort(_('either two or three arguments required'))
115 a = r.ancestor(lookup(rev1), lookup(rev2))
115 a = r.ancestor(lookup(rev1), lookup(rev2))
116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
117
117
118 @command('debugapplystreamclonebundle', [], 'FILE')
118 @command('debugapplystreamclonebundle', [], 'FILE')
119 def debugapplystreamclonebundle(ui, repo, fname):
119 def debugapplystreamclonebundle(ui, repo, fname):
120 """apply a stream clone bundle file"""
120 """apply a stream clone bundle file"""
121 f = hg.openpath(ui, fname)
121 f = hg.openpath(ui, fname)
122 gen = exchange.readbundle(ui, f, fname)
122 gen = exchange.readbundle(ui, f, fname)
123 gen.apply(repo)
123 gen.apply(repo)
124
124
125 @command('debugbuilddag',
125 @command('debugbuilddag',
126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
128 ('n', 'new-file', None, _('add new file at each rev'))],
128 ('n', 'new-file', None, _('add new file at each rev'))],
129 _('[OPTION]... [TEXT]'))
129 _('[OPTION]... [TEXT]'))
130 def debugbuilddag(ui, repo, text=None,
130 def debugbuilddag(ui, repo, text=None,
131 mergeable_file=False,
131 mergeable_file=False,
132 overwritten_file=False,
132 overwritten_file=False,
133 new_file=False):
133 new_file=False):
134 """builds a repo with a given DAG from scratch in the current empty repo
134 """builds a repo with a given DAG from scratch in the current empty repo
135
135
136 The description of the DAG is read from stdin if not given on the
136 The description of the DAG is read from stdin if not given on the
137 command line.
137 command line.
138
138
139 Elements:
139 Elements:
140
140
141 - "+n" is a linear run of n nodes based on the current default parent
141 - "+n" is a linear run of n nodes based on the current default parent
142 - "." is a single node based on the current default parent
142 - "." is a single node based on the current default parent
143 - "$" resets the default parent to null (implied at the start);
143 - "$" resets the default parent to null (implied at the start);
144 otherwise the default parent is always the last node created
144 otherwise the default parent is always the last node created
145 - "<p" sets the default parent to the backref p
145 - "<p" sets the default parent to the backref p
146 - "*p" is a fork at parent p, which is a backref
146 - "*p" is a fork at parent p, which is a backref
147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
148 - "/p2" is a merge of the preceding node and p2
148 - "/p2" is a merge of the preceding node and p2
149 - ":tag" defines a local tag for the preceding node
149 - ":tag" defines a local tag for the preceding node
150 - "@branch" sets the named branch for subsequent nodes
150 - "@branch" sets the named branch for subsequent nodes
151 - "#...\\n" is a comment up to the end of the line
151 - "#...\\n" is a comment up to the end of the line
152
152
153 Whitespace between the above elements is ignored.
153 Whitespace between the above elements is ignored.
154
154
155 A backref is either
155 A backref is either
156
156
157 - a number n, which references the node curr-n, where curr is the current
157 - a number n, which references the node curr-n, where curr is the current
158 node, or
158 node, or
159 - the name of a local tag you placed earlier using ":tag", or
159 - the name of a local tag you placed earlier using ":tag", or
160 - empty to denote the default parent.
160 - empty to denote the default parent.
161
161
162 All string valued-elements are either strictly alphanumeric, or must
162 All string valued-elements are either strictly alphanumeric, or must
163 be enclosed in double quotes ("..."), with "\\" as escape character.
163 be enclosed in double quotes ("..."), with "\\" as escape character.
164 """
164 """
165
165
166 if text is None:
166 if text is None:
167 ui.status(_("reading DAG from stdin\n"))
167 ui.status(_("reading DAG from stdin\n"))
168 text = ui.fin.read()
168 text = ui.fin.read()
169
169
170 cl = repo.changelog
170 cl = repo.changelog
171 if len(cl) > 0:
171 if len(cl) > 0:
172 raise error.Abort(_('repository is not empty'))
172 raise error.Abort(_('repository is not empty'))
173
173
174 # determine number of revs in DAG
174 # determine number of revs in DAG
175 total = 0
175 total = 0
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 total += 1
178 total += 1
179
179
180 if mergeable_file:
180 if mergeable_file:
181 linesperrev = 2
181 linesperrev = 2
182 # make a file with k lines per rev
182 # make a file with k lines per rev
183 initialmergedlines = ['%d' % i
183 initialmergedlines = ['%d' % i
184 for i in pycompat.xrange(0, total * linesperrev)]
184 for i in pycompat.xrange(0, total * linesperrev)]
185 initialmergedlines.append("")
185 initialmergedlines.append("")
186
186
187 tags = []
187 tags = []
188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
189 total=total)
189 total=total)
190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
191 at = -1
191 at = -1
192 atbranch = 'default'
192 atbranch = 'default'
193 nodeids = []
193 nodeids = []
194 id = 0
194 id = 0
195 progress.update(id)
195 progress.update(id)
196 for type, data in dagparser.parsedag(text):
196 for type, data in dagparser.parsedag(text):
197 if type == 'n':
197 if type == 'n':
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 id, ps = data
199 id, ps = data
200
200
201 files = []
201 files = []
202 filecontent = {}
202 filecontent = {}
203
203
204 p2 = None
204 p2 = None
205 if mergeable_file:
205 if mergeable_file:
206 fn = "mf"
206 fn = "mf"
207 p1 = repo[ps[0]]
207 p1 = repo[ps[0]]
208 if len(ps) > 1:
208 if len(ps) > 1:
209 p2 = repo[ps[1]]
209 p2 = repo[ps[1]]
210 pa = p1.ancestor(p2)
210 pa = p1.ancestor(p2)
211 base, local, other = [x[fn].data() for x in (pa, p1,
211 base, local, other = [x[fn].data() for x in (pa, p1,
212 p2)]
212 p2)]
213 m3 = simplemerge.Merge3Text(base, local, other)
213 m3 = simplemerge.Merge3Text(base, local, other)
214 ml = [l.strip() for l in m3.merge_lines()]
214 ml = [l.strip() for l in m3.merge_lines()]
215 ml.append("")
215 ml.append("")
216 elif at > 0:
216 elif at > 0:
217 ml = p1[fn].data().split("\n")
217 ml = p1[fn].data().split("\n")
218 else:
218 else:
219 ml = initialmergedlines
219 ml = initialmergedlines
220 ml[id * linesperrev] += " r%i" % id
220 ml[id * linesperrev] += " r%i" % id
221 mergedtext = "\n".join(ml)
221 mergedtext = "\n".join(ml)
222 files.append(fn)
222 files.append(fn)
223 filecontent[fn] = mergedtext
223 filecontent[fn] = mergedtext
224
224
225 if overwritten_file:
225 if overwritten_file:
226 fn = "of"
226 fn = "of"
227 files.append(fn)
227 files.append(fn)
228 filecontent[fn] = "r%i\n" % id
228 filecontent[fn] = "r%i\n" % id
229
229
230 if new_file:
230 if new_file:
231 fn = "nf%i" % id
231 fn = "nf%i" % id
232 files.append(fn)
232 files.append(fn)
233 filecontent[fn] = "r%i\n" % id
233 filecontent[fn] = "r%i\n" % id
234 if len(ps) > 1:
234 if len(ps) > 1:
235 if not p2:
235 if not p2:
236 p2 = repo[ps[1]]
236 p2 = repo[ps[1]]
237 for fn in p2:
237 for fn in p2:
238 if fn.startswith("nf"):
238 if fn.startswith("nf"):
239 files.append(fn)
239 files.append(fn)
240 filecontent[fn] = p2[fn].data()
240 filecontent[fn] = p2[fn].data()
241
241
242 def fctxfn(repo, cx, path):
242 def fctxfn(repo, cx, path):
243 if path in filecontent:
243 if path in filecontent:
244 return context.memfilectx(repo, cx, path,
244 return context.memfilectx(repo, cx, path,
245 filecontent[path])
245 filecontent[path])
246 return None
246 return None
247
247
248 if len(ps) == 0 or ps[0] < 0:
248 if len(ps) == 0 or ps[0] < 0:
249 pars = [None, None]
249 pars = [None, None]
250 elif len(ps) == 1:
250 elif len(ps) == 1:
251 pars = [nodeids[ps[0]], None]
251 pars = [nodeids[ps[0]], None]
252 else:
252 else:
253 pars = [nodeids[p] for p in ps]
253 pars = [nodeids[p] for p in ps]
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 date=(id, 0),
255 date=(id, 0),
256 user="debugbuilddag",
256 user="debugbuilddag",
257 extra={'branch': atbranch})
257 extra={'branch': atbranch})
258 nodeid = repo.commitctx(cx)
258 nodeid = repo.commitctx(cx)
259 nodeids.append(nodeid)
259 nodeids.append(nodeid)
260 at = id
260 at = id
261 elif type == 'l':
261 elif type == 'l':
262 id, name = data
262 id, name = data
263 ui.note(('tag %s\n' % name))
263 ui.note(('tag %s\n' % name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 elif type == 'a':
265 elif type == 'a':
266 ui.note(('branch %s\n' % data))
266 ui.note(('branch %s\n' % data))
267 atbranch = data
267 atbranch = data
268 progress.update(id)
268 progress.update(id)
269
269
270 if tags:
270 if tags:
271 repo.vfs.write("localtags", "".join(tags))
271 repo.vfs.write("localtags", "".join(tags))
272
272
273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
274 indent_string = ' ' * indent
274 indent_string = ' ' * indent
275 if all:
275 if all:
276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
277 % indent_string)
277 % indent_string)
278
278
279 def showchunks(named):
279 def showchunks(named):
280 ui.write("\n%s%s\n" % (indent_string, named))
280 ui.write("\n%s%s\n" % (indent_string, named))
281 for deltadata in gen.deltaiter():
281 for deltadata in gen.deltaiter():
282 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 node, p1, p2, cs, deltabase, delta, flags = deltadata
283 ui.write("%s%s %s %s %s %s %d\n" %
283 ui.write("%s%s %s %s %s %s %d\n" %
284 (indent_string, hex(node), hex(p1), hex(p2),
284 (indent_string, hex(node), hex(p1), hex(p2),
285 hex(cs), hex(deltabase), len(delta)))
285 hex(cs), hex(deltabase), len(delta)))
286
286
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 showchunks("changelog")
288 showchunks("changelog")
289 chunkdata = gen.manifestheader()
289 chunkdata = gen.manifestheader()
290 showchunks("manifest")
290 showchunks("manifest")
291 for chunkdata in iter(gen.filelogheader, {}):
291 for chunkdata in iter(gen.filelogheader, {}):
292 fname = chunkdata['filename']
292 fname = chunkdata['filename']
293 showchunks(fname)
293 showchunks(fname)
294 else:
294 else:
295 if isinstance(gen, bundle2.unbundle20):
295 if isinstance(gen, bundle2.unbundle20):
296 raise error.Abort(_('use debugbundle2 for this file'))
296 raise error.Abort(_('use debugbundle2 for this file'))
297 chunkdata = gen.changelogheader()
297 chunkdata = gen.changelogheader()
298 for deltadata in gen.deltaiter():
298 for deltadata in gen.deltaiter():
299 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 node, p1, p2, cs, deltabase, delta, flags = deltadata
300 ui.write("%s%s\n" % (indent_string, hex(node)))
300 ui.write("%s%s\n" % (indent_string, hex(node)))
301
301
302 def _debugobsmarkers(ui, part, indent=0, **opts):
302 def _debugobsmarkers(ui, part, indent=0, **opts):
303 """display version and markers contained in 'data'"""
303 """display version and markers contained in 'data'"""
304 opts = pycompat.byteskwargs(opts)
304 opts = pycompat.byteskwargs(opts)
305 data = part.read()
305 data = part.read()
306 indent_string = ' ' * indent
306 indent_string = ' ' * indent
307 try:
307 try:
308 version, markers = obsolete._readmarkers(data)
308 version, markers = obsolete._readmarkers(data)
309 except error.UnknownVersion as exc:
309 except error.UnknownVersion as exc:
310 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg = "%sunsupported version: %s (%d bytes)\n"
311 msg %= indent_string, exc.version, len(data)
311 msg %= indent_string, exc.version, len(data)
312 ui.write(msg)
312 ui.write(msg)
313 else:
313 else:
314 msg = "%sversion: %d (%d bytes)\n"
314 msg = "%sversion: %d (%d bytes)\n"
315 msg %= indent_string, version, len(data)
315 msg %= indent_string, version, len(data)
316 ui.write(msg)
316 ui.write(msg)
317 fm = ui.formatter('debugobsolete', opts)
317 fm = ui.formatter('debugobsolete', opts)
318 for rawmarker in sorted(markers):
318 for rawmarker in sorted(markers):
319 m = obsutil.marker(None, rawmarker)
319 m = obsutil.marker(None, rawmarker)
320 fm.startitem()
320 fm.startitem()
321 fm.plain(indent_string)
321 fm.plain(indent_string)
322 cmdutil.showmarker(fm, m)
322 cmdutil.showmarker(fm, m)
323 fm.end()
323 fm.end()
324
324
325 def _debugphaseheads(ui, data, indent=0):
325 def _debugphaseheads(ui, data, indent=0):
326 """display version and markers contained in 'data'"""
326 """display version and markers contained in 'data'"""
327 indent_string = ' ' * indent
327 indent_string = ' ' * indent
328 headsbyphase = phases.binarydecode(data)
328 headsbyphase = phases.binarydecode(data)
329 for phase in phases.allphases:
329 for phase in phases.allphases:
330 for head in headsbyphase[phase]:
330 for head in headsbyphase[phase]:
331 ui.write(indent_string)
331 ui.write(indent_string)
332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
333
333
334 def _quasirepr(thing):
334 def _quasirepr(thing):
335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
336 return '{%s}' % (
336 return '{%s}' % (
337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
338 return pycompat.bytestr(repr(thing))
338 return pycompat.bytestr(repr(thing))
339
339
340 def _debugbundle2(ui, gen, all=None, **opts):
340 def _debugbundle2(ui, gen, all=None, **opts):
341 """lists the contents of a bundle2"""
341 """lists the contents of a bundle2"""
342 if not isinstance(gen, bundle2.unbundle20):
342 if not isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_('not a bundle2 file'))
343 raise error.Abort(_('not a bundle2 file'))
344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
345 parttypes = opts.get(r'part_type', [])
345 parttypes = opts.get(r'part_type', [])
346 for part in gen.iterparts():
346 for part in gen.iterparts():
347 if parttypes and part.type not in parttypes:
347 if parttypes and part.type not in parttypes:
348 continue
348 continue
349 msg = '%s -- %s (mandatory: %r)\n'
349 msg = '%s -- %s (mandatory: %r)\n'
350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
351 if part.type == 'changegroup':
351 if part.type == 'changegroup':
352 version = part.params.get('version', '01')
352 version = part.params.get('version', '01')
353 cg = changegroup.getunbundler(version, part, 'UN')
353 cg = changegroup.getunbundler(version, part, 'UN')
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 if part.type == 'obsmarkers':
356 if part.type == 'obsmarkers':
357 if not ui.quiet:
357 if not ui.quiet:
358 _debugobsmarkers(ui, part, indent=4, **opts)
358 _debugobsmarkers(ui, part, indent=4, **opts)
359 if part.type == 'phase-heads':
359 if part.type == 'phase-heads':
360 if not ui.quiet:
360 if not ui.quiet:
361 _debugphaseheads(ui, part, indent=4)
361 _debugphaseheads(ui, part, indent=4)
362
362
363 @command('debugbundle',
363 @command('debugbundle',
364 [('a', 'all', None, _('show all details')),
364 [('a', 'all', None, _('show all details')),
365 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'part-type', [], _('show only the named part type')),
366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 _('FILE'),
367 _('FILE'),
368 norepo=True)
368 norepo=True)
369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 """lists the contents of a bundle"""
370 """lists the contents of a bundle"""
371 with hg.openpath(ui, bundlepath) as f:
371 with hg.openpath(ui, bundlepath) as f:
372 if spec:
372 if spec:
373 spec = exchange.getbundlespec(ui, f)
373 spec = exchange.getbundlespec(ui, f)
374 ui.write('%s\n' % spec)
374 ui.write('%s\n' % spec)
375 return
375 return
376
376
377 gen = exchange.readbundle(ui, f, bundlepath)
377 gen = exchange.readbundle(ui, f, bundlepath)
378 if isinstance(gen, bundle2.unbundle20):
378 if isinstance(gen, bundle2.unbundle20):
379 return _debugbundle2(ui, gen, all=all, **opts)
379 return _debugbundle2(ui, gen, all=all, **opts)
380 _debugchangegroup(ui, gen, all=all, **opts)
380 _debugchangegroup(ui, gen, all=all, **opts)
381
381
382 @command('debugcapabilities',
382 @command('debugcapabilities',
383 [], _('PATH'),
383 [], _('PATH'),
384 norepo=True)
384 norepo=True)
385 def debugcapabilities(ui, path, **opts):
385 def debugcapabilities(ui, path, **opts):
386 """lists the capabilities of a remote peer"""
386 """lists the capabilities of a remote peer"""
387 opts = pycompat.byteskwargs(opts)
387 opts = pycompat.byteskwargs(opts)
388 peer = hg.peer(ui, opts, path)
388 peer = hg.peer(ui, opts, path)
389 caps = peer.capabilities()
389 caps = peer.capabilities()
390 ui.write(('Main capabilities:\n'))
390 ui.write(('Main capabilities:\n'))
391 for c in sorted(caps):
391 for c in sorted(caps):
392 ui.write((' %s\n') % c)
392 ui.write((' %s\n') % c)
393 b2caps = bundle2.bundle2caps(peer)
393 b2caps = bundle2.bundle2caps(peer)
394 if b2caps:
394 if b2caps:
395 ui.write(('Bundle2 capabilities:\n'))
395 ui.write(('Bundle2 capabilities:\n'))
396 for key, values in sorted(b2caps.iteritems()):
396 for key, values in sorted(b2caps.iteritems()):
397 ui.write((' %s\n') % key)
397 ui.write((' %s\n') % key)
398 for v in values:
398 for v in values:
399 ui.write((' %s\n') % v)
399 ui.write((' %s\n') % v)
400
400
401 @command('debugcheckstate', [], '')
401 @command('debugcheckstate', [], '')
402 def debugcheckstate(ui, repo):
402 def debugcheckstate(ui, repo):
403 """validate the correctness of the current dirstate"""
403 """validate the correctness of the current dirstate"""
404 parent1, parent2 = repo.dirstate.parents()
404 parent1, parent2 = repo.dirstate.parents()
405 m1 = repo[parent1].manifest()
405 m1 = repo[parent1].manifest()
406 m2 = repo[parent2].manifest()
406 m2 = repo[parent2].manifest()
407 errors = 0
407 errors = 0
408 for f in repo.dirstate:
408 for f in repo.dirstate:
409 state = repo.dirstate[f]
409 state = repo.dirstate[f]
410 if state in "nr" and f not in m1:
410 if state in "nr" and f not in m1:
411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 errors += 1
412 errors += 1
413 if state in "a" and f in m1:
413 if state in "a" and f in m1:
414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 errors += 1
415 errors += 1
416 if state in "m" and f not in m1 and f not in m2:
416 if state in "m" and f not in m1 and f not in m2:
417 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 (f, state))
418 (f, state))
419 errors += 1
419 errors += 1
420 for f in m1:
420 for f in m1:
421 state = repo.dirstate[f]
421 state = repo.dirstate[f]
422 if state not in "nrm":
422 if state not in "nrm":
423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 errors += 1
424 errors += 1
425 if errors:
425 if errors:
426 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 raise error.Abort(error)
427 raise error.Abort(error)
428
428
429 @command('debugcolor',
429 @command('debugcolor',
430 [('', 'style', None, _('show all configured styles'))],
430 [('', 'style', None, _('show all configured styles'))],
431 'hg debugcolor')
431 'hg debugcolor')
432 def debugcolor(ui, repo, **opts):
432 def debugcolor(ui, repo, **opts):
433 """show available color, effects or style"""
433 """show available color, effects or style"""
434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
435 if opts.get(r'style'):
435 if opts.get(r'style'):
436 return _debugdisplaystyle(ui)
436 return _debugdisplaystyle(ui)
437 else:
437 else:
438 return _debugdisplaycolor(ui)
438 return _debugdisplaycolor(ui)
439
439
440 def _debugdisplaycolor(ui):
440 def _debugdisplaycolor(ui):
441 ui = ui.copy()
441 ui = ui.copy()
442 ui._styles.clear()
442 ui._styles.clear()
443 for effect in color._activeeffects(ui).keys():
443 for effect in color._activeeffects(ui).keys():
444 ui._styles[effect] = effect
444 ui._styles[effect] = effect
445 if ui._terminfoparams:
445 if ui._terminfoparams:
446 for k, v in ui.configitems('color'):
446 for k, v in ui.configitems('color'):
447 if k.startswith('color.'):
447 if k.startswith('color.'):
448 ui._styles[k] = k[6:]
448 ui._styles[k] = k[6:]
449 elif k.startswith('terminfo.'):
449 elif k.startswith('terminfo.'):
450 ui._styles[k] = k[9:]
450 ui._styles[k] = k[9:]
451 ui.write(_('available colors:\n'))
451 ui.write(_('available colors:\n'))
452 # sort label with a '_' after the other to group '_background' entry.
452 # sort label with a '_' after the other to group '_background' entry.
453 items = sorted(ui._styles.items(),
453 items = sorted(ui._styles.items(),
454 key=lambda i: ('_' in i[0], i[0], i[1]))
454 key=lambda i: ('_' in i[0], i[0], i[1]))
455 for colorname, label in items:
455 for colorname, label in items:
456 ui.write(('%s\n') % colorname, label=label)
456 ui.write(('%s\n') % colorname, label=label)
457
457
458 def _debugdisplaystyle(ui):
458 def _debugdisplaystyle(ui):
459 ui.write(_('available style:\n'))
459 ui.write(_('available style:\n'))
460 if not ui._styles:
460 if not ui._styles:
461 return
461 return
462 width = max(len(s) for s in ui._styles)
462 width = max(len(s) for s in ui._styles)
463 for label, effects in sorted(ui._styles.items()):
463 for label, effects in sorted(ui._styles.items()):
464 ui.write('%s' % label, label=label)
464 ui.write('%s' % label, label=label)
465 if effects:
465 if effects:
466 # 50
466 # 50
467 ui.write(': ')
467 ui.write(': ')
468 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(' ' * (max(0, width - len(label))))
469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
470 ui.write('\n')
470 ui.write('\n')
471
471
472 @command('debugcreatestreamclonebundle', [], 'FILE')
472 @command('debugcreatestreamclonebundle', [], 'FILE')
473 def debugcreatestreamclonebundle(ui, repo, fname):
473 def debugcreatestreamclonebundle(ui, repo, fname):
474 """create a stream clone bundle file
474 """create a stream clone bundle file
475
475
476 Stream bundles are special bundles that are essentially archives of
476 Stream bundles are special bundles that are essentially archives of
477 revlog files. They are commonly used for cloning very quickly.
477 revlog files. They are commonly used for cloning very quickly.
478 """
478 """
479 # TODO we may want to turn this into an abort when this functionality
479 # TODO we may want to turn this into an abort when this functionality
480 # is moved into `hg bundle`.
480 # is moved into `hg bundle`.
481 if phases.hassecret(repo):
481 if phases.hassecret(repo):
482 ui.warn(_('(warning: stream clone bundle will contain secret '
482 ui.warn(_('(warning: stream clone bundle will contain secret '
483 'revisions)\n'))
483 'revisions)\n'))
484
484
485 requirements, gen = streamclone.generatebundlev1(repo)
485 requirements, gen = streamclone.generatebundlev1(repo)
486 changegroup.writechunks(ui, gen, fname)
486 changegroup.writechunks(ui, gen, fname)
487
487
488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
489
489
490 @command('debugdag',
490 @command('debugdag',
491 [('t', 'tags', None, _('use tags as labels')),
491 [('t', 'tags', None, _('use tags as labels')),
492 ('b', 'branches', None, _('annotate with branch names')),
492 ('b', 'branches', None, _('annotate with branch names')),
493 ('', 'dots', None, _('use dots for runs')),
493 ('', 'dots', None, _('use dots for runs')),
494 ('s', 'spaces', None, _('separate elements by spaces'))],
494 ('s', 'spaces', None, _('separate elements by spaces'))],
495 _('[OPTION]... [FILE [REV]...]'),
495 _('[OPTION]... [FILE [REV]...]'),
496 optionalrepo=True)
496 optionalrepo=True)
497 def debugdag(ui, repo, file_=None, *revs, **opts):
497 def debugdag(ui, repo, file_=None, *revs, **opts):
498 """format the changelog or an index DAG as a concise textual description
498 """format the changelog or an index DAG as a concise textual description
499
499
500 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 If you pass a revlog index, the revlog's DAG is emitted. If you list
501 revision numbers, they get labeled in the output as rN.
501 revision numbers, they get labeled in the output as rN.
502
502
503 Otherwise, the changelog DAG of the current repo is emitted.
503 Otherwise, the changelog DAG of the current repo is emitted.
504 """
504 """
505 spaces = opts.get(r'spaces')
505 spaces = opts.get(r'spaces')
506 dots = opts.get(r'dots')
506 dots = opts.get(r'dots')
507 if file_:
507 if file_:
508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
509 file_)
509 file_)
510 revs = set((int(r) for r in revs))
510 revs = set((int(r) for r in revs))
511 def events():
511 def events():
512 for r in rlog:
512 for r in rlog:
513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
514 if p != -1))
514 if p != -1))
515 if r in revs:
515 if r in revs:
516 yield 'l', (r, "r%i" % r)
516 yield 'l', (r, "r%i" % r)
517 elif repo:
517 elif repo:
518 cl = repo.changelog
518 cl = repo.changelog
519 tags = opts.get(r'tags')
519 tags = opts.get(r'tags')
520 branches = opts.get(r'branches')
520 branches = opts.get(r'branches')
521 if tags:
521 if tags:
522 labels = {}
522 labels = {}
523 for l, n in repo.tags().items():
523 for l, n in repo.tags().items():
524 labels.setdefault(cl.rev(n), []).append(l)
524 labels.setdefault(cl.rev(n), []).append(l)
525 def events():
525 def events():
526 b = "default"
526 b = "default"
527 for r in cl:
527 for r in cl:
528 if branches:
528 if branches:
529 newb = cl.read(cl.node(r))[5]['branch']
529 newb = cl.read(cl.node(r))[5]['branch']
530 if newb != b:
530 if newb != b:
531 yield 'a', newb
531 yield 'a', newb
532 b = newb
532 b = newb
533 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 yield 'n', (r, list(p for p in cl.parentrevs(r)
534 if p != -1))
534 if p != -1))
535 if tags:
535 if tags:
536 ls = labels.get(r)
536 ls = labels.get(r)
537 if ls:
537 if ls:
538 for l in ls:
538 for l in ls:
539 yield 'l', (r, l)
539 yield 'l', (r, l)
540 else:
540 else:
541 raise error.Abort(_('need repo for changelog dag'))
541 raise error.Abort(_('need repo for changelog dag'))
542
542
543 for line in dagparser.dagtextlines(events(),
543 for line in dagparser.dagtextlines(events(),
544 addspaces=spaces,
544 addspaces=spaces,
545 wraplabels=True,
545 wraplabels=True,
546 wrapannotations=True,
546 wrapannotations=True,
547 wrapnonlinear=dots,
547 wrapnonlinear=dots,
548 usedots=dots,
548 usedots=dots,
549 maxlinewidth=70):
549 maxlinewidth=70):
550 ui.write(line)
550 ui.write(line)
551 ui.write("\n")
551 ui.write("\n")
552
552
553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
554 def debugdata(ui, repo, file_, rev=None, **opts):
554 def debugdata(ui, repo, file_, rev=None, **opts):
555 """dump the contents of a data file revision"""
555 """dump the contents of a data file revision"""
556 opts = pycompat.byteskwargs(opts)
556 opts = pycompat.byteskwargs(opts)
557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
558 if rev is not None:
558 if rev is not None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
559 raise error.CommandError('debugdata', _('invalid arguments'))
560 file_, rev = None, file_
560 file_, rev = None, file_
561 elif rev is None:
561 elif rev is None:
562 raise error.CommandError('debugdata', _('invalid arguments'))
562 raise error.CommandError('debugdata', _('invalid arguments'))
563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
564 try:
564 try:
565 ui.write(r.revision(r.lookup(rev), raw=True))
565 ui.write(r.revision(r.lookup(rev), raw=True))
566 except KeyError:
566 except KeyError:
567 raise error.Abort(_('invalid revision identifier %s') % rev)
567 raise error.Abort(_('invalid revision identifier %s') % rev)
568
568
569 @command('debugdate',
569 @command('debugdate',
570 [('e', 'extended', None, _('try extended date formats'))],
570 [('e', 'extended', None, _('try extended date formats'))],
571 _('[-e] DATE [RANGE]'),
571 _('[-e] DATE [RANGE]'),
572 norepo=True, optionalrepo=True)
572 norepo=True, optionalrepo=True)
573 def debugdate(ui, date, range=None, **opts):
573 def debugdate(ui, date, range=None, **opts):
574 """parse and display a date"""
574 """parse and display a date"""
575 if opts[r"extended"]:
575 if opts[r"extended"]:
576 d = dateutil.parsedate(date, util.extendeddateformats)
576 d = dateutil.parsedate(date, util.extendeddateformats)
577 else:
577 else:
578 d = dateutil.parsedate(date)
578 d = dateutil.parsedate(date)
579 ui.write(("internal: %d %d\n") % d)
579 ui.write(("internal: %d %d\n") % d)
580 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 ui.write(("standard: %s\n") % dateutil.datestr(d))
581 if range:
581 if range:
582 m = dateutil.matchdate(range)
582 m = dateutil.matchdate(range)
583 ui.write(("match: %s\n") % m(d[0]))
583 ui.write(("match: %s\n") % m(d[0]))
584
584
585 @command('debugdeltachain',
585 @command('debugdeltachain',
586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
587 _('-c|-m|FILE'),
587 _('-c|-m|FILE'),
588 optionalrepo=True)
588 optionalrepo=True)
589 def debugdeltachain(ui, repo, file_=None, **opts):
589 def debugdeltachain(ui, repo, file_=None, **opts):
590 """dump information about delta chains in a revlog
590 """dump information about delta chains in a revlog
591
591
592 Output can be templatized. Available template keywords are:
592 Output can be templatized. Available template keywords are:
593
593
594 :``rev``: revision number
594 :``rev``: revision number
595 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainid``: delta chain identifier (numbered by unique base)
596 :``chainlen``: delta chain length to this revision
596 :``chainlen``: delta chain length to this revision
597 :``prevrev``: previous revision in delta chain
597 :``prevrev``: previous revision in delta chain
598 :``deltatype``: role of delta / how it was computed
598 :``deltatype``: role of delta / how it was computed
599 :``compsize``: compressed size of revision
599 :``compsize``: compressed size of revision
600 :``uncompsize``: uncompressed size of revision
600 :``uncompsize``: uncompressed size of revision
601 :``chainsize``: total size of compressed revisions in chain
601 :``chainsize``: total size of compressed revisions in chain
602 :``chainratio``: total chain size divided by uncompressed revision size
602 :``chainratio``: total chain size divided by uncompressed revision size
603 (new delta chains typically start at ratio 2.00)
603 (new delta chains typically start at ratio 2.00)
604 :``lindist``: linear distance from base revision in delta chain to end
604 :``lindist``: linear distance from base revision in delta chain to end
605 of this revision
605 of this revision
606 :``extradist``: total size of revisions not part of this delta chain from
606 :``extradist``: total size of revisions not part of this delta chain from
607 base of delta chain to end of this revision; a measurement
607 base of delta chain to end of this revision; a measurement
608 of how much extra data we need to read/seek across to read
608 of how much extra data we need to read/seek across to read
609 the delta chain for this revision
609 the delta chain for this revision
610 :``extraratio``: extradist divided by chainsize; another representation of
610 :``extraratio``: extradist divided by chainsize; another representation of
611 how much unrelated data is needed to load this delta chain
611 how much unrelated data is needed to load this delta chain
612
612
613 If the repository is configured to use the sparse read, additional keywords
613 If the repository is configured to use the sparse read, additional keywords
614 are available:
614 are available:
615
615
616 :``readsize``: total size of data read from the disk for a revision
616 :``readsize``: total size of data read from the disk for a revision
617 (sum of the sizes of all the blocks)
617 (sum of the sizes of all the blocks)
618 :``largestblock``: size of the largest block of data read from the disk
618 :``largestblock``: size of the largest block of data read from the disk
619 :``readdensity``: density of useful bytes in the data read from the disk
619 :``readdensity``: density of useful bytes in the data read from the disk
620 :``srchunks``: in how many data hunks the whole revision would be read
620 :``srchunks``: in how many data hunks the whole revision would be read
621
621
622 The sparse read can be enabled with experimental.sparse-read = True
622 The sparse read can be enabled with experimental.sparse-read = True
623 """
623 """
624 opts = pycompat.byteskwargs(opts)
624 opts = pycompat.byteskwargs(opts)
625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
626 index = r.index
626 index = r.index
627 start = r.start
627 start = r.start
628 length = r.length
628 length = r.length
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 withsparseread = getattr(r, '_withsparseread', False)
630 withsparseread = getattr(r, '_withsparseread', False)
631
631
632 def revinfo(rev):
632 def revinfo(rev):
633 e = index[rev]
633 e = index[rev]
634 compsize = e[1]
634 compsize = e[1]
635 uncompsize = e[2]
635 uncompsize = e[2]
636 chainsize = 0
636 chainsize = 0
637
637
638 if generaldelta:
638 if generaldelta:
639 if e[3] == e[5]:
639 if e[3] == e[5]:
640 deltatype = 'p1'
640 deltatype = 'p1'
641 elif e[3] == e[6]:
641 elif e[3] == e[6]:
642 deltatype = 'p2'
642 deltatype = 'p2'
643 elif e[3] == rev - 1:
643 elif e[3] == rev - 1:
644 deltatype = 'prev'
644 deltatype = 'prev'
645 elif e[3] == rev:
645 elif e[3] == rev:
646 deltatype = 'base'
646 deltatype = 'base'
647 else:
647 else:
648 deltatype = 'other'
648 deltatype = 'other'
649 else:
649 else:
650 if e[3] == rev:
650 if e[3] == rev:
651 deltatype = 'base'
651 deltatype = 'base'
652 else:
652 else:
653 deltatype = 'prev'
653 deltatype = 'prev'
654
654
655 chain = r._deltachain(rev)[0]
655 chain = r._deltachain(rev)[0]
656 for iterrev in chain:
656 for iterrev in chain:
657 e = index[iterrev]
657 e = index[iterrev]
658 chainsize += e[1]
658 chainsize += e[1]
659
659
660 return compsize, uncompsize, deltatype, chain, chainsize
660 return compsize, uncompsize, deltatype, chain, chainsize
661
661
662 fm = ui.formatter('debugdeltachain', opts)
662 fm = ui.formatter('debugdeltachain', opts)
663
663
664 fm.plain(' rev chain# chainlen prev delta '
664 fm.plain(' rev chain# chainlen prev delta '
665 'size rawsize chainsize ratio lindist extradist '
665 'size rawsize chainsize ratio lindist extradist '
666 'extraratio')
666 'extraratio')
667 if withsparseread:
667 if withsparseread:
668 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain(' readsize largestblk rddensity srchunks')
669 fm.plain('\n')
669 fm.plain('\n')
670
670
671 chainbases = {}
671 chainbases = {}
672 for rev in r:
672 for rev in r:
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 chainbase = chain[0]
674 chainbase = chain[0]
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 basestart = start(chainbase)
676 basestart = start(chainbase)
677 revstart = start(rev)
677 revstart = start(rev)
678 lineardist = revstart + comp - basestart
678 lineardist = revstart + comp - basestart
679 extradist = lineardist - chainsize
679 extradist = lineardist - chainsize
680 try:
680 try:
681 prevrev = chain[-2]
681 prevrev = chain[-2]
682 except IndexError:
682 except IndexError:
683 prevrev = -1
683 prevrev = -1
684
684
685 if uncomp != 0:
685 if uncomp != 0:
686 chainratio = float(chainsize) / float(uncomp)
686 chainratio = float(chainsize) / float(uncomp)
687 else:
687 else:
688 chainratio = chainsize
688 chainratio = chainsize
689
689
690 if chainsize != 0:
690 if chainsize != 0:
691 extraratio = float(extradist) / float(chainsize)
691 extraratio = float(extradist) / float(chainsize)
692 else:
692 else:
693 extraratio = extradist
693 extraratio = extradist
694
694
695 fm.startitem()
695 fm.startitem()
696 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 fm.write('rev chainid chainlen prevrev deltatype compsize '
697 'uncompsize chainsize chainratio lindist extradist '
697 'uncompsize chainsize chainratio lindist extradist '
698 'extraratio',
698 'extraratio',
699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
700 rev, chainid, len(chain), prevrev, deltatype, comp,
700 rev, chainid, len(chain), prevrev, deltatype, comp,
701 uncomp, chainsize, chainratio, lineardist, extradist,
701 uncomp, chainsize, chainratio, lineardist, extradist,
702 extraratio,
702 extraratio,
703 rev=rev, chainid=chainid, chainlen=len(chain),
703 rev=rev, chainid=chainid, chainlen=len(chain),
704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
705 uncompsize=uncomp, chainsize=chainsize,
705 uncompsize=uncomp, chainsize=chainsize,
706 chainratio=chainratio, lindist=lineardist,
706 chainratio=chainratio, lindist=lineardist,
707 extradist=extradist, extraratio=extraratio)
707 extradist=extradist, extraratio=extraratio)
708 if withsparseread:
708 if withsparseread:
709 readsize = 0
709 readsize = 0
710 largestblock = 0
710 largestblock = 0
711 srchunks = 0
711 srchunks = 0
712
712
713 for revschunk in deltautil.slicechunk(r, chain):
713 for revschunk in deltautil.slicechunk(r, chain):
714 srchunks += 1
714 srchunks += 1
715 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blkend = start(revschunk[-1]) + length(revschunk[-1])
716 blksize = blkend - start(revschunk[0])
716 blksize = blkend - start(revschunk[0])
717
717
718 readsize += blksize
718 readsize += blksize
719 if largestblock < blksize:
719 if largestblock < blksize:
720 largestblock = blksize
720 largestblock = blksize
721
721
722 if readsize:
722 if readsize:
723 readdensity = float(chainsize) / float(readsize)
723 readdensity = float(chainsize) / float(readsize)
724 else:
724 else:
725 readdensity = 1
725 readdensity = 1
726
726
727 fm.write('readsize largestblock readdensity srchunks',
727 fm.write('readsize largestblock readdensity srchunks',
728 ' %10d %10d %9.5f %8d',
728 ' %10d %10d %9.5f %8d',
729 readsize, largestblock, readdensity, srchunks,
729 readsize, largestblock, readdensity, srchunks,
730 readsize=readsize, largestblock=largestblock,
730 readsize=readsize, largestblock=largestblock,
731 readdensity=readdensity, srchunks=srchunks)
731 readdensity=readdensity, srchunks=srchunks)
732
732
733 fm.plain('\n')
733 fm.plain('\n')
734
734
735 fm.end()
735 fm.end()
736
736
737 @command('debugdirstate|debugstate',
737 @command('debugdirstate|debugstate',
738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
739 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'dates', True, _('display the saved mtime')),
740 ('', 'datesort', None, _('sort by saved mtime'))],
740 ('', 'datesort', None, _('sort by saved mtime'))],
741 _('[OPTION]...'))
741 _('[OPTION]...'))
742 def debugstate(ui, repo, **opts):
742 def debugstate(ui, repo, **opts):
743 """show the contents of the current dirstate"""
743 """show the contents of the current dirstate"""
744
744
745 nodates = not opts[r'dates']
745 nodates = not opts[r'dates']
746 if opts.get(r'nodates') is not None:
746 if opts.get(r'nodates') is not None:
747 nodates = True
747 nodates = True
748 datesort = opts.get(r'datesort')
748 datesort = opts.get(r'datesort')
749
749
750 if datesort:
750 if datesort:
751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
752 else:
752 else:
753 keyfunc = None # sort by filename
753 keyfunc = None # sort by filename
754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
755 if ent[3] == -1:
755 if ent[3] == -1:
756 timestr = 'unset '
756 timestr = 'unset '
757 elif nodates:
757 elif nodates:
758 timestr = 'set '
758 timestr = 'set '
759 else:
759 else:
760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
761 time.localtime(ent[3]))
761 time.localtime(ent[3]))
762 timestr = encoding.strtolocal(timestr)
762 timestr = encoding.strtolocal(timestr)
763 if ent[1] & 0o20000:
763 if ent[1] & 0o20000:
764 mode = 'lnk'
764 mode = 'lnk'
765 else:
765 else:
766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
768 for f in repo.dirstate.copies():
768 for f in repo.dirstate.copies():
769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
770
770
771 @command('debugdiscovery',
771 @command('debugdiscovery',
772 [('', 'old', None, _('use old-style discovery')),
772 [('', 'old', None, _('use old-style discovery')),
773 ('', 'nonheads', None,
773 ('', 'nonheads', None,
774 _('use old-style discovery with non-heads included')),
774 _('use old-style discovery with non-heads included')),
775 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ('', 'rev', [], 'restrict discovery to this set of revs'),
776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
777 ] + cmdutil.remoteopts,
777 ] + cmdutil.remoteopts,
778 _('[--rev REV] [OTHER]'))
778 _('[--rev REV] [OTHER]'))
779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
780 """runs the changeset discovery protocol in isolation"""
780 """runs the changeset discovery protocol in isolation"""
781 opts = pycompat.byteskwargs(opts)
781 opts = pycompat.byteskwargs(opts)
782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
783 remote = hg.peer(repo, opts, remoteurl)
783 remote = hg.peer(repo, opts, remoteurl)
784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
785
785
786 # make sure tests are repeatable
786 # make sure tests are repeatable
787 random.seed(int(opts['seed']))
787 random.seed(int(opts['seed']))
788
788
789
789
790
790
791 if opts.get('old'):
791 if opts.get('old'):
792 def doit(pushedrevs, remoteheads, remote=remote):
792 def doit(pushedrevs, remoteheads, remote=remote):
793 if not util.safehasattr(remote, 'branches'):
793 if not util.safehasattr(remote, 'branches'):
794 # enable in-client legacy support
794 # enable in-client legacy support
795 remote = localrepo.locallegacypeer(remote.local())
795 remote = localrepo.locallegacypeer(remote.local())
796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
797 force=True)
797 force=True)
798 common = set(common)
798 common = set(common)
799 if not opts.get('nonheads'):
799 if not opts.get('nonheads'):
800 ui.write(("unpruned common: %s\n") %
800 ui.write(("unpruned common: %s\n") %
801 " ".join(sorted(short(n) for n in common)))
801 " ".join(sorted(short(n) for n in common)))
802
802
803 clnode = repo.changelog.node
803 clnode = repo.changelog.node
804 common = repo.revs('heads(::%ln)', common)
804 common = repo.revs('heads(::%ln)', common)
805 common = {clnode(r) for r in common}
805 common = {clnode(r) for r in common}
806 return common, hds
806 return common, hds
807 else:
807 else:
808 def doit(pushedrevs, remoteheads, remote=remote):
808 def doit(pushedrevs, remoteheads, remote=remote):
809 nodes = None
809 nodes = None
810 if pushedrevs:
810 if pushedrevs:
811 revs = scmutil.revrange(repo, pushedrevs)
811 revs = scmutil.revrange(repo, pushedrevs)
812 nodes = [repo[r].node() for r in revs]
812 nodes = [repo[r].node() for r in revs]
813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
814 ancestorsof=nodes)
814 ancestorsof=nodes)
815 return common, hds
815 return common, hds
816
816
817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
818 localrevs = opts['rev']
818 localrevs = opts['rev']
819 with util.timedcm('debug-discovery') as t:
819 with util.timedcm('debug-discovery') as t:
820 common, hds = doit(localrevs, remoterevs)
820 common, hds = doit(localrevs, remoterevs)
821
821
822 # compute all statistics
822 # compute all statistics
823 common = set(common)
823 common = set(common)
824 rheads = set(hds)
824 rheads = set(hds)
825 lheads = set(repo.heads())
825 lheads = set(repo.heads())
826
826
827 data = {}
827 data = {}
828 data['elapsed'] = t.elapsed
828 data['elapsed'] = t.elapsed
829 data['nb-common'] = len(common)
829 data['nb-common'] = len(common)
830 data['nb-common-local'] = len(common & lheads)
830 data['nb-common-local'] = len(common & lheads)
831 data['nb-common-remote'] = len(common & rheads)
831 data['nb-common-remote'] = len(common & rheads)
832 data['nb-common-both'] = len(common & rheads & lheads)
832 data['nb-local'] = len(lheads)
833 data['nb-local'] = len(lheads)
833 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
834 data['nb-remote'] = len(rheads)
835 data['nb-remote'] = len(rheads)
835 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
836 data['nb-revs'] = len(repo.revs('all()'))
837 data['nb-revs'] = len(repo.revs('all()'))
837 data['nb-revs-common'] = len(repo.revs('::%ln', common))
838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
838 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
839
840
840 # display discovery summary
841 # display discovery summary
841 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
842 ui.write(("heads summary:\n"))
843 ui.write(("heads summary:\n"))
843 ui.write((" total common heads: %(nb-common)9d\n") % data)
844 ui.write((" total common heads: %(nb-common)9d\n") % data)
844 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
845 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
847 ui.write((" both: %(nb-common-both)9d\n") % data)
846 ui.write((" local heads: %(nb-local)9d\n") % data)
848 ui.write((" local heads: %(nb-local)9d\n") % data)
847 ui.write((" common: %(nb-common-local)9d\n") % data)
849 ui.write((" common: %(nb-common-local)9d\n") % data)
848 ui.write((" missing: %(nb-local-missing)9d\n") % data)
850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
849 ui.write((" remote heads: %(nb-remote)9d\n") % data)
851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
850 ui.write((" common: %(nb-common-remote)9d\n") % data)
852 ui.write((" common: %(nb-common-remote)9d\n") % data)
851 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
852 ui.write(("local changesets: %(nb-revs)9d\n") % data)
854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
853 ui.write((" common: %(nb-revs-common)9d\n") % data)
855 ui.write((" common: %(nb-revs-common)9d\n") % data)
854 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
855
857
856 if ui.verbose:
858 if ui.verbose:
857 ui.write(("common heads: %s\n") %
859 ui.write(("common heads: %s\n") %
858 " ".join(sorted(short(n) for n in common)))
860 " ".join(sorted(short(n) for n in common)))
859
861
860 _chunksize = 4 << 10
862 _chunksize = 4 << 10
861
863
862 @command('debugdownload',
864 @command('debugdownload',
863 [
865 [
864 ('o', 'output', '', _('path')),
866 ('o', 'output', '', _('path')),
865 ],
867 ],
866 optionalrepo=True)
868 optionalrepo=True)
867 def debugdownload(ui, repo, url, output=None, **opts):
869 def debugdownload(ui, repo, url, output=None, **opts):
868 """download a resource using Mercurial logic and config
870 """download a resource using Mercurial logic and config
869 """
871 """
870 fh = urlmod.open(ui, url, output)
872 fh = urlmod.open(ui, url, output)
871
873
872 dest = ui
874 dest = ui
873 if output:
875 if output:
874 dest = open(output, "wb", _chunksize)
876 dest = open(output, "wb", _chunksize)
875 try:
877 try:
876 data = fh.read(_chunksize)
878 data = fh.read(_chunksize)
877 while data:
879 while data:
878 dest.write(data)
880 dest.write(data)
879 data = fh.read(_chunksize)
881 data = fh.read(_chunksize)
880 finally:
882 finally:
881 if output:
883 if output:
882 dest.close()
884 dest.close()
883
885
884 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
885 def debugextensions(ui, repo, **opts):
887 def debugextensions(ui, repo, **opts):
886 '''show information about active extensions'''
888 '''show information about active extensions'''
887 opts = pycompat.byteskwargs(opts)
889 opts = pycompat.byteskwargs(opts)
888 exts = extensions.extensions(ui)
890 exts = extensions.extensions(ui)
889 hgver = util.version()
891 hgver = util.version()
890 fm = ui.formatter('debugextensions', opts)
892 fm = ui.formatter('debugextensions', opts)
891 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
892 isinternal = extensions.ismoduleinternal(extmod)
894 isinternal = extensions.ismoduleinternal(extmod)
893 extsource = pycompat.fsencode(extmod.__file__)
895 extsource = pycompat.fsencode(extmod.__file__)
894 if isinternal:
896 if isinternal:
895 exttestedwith = [] # never expose magic string to users
897 exttestedwith = [] # never expose magic string to users
896 else:
898 else:
897 exttestedwith = getattr(extmod, 'testedwith', '').split()
899 exttestedwith = getattr(extmod, 'testedwith', '').split()
898 extbuglink = getattr(extmod, 'buglink', None)
900 extbuglink = getattr(extmod, 'buglink', None)
899
901
900 fm.startitem()
902 fm.startitem()
901
903
902 if ui.quiet or ui.verbose:
904 if ui.quiet or ui.verbose:
903 fm.write('name', '%s\n', extname)
905 fm.write('name', '%s\n', extname)
904 else:
906 else:
905 fm.write('name', '%s', extname)
907 fm.write('name', '%s', extname)
906 if isinternal or hgver in exttestedwith:
908 if isinternal or hgver in exttestedwith:
907 fm.plain('\n')
909 fm.plain('\n')
908 elif not exttestedwith:
910 elif not exttestedwith:
909 fm.plain(_(' (untested!)\n'))
911 fm.plain(_(' (untested!)\n'))
910 else:
912 else:
911 lasttestedversion = exttestedwith[-1]
913 lasttestedversion = exttestedwith[-1]
912 fm.plain(' (%s!)\n' % lasttestedversion)
914 fm.plain(' (%s!)\n' % lasttestedversion)
913
915
914 fm.condwrite(ui.verbose and extsource, 'source',
916 fm.condwrite(ui.verbose and extsource, 'source',
915 _(' location: %s\n'), extsource or "")
917 _(' location: %s\n'), extsource or "")
916
918
917 if ui.verbose:
919 if ui.verbose:
918 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
919 fm.data(bundled=isinternal)
921 fm.data(bundled=isinternal)
920
922
921 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
922 _(' tested with: %s\n'),
924 _(' tested with: %s\n'),
923 fm.formatlist(exttestedwith, name='ver'))
925 fm.formatlist(exttestedwith, name='ver'))
924
926
925 fm.condwrite(ui.verbose and extbuglink, 'buglink',
927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
926 _(' bug reporting: %s\n'), extbuglink or "")
928 _(' bug reporting: %s\n'), extbuglink or "")
927
929
928 fm.end()
930 fm.end()
929
931
930 @command('debugfileset',
932 @command('debugfileset',
931 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
932 ('', 'all-files', False,
934 ('', 'all-files', False,
933 _('test files from all revisions and working directory')),
935 _('test files from all revisions and working directory')),
934 ('s', 'show-matcher', None,
936 ('s', 'show-matcher', None,
935 _('print internal representation of matcher')),
937 _('print internal representation of matcher')),
936 ('p', 'show-stage', [],
938 ('p', 'show-stage', [],
937 _('print parsed tree at the given stage'), _('NAME'))],
939 _('print parsed tree at the given stage'), _('NAME'))],
938 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
939 def debugfileset(ui, repo, expr, **opts):
941 def debugfileset(ui, repo, expr, **opts):
940 '''parse and apply a fileset specification'''
942 '''parse and apply a fileset specification'''
941 from . import fileset
943 from . import fileset
942 fileset.symbols # force import of fileset so we have predicates to optimize
944 fileset.symbols # force import of fileset so we have predicates to optimize
943 opts = pycompat.byteskwargs(opts)
945 opts = pycompat.byteskwargs(opts)
944 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
945
947
946 stages = [
948 stages = [
947 ('parsed', pycompat.identity),
949 ('parsed', pycompat.identity),
948 ('analyzed', filesetlang.analyze),
950 ('analyzed', filesetlang.analyze),
949 ('optimized', filesetlang.optimize),
951 ('optimized', filesetlang.optimize),
950 ]
952 ]
951 stagenames = set(n for n, f in stages)
953 stagenames = set(n for n, f in stages)
952
954
953 showalways = set()
955 showalways = set()
954 if ui.verbose and not opts['show_stage']:
956 if ui.verbose and not opts['show_stage']:
955 # show parsed tree by --verbose (deprecated)
957 # show parsed tree by --verbose (deprecated)
956 showalways.add('parsed')
958 showalways.add('parsed')
957 if opts['show_stage'] == ['all']:
959 if opts['show_stage'] == ['all']:
958 showalways.update(stagenames)
960 showalways.update(stagenames)
959 else:
961 else:
960 for n in opts['show_stage']:
962 for n in opts['show_stage']:
961 if n not in stagenames:
963 if n not in stagenames:
962 raise error.Abort(_('invalid stage name: %s') % n)
964 raise error.Abort(_('invalid stage name: %s') % n)
963 showalways.update(opts['show_stage'])
965 showalways.update(opts['show_stage'])
964
966
965 tree = filesetlang.parse(expr)
967 tree = filesetlang.parse(expr)
966 for n, f in stages:
968 for n, f in stages:
967 tree = f(tree)
969 tree = f(tree)
968 if n in showalways:
970 if n in showalways:
969 if opts['show_stage'] or n != 'parsed':
971 if opts['show_stage'] or n != 'parsed':
970 ui.write(("* %s:\n") % n)
972 ui.write(("* %s:\n") % n)
971 ui.write(filesetlang.prettyformat(tree), "\n")
973 ui.write(filesetlang.prettyformat(tree), "\n")
972
974
973 files = set()
975 files = set()
974 if opts['all_files']:
976 if opts['all_files']:
975 for r in repo:
977 for r in repo:
976 c = repo[r]
978 c = repo[r]
977 files.update(c.files())
979 files.update(c.files())
978 files.update(c.substate)
980 files.update(c.substate)
979 if opts['all_files'] or ctx.rev() is None:
981 if opts['all_files'] or ctx.rev() is None:
980 wctx = repo[None]
982 wctx = repo[None]
981 files.update(repo.dirstate.walk(scmutil.matchall(repo),
983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
982 subrepos=list(wctx.substate),
984 subrepos=list(wctx.substate),
983 unknown=True, ignored=True))
985 unknown=True, ignored=True))
984 files.update(wctx.substate)
986 files.update(wctx.substate)
985 else:
987 else:
986 files.update(ctx.files())
988 files.update(ctx.files())
987 files.update(ctx.substate)
989 files.update(ctx.substate)
988
990
989 m = ctx.matchfileset(expr)
991 m = ctx.matchfileset(expr)
990 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
991 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
992 for f in sorted(files):
994 for f in sorted(files):
993 if not m(f):
995 if not m(f):
994 continue
996 continue
995 ui.write("%s\n" % f)
997 ui.write("%s\n" % f)
996
998
997 @command('debugformat',
999 @command('debugformat',
998 [] + cmdutil.formatteropts)
1000 [] + cmdutil.formatteropts)
999 def debugformat(ui, repo, **opts):
1001 def debugformat(ui, repo, **opts):
1000 """display format information about the current repository
1002 """display format information about the current repository
1001
1003
1002 Use --verbose to get extra information about current config value and
1004 Use --verbose to get extra information about current config value and
1003 Mercurial default."""
1005 Mercurial default."""
1004 opts = pycompat.byteskwargs(opts)
1006 opts = pycompat.byteskwargs(opts)
1005 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1006 maxvariantlength = max(len('format-variant'), maxvariantlength)
1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1007
1009
1008 def makeformatname(name):
1010 def makeformatname(name):
1009 return '%s:' + (' ' * (maxvariantlength - len(name)))
1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1010
1012
1011 fm = ui.formatter('debugformat', opts)
1013 fm = ui.formatter('debugformat', opts)
1012 if fm.isplain():
1014 if fm.isplain():
1013 def formatvalue(value):
1015 def formatvalue(value):
1014 if util.safehasattr(value, 'startswith'):
1016 if util.safehasattr(value, 'startswith'):
1015 return value
1017 return value
1016 if value:
1018 if value:
1017 return 'yes'
1019 return 'yes'
1018 else:
1020 else:
1019 return 'no'
1021 return 'no'
1020 else:
1022 else:
1021 formatvalue = pycompat.identity
1023 formatvalue = pycompat.identity
1022
1024
1023 fm.plain('format-variant')
1025 fm.plain('format-variant')
1024 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1025 fm.plain(' repo')
1027 fm.plain(' repo')
1026 if ui.verbose:
1028 if ui.verbose:
1027 fm.plain(' config default')
1029 fm.plain(' config default')
1028 fm.plain('\n')
1030 fm.plain('\n')
1029 for fv in upgrade.allformatvariant:
1031 for fv in upgrade.allformatvariant:
1030 fm.startitem()
1032 fm.startitem()
1031 repovalue = fv.fromrepo(repo)
1033 repovalue = fv.fromrepo(repo)
1032 configvalue = fv.fromconfig(repo)
1034 configvalue = fv.fromconfig(repo)
1033
1035
1034 if repovalue != configvalue:
1036 if repovalue != configvalue:
1035 namelabel = 'formatvariant.name.mismatchconfig'
1037 namelabel = 'formatvariant.name.mismatchconfig'
1036 repolabel = 'formatvariant.repo.mismatchconfig'
1038 repolabel = 'formatvariant.repo.mismatchconfig'
1037 elif repovalue != fv.default:
1039 elif repovalue != fv.default:
1038 namelabel = 'formatvariant.name.mismatchdefault'
1040 namelabel = 'formatvariant.name.mismatchdefault'
1039 repolabel = 'formatvariant.repo.mismatchdefault'
1041 repolabel = 'formatvariant.repo.mismatchdefault'
1040 else:
1042 else:
1041 namelabel = 'formatvariant.name.uptodate'
1043 namelabel = 'formatvariant.name.uptodate'
1042 repolabel = 'formatvariant.repo.uptodate'
1044 repolabel = 'formatvariant.repo.uptodate'
1043
1045
1044 fm.write('name', makeformatname(fv.name), fv.name,
1046 fm.write('name', makeformatname(fv.name), fv.name,
1045 label=namelabel)
1047 label=namelabel)
1046 fm.write('repo', ' %3s', formatvalue(repovalue),
1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1047 label=repolabel)
1049 label=repolabel)
1048 if fv.default != configvalue:
1050 if fv.default != configvalue:
1049 configlabel = 'formatvariant.config.special'
1051 configlabel = 'formatvariant.config.special'
1050 else:
1052 else:
1051 configlabel = 'formatvariant.config.default'
1053 configlabel = 'formatvariant.config.default'
1052 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1053 label=configlabel)
1055 label=configlabel)
1054 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1055 label='formatvariant.default')
1057 label='formatvariant.default')
1056 fm.plain('\n')
1058 fm.plain('\n')
1057 fm.end()
1059 fm.end()
1058
1060
1059 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1060 def debugfsinfo(ui, path="."):
1062 def debugfsinfo(ui, path="."):
1061 """show information detected about current filesystem"""
1063 """show information detected about current filesystem"""
1062 ui.write(('path: %s\n') % path)
1064 ui.write(('path: %s\n') % path)
1063 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1064 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1065 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1066 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1067 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1068 casesensitive = '(unknown)'
1070 casesensitive = '(unknown)'
1069 try:
1071 try:
1070 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1071 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1072 except OSError:
1074 except OSError:
1073 pass
1075 pass
1074 ui.write(('case-sensitive: %s\n') % casesensitive)
1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1075
1077
1076 @command('debuggetbundle',
1078 @command('debuggetbundle',
1077 [('H', 'head', [], _('id of head node'), _('ID')),
1079 [('H', 'head', [], _('id of head node'), _('ID')),
1078 ('C', 'common', [], _('id of common node'), _('ID')),
1080 ('C', 'common', [], _('id of common node'), _('ID')),
1079 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1080 _('REPO FILE [-H|-C ID]...'),
1082 _('REPO FILE [-H|-C ID]...'),
1081 norepo=True)
1083 norepo=True)
1082 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1083 """retrieves a bundle from a repo
1085 """retrieves a bundle from a repo
1084
1086
1085 Every ID must be a full-length hex node id string. Saves the bundle to the
1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1086 given file.
1088 given file.
1087 """
1089 """
1088 opts = pycompat.byteskwargs(opts)
1090 opts = pycompat.byteskwargs(opts)
1089 repo = hg.peer(ui, opts, repopath)
1091 repo = hg.peer(ui, opts, repopath)
1090 if not repo.capable('getbundle'):
1092 if not repo.capable('getbundle'):
1091 raise error.Abort("getbundle() not supported by target repository")
1093 raise error.Abort("getbundle() not supported by target repository")
1092 args = {}
1094 args = {}
1093 if common:
1095 if common:
1094 args[r'common'] = [bin(s) for s in common]
1096 args[r'common'] = [bin(s) for s in common]
1095 if head:
1097 if head:
1096 args[r'heads'] = [bin(s) for s in head]
1098 args[r'heads'] = [bin(s) for s in head]
1097 # TODO: get desired bundlecaps from command line.
1099 # TODO: get desired bundlecaps from command line.
1098 args[r'bundlecaps'] = None
1100 args[r'bundlecaps'] = None
1099 bundle = repo.getbundle('debug', **args)
1101 bundle = repo.getbundle('debug', **args)
1100
1102
1101 bundletype = opts.get('type', 'bzip2').lower()
1103 bundletype = opts.get('type', 'bzip2').lower()
1102 btypes = {'none': 'HG10UN',
1104 btypes = {'none': 'HG10UN',
1103 'bzip2': 'HG10BZ',
1105 'bzip2': 'HG10BZ',
1104 'gzip': 'HG10GZ',
1106 'gzip': 'HG10GZ',
1105 'bundle2': 'HG20'}
1107 'bundle2': 'HG20'}
1106 bundletype = btypes.get(bundletype)
1108 bundletype = btypes.get(bundletype)
1107 if bundletype not in bundle2.bundletypes:
1109 if bundletype not in bundle2.bundletypes:
1108 raise error.Abort(_('unknown bundle type specified with --type'))
1110 raise error.Abort(_('unknown bundle type specified with --type'))
1109 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1110
1112
1111 @command('debugignore', [], '[FILE]')
1113 @command('debugignore', [], '[FILE]')
1112 def debugignore(ui, repo, *files, **opts):
1114 def debugignore(ui, repo, *files, **opts):
1113 """display the combined ignore pattern and information about ignored files
1115 """display the combined ignore pattern and information about ignored files
1114
1116
1115 With no argument display the combined ignore pattern.
1117 With no argument display the combined ignore pattern.
1116
1118
1117 Given space separated file names, shows if the given file is ignored and
1119 Given space separated file names, shows if the given file is ignored and
1118 if so, show the ignore rule (file and line number) that matched it.
1120 if so, show the ignore rule (file and line number) that matched it.
1119 """
1121 """
1120 ignore = repo.dirstate._ignore
1122 ignore = repo.dirstate._ignore
1121 if not files:
1123 if not files:
1122 # Show all the patterns
1124 # Show all the patterns
1123 ui.write("%s\n" % pycompat.byterepr(ignore))
1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1124 else:
1126 else:
1125 m = scmutil.match(repo[None], pats=files)
1127 m = scmutil.match(repo[None], pats=files)
1126 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1127 for f in m.files():
1129 for f in m.files():
1128 nf = util.normpath(f)
1130 nf = util.normpath(f)
1129 ignored = None
1131 ignored = None
1130 ignoredata = None
1132 ignoredata = None
1131 if nf != '.':
1133 if nf != '.':
1132 if ignore(nf):
1134 if ignore(nf):
1133 ignored = nf
1135 ignored = nf
1134 ignoredata = repo.dirstate._ignorefileandline(nf)
1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1135 else:
1137 else:
1136 for p in util.finddirs(nf):
1138 for p in util.finddirs(nf):
1137 if ignore(p):
1139 if ignore(p):
1138 ignored = p
1140 ignored = p
1139 ignoredata = repo.dirstate._ignorefileandline(p)
1141 ignoredata = repo.dirstate._ignorefileandline(p)
1140 break
1142 break
1141 if ignored:
1143 if ignored:
1142 if ignored == nf:
1144 if ignored == nf:
1143 ui.write(_("%s is ignored\n") % uipathfn(f))
1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1144 else:
1146 else:
1145 ui.write(_("%s is ignored because of "
1147 ui.write(_("%s is ignored because of "
1146 "containing directory %s\n")
1148 "containing directory %s\n")
1147 % (uipathfn(f), ignored))
1149 % (uipathfn(f), ignored))
1148 ignorefile, lineno, line = ignoredata
1150 ignorefile, lineno, line = ignoredata
1149 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1150 % (ignorefile, lineno, line))
1152 % (ignorefile, lineno, line))
1151 else:
1153 else:
1152 ui.write(_("%s is not ignored\n") % uipathfn(f))
1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1153
1155
1154 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1155 _('-c|-m|FILE'))
1157 _('-c|-m|FILE'))
1156 def debugindex(ui, repo, file_=None, **opts):
1158 def debugindex(ui, repo, file_=None, **opts):
1157 """dump index data for a storage primitive"""
1159 """dump index data for a storage primitive"""
1158 opts = pycompat.byteskwargs(opts)
1160 opts = pycompat.byteskwargs(opts)
1159 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1160
1162
1161 if ui.debugflag:
1163 if ui.debugflag:
1162 shortfn = hex
1164 shortfn = hex
1163 else:
1165 else:
1164 shortfn = short
1166 shortfn = short
1165
1167
1166 idlen = 12
1168 idlen = 12
1167 for i in store:
1169 for i in store:
1168 idlen = len(shortfn(store.node(i)))
1170 idlen = len(shortfn(store.node(i)))
1169 break
1171 break
1170
1172
1171 fm = ui.formatter('debugindex', opts)
1173 fm = ui.formatter('debugindex', opts)
1172 fm.plain(b' rev linkrev %s %s p2\n' % (
1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1173 b'nodeid'.ljust(idlen),
1175 b'nodeid'.ljust(idlen),
1174 b'p1'.ljust(idlen)))
1176 b'p1'.ljust(idlen)))
1175
1177
1176 for rev in store:
1178 for rev in store:
1177 node = store.node(rev)
1179 node = store.node(rev)
1178 parents = store.parents(node)
1180 parents = store.parents(node)
1179
1181
1180 fm.startitem()
1182 fm.startitem()
1181 fm.write(b'rev', b'%6d ', rev)
1183 fm.write(b'rev', b'%6d ', rev)
1182 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1183 fm.write(b'node', '%s ', shortfn(node))
1185 fm.write(b'node', '%s ', shortfn(node))
1184 fm.write(b'p1', '%s ', shortfn(parents[0]))
1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1185 fm.write(b'p2', '%s', shortfn(parents[1]))
1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1186 fm.plain(b'\n')
1188 fm.plain(b'\n')
1187
1189
1188 fm.end()
1190 fm.end()
1189
1191
1190 @command('debugindexdot', cmdutil.debugrevlogopts,
1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1191 _('-c|-m|FILE'), optionalrepo=True)
1193 _('-c|-m|FILE'), optionalrepo=True)
1192 def debugindexdot(ui, repo, file_=None, **opts):
1194 def debugindexdot(ui, repo, file_=None, **opts):
1193 """dump an index DAG as a graphviz dot file"""
1195 """dump an index DAG as a graphviz dot file"""
1194 opts = pycompat.byteskwargs(opts)
1196 opts = pycompat.byteskwargs(opts)
1195 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1196 ui.write(("digraph G {\n"))
1198 ui.write(("digraph G {\n"))
1197 for i in r:
1199 for i in r:
1198 node = r.node(i)
1200 node = r.node(i)
1199 pp = r.parents(node)
1201 pp = r.parents(node)
1200 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1201 if pp[1] != nullid:
1203 if pp[1] != nullid:
1202 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1203 ui.write("}\n")
1205 ui.write("}\n")
1204
1206
1205 @command('debugindexstats', [])
1207 @command('debugindexstats', [])
1206 def debugindexstats(ui, repo):
1208 def debugindexstats(ui, repo):
1207 """show stats related to the changelog index"""
1209 """show stats related to the changelog index"""
1208 repo.changelog.shortest(nullid, 1)
1210 repo.changelog.shortest(nullid, 1)
1209 index = repo.changelog.index
1211 index = repo.changelog.index
1210 if not util.safehasattr(index, 'stats'):
1212 if not util.safehasattr(index, 'stats'):
1211 raise error.Abort(_('debugindexstats only works with native code'))
1213 raise error.Abort(_('debugindexstats only works with native code'))
1212 for k, v in sorted(index.stats().items()):
1214 for k, v in sorted(index.stats().items()):
1213 ui.write('%s: %d\n' % (k, v))
1215 ui.write('%s: %d\n' % (k, v))
1214
1216
1215 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1216 def debuginstall(ui, **opts):
1218 def debuginstall(ui, **opts):
1217 '''test Mercurial installation
1219 '''test Mercurial installation
1218
1220
1219 Returns 0 on success.
1221 Returns 0 on success.
1220 '''
1222 '''
1221 opts = pycompat.byteskwargs(opts)
1223 opts = pycompat.byteskwargs(opts)
1222
1224
1223 problems = 0
1225 problems = 0
1224
1226
1225 fm = ui.formatter('debuginstall', opts)
1227 fm = ui.formatter('debuginstall', opts)
1226 fm.startitem()
1228 fm.startitem()
1227
1229
1228 # encoding
1230 # encoding
1229 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1230 err = None
1232 err = None
1231 try:
1233 try:
1232 codecs.lookup(pycompat.sysstr(encoding.encoding))
1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1233 except LookupError as inst:
1235 except LookupError as inst:
1234 err = stringutil.forcebytestr(inst)
1236 err = stringutil.forcebytestr(inst)
1235 problems += 1
1237 problems += 1
1236 fm.condwrite(err, 'encodingerror', _(" %s\n"
1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1237 " (check that your locale is properly set)\n"), err)
1239 " (check that your locale is properly set)\n"), err)
1238
1240
1239 # Python
1241 # Python
1240 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1241 pycompat.sysexecutable)
1243 pycompat.sysexecutable)
1242 fm.write('pythonver', _("checking Python version (%s)\n"),
1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1243 ("%d.%d.%d" % sys.version_info[:3]))
1245 ("%d.%d.%d" % sys.version_info[:3]))
1244 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1245 os.path.dirname(pycompat.fsencode(os.__file__)))
1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1246
1248
1247 security = set(sslutil.supportedprotocols)
1249 security = set(sslutil.supportedprotocols)
1248 if sslutil.hassni:
1250 if sslutil.hassni:
1249 security.add('sni')
1251 security.add('sni')
1250
1252
1251 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1252 fm.formatlist(sorted(security), name='protocol',
1254 fm.formatlist(sorted(security), name='protocol',
1253 fmt='%s', sep=','))
1255 fmt='%s', sep=','))
1254
1256
1255 # These are warnings, not errors. So don't increment problem count. This
1257 # These are warnings, not errors. So don't increment problem count. This
1256 # may change in the future.
1258 # may change in the future.
1257 if 'tls1.2' not in security:
1259 if 'tls1.2' not in security:
1258 fm.plain(_(' TLS 1.2 not supported by Python install; '
1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1259 'network connections lack modern security\n'))
1261 'network connections lack modern security\n'))
1260 if 'sni' not in security:
1262 if 'sni' not in security:
1261 fm.plain(_(' SNI not supported by Python install; may have '
1263 fm.plain(_(' SNI not supported by Python install; may have '
1262 'connectivity issues with some servers\n'))
1264 'connectivity issues with some servers\n'))
1263
1265
1264 # TODO print CA cert info
1266 # TODO print CA cert info
1265
1267
1266 # hg version
1268 # hg version
1267 hgver = util.version()
1269 hgver = util.version()
1268 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1269 hgver.split('+')[0])
1271 hgver.split('+')[0])
1270 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1271 '+'.join(hgver.split('+')[1:]))
1273 '+'.join(hgver.split('+')[1:]))
1272
1274
1273 # compiled modules
1275 # compiled modules
1274 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1275 policy.policy)
1277 policy.policy)
1276 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1277 os.path.dirname(pycompat.fsencode(__file__)))
1279 os.path.dirname(pycompat.fsencode(__file__)))
1278
1280
1279 if policy.policy in ('c', 'allow'):
1281 if policy.policy in ('c', 'allow'):
1280 err = None
1282 err = None
1281 try:
1283 try:
1282 from .cext import (
1284 from .cext import (
1283 base85,
1285 base85,
1284 bdiff,
1286 bdiff,
1285 mpatch,
1287 mpatch,
1286 osutil,
1288 osutil,
1287 )
1289 )
1288 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1290 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1289 except Exception as inst:
1291 except Exception as inst:
1290 err = stringutil.forcebytestr(inst)
1292 err = stringutil.forcebytestr(inst)
1291 problems += 1
1293 problems += 1
1292 fm.condwrite(err, 'extensionserror', " %s\n", err)
1294 fm.condwrite(err, 'extensionserror', " %s\n", err)
1293
1295
1294 compengines = util.compengines._engines.values()
1296 compengines = util.compengines._engines.values()
1295 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1297 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1296 fm.formatlist(sorted(e.name() for e in compengines),
1298 fm.formatlist(sorted(e.name() for e in compengines),
1297 name='compengine', fmt='%s', sep=', '))
1299 name='compengine', fmt='%s', sep=', '))
1298 fm.write('compenginesavail', _('checking available compression engines '
1300 fm.write('compenginesavail', _('checking available compression engines '
1299 '(%s)\n'),
1301 '(%s)\n'),
1300 fm.formatlist(sorted(e.name() for e in compengines
1302 fm.formatlist(sorted(e.name() for e in compengines
1301 if e.available()),
1303 if e.available()),
1302 name='compengine', fmt='%s', sep=', '))
1304 name='compengine', fmt='%s', sep=', '))
1303 wirecompengines = compression.compengines.supportedwireengines(
1305 wirecompengines = compression.compengines.supportedwireengines(
1304 compression.SERVERROLE)
1306 compression.SERVERROLE)
1305 fm.write('compenginesserver', _('checking available compression engines '
1307 fm.write('compenginesserver', _('checking available compression engines '
1306 'for wire protocol (%s)\n'),
1308 'for wire protocol (%s)\n'),
1307 fm.formatlist([e.name() for e in wirecompengines
1309 fm.formatlist([e.name() for e in wirecompengines
1308 if e.wireprotosupport()],
1310 if e.wireprotosupport()],
1309 name='compengine', fmt='%s', sep=', '))
1311 name='compengine', fmt='%s', sep=', '))
1310 re2 = 'missing'
1312 re2 = 'missing'
1311 if util._re2:
1313 if util._re2:
1312 re2 = 'available'
1314 re2 = 'available'
1313 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1315 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1314 fm.data(re2=bool(util._re2))
1316 fm.data(re2=bool(util._re2))
1315
1317
1316 # templates
1318 # templates
1317 p = templater.templatepaths()
1319 p = templater.templatepaths()
1318 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1320 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1319 fm.condwrite(not p, '', _(" no template directories found\n"))
1321 fm.condwrite(not p, '', _(" no template directories found\n"))
1320 if p:
1322 if p:
1321 m = templater.templatepath("map-cmdline.default")
1323 m = templater.templatepath("map-cmdline.default")
1322 if m:
1324 if m:
1323 # template found, check if it is working
1325 # template found, check if it is working
1324 err = None
1326 err = None
1325 try:
1327 try:
1326 templater.templater.frommapfile(m)
1328 templater.templater.frommapfile(m)
1327 except Exception as inst:
1329 except Exception as inst:
1328 err = stringutil.forcebytestr(inst)
1330 err = stringutil.forcebytestr(inst)
1329 p = None
1331 p = None
1330 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1332 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1331 else:
1333 else:
1332 p = None
1334 p = None
1333 fm.condwrite(p, 'defaulttemplate',
1335 fm.condwrite(p, 'defaulttemplate',
1334 _("checking default template (%s)\n"), m)
1336 _("checking default template (%s)\n"), m)
1335 fm.condwrite(not m, 'defaulttemplatenotfound',
1337 fm.condwrite(not m, 'defaulttemplatenotfound',
1336 _(" template '%s' not found\n"), "default")
1338 _(" template '%s' not found\n"), "default")
1337 if not p:
1339 if not p:
1338 problems += 1
1340 problems += 1
1339 fm.condwrite(not p, '',
1341 fm.condwrite(not p, '',
1340 _(" (templates seem to have been installed incorrectly)\n"))
1342 _(" (templates seem to have been installed incorrectly)\n"))
1341
1343
1342 # editor
1344 # editor
1343 editor = ui.geteditor()
1345 editor = ui.geteditor()
1344 editor = util.expandpath(editor)
1346 editor = util.expandpath(editor)
1345 editorbin = procutil.shellsplit(editor)[0]
1347 editorbin = procutil.shellsplit(editor)[0]
1346 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1348 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1347 cmdpath = procutil.findexe(editorbin)
1349 cmdpath = procutil.findexe(editorbin)
1348 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1350 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1349 _(" No commit editor set and can't find %s in PATH\n"
1351 _(" No commit editor set and can't find %s in PATH\n"
1350 " (specify a commit editor in your configuration"
1352 " (specify a commit editor in your configuration"
1351 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1353 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1352 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1354 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1353 _(" Can't find editor '%s' in PATH\n"
1355 _(" Can't find editor '%s' in PATH\n"
1354 " (specify a commit editor in your configuration"
1356 " (specify a commit editor in your configuration"
1355 " file)\n"), not cmdpath and editorbin)
1357 " file)\n"), not cmdpath and editorbin)
1356 if not cmdpath and editor != 'vi':
1358 if not cmdpath and editor != 'vi':
1357 problems += 1
1359 problems += 1
1358
1360
1359 # check username
1361 # check username
1360 username = None
1362 username = None
1361 err = None
1363 err = None
1362 try:
1364 try:
1363 username = ui.username()
1365 username = ui.username()
1364 except error.Abort as e:
1366 except error.Abort as e:
1365 err = stringutil.forcebytestr(e)
1367 err = stringutil.forcebytestr(e)
1366 problems += 1
1368 problems += 1
1367
1369
1368 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1370 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1369 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1371 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1370 " (specify a username in your configuration file)\n"), err)
1372 " (specify a username in your configuration file)\n"), err)
1371
1373
1372 fm.condwrite(not problems, '',
1374 fm.condwrite(not problems, '',
1373 _("no problems detected\n"))
1375 _("no problems detected\n"))
1374 if not problems:
1376 if not problems:
1375 fm.data(problems=problems)
1377 fm.data(problems=problems)
1376 fm.condwrite(problems, 'problems',
1378 fm.condwrite(problems, 'problems',
1377 _("%d problems detected,"
1379 _("%d problems detected,"
1378 " please check your install!\n"), problems)
1380 " please check your install!\n"), problems)
1379 fm.end()
1381 fm.end()
1380
1382
1381 return problems
1383 return problems
1382
1384
1383 @command('debugknown', [], _('REPO ID...'), norepo=True)
1385 @command('debugknown', [], _('REPO ID...'), norepo=True)
1384 def debugknown(ui, repopath, *ids, **opts):
1386 def debugknown(ui, repopath, *ids, **opts):
1385 """test whether node ids are known to a repo
1387 """test whether node ids are known to a repo
1386
1388
1387 Every ID must be a full-length hex node id string. Returns a list of 0s
1389 Every ID must be a full-length hex node id string. Returns a list of 0s
1388 and 1s indicating unknown/known.
1390 and 1s indicating unknown/known.
1389 """
1391 """
1390 opts = pycompat.byteskwargs(opts)
1392 opts = pycompat.byteskwargs(opts)
1391 repo = hg.peer(ui, opts, repopath)
1393 repo = hg.peer(ui, opts, repopath)
1392 if not repo.capable('known'):
1394 if not repo.capable('known'):
1393 raise error.Abort("known() not supported by target repository")
1395 raise error.Abort("known() not supported by target repository")
1394 flags = repo.known([bin(s) for s in ids])
1396 flags = repo.known([bin(s) for s in ids])
1395 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1397 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1396
1398
1397 @command('debuglabelcomplete', [], _('LABEL...'))
1399 @command('debuglabelcomplete', [], _('LABEL...'))
1398 def debuglabelcomplete(ui, repo, *args):
1400 def debuglabelcomplete(ui, repo, *args):
1399 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1401 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1400 debugnamecomplete(ui, repo, *args)
1402 debugnamecomplete(ui, repo, *args)
1401
1403
1402 @command('debuglocks',
1404 @command('debuglocks',
1403 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1405 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1404 ('W', 'force-wlock', None,
1406 ('W', 'force-wlock', None,
1405 _('free the working state lock (DANGEROUS)')),
1407 _('free the working state lock (DANGEROUS)')),
1406 ('s', 'set-lock', None, _('set the store lock until stopped')),
1408 ('s', 'set-lock', None, _('set the store lock until stopped')),
1407 ('S', 'set-wlock', None,
1409 ('S', 'set-wlock', None,
1408 _('set the working state lock until stopped'))],
1410 _('set the working state lock until stopped'))],
1409 _('[OPTION]...'))
1411 _('[OPTION]...'))
1410 def debuglocks(ui, repo, **opts):
1412 def debuglocks(ui, repo, **opts):
1411 """show or modify state of locks
1413 """show or modify state of locks
1412
1414
1413 By default, this command will show which locks are held. This
1415 By default, this command will show which locks are held. This
1414 includes the user and process holding the lock, the amount of time
1416 includes the user and process holding the lock, the amount of time
1415 the lock has been held, and the machine name where the process is
1417 the lock has been held, and the machine name where the process is
1416 running if it's not local.
1418 running if it's not local.
1417
1419
1418 Locks protect the integrity of Mercurial's data, so should be
1420 Locks protect the integrity of Mercurial's data, so should be
1419 treated with care. System crashes or other interruptions may cause
1421 treated with care. System crashes or other interruptions may cause
1420 locks to not be properly released, though Mercurial will usually
1422 locks to not be properly released, though Mercurial will usually
1421 detect and remove such stale locks automatically.
1423 detect and remove such stale locks automatically.
1422
1424
1423 However, detecting stale locks may not always be possible (for
1425 However, detecting stale locks may not always be possible (for
1424 instance, on a shared filesystem). Removing locks may also be
1426 instance, on a shared filesystem). Removing locks may also be
1425 blocked by filesystem permissions.
1427 blocked by filesystem permissions.
1426
1428
1427 Setting a lock will prevent other commands from changing the data.
1429 Setting a lock will prevent other commands from changing the data.
1428 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1430 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1429 The set locks are removed when the command exits.
1431 The set locks are removed when the command exits.
1430
1432
1431 Returns 0 if no locks are held.
1433 Returns 0 if no locks are held.
1432
1434
1433 """
1435 """
1434
1436
1435 if opts.get(r'force_lock'):
1437 if opts.get(r'force_lock'):
1436 repo.svfs.unlink('lock')
1438 repo.svfs.unlink('lock')
1437 if opts.get(r'force_wlock'):
1439 if opts.get(r'force_wlock'):
1438 repo.vfs.unlink('wlock')
1440 repo.vfs.unlink('wlock')
1439 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1441 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1440 return 0
1442 return 0
1441
1443
1442 locks = []
1444 locks = []
1443 try:
1445 try:
1444 if opts.get(r'set_wlock'):
1446 if opts.get(r'set_wlock'):
1445 try:
1447 try:
1446 locks.append(repo.wlock(False))
1448 locks.append(repo.wlock(False))
1447 except error.LockHeld:
1449 except error.LockHeld:
1448 raise error.Abort(_('wlock is already held'))
1450 raise error.Abort(_('wlock is already held'))
1449 if opts.get(r'set_lock'):
1451 if opts.get(r'set_lock'):
1450 try:
1452 try:
1451 locks.append(repo.lock(False))
1453 locks.append(repo.lock(False))
1452 except error.LockHeld:
1454 except error.LockHeld:
1453 raise error.Abort(_('lock is already held'))
1455 raise error.Abort(_('lock is already held'))
1454 if len(locks):
1456 if len(locks):
1455 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1457 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1456 return 0
1458 return 0
1457 finally:
1459 finally:
1458 release(*locks)
1460 release(*locks)
1459
1461
1460 now = time.time()
1462 now = time.time()
1461 held = 0
1463 held = 0
1462
1464
1463 def report(vfs, name, method):
1465 def report(vfs, name, method):
1464 # this causes stale locks to get reaped for more accurate reporting
1466 # this causes stale locks to get reaped for more accurate reporting
1465 try:
1467 try:
1466 l = method(False)
1468 l = method(False)
1467 except error.LockHeld:
1469 except error.LockHeld:
1468 l = None
1470 l = None
1469
1471
1470 if l:
1472 if l:
1471 l.release()
1473 l.release()
1472 else:
1474 else:
1473 try:
1475 try:
1474 st = vfs.lstat(name)
1476 st = vfs.lstat(name)
1475 age = now - st[stat.ST_MTIME]
1477 age = now - st[stat.ST_MTIME]
1476 user = util.username(st.st_uid)
1478 user = util.username(st.st_uid)
1477 locker = vfs.readlock(name)
1479 locker = vfs.readlock(name)
1478 if ":" in locker:
1480 if ":" in locker:
1479 host, pid = locker.split(':')
1481 host, pid = locker.split(':')
1480 if host == socket.gethostname():
1482 if host == socket.gethostname():
1481 locker = 'user %s, process %s' % (user or b'None', pid)
1483 locker = 'user %s, process %s' % (user or b'None', pid)
1482 else:
1484 else:
1483 locker = ('user %s, process %s, host %s'
1485 locker = ('user %s, process %s, host %s'
1484 % (user or b'None', pid, host))
1486 % (user or b'None', pid, host))
1485 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1487 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1486 return 1
1488 return 1
1487 except OSError as e:
1489 except OSError as e:
1488 if e.errno != errno.ENOENT:
1490 if e.errno != errno.ENOENT:
1489 raise
1491 raise
1490
1492
1491 ui.write(("%-6s free\n") % (name + ":"))
1493 ui.write(("%-6s free\n") % (name + ":"))
1492 return 0
1494 return 0
1493
1495
1494 held += report(repo.svfs, "lock", repo.lock)
1496 held += report(repo.svfs, "lock", repo.lock)
1495 held += report(repo.vfs, "wlock", repo.wlock)
1497 held += report(repo.vfs, "wlock", repo.wlock)
1496
1498
1497 return held
1499 return held
1498
1500
1499 @command('debugmanifestfulltextcache', [
1501 @command('debugmanifestfulltextcache', [
1500 ('', 'clear', False, _('clear the cache')),
1502 ('', 'clear', False, _('clear the cache')),
1501 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1503 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1502 _('NODE'))
1504 _('NODE'))
1503 ], '')
1505 ], '')
1504 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1506 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1505 """show, clear or amend the contents of the manifest fulltext cache"""
1507 """show, clear or amend the contents of the manifest fulltext cache"""
1506
1508
1507 def getcache():
1509 def getcache():
1508 r = repo.manifestlog.getstorage(b'')
1510 r = repo.manifestlog.getstorage(b'')
1509 try:
1511 try:
1510 return r._fulltextcache
1512 return r._fulltextcache
1511 except AttributeError:
1513 except AttributeError:
1512 msg = _("Current revlog implementation doesn't appear to have a "
1514 msg = _("Current revlog implementation doesn't appear to have a "
1513 "manifest fulltext cache\n")
1515 "manifest fulltext cache\n")
1514 raise error.Abort(msg)
1516 raise error.Abort(msg)
1515
1517
1516 if opts.get(r'clear'):
1518 if opts.get(r'clear'):
1517 with repo.wlock():
1519 with repo.wlock():
1518 cache = getcache()
1520 cache = getcache()
1519 cache.clear(clear_persisted_data=True)
1521 cache.clear(clear_persisted_data=True)
1520 return
1522 return
1521
1523
1522 if add:
1524 if add:
1523 with repo.wlock():
1525 with repo.wlock():
1524 m = repo.manifestlog
1526 m = repo.manifestlog
1525 store = m.getstorage(b'')
1527 store = m.getstorage(b'')
1526 for n in add:
1528 for n in add:
1527 try:
1529 try:
1528 manifest = m[store.lookup(n)]
1530 manifest = m[store.lookup(n)]
1529 except error.LookupError as e:
1531 except error.LookupError as e:
1530 raise error.Abort(e, hint="Check your manifest node id")
1532 raise error.Abort(e, hint="Check your manifest node id")
1531 manifest.read() # stores revisision in cache too
1533 manifest.read() # stores revisision in cache too
1532 return
1534 return
1533
1535
1534 cache = getcache()
1536 cache = getcache()
1535 if not len(cache):
1537 if not len(cache):
1536 ui.write(_('cache empty\n'))
1538 ui.write(_('cache empty\n'))
1537 else:
1539 else:
1538 ui.write(
1540 ui.write(
1539 _('cache contains %d manifest entries, in order of most to '
1541 _('cache contains %d manifest entries, in order of most to '
1540 'least recent:\n') % (len(cache),))
1542 'least recent:\n') % (len(cache),))
1541 totalsize = 0
1543 totalsize = 0
1542 for nodeid in cache:
1544 for nodeid in cache:
1543 # Use cache.get to not update the LRU order
1545 # Use cache.get to not update the LRU order
1544 data = cache.peek(nodeid)
1546 data = cache.peek(nodeid)
1545 size = len(data)
1547 size = len(data)
1546 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1548 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1547 ui.write(_('id: %s, size %s\n') % (
1549 ui.write(_('id: %s, size %s\n') % (
1548 hex(nodeid), util.bytecount(size)))
1550 hex(nodeid), util.bytecount(size)))
1549 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1551 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1550 ui.write(
1552 ui.write(
1551 _('total cache data size %s, on-disk %s\n') % (
1553 _('total cache data size %s, on-disk %s\n') % (
1552 util.bytecount(totalsize), util.bytecount(ondisk))
1554 util.bytecount(totalsize), util.bytecount(ondisk))
1553 )
1555 )
1554
1556
1555 @command('debugmergestate', [], '')
1557 @command('debugmergestate', [], '')
1556 def debugmergestate(ui, repo, *args):
1558 def debugmergestate(ui, repo, *args):
1557 """print merge state
1559 """print merge state
1558
1560
1559 Use --verbose to print out information about whether v1 or v2 merge state
1561 Use --verbose to print out information about whether v1 or v2 merge state
1560 was chosen."""
1562 was chosen."""
1561 def _hashornull(h):
1563 def _hashornull(h):
1562 if h == nullhex:
1564 if h == nullhex:
1563 return 'null'
1565 return 'null'
1564 else:
1566 else:
1565 return h
1567 return h
1566
1568
1567 def printrecords(version):
1569 def printrecords(version):
1568 ui.write(('* version %d records\n') % version)
1570 ui.write(('* version %d records\n') % version)
1569 if version == 1:
1571 if version == 1:
1570 records = v1records
1572 records = v1records
1571 else:
1573 else:
1572 records = v2records
1574 records = v2records
1573
1575
1574 for rtype, record in records:
1576 for rtype, record in records:
1575 # pretty print some record types
1577 # pretty print some record types
1576 if rtype == 'L':
1578 if rtype == 'L':
1577 ui.write(('local: %s\n') % record)
1579 ui.write(('local: %s\n') % record)
1578 elif rtype == 'O':
1580 elif rtype == 'O':
1579 ui.write(('other: %s\n') % record)
1581 ui.write(('other: %s\n') % record)
1580 elif rtype == 'm':
1582 elif rtype == 'm':
1581 driver, mdstate = record.split('\0', 1)
1583 driver, mdstate = record.split('\0', 1)
1582 ui.write(('merge driver: %s (state "%s")\n')
1584 ui.write(('merge driver: %s (state "%s")\n')
1583 % (driver, mdstate))
1585 % (driver, mdstate))
1584 elif rtype in 'FDC':
1586 elif rtype in 'FDC':
1585 r = record.split('\0')
1587 r = record.split('\0')
1586 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1588 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1587 if version == 1:
1589 if version == 1:
1588 onode = 'not stored in v1 format'
1590 onode = 'not stored in v1 format'
1589 flags = r[7]
1591 flags = r[7]
1590 else:
1592 else:
1591 onode, flags = r[7:9]
1593 onode, flags = r[7:9]
1592 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1594 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1593 % (f, rtype, state, _hashornull(hash)))
1595 % (f, rtype, state, _hashornull(hash)))
1594 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1596 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1595 ui.write((' ancestor path: %s (node %s)\n')
1597 ui.write((' ancestor path: %s (node %s)\n')
1596 % (afile, _hashornull(anode)))
1598 % (afile, _hashornull(anode)))
1597 ui.write((' other path: %s (node %s)\n')
1599 ui.write((' other path: %s (node %s)\n')
1598 % (ofile, _hashornull(onode)))
1600 % (ofile, _hashornull(onode)))
1599 elif rtype == 'f':
1601 elif rtype == 'f':
1600 filename, rawextras = record.split('\0', 1)
1602 filename, rawextras = record.split('\0', 1)
1601 extras = rawextras.split('\0')
1603 extras = rawextras.split('\0')
1602 i = 0
1604 i = 0
1603 extrastrings = []
1605 extrastrings = []
1604 while i < len(extras):
1606 while i < len(extras):
1605 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1607 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1606 i += 2
1608 i += 2
1607
1609
1608 ui.write(('file extras: %s (%s)\n')
1610 ui.write(('file extras: %s (%s)\n')
1609 % (filename, ', '.join(extrastrings)))
1611 % (filename, ', '.join(extrastrings)))
1610 elif rtype == 'l':
1612 elif rtype == 'l':
1611 labels = record.split('\0', 2)
1613 labels = record.split('\0', 2)
1612 labels = [l for l in labels if len(l) > 0]
1614 labels = [l for l in labels if len(l) > 0]
1613 ui.write(('labels:\n'))
1615 ui.write(('labels:\n'))
1614 ui.write((' local: %s\n' % labels[0]))
1616 ui.write((' local: %s\n' % labels[0]))
1615 ui.write((' other: %s\n' % labels[1]))
1617 ui.write((' other: %s\n' % labels[1]))
1616 if len(labels) > 2:
1618 if len(labels) > 2:
1617 ui.write((' base: %s\n' % labels[2]))
1619 ui.write((' base: %s\n' % labels[2]))
1618 else:
1620 else:
1619 ui.write(('unrecognized entry: %s\t%s\n')
1621 ui.write(('unrecognized entry: %s\t%s\n')
1620 % (rtype, record.replace('\0', '\t')))
1622 % (rtype, record.replace('\0', '\t')))
1621
1623
1622 # Avoid mergestate.read() since it may raise an exception for unsupported
1624 # Avoid mergestate.read() since it may raise an exception for unsupported
1623 # merge state records. We shouldn't be doing this, but this is OK since this
1625 # merge state records. We shouldn't be doing this, but this is OK since this
1624 # command is pretty low-level.
1626 # command is pretty low-level.
1625 ms = mergemod.mergestate(repo)
1627 ms = mergemod.mergestate(repo)
1626
1628
1627 # sort so that reasonable information is on top
1629 # sort so that reasonable information is on top
1628 v1records = ms._readrecordsv1()
1630 v1records = ms._readrecordsv1()
1629 v2records = ms._readrecordsv2()
1631 v2records = ms._readrecordsv2()
1630 order = 'LOml'
1632 order = 'LOml'
1631 def key(r):
1633 def key(r):
1632 idx = order.find(r[0])
1634 idx = order.find(r[0])
1633 if idx == -1:
1635 if idx == -1:
1634 return (1, r[1])
1636 return (1, r[1])
1635 else:
1637 else:
1636 return (0, idx)
1638 return (0, idx)
1637 v1records.sort(key=key)
1639 v1records.sort(key=key)
1638 v2records.sort(key=key)
1640 v2records.sort(key=key)
1639
1641
1640 if not v1records and not v2records:
1642 if not v1records and not v2records:
1641 ui.write(('no merge state found\n'))
1643 ui.write(('no merge state found\n'))
1642 elif not v2records:
1644 elif not v2records:
1643 ui.note(('no version 2 merge state\n'))
1645 ui.note(('no version 2 merge state\n'))
1644 printrecords(1)
1646 printrecords(1)
1645 elif ms._v1v2match(v1records, v2records):
1647 elif ms._v1v2match(v1records, v2records):
1646 ui.note(('v1 and v2 states match: using v2\n'))
1648 ui.note(('v1 and v2 states match: using v2\n'))
1647 printrecords(2)
1649 printrecords(2)
1648 else:
1650 else:
1649 ui.note(('v1 and v2 states mismatch: using v1\n'))
1651 ui.note(('v1 and v2 states mismatch: using v1\n'))
1650 printrecords(1)
1652 printrecords(1)
1651 if ui.verbose:
1653 if ui.verbose:
1652 printrecords(2)
1654 printrecords(2)
1653
1655
1654 @command('debugnamecomplete', [], _('NAME...'))
1656 @command('debugnamecomplete', [], _('NAME...'))
1655 def debugnamecomplete(ui, repo, *args):
1657 def debugnamecomplete(ui, repo, *args):
1656 '''complete "names" - tags, open branch names, bookmark names'''
1658 '''complete "names" - tags, open branch names, bookmark names'''
1657
1659
1658 names = set()
1660 names = set()
1659 # since we previously only listed open branches, we will handle that
1661 # since we previously only listed open branches, we will handle that
1660 # specially (after this for loop)
1662 # specially (after this for loop)
1661 for name, ns in repo.names.iteritems():
1663 for name, ns in repo.names.iteritems():
1662 if name != 'branches':
1664 if name != 'branches':
1663 names.update(ns.listnames(repo))
1665 names.update(ns.listnames(repo))
1664 names.update(tag for (tag, heads, tip, closed)
1666 names.update(tag for (tag, heads, tip, closed)
1665 in repo.branchmap().iterbranches() if not closed)
1667 in repo.branchmap().iterbranches() if not closed)
1666 completions = set()
1668 completions = set()
1667 if not args:
1669 if not args:
1668 args = ['']
1670 args = ['']
1669 for a in args:
1671 for a in args:
1670 completions.update(n for n in names if n.startswith(a))
1672 completions.update(n for n in names if n.startswith(a))
1671 ui.write('\n'.join(sorted(completions)))
1673 ui.write('\n'.join(sorted(completions)))
1672 ui.write('\n')
1674 ui.write('\n')
1673
1675
1674 @command('debugobsolete',
1676 @command('debugobsolete',
1675 [('', 'flags', 0, _('markers flag')),
1677 [('', 'flags', 0, _('markers flag')),
1676 ('', 'record-parents', False,
1678 ('', 'record-parents', False,
1677 _('record parent information for the precursor')),
1679 _('record parent information for the precursor')),
1678 ('r', 'rev', [], _('display markers relevant to REV')),
1680 ('r', 'rev', [], _('display markers relevant to REV')),
1679 ('', 'exclusive', False, _('restrict display to markers only '
1681 ('', 'exclusive', False, _('restrict display to markers only '
1680 'relevant to REV')),
1682 'relevant to REV')),
1681 ('', 'index', False, _('display index of the marker')),
1683 ('', 'index', False, _('display index of the marker')),
1682 ('', 'delete', [], _('delete markers specified by indices')),
1684 ('', 'delete', [], _('delete markers specified by indices')),
1683 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1685 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1684 _('[OBSOLETED [REPLACEMENT ...]]'))
1686 _('[OBSOLETED [REPLACEMENT ...]]'))
1685 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1687 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1686 """create arbitrary obsolete marker
1688 """create arbitrary obsolete marker
1687
1689
1688 With no arguments, displays the list of obsolescence markers."""
1690 With no arguments, displays the list of obsolescence markers."""
1689
1691
1690 opts = pycompat.byteskwargs(opts)
1692 opts = pycompat.byteskwargs(opts)
1691
1693
1692 def parsenodeid(s):
1694 def parsenodeid(s):
1693 try:
1695 try:
1694 # We do not use revsingle/revrange functions here to accept
1696 # We do not use revsingle/revrange functions here to accept
1695 # arbitrary node identifiers, possibly not present in the
1697 # arbitrary node identifiers, possibly not present in the
1696 # local repository.
1698 # local repository.
1697 n = bin(s)
1699 n = bin(s)
1698 if len(n) != len(nullid):
1700 if len(n) != len(nullid):
1699 raise TypeError()
1701 raise TypeError()
1700 return n
1702 return n
1701 except TypeError:
1703 except TypeError:
1702 raise error.Abort('changeset references must be full hexadecimal '
1704 raise error.Abort('changeset references must be full hexadecimal '
1703 'node identifiers')
1705 'node identifiers')
1704
1706
1705 if opts.get('delete'):
1707 if opts.get('delete'):
1706 indices = []
1708 indices = []
1707 for v in opts.get('delete'):
1709 for v in opts.get('delete'):
1708 try:
1710 try:
1709 indices.append(int(v))
1711 indices.append(int(v))
1710 except ValueError:
1712 except ValueError:
1711 raise error.Abort(_('invalid index value: %r') % v,
1713 raise error.Abort(_('invalid index value: %r') % v,
1712 hint=_('use integers for indices'))
1714 hint=_('use integers for indices'))
1713
1715
1714 if repo.currenttransaction():
1716 if repo.currenttransaction():
1715 raise error.Abort(_('cannot delete obsmarkers in the middle '
1717 raise error.Abort(_('cannot delete obsmarkers in the middle '
1716 'of transaction.'))
1718 'of transaction.'))
1717
1719
1718 with repo.lock():
1720 with repo.lock():
1719 n = repair.deleteobsmarkers(repo.obsstore, indices)
1721 n = repair.deleteobsmarkers(repo.obsstore, indices)
1720 ui.write(_('deleted %i obsolescence markers\n') % n)
1722 ui.write(_('deleted %i obsolescence markers\n') % n)
1721
1723
1722 return
1724 return
1723
1725
1724 if precursor is not None:
1726 if precursor is not None:
1725 if opts['rev']:
1727 if opts['rev']:
1726 raise error.Abort('cannot select revision when creating marker')
1728 raise error.Abort('cannot select revision when creating marker')
1727 metadata = {}
1729 metadata = {}
1728 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1730 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1729 succs = tuple(parsenodeid(succ) for succ in successors)
1731 succs = tuple(parsenodeid(succ) for succ in successors)
1730 l = repo.lock()
1732 l = repo.lock()
1731 try:
1733 try:
1732 tr = repo.transaction('debugobsolete')
1734 tr = repo.transaction('debugobsolete')
1733 try:
1735 try:
1734 date = opts.get('date')
1736 date = opts.get('date')
1735 if date:
1737 if date:
1736 date = dateutil.parsedate(date)
1738 date = dateutil.parsedate(date)
1737 else:
1739 else:
1738 date = None
1740 date = None
1739 prec = parsenodeid(precursor)
1741 prec = parsenodeid(precursor)
1740 parents = None
1742 parents = None
1741 if opts['record_parents']:
1743 if opts['record_parents']:
1742 if prec not in repo.unfiltered():
1744 if prec not in repo.unfiltered():
1743 raise error.Abort('cannot used --record-parents on '
1745 raise error.Abort('cannot used --record-parents on '
1744 'unknown changesets')
1746 'unknown changesets')
1745 parents = repo.unfiltered()[prec].parents()
1747 parents = repo.unfiltered()[prec].parents()
1746 parents = tuple(p.node() for p in parents)
1748 parents = tuple(p.node() for p in parents)
1747 repo.obsstore.create(tr, prec, succs, opts['flags'],
1749 repo.obsstore.create(tr, prec, succs, opts['flags'],
1748 parents=parents, date=date,
1750 parents=parents, date=date,
1749 metadata=metadata, ui=ui)
1751 metadata=metadata, ui=ui)
1750 tr.close()
1752 tr.close()
1751 except ValueError as exc:
1753 except ValueError as exc:
1752 raise error.Abort(_('bad obsmarker input: %s') %
1754 raise error.Abort(_('bad obsmarker input: %s') %
1753 pycompat.bytestr(exc))
1755 pycompat.bytestr(exc))
1754 finally:
1756 finally:
1755 tr.release()
1757 tr.release()
1756 finally:
1758 finally:
1757 l.release()
1759 l.release()
1758 else:
1760 else:
1759 if opts['rev']:
1761 if opts['rev']:
1760 revs = scmutil.revrange(repo, opts['rev'])
1762 revs = scmutil.revrange(repo, opts['rev'])
1761 nodes = [repo[r].node() for r in revs]
1763 nodes = [repo[r].node() for r in revs]
1762 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1764 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1763 exclusive=opts['exclusive']))
1765 exclusive=opts['exclusive']))
1764 markers.sort(key=lambda x: x._data)
1766 markers.sort(key=lambda x: x._data)
1765 else:
1767 else:
1766 markers = obsutil.getmarkers(repo)
1768 markers = obsutil.getmarkers(repo)
1767
1769
1768 markerstoiter = markers
1770 markerstoiter = markers
1769 isrelevant = lambda m: True
1771 isrelevant = lambda m: True
1770 if opts.get('rev') and opts.get('index'):
1772 if opts.get('rev') and opts.get('index'):
1771 markerstoiter = obsutil.getmarkers(repo)
1773 markerstoiter = obsutil.getmarkers(repo)
1772 markerset = set(markers)
1774 markerset = set(markers)
1773 isrelevant = lambda m: m in markerset
1775 isrelevant = lambda m: m in markerset
1774
1776
1775 fm = ui.formatter('debugobsolete', opts)
1777 fm = ui.formatter('debugobsolete', opts)
1776 for i, m in enumerate(markerstoiter):
1778 for i, m in enumerate(markerstoiter):
1777 if not isrelevant(m):
1779 if not isrelevant(m):
1778 # marker can be irrelevant when we're iterating over a set
1780 # marker can be irrelevant when we're iterating over a set
1779 # of markers (markerstoiter) which is bigger than the set
1781 # of markers (markerstoiter) which is bigger than the set
1780 # of markers we want to display (markers)
1782 # of markers we want to display (markers)
1781 # this can happen if both --index and --rev options are
1783 # this can happen if both --index and --rev options are
1782 # provided and thus we need to iterate over all of the markers
1784 # provided and thus we need to iterate over all of the markers
1783 # to get the correct indices, but only display the ones that
1785 # to get the correct indices, but only display the ones that
1784 # are relevant to --rev value
1786 # are relevant to --rev value
1785 continue
1787 continue
1786 fm.startitem()
1788 fm.startitem()
1787 ind = i if opts.get('index') else None
1789 ind = i if opts.get('index') else None
1788 cmdutil.showmarker(fm, m, index=ind)
1790 cmdutil.showmarker(fm, m, index=ind)
1789 fm.end()
1791 fm.end()
1790
1792
1791 @command('debugp1copies',
1793 @command('debugp1copies',
1792 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1794 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1793 _('[-r REV]'))
1795 _('[-r REV]'))
1794 def debugp1copies(ui, repo, **opts):
1796 def debugp1copies(ui, repo, **opts):
1795 """dump copy information compared to p1"""
1797 """dump copy information compared to p1"""
1796
1798
1797 opts = pycompat.byteskwargs(opts)
1799 opts = pycompat.byteskwargs(opts)
1798 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1800 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1799 for dst, src in ctx.p1copies().items():
1801 for dst, src in ctx.p1copies().items():
1800 ui.write('%s -> %s\n' % (src, dst))
1802 ui.write('%s -> %s\n' % (src, dst))
1801
1803
1802 @command('debugp2copies',
1804 @command('debugp2copies',
1803 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1805 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1804 _('[-r REV]'))
1806 _('[-r REV]'))
1805 def debugp1copies(ui, repo, **opts):
1807 def debugp1copies(ui, repo, **opts):
1806 """dump copy information compared to p2"""
1808 """dump copy information compared to p2"""
1807
1809
1808 opts = pycompat.byteskwargs(opts)
1810 opts = pycompat.byteskwargs(opts)
1809 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1811 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1810 for dst, src in ctx.p2copies().items():
1812 for dst, src in ctx.p2copies().items():
1811 ui.write('%s -> %s\n' % (src, dst))
1813 ui.write('%s -> %s\n' % (src, dst))
1812
1814
1813 @command('debugpathcomplete',
1815 @command('debugpathcomplete',
1814 [('f', 'full', None, _('complete an entire path')),
1816 [('f', 'full', None, _('complete an entire path')),
1815 ('n', 'normal', None, _('show only normal files')),
1817 ('n', 'normal', None, _('show only normal files')),
1816 ('a', 'added', None, _('show only added files')),
1818 ('a', 'added', None, _('show only added files')),
1817 ('r', 'removed', None, _('show only removed files'))],
1819 ('r', 'removed', None, _('show only removed files'))],
1818 _('FILESPEC...'))
1820 _('FILESPEC...'))
1819 def debugpathcomplete(ui, repo, *specs, **opts):
1821 def debugpathcomplete(ui, repo, *specs, **opts):
1820 '''complete part or all of a tracked path
1822 '''complete part or all of a tracked path
1821
1823
1822 This command supports shells that offer path name completion. It
1824 This command supports shells that offer path name completion. It
1823 currently completes only files already known to the dirstate.
1825 currently completes only files already known to the dirstate.
1824
1826
1825 Completion extends only to the next path segment unless
1827 Completion extends only to the next path segment unless
1826 --full is specified, in which case entire paths are used.'''
1828 --full is specified, in which case entire paths are used.'''
1827
1829
1828 def complete(path, acceptable):
1830 def complete(path, acceptable):
1829 dirstate = repo.dirstate
1831 dirstate = repo.dirstate
1830 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1832 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1831 rootdir = repo.root + pycompat.ossep
1833 rootdir = repo.root + pycompat.ossep
1832 if spec != repo.root and not spec.startswith(rootdir):
1834 if spec != repo.root and not spec.startswith(rootdir):
1833 return [], []
1835 return [], []
1834 if os.path.isdir(spec):
1836 if os.path.isdir(spec):
1835 spec += '/'
1837 spec += '/'
1836 spec = spec[len(rootdir):]
1838 spec = spec[len(rootdir):]
1837 fixpaths = pycompat.ossep != '/'
1839 fixpaths = pycompat.ossep != '/'
1838 if fixpaths:
1840 if fixpaths:
1839 spec = spec.replace(pycompat.ossep, '/')
1841 spec = spec.replace(pycompat.ossep, '/')
1840 speclen = len(spec)
1842 speclen = len(spec)
1841 fullpaths = opts[r'full']
1843 fullpaths = opts[r'full']
1842 files, dirs = set(), set()
1844 files, dirs = set(), set()
1843 adddir, addfile = dirs.add, files.add
1845 adddir, addfile = dirs.add, files.add
1844 for f, st in dirstate.iteritems():
1846 for f, st in dirstate.iteritems():
1845 if f.startswith(spec) and st[0] in acceptable:
1847 if f.startswith(spec) and st[0] in acceptable:
1846 if fixpaths:
1848 if fixpaths:
1847 f = f.replace('/', pycompat.ossep)
1849 f = f.replace('/', pycompat.ossep)
1848 if fullpaths:
1850 if fullpaths:
1849 addfile(f)
1851 addfile(f)
1850 continue
1852 continue
1851 s = f.find(pycompat.ossep, speclen)
1853 s = f.find(pycompat.ossep, speclen)
1852 if s >= 0:
1854 if s >= 0:
1853 adddir(f[:s])
1855 adddir(f[:s])
1854 else:
1856 else:
1855 addfile(f)
1857 addfile(f)
1856 return files, dirs
1858 return files, dirs
1857
1859
1858 acceptable = ''
1860 acceptable = ''
1859 if opts[r'normal']:
1861 if opts[r'normal']:
1860 acceptable += 'nm'
1862 acceptable += 'nm'
1861 if opts[r'added']:
1863 if opts[r'added']:
1862 acceptable += 'a'
1864 acceptable += 'a'
1863 if opts[r'removed']:
1865 if opts[r'removed']:
1864 acceptable += 'r'
1866 acceptable += 'r'
1865 cwd = repo.getcwd()
1867 cwd = repo.getcwd()
1866 if not specs:
1868 if not specs:
1867 specs = ['.']
1869 specs = ['.']
1868
1870
1869 files, dirs = set(), set()
1871 files, dirs = set(), set()
1870 for spec in specs:
1872 for spec in specs:
1871 f, d = complete(spec, acceptable or 'nmar')
1873 f, d = complete(spec, acceptable or 'nmar')
1872 files.update(f)
1874 files.update(f)
1873 dirs.update(d)
1875 dirs.update(d)
1874 files.update(dirs)
1876 files.update(dirs)
1875 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1877 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1876 ui.write('\n')
1878 ui.write('\n')
1877
1879
1878 @command('debugpathcopies',
1880 @command('debugpathcopies',
1879 cmdutil.walkopts,
1881 cmdutil.walkopts,
1880 'hg debugpathcopies REV1 REV2 [FILE]',
1882 'hg debugpathcopies REV1 REV2 [FILE]',
1881 inferrepo=True)
1883 inferrepo=True)
1882 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1884 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1883 """show copies between two revisions"""
1885 """show copies between two revisions"""
1884 ctx1 = scmutil.revsingle(repo, rev1)
1886 ctx1 = scmutil.revsingle(repo, rev1)
1885 ctx2 = scmutil.revsingle(repo, rev2)
1887 ctx2 = scmutil.revsingle(repo, rev2)
1886 m = scmutil.match(ctx1, pats, opts)
1888 m = scmutil.match(ctx1, pats, opts)
1887 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1889 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1888 ui.write('%s -> %s\n' % (src, dst))
1890 ui.write('%s -> %s\n' % (src, dst))
1889
1891
1890 @command('debugpeer', [], _('PATH'), norepo=True)
1892 @command('debugpeer', [], _('PATH'), norepo=True)
1891 def debugpeer(ui, path):
1893 def debugpeer(ui, path):
1892 """establish a connection to a peer repository"""
1894 """establish a connection to a peer repository"""
1893 # Always enable peer request logging. Requires --debug to display
1895 # Always enable peer request logging. Requires --debug to display
1894 # though.
1896 # though.
1895 overrides = {
1897 overrides = {
1896 ('devel', 'debug.peer-request'): True,
1898 ('devel', 'debug.peer-request'): True,
1897 }
1899 }
1898
1900
1899 with ui.configoverride(overrides):
1901 with ui.configoverride(overrides):
1900 peer = hg.peer(ui, {}, path)
1902 peer = hg.peer(ui, {}, path)
1901
1903
1902 local = peer.local() is not None
1904 local = peer.local() is not None
1903 canpush = peer.canpush()
1905 canpush = peer.canpush()
1904
1906
1905 ui.write(_('url: %s\n') % peer.url())
1907 ui.write(_('url: %s\n') % peer.url())
1906 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1908 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1907 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1909 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1908
1910
1909 @command('debugpickmergetool',
1911 @command('debugpickmergetool',
1910 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1912 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1911 ('', 'changedelete', None, _('emulate merging change and delete')),
1913 ('', 'changedelete', None, _('emulate merging change and delete')),
1912 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1914 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1913 _('[PATTERN]...'),
1915 _('[PATTERN]...'),
1914 inferrepo=True)
1916 inferrepo=True)
1915 def debugpickmergetool(ui, repo, *pats, **opts):
1917 def debugpickmergetool(ui, repo, *pats, **opts):
1916 """examine which merge tool is chosen for specified file
1918 """examine which merge tool is chosen for specified file
1917
1919
1918 As described in :hg:`help merge-tools`, Mercurial examines
1920 As described in :hg:`help merge-tools`, Mercurial examines
1919 configurations below in this order to decide which merge tool is
1921 configurations below in this order to decide which merge tool is
1920 chosen for specified file.
1922 chosen for specified file.
1921
1923
1922 1. ``--tool`` option
1924 1. ``--tool`` option
1923 2. ``HGMERGE`` environment variable
1925 2. ``HGMERGE`` environment variable
1924 3. configurations in ``merge-patterns`` section
1926 3. configurations in ``merge-patterns`` section
1925 4. configuration of ``ui.merge``
1927 4. configuration of ``ui.merge``
1926 5. configurations in ``merge-tools`` section
1928 5. configurations in ``merge-tools`` section
1927 6. ``hgmerge`` tool (for historical reason only)
1929 6. ``hgmerge`` tool (for historical reason only)
1928 7. default tool for fallback (``:merge`` or ``:prompt``)
1930 7. default tool for fallback (``:merge`` or ``:prompt``)
1929
1931
1930 This command writes out examination result in the style below::
1932 This command writes out examination result in the style below::
1931
1933
1932 FILE = MERGETOOL
1934 FILE = MERGETOOL
1933
1935
1934 By default, all files known in the first parent context of the
1936 By default, all files known in the first parent context of the
1935 working directory are examined. Use file patterns and/or -I/-X
1937 working directory are examined. Use file patterns and/or -I/-X
1936 options to limit target files. -r/--rev is also useful to examine
1938 options to limit target files. -r/--rev is also useful to examine
1937 files in another context without actual updating to it.
1939 files in another context without actual updating to it.
1938
1940
1939 With --debug, this command shows warning messages while matching
1941 With --debug, this command shows warning messages while matching
1940 against ``merge-patterns`` and so on, too. It is recommended to
1942 against ``merge-patterns`` and so on, too. It is recommended to
1941 use this option with explicit file patterns and/or -I/-X options,
1943 use this option with explicit file patterns and/or -I/-X options,
1942 because this option increases amount of output per file according
1944 because this option increases amount of output per file according
1943 to configurations in hgrc.
1945 to configurations in hgrc.
1944
1946
1945 With -v/--verbose, this command shows configurations below at
1947 With -v/--verbose, this command shows configurations below at
1946 first (only if specified).
1948 first (only if specified).
1947
1949
1948 - ``--tool`` option
1950 - ``--tool`` option
1949 - ``HGMERGE`` environment variable
1951 - ``HGMERGE`` environment variable
1950 - configuration of ``ui.merge``
1952 - configuration of ``ui.merge``
1951
1953
1952 If merge tool is chosen before matching against
1954 If merge tool is chosen before matching against
1953 ``merge-patterns``, this command can't show any helpful
1955 ``merge-patterns``, this command can't show any helpful
1954 information, even with --debug. In such case, information above is
1956 information, even with --debug. In such case, information above is
1955 useful to know why a merge tool is chosen.
1957 useful to know why a merge tool is chosen.
1956 """
1958 """
1957 opts = pycompat.byteskwargs(opts)
1959 opts = pycompat.byteskwargs(opts)
1958 overrides = {}
1960 overrides = {}
1959 if opts['tool']:
1961 if opts['tool']:
1960 overrides[('ui', 'forcemerge')] = opts['tool']
1962 overrides[('ui', 'forcemerge')] = opts['tool']
1961 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1963 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1962
1964
1963 with ui.configoverride(overrides, 'debugmergepatterns'):
1965 with ui.configoverride(overrides, 'debugmergepatterns'):
1964 hgmerge = encoding.environ.get("HGMERGE")
1966 hgmerge = encoding.environ.get("HGMERGE")
1965 if hgmerge is not None:
1967 if hgmerge is not None:
1966 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1968 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1967 uimerge = ui.config("ui", "merge")
1969 uimerge = ui.config("ui", "merge")
1968 if uimerge:
1970 if uimerge:
1969 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1971 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1970
1972
1971 ctx = scmutil.revsingle(repo, opts.get('rev'))
1973 ctx = scmutil.revsingle(repo, opts.get('rev'))
1972 m = scmutil.match(ctx, pats, opts)
1974 m = scmutil.match(ctx, pats, opts)
1973 changedelete = opts['changedelete']
1975 changedelete = opts['changedelete']
1974 for path in ctx.walk(m):
1976 for path in ctx.walk(m):
1975 fctx = ctx[path]
1977 fctx = ctx[path]
1976 try:
1978 try:
1977 if not ui.debugflag:
1979 if not ui.debugflag:
1978 ui.pushbuffer(error=True)
1980 ui.pushbuffer(error=True)
1979 tool, toolpath = filemerge._picktool(repo, ui, path,
1981 tool, toolpath = filemerge._picktool(repo, ui, path,
1980 fctx.isbinary(),
1982 fctx.isbinary(),
1981 'l' in fctx.flags(),
1983 'l' in fctx.flags(),
1982 changedelete)
1984 changedelete)
1983 finally:
1985 finally:
1984 if not ui.debugflag:
1986 if not ui.debugflag:
1985 ui.popbuffer()
1987 ui.popbuffer()
1986 ui.write(('%s = %s\n') % (path, tool))
1988 ui.write(('%s = %s\n') % (path, tool))
1987
1989
1988 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1990 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1989 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1991 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1990 '''access the pushkey key/value protocol
1992 '''access the pushkey key/value protocol
1991
1993
1992 With two args, list the keys in the given namespace.
1994 With two args, list the keys in the given namespace.
1993
1995
1994 With five args, set a key to new if it currently is set to old.
1996 With five args, set a key to new if it currently is set to old.
1995 Reports success or failure.
1997 Reports success or failure.
1996 '''
1998 '''
1997
1999
1998 target = hg.peer(ui, {}, repopath)
2000 target = hg.peer(ui, {}, repopath)
1999 if keyinfo:
2001 if keyinfo:
2000 key, old, new = keyinfo
2002 key, old, new = keyinfo
2001 with target.commandexecutor() as e:
2003 with target.commandexecutor() as e:
2002 r = e.callcommand('pushkey', {
2004 r = e.callcommand('pushkey', {
2003 'namespace': namespace,
2005 'namespace': namespace,
2004 'key': key,
2006 'key': key,
2005 'old': old,
2007 'old': old,
2006 'new': new,
2008 'new': new,
2007 }).result()
2009 }).result()
2008
2010
2009 ui.status(pycompat.bytestr(r) + '\n')
2011 ui.status(pycompat.bytestr(r) + '\n')
2010 return not r
2012 return not r
2011 else:
2013 else:
2012 for k, v in sorted(target.listkeys(namespace).iteritems()):
2014 for k, v in sorted(target.listkeys(namespace).iteritems()):
2013 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2015 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2014 stringutil.escapestr(v)))
2016 stringutil.escapestr(v)))
2015
2017
2016 @command('debugpvec', [], _('A B'))
2018 @command('debugpvec', [], _('A B'))
2017 def debugpvec(ui, repo, a, b=None):
2019 def debugpvec(ui, repo, a, b=None):
2018 ca = scmutil.revsingle(repo, a)
2020 ca = scmutil.revsingle(repo, a)
2019 cb = scmutil.revsingle(repo, b)
2021 cb = scmutil.revsingle(repo, b)
2020 pa = pvec.ctxpvec(ca)
2022 pa = pvec.ctxpvec(ca)
2021 pb = pvec.ctxpvec(cb)
2023 pb = pvec.ctxpvec(cb)
2022 if pa == pb:
2024 if pa == pb:
2023 rel = "="
2025 rel = "="
2024 elif pa > pb:
2026 elif pa > pb:
2025 rel = ">"
2027 rel = ">"
2026 elif pa < pb:
2028 elif pa < pb:
2027 rel = "<"
2029 rel = "<"
2028 elif pa | pb:
2030 elif pa | pb:
2029 rel = "|"
2031 rel = "|"
2030 ui.write(_("a: %s\n") % pa)
2032 ui.write(_("a: %s\n") % pa)
2031 ui.write(_("b: %s\n") % pb)
2033 ui.write(_("b: %s\n") % pb)
2032 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2034 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2033 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2035 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2034 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2036 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2035 pa.distance(pb), rel))
2037 pa.distance(pb), rel))
2036
2038
2037 @command('debugrebuilddirstate|debugrebuildstate',
2039 @command('debugrebuilddirstate|debugrebuildstate',
2038 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2040 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2039 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2041 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2040 'the working copy parent')),
2042 'the working copy parent')),
2041 ],
2043 ],
2042 _('[-r REV]'))
2044 _('[-r REV]'))
2043 def debugrebuilddirstate(ui, repo, rev, **opts):
2045 def debugrebuilddirstate(ui, repo, rev, **opts):
2044 """rebuild the dirstate as it would look like for the given revision
2046 """rebuild the dirstate as it would look like for the given revision
2045
2047
2046 If no revision is specified the first current parent will be used.
2048 If no revision is specified the first current parent will be used.
2047
2049
2048 The dirstate will be set to the files of the given revision.
2050 The dirstate will be set to the files of the given revision.
2049 The actual working directory content or existing dirstate
2051 The actual working directory content or existing dirstate
2050 information such as adds or removes is not considered.
2052 information such as adds or removes is not considered.
2051
2053
2052 ``minimal`` will only rebuild the dirstate status for files that claim to be
2054 ``minimal`` will only rebuild the dirstate status for files that claim to be
2053 tracked but are not in the parent manifest, or that exist in the parent
2055 tracked but are not in the parent manifest, or that exist in the parent
2054 manifest but are not in the dirstate. It will not change adds, removes, or
2056 manifest but are not in the dirstate. It will not change adds, removes, or
2055 modified files that are in the working copy parent.
2057 modified files that are in the working copy parent.
2056
2058
2057 One use of this command is to make the next :hg:`status` invocation
2059 One use of this command is to make the next :hg:`status` invocation
2058 check the actual file content.
2060 check the actual file content.
2059 """
2061 """
2060 ctx = scmutil.revsingle(repo, rev)
2062 ctx = scmutil.revsingle(repo, rev)
2061 with repo.wlock():
2063 with repo.wlock():
2062 dirstate = repo.dirstate
2064 dirstate = repo.dirstate
2063 changedfiles = None
2065 changedfiles = None
2064 # See command doc for what minimal does.
2066 # See command doc for what minimal does.
2065 if opts.get(r'minimal'):
2067 if opts.get(r'minimal'):
2066 manifestfiles = set(ctx.manifest().keys())
2068 manifestfiles = set(ctx.manifest().keys())
2067 dirstatefiles = set(dirstate)
2069 dirstatefiles = set(dirstate)
2068 manifestonly = manifestfiles - dirstatefiles
2070 manifestonly = manifestfiles - dirstatefiles
2069 dsonly = dirstatefiles - manifestfiles
2071 dsonly = dirstatefiles - manifestfiles
2070 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2072 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2071 changedfiles = manifestonly | dsnotadded
2073 changedfiles = manifestonly | dsnotadded
2072
2074
2073 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2075 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2074
2076
2075 @command('debugrebuildfncache', [], '')
2077 @command('debugrebuildfncache', [], '')
2076 def debugrebuildfncache(ui, repo):
2078 def debugrebuildfncache(ui, repo):
2077 """rebuild the fncache file"""
2079 """rebuild the fncache file"""
2078 repair.rebuildfncache(ui, repo)
2080 repair.rebuildfncache(ui, repo)
2079
2081
2080 @command('debugrename',
2082 @command('debugrename',
2081 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2083 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2082 _('[-r REV] [FILE]...'))
2084 _('[-r REV] [FILE]...'))
2083 def debugrename(ui, repo, *pats, **opts):
2085 def debugrename(ui, repo, *pats, **opts):
2084 """dump rename information"""
2086 """dump rename information"""
2085
2087
2086 opts = pycompat.byteskwargs(opts)
2088 opts = pycompat.byteskwargs(opts)
2087 ctx = scmutil.revsingle(repo, opts.get('rev'))
2089 ctx = scmutil.revsingle(repo, opts.get('rev'))
2088 m = scmutil.match(ctx, pats, opts)
2090 m = scmutil.match(ctx, pats, opts)
2089 for abs in ctx.walk(m):
2091 for abs in ctx.walk(m):
2090 fctx = ctx[abs]
2092 fctx = ctx[abs]
2091 o = fctx.filelog().renamed(fctx.filenode())
2093 o = fctx.filelog().renamed(fctx.filenode())
2092 rel = repo.pathto(abs)
2094 rel = repo.pathto(abs)
2093 if o:
2095 if o:
2094 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2096 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2095 else:
2097 else:
2096 ui.write(_("%s not renamed\n") % rel)
2098 ui.write(_("%s not renamed\n") % rel)
2097
2099
2098 @command('debugrevlog', cmdutil.debugrevlogopts +
2100 @command('debugrevlog', cmdutil.debugrevlogopts +
2099 [('d', 'dump', False, _('dump index data'))],
2101 [('d', 'dump', False, _('dump index data'))],
2100 _('-c|-m|FILE'),
2102 _('-c|-m|FILE'),
2101 optionalrepo=True)
2103 optionalrepo=True)
2102 def debugrevlog(ui, repo, file_=None, **opts):
2104 def debugrevlog(ui, repo, file_=None, **opts):
2103 """show data and statistics about a revlog"""
2105 """show data and statistics about a revlog"""
2104 opts = pycompat.byteskwargs(opts)
2106 opts = pycompat.byteskwargs(opts)
2105 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2107 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2106
2108
2107 if opts.get("dump"):
2109 if opts.get("dump"):
2108 numrevs = len(r)
2110 numrevs = len(r)
2109 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2111 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2110 " rawsize totalsize compression heads chainlen\n"))
2112 " rawsize totalsize compression heads chainlen\n"))
2111 ts = 0
2113 ts = 0
2112 heads = set()
2114 heads = set()
2113
2115
2114 for rev in pycompat.xrange(numrevs):
2116 for rev in pycompat.xrange(numrevs):
2115 dbase = r.deltaparent(rev)
2117 dbase = r.deltaparent(rev)
2116 if dbase == -1:
2118 if dbase == -1:
2117 dbase = rev
2119 dbase = rev
2118 cbase = r.chainbase(rev)
2120 cbase = r.chainbase(rev)
2119 clen = r.chainlen(rev)
2121 clen = r.chainlen(rev)
2120 p1, p2 = r.parentrevs(rev)
2122 p1, p2 = r.parentrevs(rev)
2121 rs = r.rawsize(rev)
2123 rs = r.rawsize(rev)
2122 ts = ts + rs
2124 ts = ts + rs
2123 heads -= set(r.parentrevs(rev))
2125 heads -= set(r.parentrevs(rev))
2124 heads.add(rev)
2126 heads.add(rev)
2125 try:
2127 try:
2126 compression = ts / r.end(rev)
2128 compression = ts / r.end(rev)
2127 except ZeroDivisionError:
2129 except ZeroDivisionError:
2128 compression = 0
2130 compression = 0
2129 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2131 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2130 "%11d %5d %8d\n" %
2132 "%11d %5d %8d\n" %
2131 (rev, p1, p2, r.start(rev), r.end(rev),
2133 (rev, p1, p2, r.start(rev), r.end(rev),
2132 r.start(dbase), r.start(cbase),
2134 r.start(dbase), r.start(cbase),
2133 r.start(p1), r.start(p2),
2135 r.start(p1), r.start(p2),
2134 rs, ts, compression, len(heads), clen))
2136 rs, ts, compression, len(heads), clen))
2135 return 0
2137 return 0
2136
2138
2137 v = r.version
2139 v = r.version
2138 format = v & 0xFFFF
2140 format = v & 0xFFFF
2139 flags = []
2141 flags = []
2140 gdelta = False
2142 gdelta = False
2141 if v & revlog.FLAG_INLINE_DATA:
2143 if v & revlog.FLAG_INLINE_DATA:
2142 flags.append('inline')
2144 flags.append('inline')
2143 if v & revlog.FLAG_GENERALDELTA:
2145 if v & revlog.FLAG_GENERALDELTA:
2144 gdelta = True
2146 gdelta = True
2145 flags.append('generaldelta')
2147 flags.append('generaldelta')
2146 if not flags:
2148 if not flags:
2147 flags = ['(none)']
2149 flags = ['(none)']
2148
2150
2149 ### tracks merge vs single parent
2151 ### tracks merge vs single parent
2150 nummerges = 0
2152 nummerges = 0
2151
2153
2152 ### tracks ways the "delta" are build
2154 ### tracks ways the "delta" are build
2153 # nodelta
2155 # nodelta
2154 numempty = 0
2156 numempty = 0
2155 numemptytext = 0
2157 numemptytext = 0
2156 numemptydelta = 0
2158 numemptydelta = 0
2157 # full file content
2159 # full file content
2158 numfull = 0
2160 numfull = 0
2159 # intermediate snapshot against a prior snapshot
2161 # intermediate snapshot against a prior snapshot
2160 numsemi = 0
2162 numsemi = 0
2161 # snapshot count per depth
2163 # snapshot count per depth
2162 numsnapdepth = collections.defaultdict(lambda: 0)
2164 numsnapdepth = collections.defaultdict(lambda: 0)
2163 # delta against previous revision
2165 # delta against previous revision
2164 numprev = 0
2166 numprev = 0
2165 # delta against first or second parent (not prev)
2167 # delta against first or second parent (not prev)
2166 nump1 = 0
2168 nump1 = 0
2167 nump2 = 0
2169 nump2 = 0
2168 # delta against neither prev nor parents
2170 # delta against neither prev nor parents
2169 numother = 0
2171 numother = 0
2170 # delta against prev that are also first or second parent
2172 # delta against prev that are also first or second parent
2171 # (details of `numprev`)
2173 # (details of `numprev`)
2172 nump1prev = 0
2174 nump1prev = 0
2173 nump2prev = 0
2175 nump2prev = 0
2174
2176
2175 # data about delta chain of each revs
2177 # data about delta chain of each revs
2176 chainlengths = []
2178 chainlengths = []
2177 chainbases = []
2179 chainbases = []
2178 chainspans = []
2180 chainspans = []
2179
2181
2180 # data about each revision
2182 # data about each revision
2181 datasize = [None, 0, 0]
2183 datasize = [None, 0, 0]
2182 fullsize = [None, 0, 0]
2184 fullsize = [None, 0, 0]
2183 semisize = [None, 0, 0]
2185 semisize = [None, 0, 0]
2184 # snapshot count per depth
2186 # snapshot count per depth
2185 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2187 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2186 deltasize = [None, 0, 0]
2188 deltasize = [None, 0, 0]
2187 chunktypecounts = {}
2189 chunktypecounts = {}
2188 chunktypesizes = {}
2190 chunktypesizes = {}
2189
2191
2190 def addsize(size, l):
2192 def addsize(size, l):
2191 if l[0] is None or size < l[0]:
2193 if l[0] is None or size < l[0]:
2192 l[0] = size
2194 l[0] = size
2193 if size > l[1]:
2195 if size > l[1]:
2194 l[1] = size
2196 l[1] = size
2195 l[2] += size
2197 l[2] += size
2196
2198
2197 numrevs = len(r)
2199 numrevs = len(r)
2198 for rev in pycompat.xrange(numrevs):
2200 for rev in pycompat.xrange(numrevs):
2199 p1, p2 = r.parentrevs(rev)
2201 p1, p2 = r.parentrevs(rev)
2200 delta = r.deltaparent(rev)
2202 delta = r.deltaparent(rev)
2201 if format > 0:
2203 if format > 0:
2202 addsize(r.rawsize(rev), datasize)
2204 addsize(r.rawsize(rev), datasize)
2203 if p2 != nullrev:
2205 if p2 != nullrev:
2204 nummerges += 1
2206 nummerges += 1
2205 size = r.length(rev)
2207 size = r.length(rev)
2206 if delta == nullrev:
2208 if delta == nullrev:
2207 chainlengths.append(0)
2209 chainlengths.append(0)
2208 chainbases.append(r.start(rev))
2210 chainbases.append(r.start(rev))
2209 chainspans.append(size)
2211 chainspans.append(size)
2210 if size == 0:
2212 if size == 0:
2211 numempty += 1
2213 numempty += 1
2212 numemptytext += 1
2214 numemptytext += 1
2213 else:
2215 else:
2214 numfull += 1
2216 numfull += 1
2215 numsnapdepth[0] += 1
2217 numsnapdepth[0] += 1
2216 addsize(size, fullsize)
2218 addsize(size, fullsize)
2217 addsize(size, snapsizedepth[0])
2219 addsize(size, snapsizedepth[0])
2218 else:
2220 else:
2219 chainlengths.append(chainlengths[delta] + 1)
2221 chainlengths.append(chainlengths[delta] + 1)
2220 baseaddr = chainbases[delta]
2222 baseaddr = chainbases[delta]
2221 revaddr = r.start(rev)
2223 revaddr = r.start(rev)
2222 chainbases.append(baseaddr)
2224 chainbases.append(baseaddr)
2223 chainspans.append((revaddr - baseaddr) + size)
2225 chainspans.append((revaddr - baseaddr) + size)
2224 if size == 0:
2226 if size == 0:
2225 numempty += 1
2227 numempty += 1
2226 numemptydelta += 1
2228 numemptydelta += 1
2227 elif r.issnapshot(rev):
2229 elif r.issnapshot(rev):
2228 addsize(size, semisize)
2230 addsize(size, semisize)
2229 numsemi += 1
2231 numsemi += 1
2230 depth = r.snapshotdepth(rev)
2232 depth = r.snapshotdepth(rev)
2231 numsnapdepth[depth] += 1
2233 numsnapdepth[depth] += 1
2232 addsize(size, snapsizedepth[depth])
2234 addsize(size, snapsizedepth[depth])
2233 else:
2235 else:
2234 addsize(size, deltasize)
2236 addsize(size, deltasize)
2235 if delta == rev - 1:
2237 if delta == rev - 1:
2236 numprev += 1
2238 numprev += 1
2237 if delta == p1:
2239 if delta == p1:
2238 nump1prev += 1
2240 nump1prev += 1
2239 elif delta == p2:
2241 elif delta == p2:
2240 nump2prev += 1
2242 nump2prev += 1
2241 elif delta == p1:
2243 elif delta == p1:
2242 nump1 += 1
2244 nump1 += 1
2243 elif delta == p2:
2245 elif delta == p2:
2244 nump2 += 1
2246 nump2 += 1
2245 elif delta != nullrev:
2247 elif delta != nullrev:
2246 numother += 1
2248 numother += 1
2247
2249
2248 # Obtain data on the raw chunks in the revlog.
2250 # Obtain data on the raw chunks in the revlog.
2249 if util.safehasattr(r, '_getsegmentforrevs'):
2251 if util.safehasattr(r, '_getsegmentforrevs'):
2250 segment = r._getsegmentforrevs(rev, rev)[1]
2252 segment = r._getsegmentforrevs(rev, rev)[1]
2251 else:
2253 else:
2252 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2254 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2253 if segment:
2255 if segment:
2254 chunktype = bytes(segment[0:1])
2256 chunktype = bytes(segment[0:1])
2255 else:
2257 else:
2256 chunktype = 'empty'
2258 chunktype = 'empty'
2257
2259
2258 if chunktype not in chunktypecounts:
2260 if chunktype not in chunktypecounts:
2259 chunktypecounts[chunktype] = 0
2261 chunktypecounts[chunktype] = 0
2260 chunktypesizes[chunktype] = 0
2262 chunktypesizes[chunktype] = 0
2261
2263
2262 chunktypecounts[chunktype] += 1
2264 chunktypecounts[chunktype] += 1
2263 chunktypesizes[chunktype] += size
2265 chunktypesizes[chunktype] += size
2264
2266
2265 # Adjust size min value for empty cases
2267 # Adjust size min value for empty cases
2266 for size in (datasize, fullsize, semisize, deltasize):
2268 for size in (datasize, fullsize, semisize, deltasize):
2267 if size[0] is None:
2269 if size[0] is None:
2268 size[0] = 0
2270 size[0] = 0
2269
2271
2270 numdeltas = numrevs - numfull - numempty - numsemi
2272 numdeltas = numrevs - numfull - numempty - numsemi
2271 numoprev = numprev - nump1prev - nump2prev
2273 numoprev = numprev - nump1prev - nump2prev
2272 totalrawsize = datasize[2]
2274 totalrawsize = datasize[2]
2273 datasize[2] /= numrevs
2275 datasize[2] /= numrevs
2274 fulltotal = fullsize[2]
2276 fulltotal = fullsize[2]
2275 fullsize[2] /= numfull
2277 fullsize[2] /= numfull
2276 semitotal = semisize[2]
2278 semitotal = semisize[2]
2277 snaptotal = {}
2279 snaptotal = {}
2278 if numsemi > 0:
2280 if numsemi > 0:
2279 semisize[2] /= numsemi
2281 semisize[2] /= numsemi
2280 for depth in snapsizedepth:
2282 for depth in snapsizedepth:
2281 snaptotal[depth] = snapsizedepth[depth][2]
2283 snaptotal[depth] = snapsizedepth[depth][2]
2282 snapsizedepth[depth][2] /= numsnapdepth[depth]
2284 snapsizedepth[depth][2] /= numsnapdepth[depth]
2283
2285
2284 deltatotal = deltasize[2]
2286 deltatotal = deltasize[2]
2285 if numdeltas > 0:
2287 if numdeltas > 0:
2286 deltasize[2] /= numdeltas
2288 deltasize[2] /= numdeltas
2287 totalsize = fulltotal + semitotal + deltatotal
2289 totalsize = fulltotal + semitotal + deltatotal
2288 avgchainlen = sum(chainlengths) / numrevs
2290 avgchainlen = sum(chainlengths) / numrevs
2289 maxchainlen = max(chainlengths)
2291 maxchainlen = max(chainlengths)
2290 maxchainspan = max(chainspans)
2292 maxchainspan = max(chainspans)
2291 compratio = 1
2293 compratio = 1
2292 if totalsize:
2294 if totalsize:
2293 compratio = totalrawsize / totalsize
2295 compratio = totalrawsize / totalsize
2294
2296
2295 basedfmtstr = '%%%dd\n'
2297 basedfmtstr = '%%%dd\n'
2296 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2298 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2297
2299
2298 def dfmtstr(max):
2300 def dfmtstr(max):
2299 return basedfmtstr % len(str(max))
2301 return basedfmtstr % len(str(max))
2300 def pcfmtstr(max, padding=0):
2302 def pcfmtstr(max, padding=0):
2301 return basepcfmtstr % (len(str(max)), ' ' * padding)
2303 return basepcfmtstr % (len(str(max)), ' ' * padding)
2302
2304
2303 def pcfmt(value, total):
2305 def pcfmt(value, total):
2304 if total:
2306 if total:
2305 return (value, 100 * float(value) / total)
2307 return (value, 100 * float(value) / total)
2306 else:
2308 else:
2307 return value, 100.0
2309 return value, 100.0
2308
2310
2309 ui.write(('format : %d\n') % format)
2311 ui.write(('format : %d\n') % format)
2310 ui.write(('flags : %s\n') % ', '.join(flags))
2312 ui.write(('flags : %s\n') % ', '.join(flags))
2311
2313
2312 ui.write('\n')
2314 ui.write('\n')
2313 fmt = pcfmtstr(totalsize)
2315 fmt = pcfmtstr(totalsize)
2314 fmt2 = dfmtstr(totalsize)
2316 fmt2 = dfmtstr(totalsize)
2315 ui.write(('revisions : ') + fmt2 % numrevs)
2317 ui.write(('revisions : ') + fmt2 % numrevs)
2316 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2318 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2317 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2319 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2318 ui.write(('revisions : ') + fmt2 % numrevs)
2320 ui.write(('revisions : ') + fmt2 % numrevs)
2319 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2321 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2320 ui.write((' text : ')
2322 ui.write((' text : ')
2321 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2323 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2322 ui.write((' delta : ')
2324 ui.write((' delta : ')
2323 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2325 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2324 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2326 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2325 for depth in sorted(numsnapdepth):
2327 for depth in sorted(numsnapdepth):
2326 ui.write((' lvl-%-3d : ' % depth)
2328 ui.write((' lvl-%-3d : ' % depth)
2327 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2329 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2328 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2330 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2329 ui.write(('revision size : ') + fmt2 % totalsize)
2331 ui.write(('revision size : ') + fmt2 % totalsize)
2330 ui.write((' snapshot : ')
2332 ui.write((' snapshot : ')
2331 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2333 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2332 for depth in sorted(numsnapdepth):
2334 for depth in sorted(numsnapdepth):
2333 ui.write((' lvl-%-3d : ' % depth)
2335 ui.write((' lvl-%-3d : ' % depth)
2334 + fmt % pcfmt(snaptotal[depth], totalsize))
2336 + fmt % pcfmt(snaptotal[depth], totalsize))
2335 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2337 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2336
2338
2337 def fmtchunktype(chunktype):
2339 def fmtchunktype(chunktype):
2338 if chunktype == 'empty':
2340 if chunktype == 'empty':
2339 return ' %s : ' % chunktype
2341 return ' %s : ' % chunktype
2340 elif chunktype in pycompat.bytestr(string.ascii_letters):
2342 elif chunktype in pycompat.bytestr(string.ascii_letters):
2341 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2343 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2342 else:
2344 else:
2343 return ' 0x%s : ' % hex(chunktype)
2345 return ' 0x%s : ' % hex(chunktype)
2344
2346
2345 ui.write('\n')
2347 ui.write('\n')
2346 ui.write(('chunks : ') + fmt2 % numrevs)
2348 ui.write(('chunks : ') + fmt2 % numrevs)
2347 for chunktype in sorted(chunktypecounts):
2349 for chunktype in sorted(chunktypecounts):
2348 ui.write(fmtchunktype(chunktype))
2350 ui.write(fmtchunktype(chunktype))
2349 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2351 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2350 ui.write(('chunks size : ') + fmt2 % totalsize)
2352 ui.write(('chunks size : ') + fmt2 % totalsize)
2351 for chunktype in sorted(chunktypecounts):
2353 for chunktype in sorted(chunktypecounts):
2352 ui.write(fmtchunktype(chunktype))
2354 ui.write(fmtchunktype(chunktype))
2353 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2355 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2354
2356
2355 ui.write('\n')
2357 ui.write('\n')
2356 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2358 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2357 ui.write(('avg chain length : ') + fmt % avgchainlen)
2359 ui.write(('avg chain length : ') + fmt % avgchainlen)
2358 ui.write(('max chain length : ') + fmt % maxchainlen)
2360 ui.write(('max chain length : ') + fmt % maxchainlen)
2359 ui.write(('max chain reach : ') + fmt % maxchainspan)
2361 ui.write(('max chain reach : ') + fmt % maxchainspan)
2360 ui.write(('compression ratio : ') + fmt % compratio)
2362 ui.write(('compression ratio : ') + fmt % compratio)
2361
2363
2362 if format > 0:
2364 if format > 0:
2363 ui.write('\n')
2365 ui.write('\n')
2364 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2366 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2365 % tuple(datasize))
2367 % tuple(datasize))
2366 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2368 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2367 % tuple(fullsize))
2369 % tuple(fullsize))
2368 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2370 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2369 % tuple(semisize))
2371 % tuple(semisize))
2370 for depth in sorted(snapsizedepth):
2372 for depth in sorted(snapsizedepth):
2371 if depth == 0:
2373 if depth == 0:
2372 continue
2374 continue
2373 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2375 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2374 % ((depth,) + tuple(snapsizedepth[depth])))
2376 % ((depth,) + tuple(snapsizedepth[depth])))
2375 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2377 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2376 % tuple(deltasize))
2378 % tuple(deltasize))
2377
2379
2378 if numdeltas > 0:
2380 if numdeltas > 0:
2379 ui.write('\n')
2381 ui.write('\n')
2380 fmt = pcfmtstr(numdeltas)
2382 fmt = pcfmtstr(numdeltas)
2381 fmt2 = pcfmtstr(numdeltas, 4)
2383 fmt2 = pcfmtstr(numdeltas, 4)
2382 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2384 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2383 if numprev > 0:
2385 if numprev > 0:
2384 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2386 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2385 numprev))
2387 numprev))
2386 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2388 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2387 numprev))
2389 numprev))
2388 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2390 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2389 numprev))
2391 numprev))
2390 if gdelta:
2392 if gdelta:
2391 ui.write(('deltas against p1 : ')
2393 ui.write(('deltas against p1 : ')
2392 + fmt % pcfmt(nump1, numdeltas))
2394 + fmt % pcfmt(nump1, numdeltas))
2393 ui.write(('deltas against p2 : ')
2395 ui.write(('deltas against p2 : ')
2394 + fmt % pcfmt(nump2, numdeltas))
2396 + fmt % pcfmt(nump2, numdeltas))
2395 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2397 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2396 numdeltas))
2398 numdeltas))
2397
2399
2398 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2400 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2399 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2401 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2400 _('[-f FORMAT] -c|-m|FILE'),
2402 _('[-f FORMAT] -c|-m|FILE'),
2401 optionalrepo=True)
2403 optionalrepo=True)
2402 def debugrevlogindex(ui, repo, file_=None, **opts):
2404 def debugrevlogindex(ui, repo, file_=None, **opts):
2403 """dump the contents of a revlog index"""
2405 """dump the contents of a revlog index"""
2404 opts = pycompat.byteskwargs(opts)
2406 opts = pycompat.byteskwargs(opts)
2405 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2407 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2406 format = opts.get('format', 0)
2408 format = opts.get('format', 0)
2407 if format not in (0, 1):
2409 if format not in (0, 1):
2408 raise error.Abort(_("unknown format %d") % format)
2410 raise error.Abort(_("unknown format %d") % format)
2409
2411
2410 if ui.debugflag:
2412 if ui.debugflag:
2411 shortfn = hex
2413 shortfn = hex
2412 else:
2414 else:
2413 shortfn = short
2415 shortfn = short
2414
2416
2415 # There might not be anything in r, so have a sane default
2417 # There might not be anything in r, so have a sane default
2416 idlen = 12
2418 idlen = 12
2417 for i in r:
2419 for i in r:
2418 idlen = len(shortfn(r.node(i)))
2420 idlen = len(shortfn(r.node(i)))
2419 break
2421 break
2420
2422
2421 if format == 0:
2423 if format == 0:
2422 if ui.verbose:
2424 if ui.verbose:
2423 ui.write((" rev offset length linkrev"
2425 ui.write((" rev offset length linkrev"
2424 " %s %s p2\n") % ("nodeid".ljust(idlen),
2426 " %s %s p2\n") % ("nodeid".ljust(idlen),
2425 "p1".ljust(idlen)))
2427 "p1".ljust(idlen)))
2426 else:
2428 else:
2427 ui.write((" rev linkrev %s %s p2\n") % (
2429 ui.write((" rev linkrev %s %s p2\n") % (
2428 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2430 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2429 elif format == 1:
2431 elif format == 1:
2430 if ui.verbose:
2432 if ui.verbose:
2431 ui.write((" rev flag offset length size link p1"
2433 ui.write((" rev flag offset length size link p1"
2432 " p2 %s\n") % "nodeid".rjust(idlen))
2434 " p2 %s\n") % "nodeid".rjust(idlen))
2433 else:
2435 else:
2434 ui.write((" rev flag size link p1 p2 %s\n") %
2436 ui.write((" rev flag size link p1 p2 %s\n") %
2435 "nodeid".rjust(idlen))
2437 "nodeid".rjust(idlen))
2436
2438
2437 for i in r:
2439 for i in r:
2438 node = r.node(i)
2440 node = r.node(i)
2439 if format == 0:
2441 if format == 0:
2440 try:
2442 try:
2441 pp = r.parents(node)
2443 pp = r.parents(node)
2442 except Exception:
2444 except Exception:
2443 pp = [nullid, nullid]
2445 pp = [nullid, nullid]
2444 if ui.verbose:
2446 if ui.verbose:
2445 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2447 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2446 i, r.start(i), r.length(i), r.linkrev(i),
2448 i, r.start(i), r.length(i), r.linkrev(i),
2447 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2449 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2448 else:
2450 else:
2449 ui.write("% 6d % 7d %s %s %s\n" % (
2451 ui.write("% 6d % 7d %s %s %s\n" % (
2450 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2452 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2451 shortfn(pp[1])))
2453 shortfn(pp[1])))
2452 elif format == 1:
2454 elif format == 1:
2453 pr = r.parentrevs(i)
2455 pr = r.parentrevs(i)
2454 if ui.verbose:
2456 if ui.verbose:
2455 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2457 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2456 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2458 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2457 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2459 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2458 else:
2460 else:
2459 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2461 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2460 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2462 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2461 shortfn(node)))
2463 shortfn(node)))
2462
2464
2463 @command('debugrevspec',
2465 @command('debugrevspec',
2464 [('', 'optimize', None,
2466 [('', 'optimize', None,
2465 _('print parsed tree after optimizing (DEPRECATED)')),
2467 _('print parsed tree after optimizing (DEPRECATED)')),
2466 ('', 'show-revs', True, _('print list of result revisions (default)')),
2468 ('', 'show-revs', True, _('print list of result revisions (default)')),
2467 ('s', 'show-set', None, _('print internal representation of result set')),
2469 ('s', 'show-set', None, _('print internal representation of result set')),
2468 ('p', 'show-stage', [],
2470 ('p', 'show-stage', [],
2469 _('print parsed tree at the given stage'), _('NAME')),
2471 _('print parsed tree at the given stage'), _('NAME')),
2470 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2472 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2471 ('', 'verify-optimized', False, _('verify optimized result')),
2473 ('', 'verify-optimized', False, _('verify optimized result')),
2472 ],
2474 ],
2473 ('REVSPEC'))
2475 ('REVSPEC'))
2474 def debugrevspec(ui, repo, expr, **opts):
2476 def debugrevspec(ui, repo, expr, **opts):
2475 """parse and apply a revision specification
2477 """parse and apply a revision specification
2476
2478
2477 Use -p/--show-stage option to print the parsed tree at the given stages.
2479 Use -p/--show-stage option to print the parsed tree at the given stages.
2478 Use -p all to print tree at every stage.
2480 Use -p all to print tree at every stage.
2479
2481
2480 Use --no-show-revs option with -s or -p to print only the set
2482 Use --no-show-revs option with -s or -p to print only the set
2481 representation or the parsed tree respectively.
2483 representation or the parsed tree respectively.
2482
2484
2483 Use --verify-optimized to compare the optimized result with the unoptimized
2485 Use --verify-optimized to compare the optimized result with the unoptimized
2484 one. Returns 1 if the optimized result differs.
2486 one. Returns 1 if the optimized result differs.
2485 """
2487 """
2486 opts = pycompat.byteskwargs(opts)
2488 opts = pycompat.byteskwargs(opts)
2487 aliases = ui.configitems('revsetalias')
2489 aliases = ui.configitems('revsetalias')
2488 stages = [
2490 stages = [
2489 ('parsed', lambda tree: tree),
2491 ('parsed', lambda tree: tree),
2490 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2492 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2491 ui.warn)),
2493 ui.warn)),
2492 ('concatenated', revsetlang.foldconcat),
2494 ('concatenated', revsetlang.foldconcat),
2493 ('analyzed', revsetlang.analyze),
2495 ('analyzed', revsetlang.analyze),
2494 ('optimized', revsetlang.optimize),
2496 ('optimized', revsetlang.optimize),
2495 ]
2497 ]
2496 if opts['no_optimized']:
2498 if opts['no_optimized']:
2497 stages = stages[:-1]
2499 stages = stages[:-1]
2498 if opts['verify_optimized'] and opts['no_optimized']:
2500 if opts['verify_optimized'] and opts['no_optimized']:
2499 raise error.Abort(_('cannot use --verify-optimized with '
2501 raise error.Abort(_('cannot use --verify-optimized with '
2500 '--no-optimized'))
2502 '--no-optimized'))
2501 stagenames = set(n for n, f in stages)
2503 stagenames = set(n for n, f in stages)
2502
2504
2503 showalways = set()
2505 showalways = set()
2504 showchanged = set()
2506 showchanged = set()
2505 if ui.verbose and not opts['show_stage']:
2507 if ui.verbose and not opts['show_stage']:
2506 # show parsed tree by --verbose (deprecated)
2508 # show parsed tree by --verbose (deprecated)
2507 showalways.add('parsed')
2509 showalways.add('parsed')
2508 showchanged.update(['expanded', 'concatenated'])
2510 showchanged.update(['expanded', 'concatenated'])
2509 if opts['optimize']:
2511 if opts['optimize']:
2510 showalways.add('optimized')
2512 showalways.add('optimized')
2511 if opts['show_stage'] and opts['optimize']:
2513 if opts['show_stage'] and opts['optimize']:
2512 raise error.Abort(_('cannot use --optimize with --show-stage'))
2514 raise error.Abort(_('cannot use --optimize with --show-stage'))
2513 if opts['show_stage'] == ['all']:
2515 if opts['show_stage'] == ['all']:
2514 showalways.update(stagenames)
2516 showalways.update(stagenames)
2515 else:
2517 else:
2516 for n in opts['show_stage']:
2518 for n in opts['show_stage']:
2517 if n not in stagenames:
2519 if n not in stagenames:
2518 raise error.Abort(_('invalid stage name: %s') % n)
2520 raise error.Abort(_('invalid stage name: %s') % n)
2519 showalways.update(opts['show_stage'])
2521 showalways.update(opts['show_stage'])
2520
2522
2521 treebystage = {}
2523 treebystage = {}
2522 printedtree = None
2524 printedtree = None
2523 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2525 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2524 for n, f in stages:
2526 for n, f in stages:
2525 treebystage[n] = tree = f(tree)
2527 treebystage[n] = tree = f(tree)
2526 if n in showalways or (n in showchanged and tree != printedtree):
2528 if n in showalways or (n in showchanged and tree != printedtree):
2527 if opts['show_stage'] or n != 'parsed':
2529 if opts['show_stage'] or n != 'parsed':
2528 ui.write(("* %s:\n") % n)
2530 ui.write(("* %s:\n") % n)
2529 ui.write(revsetlang.prettyformat(tree), "\n")
2531 ui.write(revsetlang.prettyformat(tree), "\n")
2530 printedtree = tree
2532 printedtree = tree
2531
2533
2532 if opts['verify_optimized']:
2534 if opts['verify_optimized']:
2533 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2535 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2534 brevs = revset.makematcher(treebystage['optimized'])(repo)
2536 brevs = revset.makematcher(treebystage['optimized'])(repo)
2535 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2537 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2536 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2538 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2537 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2539 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2538 arevs = list(arevs)
2540 arevs = list(arevs)
2539 brevs = list(brevs)
2541 brevs = list(brevs)
2540 if arevs == brevs:
2542 if arevs == brevs:
2541 return 0
2543 return 0
2542 ui.write(('--- analyzed\n'), label='diff.file_a')
2544 ui.write(('--- analyzed\n'), label='diff.file_a')
2543 ui.write(('+++ optimized\n'), label='diff.file_b')
2545 ui.write(('+++ optimized\n'), label='diff.file_b')
2544 sm = difflib.SequenceMatcher(None, arevs, brevs)
2546 sm = difflib.SequenceMatcher(None, arevs, brevs)
2545 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2547 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2546 if tag in (r'delete', r'replace'):
2548 if tag in (r'delete', r'replace'):
2547 for c in arevs[alo:ahi]:
2549 for c in arevs[alo:ahi]:
2548 ui.write('-%d\n' % c, label='diff.deleted')
2550 ui.write('-%d\n' % c, label='diff.deleted')
2549 if tag in (r'insert', r'replace'):
2551 if tag in (r'insert', r'replace'):
2550 for c in brevs[blo:bhi]:
2552 for c in brevs[blo:bhi]:
2551 ui.write('+%d\n' % c, label='diff.inserted')
2553 ui.write('+%d\n' % c, label='diff.inserted')
2552 if tag == r'equal':
2554 if tag == r'equal':
2553 for c in arevs[alo:ahi]:
2555 for c in arevs[alo:ahi]:
2554 ui.write(' %d\n' % c)
2556 ui.write(' %d\n' % c)
2555 return 1
2557 return 1
2556
2558
2557 func = revset.makematcher(tree)
2559 func = revset.makematcher(tree)
2558 revs = func(repo)
2560 revs = func(repo)
2559 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2561 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2560 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2562 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2561 if not opts['show_revs']:
2563 if not opts['show_revs']:
2562 return
2564 return
2563 for c in revs:
2565 for c in revs:
2564 ui.write("%d\n" % c)
2566 ui.write("%d\n" % c)
2565
2567
2566 @command('debugserve', [
2568 @command('debugserve', [
2567 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2569 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2568 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2570 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2569 ('', 'logiofile', '', _('file to log server I/O to')),
2571 ('', 'logiofile', '', _('file to log server I/O to')),
2570 ], '')
2572 ], '')
2571 def debugserve(ui, repo, **opts):
2573 def debugserve(ui, repo, **opts):
2572 """run a server with advanced settings
2574 """run a server with advanced settings
2573
2575
2574 This command is similar to :hg:`serve`. It exists partially as a
2576 This command is similar to :hg:`serve`. It exists partially as a
2575 workaround to the fact that ``hg serve --stdio`` must have specific
2577 workaround to the fact that ``hg serve --stdio`` must have specific
2576 arguments for security reasons.
2578 arguments for security reasons.
2577 """
2579 """
2578 opts = pycompat.byteskwargs(opts)
2580 opts = pycompat.byteskwargs(opts)
2579
2581
2580 if not opts['sshstdio']:
2582 if not opts['sshstdio']:
2581 raise error.Abort(_('only --sshstdio is currently supported'))
2583 raise error.Abort(_('only --sshstdio is currently supported'))
2582
2584
2583 logfh = None
2585 logfh = None
2584
2586
2585 if opts['logiofd'] and opts['logiofile']:
2587 if opts['logiofd'] and opts['logiofile']:
2586 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2588 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2587
2589
2588 if opts['logiofd']:
2590 if opts['logiofd']:
2589 # Line buffered because output is line based.
2591 # Line buffered because output is line based.
2590 try:
2592 try:
2591 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2593 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2592 except OSError as e:
2594 except OSError as e:
2593 if e.errno != errno.ESPIPE:
2595 if e.errno != errno.ESPIPE:
2594 raise
2596 raise
2595 # can't seek a pipe, so `ab` mode fails on py3
2597 # can't seek a pipe, so `ab` mode fails on py3
2596 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2598 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2597 elif opts['logiofile']:
2599 elif opts['logiofile']:
2598 logfh = open(opts['logiofile'], 'ab', 1)
2600 logfh = open(opts['logiofile'], 'ab', 1)
2599
2601
2600 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2602 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2601 s.serve_forever()
2603 s.serve_forever()
2602
2604
2603 @command('debugsetparents', [], _('REV1 [REV2]'))
2605 @command('debugsetparents', [], _('REV1 [REV2]'))
2604 def debugsetparents(ui, repo, rev1, rev2=None):
2606 def debugsetparents(ui, repo, rev1, rev2=None):
2605 """manually set the parents of the current working directory
2607 """manually set the parents of the current working directory
2606
2608
2607 This is useful for writing repository conversion tools, but should
2609 This is useful for writing repository conversion tools, but should
2608 be used with care. For example, neither the working directory nor the
2610 be used with care. For example, neither the working directory nor the
2609 dirstate is updated, so file status may be incorrect after running this
2611 dirstate is updated, so file status may be incorrect after running this
2610 command.
2612 command.
2611
2613
2612 Returns 0 on success.
2614 Returns 0 on success.
2613 """
2615 """
2614
2616
2615 node1 = scmutil.revsingle(repo, rev1).node()
2617 node1 = scmutil.revsingle(repo, rev1).node()
2616 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2618 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2617
2619
2618 with repo.wlock():
2620 with repo.wlock():
2619 repo.setparents(node1, node2)
2621 repo.setparents(node1, node2)
2620
2622
2621 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2623 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2622 def debugssl(ui, repo, source=None, **opts):
2624 def debugssl(ui, repo, source=None, **opts):
2623 '''test a secure connection to a server
2625 '''test a secure connection to a server
2624
2626
2625 This builds the certificate chain for the server on Windows, installing the
2627 This builds the certificate chain for the server on Windows, installing the
2626 missing intermediates and trusted root via Windows Update if necessary. It
2628 missing intermediates and trusted root via Windows Update if necessary. It
2627 does nothing on other platforms.
2629 does nothing on other platforms.
2628
2630
2629 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2631 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2630 that server is used. See :hg:`help urls` for more information.
2632 that server is used. See :hg:`help urls` for more information.
2631
2633
2632 If the update succeeds, retry the original operation. Otherwise, the cause
2634 If the update succeeds, retry the original operation. Otherwise, the cause
2633 of the SSL error is likely another issue.
2635 of the SSL error is likely another issue.
2634 '''
2636 '''
2635 if not pycompat.iswindows:
2637 if not pycompat.iswindows:
2636 raise error.Abort(_('certificate chain building is only possible on '
2638 raise error.Abort(_('certificate chain building is only possible on '
2637 'Windows'))
2639 'Windows'))
2638
2640
2639 if not source:
2641 if not source:
2640 if not repo:
2642 if not repo:
2641 raise error.Abort(_("there is no Mercurial repository here, and no "
2643 raise error.Abort(_("there is no Mercurial repository here, and no "
2642 "server specified"))
2644 "server specified"))
2643 source = "default"
2645 source = "default"
2644
2646
2645 source, branches = hg.parseurl(ui.expandpath(source))
2647 source, branches = hg.parseurl(ui.expandpath(source))
2646 url = util.url(source)
2648 url = util.url(source)
2647
2649
2648 defaultport = {'https': 443, 'ssh': 22}
2650 defaultport = {'https': 443, 'ssh': 22}
2649 if url.scheme in defaultport:
2651 if url.scheme in defaultport:
2650 try:
2652 try:
2651 addr = (url.host, int(url.port or defaultport[url.scheme]))
2653 addr = (url.host, int(url.port or defaultport[url.scheme]))
2652 except ValueError:
2654 except ValueError:
2653 raise error.Abort(_("malformed port number in URL"))
2655 raise error.Abort(_("malformed port number in URL"))
2654 else:
2656 else:
2655 raise error.Abort(_("only https and ssh connections are supported"))
2657 raise error.Abort(_("only https and ssh connections are supported"))
2656
2658
2657 from . import win32
2659 from . import win32
2658
2660
2659 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2661 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2660 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2662 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2661
2663
2662 try:
2664 try:
2663 s.connect(addr)
2665 s.connect(addr)
2664 cert = s.getpeercert(True)
2666 cert = s.getpeercert(True)
2665
2667
2666 ui.status(_('checking the certificate chain for %s\n') % url.host)
2668 ui.status(_('checking the certificate chain for %s\n') % url.host)
2667
2669
2668 complete = win32.checkcertificatechain(cert, build=False)
2670 complete = win32.checkcertificatechain(cert, build=False)
2669
2671
2670 if not complete:
2672 if not complete:
2671 ui.status(_('certificate chain is incomplete, updating... '))
2673 ui.status(_('certificate chain is incomplete, updating... '))
2672
2674
2673 if not win32.checkcertificatechain(cert):
2675 if not win32.checkcertificatechain(cert):
2674 ui.status(_('failed.\n'))
2676 ui.status(_('failed.\n'))
2675 else:
2677 else:
2676 ui.status(_('done.\n'))
2678 ui.status(_('done.\n'))
2677 else:
2679 else:
2678 ui.status(_('full certificate chain is available\n'))
2680 ui.status(_('full certificate chain is available\n'))
2679 finally:
2681 finally:
2680 s.close()
2682 s.close()
2681
2683
2682 @command('debugsub',
2684 @command('debugsub',
2683 [('r', 'rev', '',
2685 [('r', 'rev', '',
2684 _('revision to check'), _('REV'))],
2686 _('revision to check'), _('REV'))],
2685 _('[-r REV] [REV]'))
2687 _('[-r REV] [REV]'))
2686 def debugsub(ui, repo, rev=None):
2688 def debugsub(ui, repo, rev=None):
2687 ctx = scmutil.revsingle(repo, rev, None)
2689 ctx = scmutil.revsingle(repo, rev, None)
2688 for k, v in sorted(ctx.substate.items()):
2690 for k, v in sorted(ctx.substate.items()):
2689 ui.write(('path %s\n') % k)
2691 ui.write(('path %s\n') % k)
2690 ui.write((' source %s\n') % v[0])
2692 ui.write((' source %s\n') % v[0])
2691 ui.write((' revision %s\n') % v[1])
2693 ui.write((' revision %s\n') % v[1])
2692
2694
2693 @command('debugsuccessorssets',
2695 @command('debugsuccessorssets',
2694 [('', 'closest', False, _('return closest successors sets only'))],
2696 [('', 'closest', False, _('return closest successors sets only'))],
2695 _('[REV]'))
2697 _('[REV]'))
2696 def debugsuccessorssets(ui, repo, *revs, **opts):
2698 def debugsuccessorssets(ui, repo, *revs, **opts):
2697 """show set of successors for revision
2699 """show set of successors for revision
2698
2700
2699 A successors set of changeset A is a consistent group of revisions that
2701 A successors set of changeset A is a consistent group of revisions that
2700 succeed A. It contains non-obsolete changesets only unless closests
2702 succeed A. It contains non-obsolete changesets only unless closests
2701 successors set is set.
2703 successors set is set.
2702
2704
2703 In most cases a changeset A has a single successors set containing a single
2705 In most cases a changeset A has a single successors set containing a single
2704 successor (changeset A replaced by A').
2706 successor (changeset A replaced by A').
2705
2707
2706 A changeset that is made obsolete with no successors are called "pruned".
2708 A changeset that is made obsolete with no successors are called "pruned".
2707 Such changesets have no successors sets at all.
2709 Such changesets have no successors sets at all.
2708
2710
2709 A changeset that has been "split" will have a successors set containing
2711 A changeset that has been "split" will have a successors set containing
2710 more than one successor.
2712 more than one successor.
2711
2713
2712 A changeset that has been rewritten in multiple different ways is called
2714 A changeset that has been rewritten in multiple different ways is called
2713 "divergent". Such changesets have multiple successor sets (each of which
2715 "divergent". Such changesets have multiple successor sets (each of which
2714 may also be split, i.e. have multiple successors).
2716 may also be split, i.e. have multiple successors).
2715
2717
2716 Results are displayed as follows::
2718 Results are displayed as follows::
2717
2719
2718 <rev1>
2720 <rev1>
2719 <successors-1A>
2721 <successors-1A>
2720 <rev2>
2722 <rev2>
2721 <successors-2A>
2723 <successors-2A>
2722 <successors-2B1> <successors-2B2> <successors-2B3>
2724 <successors-2B1> <successors-2B2> <successors-2B3>
2723
2725
2724 Here rev2 has two possible (i.e. divergent) successors sets. The first
2726 Here rev2 has two possible (i.e. divergent) successors sets. The first
2725 holds one element, whereas the second holds three (i.e. the changeset has
2727 holds one element, whereas the second holds three (i.e. the changeset has
2726 been split).
2728 been split).
2727 """
2729 """
2728 # passed to successorssets caching computation from one call to another
2730 # passed to successorssets caching computation from one call to another
2729 cache = {}
2731 cache = {}
2730 ctx2str = bytes
2732 ctx2str = bytes
2731 node2str = short
2733 node2str = short
2732 for rev in scmutil.revrange(repo, revs):
2734 for rev in scmutil.revrange(repo, revs):
2733 ctx = repo[rev]
2735 ctx = repo[rev]
2734 ui.write('%s\n'% ctx2str(ctx))
2736 ui.write('%s\n'% ctx2str(ctx))
2735 for succsset in obsutil.successorssets(repo, ctx.node(),
2737 for succsset in obsutil.successorssets(repo, ctx.node(),
2736 closest=opts[r'closest'],
2738 closest=opts[r'closest'],
2737 cache=cache):
2739 cache=cache):
2738 if succsset:
2740 if succsset:
2739 ui.write(' ')
2741 ui.write(' ')
2740 ui.write(node2str(succsset[0]))
2742 ui.write(node2str(succsset[0]))
2741 for node in succsset[1:]:
2743 for node in succsset[1:]:
2742 ui.write(' ')
2744 ui.write(' ')
2743 ui.write(node2str(node))
2745 ui.write(node2str(node))
2744 ui.write('\n')
2746 ui.write('\n')
2745
2747
2746 @command('debugtemplate',
2748 @command('debugtemplate',
2747 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2749 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2748 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2750 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2749 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2751 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2750 optionalrepo=True)
2752 optionalrepo=True)
2751 def debugtemplate(ui, repo, tmpl, **opts):
2753 def debugtemplate(ui, repo, tmpl, **opts):
2752 """parse and apply a template
2754 """parse and apply a template
2753
2755
2754 If -r/--rev is given, the template is processed as a log template and
2756 If -r/--rev is given, the template is processed as a log template and
2755 applied to the given changesets. Otherwise, it is processed as a generic
2757 applied to the given changesets. Otherwise, it is processed as a generic
2756 template.
2758 template.
2757
2759
2758 Use --verbose to print the parsed tree.
2760 Use --verbose to print the parsed tree.
2759 """
2761 """
2760 revs = None
2762 revs = None
2761 if opts[r'rev']:
2763 if opts[r'rev']:
2762 if repo is None:
2764 if repo is None:
2763 raise error.RepoError(_('there is no Mercurial repository here '
2765 raise error.RepoError(_('there is no Mercurial repository here '
2764 '(.hg not found)'))
2766 '(.hg not found)'))
2765 revs = scmutil.revrange(repo, opts[r'rev'])
2767 revs = scmutil.revrange(repo, opts[r'rev'])
2766
2768
2767 props = {}
2769 props = {}
2768 for d in opts[r'define']:
2770 for d in opts[r'define']:
2769 try:
2771 try:
2770 k, v = (e.strip() for e in d.split('=', 1))
2772 k, v = (e.strip() for e in d.split('=', 1))
2771 if not k or k == 'ui':
2773 if not k or k == 'ui':
2772 raise ValueError
2774 raise ValueError
2773 props[k] = v
2775 props[k] = v
2774 except ValueError:
2776 except ValueError:
2775 raise error.Abort(_('malformed keyword definition: %s') % d)
2777 raise error.Abort(_('malformed keyword definition: %s') % d)
2776
2778
2777 if ui.verbose:
2779 if ui.verbose:
2778 aliases = ui.configitems('templatealias')
2780 aliases = ui.configitems('templatealias')
2779 tree = templater.parse(tmpl)
2781 tree = templater.parse(tmpl)
2780 ui.note(templater.prettyformat(tree), '\n')
2782 ui.note(templater.prettyformat(tree), '\n')
2781 newtree = templater.expandaliases(tree, aliases)
2783 newtree = templater.expandaliases(tree, aliases)
2782 if newtree != tree:
2784 if newtree != tree:
2783 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2785 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2784
2786
2785 if revs is None:
2787 if revs is None:
2786 tres = formatter.templateresources(ui, repo)
2788 tres = formatter.templateresources(ui, repo)
2787 t = formatter.maketemplater(ui, tmpl, resources=tres)
2789 t = formatter.maketemplater(ui, tmpl, resources=tres)
2788 if ui.verbose:
2790 if ui.verbose:
2789 kwds, funcs = t.symbolsuseddefault()
2791 kwds, funcs = t.symbolsuseddefault()
2790 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2792 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2791 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2793 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2792 ui.write(t.renderdefault(props))
2794 ui.write(t.renderdefault(props))
2793 else:
2795 else:
2794 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2796 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2795 if ui.verbose:
2797 if ui.verbose:
2796 kwds, funcs = displayer.t.symbolsuseddefault()
2798 kwds, funcs = displayer.t.symbolsuseddefault()
2797 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2799 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2798 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2800 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2799 for r in revs:
2801 for r in revs:
2800 displayer.show(repo[r], **pycompat.strkwargs(props))
2802 displayer.show(repo[r], **pycompat.strkwargs(props))
2801 displayer.close()
2803 displayer.close()
2802
2804
2803 @command('debuguigetpass', [
2805 @command('debuguigetpass', [
2804 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2806 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2805 ], _('[-p TEXT]'), norepo=True)
2807 ], _('[-p TEXT]'), norepo=True)
2806 def debuguigetpass(ui, prompt=''):
2808 def debuguigetpass(ui, prompt=''):
2807 """show prompt to type password"""
2809 """show prompt to type password"""
2808 r = ui.getpass(prompt)
2810 r = ui.getpass(prompt)
2809 ui.write(('respose: %s\n') % r)
2811 ui.write(('respose: %s\n') % r)
2810
2812
2811 @command('debuguiprompt', [
2813 @command('debuguiprompt', [
2812 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2814 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2813 ], _('[-p TEXT]'), norepo=True)
2815 ], _('[-p TEXT]'), norepo=True)
2814 def debuguiprompt(ui, prompt=''):
2816 def debuguiprompt(ui, prompt=''):
2815 """show plain prompt"""
2817 """show plain prompt"""
2816 r = ui.prompt(prompt)
2818 r = ui.prompt(prompt)
2817 ui.write(('response: %s\n') % r)
2819 ui.write(('response: %s\n') % r)
2818
2820
2819 @command('debugupdatecaches', [])
2821 @command('debugupdatecaches', [])
2820 def debugupdatecaches(ui, repo, *pats, **opts):
2822 def debugupdatecaches(ui, repo, *pats, **opts):
2821 """warm all known caches in the repository"""
2823 """warm all known caches in the repository"""
2822 with repo.wlock(), repo.lock():
2824 with repo.wlock(), repo.lock():
2823 repo.updatecaches(full=True)
2825 repo.updatecaches(full=True)
2824
2826
2825 @command('debugupgraderepo', [
2827 @command('debugupgraderepo', [
2826 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2828 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2827 ('', 'run', False, _('performs an upgrade')),
2829 ('', 'run', False, _('performs an upgrade')),
2828 ('', 'backup', True, _('keep the old repository content around')),
2830 ('', 'backup', True, _('keep the old repository content around')),
2829 ])
2831 ])
2830 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2832 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2831 """upgrade a repository to use different features
2833 """upgrade a repository to use different features
2832
2834
2833 If no arguments are specified, the repository is evaluated for upgrade
2835 If no arguments are specified, the repository is evaluated for upgrade
2834 and a list of problems and potential optimizations is printed.
2836 and a list of problems and potential optimizations is printed.
2835
2837
2836 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2838 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2837 can be influenced via additional arguments. More details will be provided
2839 can be influenced via additional arguments. More details will be provided
2838 by the command output when run without ``--run``.
2840 by the command output when run without ``--run``.
2839
2841
2840 During the upgrade, the repository will be locked and no writes will be
2842 During the upgrade, the repository will be locked and no writes will be
2841 allowed.
2843 allowed.
2842
2844
2843 At the end of the upgrade, the repository may not be readable while new
2845 At the end of the upgrade, the repository may not be readable while new
2844 repository data is swapped in. This window will be as long as it takes to
2846 repository data is swapped in. This window will be as long as it takes to
2845 rename some directories inside the ``.hg`` directory. On most machines, this
2847 rename some directories inside the ``.hg`` directory. On most machines, this
2846 should complete almost instantaneously and the chances of a consumer being
2848 should complete almost instantaneously and the chances of a consumer being
2847 unable to access the repository should be low.
2849 unable to access the repository should be low.
2848 """
2850 """
2849 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2851 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2850 backup=backup)
2852 backup=backup)
2851
2853
2852 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2854 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2853 inferrepo=True)
2855 inferrepo=True)
2854 def debugwalk(ui, repo, *pats, **opts):
2856 def debugwalk(ui, repo, *pats, **opts):
2855 """show how files match on given patterns"""
2857 """show how files match on given patterns"""
2856 opts = pycompat.byteskwargs(opts)
2858 opts = pycompat.byteskwargs(opts)
2857 m = scmutil.match(repo[None], pats, opts)
2859 m = scmutil.match(repo[None], pats, opts)
2858 if ui.verbose:
2860 if ui.verbose:
2859 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2861 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2860 items = list(repo[None].walk(m))
2862 items = list(repo[None].walk(m))
2861 if not items:
2863 if not items:
2862 return
2864 return
2863 f = lambda fn: fn
2865 f = lambda fn: fn
2864 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2866 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2865 f = lambda fn: util.normpath(fn)
2867 f = lambda fn: util.normpath(fn)
2866 fmt = 'f %%-%ds %%-%ds %%s' % (
2868 fmt = 'f %%-%ds %%-%ds %%s' % (
2867 max([len(abs) for abs in items]),
2869 max([len(abs) for abs in items]),
2868 max([len(repo.pathto(abs)) for abs in items]))
2870 max([len(repo.pathto(abs)) for abs in items]))
2869 for abs in items:
2871 for abs in items:
2870 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2872 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2871 ui.write("%s\n" % line.rstrip())
2873 ui.write("%s\n" % line.rstrip())
2872
2874
2873 @command('debugwhyunstable', [], _('REV'))
2875 @command('debugwhyunstable', [], _('REV'))
2874 def debugwhyunstable(ui, repo, rev):
2876 def debugwhyunstable(ui, repo, rev):
2875 """explain instabilities of a changeset"""
2877 """explain instabilities of a changeset"""
2876 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2878 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2877 dnodes = ''
2879 dnodes = ''
2878 if entry.get('divergentnodes'):
2880 if entry.get('divergentnodes'):
2879 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2881 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2880 for ctx in entry['divergentnodes']) + ' '
2882 for ctx in entry['divergentnodes']) + ' '
2881 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2883 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2882 entry['reason'], entry['node']))
2884 entry['reason'], entry['node']))
2883
2885
2884 @command('debugwireargs',
2886 @command('debugwireargs',
2885 [('', 'three', '', 'three'),
2887 [('', 'three', '', 'three'),
2886 ('', 'four', '', 'four'),
2888 ('', 'four', '', 'four'),
2887 ('', 'five', '', 'five'),
2889 ('', 'five', '', 'five'),
2888 ] + cmdutil.remoteopts,
2890 ] + cmdutil.remoteopts,
2889 _('REPO [OPTIONS]... [ONE [TWO]]'),
2891 _('REPO [OPTIONS]... [ONE [TWO]]'),
2890 norepo=True)
2892 norepo=True)
2891 def debugwireargs(ui, repopath, *vals, **opts):
2893 def debugwireargs(ui, repopath, *vals, **opts):
2892 opts = pycompat.byteskwargs(opts)
2894 opts = pycompat.byteskwargs(opts)
2893 repo = hg.peer(ui, opts, repopath)
2895 repo = hg.peer(ui, opts, repopath)
2894 for opt in cmdutil.remoteopts:
2896 for opt in cmdutil.remoteopts:
2895 del opts[opt[1]]
2897 del opts[opt[1]]
2896 args = {}
2898 args = {}
2897 for k, v in opts.iteritems():
2899 for k, v in opts.iteritems():
2898 if v:
2900 if v:
2899 args[k] = v
2901 args[k] = v
2900 args = pycompat.strkwargs(args)
2902 args = pycompat.strkwargs(args)
2901 # run twice to check that we don't mess up the stream for the next command
2903 # run twice to check that we don't mess up the stream for the next command
2902 res1 = repo.debugwireargs(*vals, **args)
2904 res1 = repo.debugwireargs(*vals, **args)
2903 res2 = repo.debugwireargs(*vals, **args)
2905 res2 = repo.debugwireargs(*vals, **args)
2904 ui.write("%s\n" % res1)
2906 ui.write("%s\n" % res1)
2905 if res1 != res2:
2907 if res1 != res2:
2906 ui.warn("%s\n" % res2)
2908 ui.warn("%s\n" % res2)
2907
2909
2908 def _parsewirelangblocks(fh):
2910 def _parsewirelangblocks(fh):
2909 activeaction = None
2911 activeaction = None
2910 blocklines = []
2912 blocklines = []
2911 lastindent = 0
2913 lastindent = 0
2912
2914
2913 for line in fh:
2915 for line in fh:
2914 line = line.rstrip()
2916 line = line.rstrip()
2915 if not line:
2917 if not line:
2916 continue
2918 continue
2917
2919
2918 if line.startswith(b'#'):
2920 if line.startswith(b'#'):
2919 continue
2921 continue
2920
2922
2921 if not line.startswith(b' '):
2923 if not line.startswith(b' '):
2922 # New block. Flush previous one.
2924 # New block. Flush previous one.
2923 if activeaction:
2925 if activeaction:
2924 yield activeaction, blocklines
2926 yield activeaction, blocklines
2925
2927
2926 activeaction = line
2928 activeaction = line
2927 blocklines = []
2929 blocklines = []
2928 lastindent = 0
2930 lastindent = 0
2929 continue
2931 continue
2930
2932
2931 # Else we start with an indent.
2933 # Else we start with an indent.
2932
2934
2933 if not activeaction:
2935 if not activeaction:
2934 raise error.Abort(_('indented line outside of block'))
2936 raise error.Abort(_('indented line outside of block'))
2935
2937
2936 indent = len(line) - len(line.lstrip())
2938 indent = len(line) - len(line.lstrip())
2937
2939
2938 # If this line is indented more than the last line, concatenate it.
2940 # If this line is indented more than the last line, concatenate it.
2939 if indent > lastindent and blocklines:
2941 if indent > lastindent and blocklines:
2940 blocklines[-1] += line.lstrip()
2942 blocklines[-1] += line.lstrip()
2941 else:
2943 else:
2942 blocklines.append(line)
2944 blocklines.append(line)
2943 lastindent = indent
2945 lastindent = indent
2944
2946
2945 # Flush last block.
2947 # Flush last block.
2946 if activeaction:
2948 if activeaction:
2947 yield activeaction, blocklines
2949 yield activeaction, blocklines
2948
2950
2949 @command('debugwireproto',
2951 @command('debugwireproto',
2950 [
2952 [
2951 ('', 'localssh', False, _('start an SSH server for this repo')),
2953 ('', 'localssh', False, _('start an SSH server for this repo')),
2952 ('', 'peer', '', _('construct a specific version of the peer')),
2954 ('', 'peer', '', _('construct a specific version of the peer')),
2953 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2955 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2954 ('', 'nologhandshake', False,
2956 ('', 'nologhandshake', False,
2955 _('do not log I/O related to the peer handshake')),
2957 _('do not log I/O related to the peer handshake')),
2956 ] + cmdutil.remoteopts,
2958 ] + cmdutil.remoteopts,
2957 _('[PATH]'),
2959 _('[PATH]'),
2958 optionalrepo=True)
2960 optionalrepo=True)
2959 def debugwireproto(ui, repo, path=None, **opts):
2961 def debugwireproto(ui, repo, path=None, **opts):
2960 """send wire protocol commands to a server
2962 """send wire protocol commands to a server
2961
2963
2962 This command can be used to issue wire protocol commands to remote
2964 This command can be used to issue wire protocol commands to remote
2963 peers and to debug the raw data being exchanged.
2965 peers and to debug the raw data being exchanged.
2964
2966
2965 ``--localssh`` will start an SSH server against the current repository
2967 ``--localssh`` will start an SSH server against the current repository
2966 and connect to that. By default, the connection will perform a handshake
2968 and connect to that. By default, the connection will perform a handshake
2967 and establish an appropriate peer instance.
2969 and establish an appropriate peer instance.
2968
2970
2969 ``--peer`` can be used to bypass the handshake protocol and construct a
2971 ``--peer`` can be used to bypass the handshake protocol and construct a
2970 peer instance using the specified class type. Valid values are ``raw``,
2972 peer instance using the specified class type. Valid values are ``raw``,
2971 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2973 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2972 raw data payloads and don't support higher-level command actions.
2974 raw data payloads and don't support higher-level command actions.
2973
2975
2974 ``--noreadstderr`` can be used to disable automatic reading from stderr
2976 ``--noreadstderr`` can be used to disable automatic reading from stderr
2975 of the peer (for SSH connections only). Disabling automatic reading of
2977 of the peer (for SSH connections only). Disabling automatic reading of
2976 stderr is useful for making output more deterministic.
2978 stderr is useful for making output more deterministic.
2977
2979
2978 Commands are issued via a mini language which is specified via stdin.
2980 Commands are issued via a mini language which is specified via stdin.
2979 The language consists of individual actions to perform. An action is
2981 The language consists of individual actions to perform. An action is
2980 defined by a block. A block is defined as a line with no leading
2982 defined by a block. A block is defined as a line with no leading
2981 space followed by 0 or more lines with leading space. Blocks are
2983 space followed by 0 or more lines with leading space. Blocks are
2982 effectively a high-level command with additional metadata.
2984 effectively a high-level command with additional metadata.
2983
2985
2984 Lines beginning with ``#`` are ignored.
2986 Lines beginning with ``#`` are ignored.
2985
2987
2986 The following sections denote available actions.
2988 The following sections denote available actions.
2987
2989
2988 raw
2990 raw
2989 ---
2991 ---
2990
2992
2991 Send raw data to the server.
2993 Send raw data to the server.
2992
2994
2993 The block payload contains the raw data to send as one atomic send
2995 The block payload contains the raw data to send as one atomic send
2994 operation. The data may not actually be delivered in a single system
2996 operation. The data may not actually be delivered in a single system
2995 call: it depends on the abilities of the transport being used.
2997 call: it depends on the abilities of the transport being used.
2996
2998
2997 Each line in the block is de-indented and concatenated. Then, that
2999 Each line in the block is de-indented and concatenated. Then, that
2998 value is evaluated as a Python b'' literal. This allows the use of
3000 value is evaluated as a Python b'' literal. This allows the use of
2999 backslash escaping, etc.
3001 backslash escaping, etc.
3000
3002
3001 raw+
3003 raw+
3002 ----
3004 ----
3003
3005
3004 Behaves like ``raw`` except flushes output afterwards.
3006 Behaves like ``raw`` except flushes output afterwards.
3005
3007
3006 command <X>
3008 command <X>
3007 -----------
3009 -----------
3008
3010
3009 Send a request to run a named command, whose name follows the ``command``
3011 Send a request to run a named command, whose name follows the ``command``
3010 string.
3012 string.
3011
3013
3012 Arguments to the command are defined as lines in this block. The format of
3014 Arguments to the command are defined as lines in this block. The format of
3013 each line is ``<key> <value>``. e.g.::
3015 each line is ``<key> <value>``. e.g.::
3014
3016
3015 command listkeys
3017 command listkeys
3016 namespace bookmarks
3018 namespace bookmarks
3017
3019
3018 If the value begins with ``eval:``, it will be interpreted as a Python
3020 If the value begins with ``eval:``, it will be interpreted as a Python
3019 literal expression. Otherwise values are interpreted as Python b'' literals.
3021 literal expression. Otherwise values are interpreted as Python b'' literals.
3020 This allows sending complex types and encoding special byte sequences via
3022 This allows sending complex types and encoding special byte sequences via
3021 backslash escaping.
3023 backslash escaping.
3022
3024
3023 The following arguments have special meaning:
3025 The following arguments have special meaning:
3024
3026
3025 ``PUSHFILE``
3027 ``PUSHFILE``
3026 When defined, the *push* mechanism of the peer will be used instead
3028 When defined, the *push* mechanism of the peer will be used instead
3027 of the static request-response mechanism and the content of the
3029 of the static request-response mechanism and the content of the
3028 file specified in the value of this argument will be sent as the
3030 file specified in the value of this argument will be sent as the
3029 command payload.
3031 command payload.
3030
3032
3031 This can be used to submit a local bundle file to the remote.
3033 This can be used to submit a local bundle file to the remote.
3032
3034
3033 batchbegin
3035 batchbegin
3034 ----------
3036 ----------
3035
3037
3036 Instruct the peer to begin a batched send.
3038 Instruct the peer to begin a batched send.
3037
3039
3038 All ``command`` blocks are queued for execution until the next
3040 All ``command`` blocks are queued for execution until the next
3039 ``batchsubmit`` block.
3041 ``batchsubmit`` block.
3040
3042
3041 batchsubmit
3043 batchsubmit
3042 -----------
3044 -----------
3043
3045
3044 Submit previously queued ``command`` blocks as a batch request.
3046 Submit previously queued ``command`` blocks as a batch request.
3045
3047
3046 This action MUST be paired with a ``batchbegin`` action.
3048 This action MUST be paired with a ``batchbegin`` action.
3047
3049
3048 httprequest <method> <path>
3050 httprequest <method> <path>
3049 ---------------------------
3051 ---------------------------
3050
3052
3051 (HTTP peer only)
3053 (HTTP peer only)
3052
3054
3053 Send an HTTP request to the peer.
3055 Send an HTTP request to the peer.
3054
3056
3055 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3057 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3056
3058
3057 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3059 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3058 headers to add to the request. e.g. ``Accept: foo``.
3060 headers to add to the request. e.g. ``Accept: foo``.
3059
3061
3060 The following arguments are special:
3062 The following arguments are special:
3061
3063
3062 ``BODYFILE``
3064 ``BODYFILE``
3063 The content of the file defined as the value to this argument will be
3065 The content of the file defined as the value to this argument will be
3064 transferred verbatim as the HTTP request body.
3066 transferred verbatim as the HTTP request body.
3065
3067
3066 ``frame <type> <flags> <payload>``
3068 ``frame <type> <flags> <payload>``
3067 Send a unified protocol frame as part of the request body.
3069 Send a unified protocol frame as part of the request body.
3068
3070
3069 All frames will be collected and sent as the body to the HTTP
3071 All frames will be collected and sent as the body to the HTTP
3070 request.
3072 request.
3071
3073
3072 close
3074 close
3073 -----
3075 -----
3074
3076
3075 Close the connection to the server.
3077 Close the connection to the server.
3076
3078
3077 flush
3079 flush
3078 -----
3080 -----
3079
3081
3080 Flush data written to the server.
3082 Flush data written to the server.
3081
3083
3082 readavailable
3084 readavailable
3083 -------------
3085 -------------
3084
3086
3085 Close the write end of the connection and read all available data from
3087 Close the write end of the connection and read all available data from
3086 the server.
3088 the server.
3087
3089
3088 If the connection to the server encompasses multiple pipes, we poll both
3090 If the connection to the server encompasses multiple pipes, we poll both
3089 pipes and read available data.
3091 pipes and read available data.
3090
3092
3091 readline
3093 readline
3092 --------
3094 --------
3093
3095
3094 Read a line of output from the server. If there are multiple output
3096 Read a line of output from the server. If there are multiple output
3095 pipes, reads only the main pipe.
3097 pipes, reads only the main pipe.
3096
3098
3097 ereadline
3099 ereadline
3098 ---------
3100 ---------
3099
3101
3100 Like ``readline``, but read from the stderr pipe, if available.
3102 Like ``readline``, but read from the stderr pipe, if available.
3101
3103
3102 read <X>
3104 read <X>
3103 --------
3105 --------
3104
3106
3105 ``read()`` N bytes from the server's main output pipe.
3107 ``read()`` N bytes from the server's main output pipe.
3106
3108
3107 eread <X>
3109 eread <X>
3108 ---------
3110 ---------
3109
3111
3110 ``read()`` N bytes from the server's stderr pipe, if available.
3112 ``read()`` N bytes from the server's stderr pipe, if available.
3111
3113
3112 Specifying Unified Frame-Based Protocol Frames
3114 Specifying Unified Frame-Based Protocol Frames
3113 ----------------------------------------------
3115 ----------------------------------------------
3114
3116
3115 It is possible to emit a *Unified Frame-Based Protocol* by using special
3117 It is possible to emit a *Unified Frame-Based Protocol* by using special
3116 syntax.
3118 syntax.
3117
3119
3118 A frame is composed as a type, flags, and payload. These can be parsed
3120 A frame is composed as a type, flags, and payload. These can be parsed
3119 from a string of the form:
3121 from a string of the form:
3120
3122
3121 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3123 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3122
3124
3123 ``request-id`` and ``stream-id`` are integers defining the request and
3125 ``request-id`` and ``stream-id`` are integers defining the request and
3124 stream identifiers.
3126 stream identifiers.
3125
3127
3126 ``type`` can be an integer value for the frame type or the string name
3128 ``type`` can be an integer value for the frame type or the string name
3127 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3129 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3128 ``command-name``.
3130 ``command-name``.
3129
3131
3130 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3132 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3131 components. Each component (and there can be just one) can be an integer
3133 components. Each component (and there can be just one) can be an integer
3132 or a flag name for stream flags or frame flags, respectively. Values are
3134 or a flag name for stream flags or frame flags, respectively. Values are
3133 resolved to integers and then bitwise OR'd together.
3135 resolved to integers and then bitwise OR'd together.
3134
3136
3135 ``payload`` represents the raw frame payload. If it begins with
3137 ``payload`` represents the raw frame payload. If it begins with
3136 ``cbor:``, the following string is evaluated as Python code and the
3138 ``cbor:``, the following string is evaluated as Python code and the
3137 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3139 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3138 as a Python byte string literal.
3140 as a Python byte string literal.
3139 """
3141 """
3140 opts = pycompat.byteskwargs(opts)
3142 opts = pycompat.byteskwargs(opts)
3141
3143
3142 if opts['localssh'] and not repo:
3144 if opts['localssh'] and not repo:
3143 raise error.Abort(_('--localssh requires a repository'))
3145 raise error.Abort(_('--localssh requires a repository'))
3144
3146
3145 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3147 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3146 raise error.Abort(_('invalid value for --peer'),
3148 raise error.Abort(_('invalid value for --peer'),
3147 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3149 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3148
3150
3149 if path and opts['localssh']:
3151 if path and opts['localssh']:
3150 raise error.Abort(_('cannot specify --localssh with an explicit '
3152 raise error.Abort(_('cannot specify --localssh with an explicit '
3151 'path'))
3153 'path'))
3152
3154
3153 if ui.interactive():
3155 if ui.interactive():
3154 ui.write(_('(waiting for commands on stdin)\n'))
3156 ui.write(_('(waiting for commands on stdin)\n'))
3155
3157
3156 blocks = list(_parsewirelangblocks(ui.fin))
3158 blocks = list(_parsewirelangblocks(ui.fin))
3157
3159
3158 proc = None
3160 proc = None
3159 stdin = None
3161 stdin = None
3160 stdout = None
3162 stdout = None
3161 stderr = None
3163 stderr = None
3162 opener = None
3164 opener = None
3163
3165
3164 if opts['localssh']:
3166 if opts['localssh']:
3165 # We start the SSH server in its own process so there is process
3167 # We start the SSH server in its own process so there is process
3166 # separation. This prevents a whole class of potential bugs around
3168 # separation. This prevents a whole class of potential bugs around
3167 # shared state from interfering with server operation.
3169 # shared state from interfering with server operation.
3168 args = procutil.hgcmd() + [
3170 args = procutil.hgcmd() + [
3169 '-R', repo.root,
3171 '-R', repo.root,
3170 'debugserve', '--sshstdio',
3172 'debugserve', '--sshstdio',
3171 ]
3173 ]
3172 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3174 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3173 stdin=subprocess.PIPE,
3175 stdin=subprocess.PIPE,
3174 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3176 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3175 bufsize=0)
3177 bufsize=0)
3176
3178
3177 stdin = proc.stdin
3179 stdin = proc.stdin
3178 stdout = proc.stdout
3180 stdout = proc.stdout
3179 stderr = proc.stderr
3181 stderr = proc.stderr
3180
3182
3181 # We turn the pipes into observers so we can log I/O.
3183 # We turn the pipes into observers so we can log I/O.
3182 if ui.verbose or opts['peer'] == 'raw':
3184 if ui.verbose or opts['peer'] == 'raw':
3183 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3185 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3184 logdata=True)
3186 logdata=True)
3185 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3187 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3186 logdata=True)
3188 logdata=True)
3187 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3189 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3188 logdata=True)
3190 logdata=True)
3189
3191
3190 # --localssh also implies the peer connection settings.
3192 # --localssh also implies the peer connection settings.
3191
3193
3192 url = 'ssh://localserver'
3194 url = 'ssh://localserver'
3193 autoreadstderr = not opts['noreadstderr']
3195 autoreadstderr = not opts['noreadstderr']
3194
3196
3195 if opts['peer'] == 'ssh1':
3197 if opts['peer'] == 'ssh1':
3196 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3198 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3197 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3199 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3198 None, autoreadstderr=autoreadstderr)
3200 None, autoreadstderr=autoreadstderr)
3199 elif opts['peer'] == 'ssh2':
3201 elif opts['peer'] == 'ssh2':
3200 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3202 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3201 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3203 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3202 None, autoreadstderr=autoreadstderr)
3204 None, autoreadstderr=autoreadstderr)
3203 elif opts['peer'] == 'raw':
3205 elif opts['peer'] == 'raw':
3204 ui.write(_('using raw connection to peer\n'))
3206 ui.write(_('using raw connection to peer\n'))
3205 peer = None
3207 peer = None
3206 else:
3208 else:
3207 ui.write(_('creating ssh peer from handshake results\n'))
3209 ui.write(_('creating ssh peer from handshake results\n'))
3208 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3210 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3209 autoreadstderr=autoreadstderr)
3211 autoreadstderr=autoreadstderr)
3210
3212
3211 elif path:
3213 elif path:
3212 # We bypass hg.peer() so we can proxy the sockets.
3214 # We bypass hg.peer() so we can proxy the sockets.
3213 # TODO consider not doing this because we skip
3215 # TODO consider not doing this because we skip
3214 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3216 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3215 u = util.url(path)
3217 u = util.url(path)
3216 if u.scheme != 'http':
3218 if u.scheme != 'http':
3217 raise error.Abort(_('only http:// paths are currently supported'))
3219 raise error.Abort(_('only http:// paths are currently supported'))
3218
3220
3219 url, authinfo = u.authinfo()
3221 url, authinfo = u.authinfo()
3220 openerargs = {
3222 openerargs = {
3221 r'useragent': b'Mercurial debugwireproto',
3223 r'useragent': b'Mercurial debugwireproto',
3222 }
3224 }
3223
3225
3224 # Turn pipes/sockets into observers so we can log I/O.
3226 # Turn pipes/sockets into observers so we can log I/O.
3225 if ui.verbose:
3227 if ui.verbose:
3226 openerargs.update({
3228 openerargs.update({
3227 r'loggingfh': ui,
3229 r'loggingfh': ui,
3228 r'loggingname': b's',
3230 r'loggingname': b's',
3229 r'loggingopts': {
3231 r'loggingopts': {
3230 r'logdata': True,
3232 r'logdata': True,
3231 r'logdataapis': False,
3233 r'logdataapis': False,
3232 },
3234 },
3233 })
3235 })
3234
3236
3235 if ui.debugflag:
3237 if ui.debugflag:
3236 openerargs[r'loggingopts'][r'logdataapis'] = True
3238 openerargs[r'loggingopts'][r'logdataapis'] = True
3237
3239
3238 # Don't send default headers when in raw mode. This allows us to
3240 # Don't send default headers when in raw mode. This allows us to
3239 # bypass most of the behavior of our URL handling code so we can
3241 # bypass most of the behavior of our URL handling code so we can
3240 # have near complete control over what's sent on the wire.
3242 # have near complete control over what's sent on the wire.
3241 if opts['peer'] == 'raw':
3243 if opts['peer'] == 'raw':
3242 openerargs[r'sendaccept'] = False
3244 openerargs[r'sendaccept'] = False
3243
3245
3244 opener = urlmod.opener(ui, authinfo, **openerargs)
3246 opener = urlmod.opener(ui, authinfo, **openerargs)
3245
3247
3246 if opts['peer'] == 'http2':
3248 if opts['peer'] == 'http2':
3247 ui.write(_('creating http peer for wire protocol version 2\n'))
3249 ui.write(_('creating http peer for wire protocol version 2\n'))
3248 # We go through makepeer() because we need an API descriptor for
3250 # We go through makepeer() because we need an API descriptor for
3249 # the peer instance to be useful.
3251 # the peer instance to be useful.
3250 with ui.configoverride({
3252 with ui.configoverride({
3251 ('experimental', 'httppeer.advertise-v2'): True}):
3253 ('experimental', 'httppeer.advertise-v2'): True}):
3252 if opts['nologhandshake']:
3254 if opts['nologhandshake']:
3253 ui.pushbuffer()
3255 ui.pushbuffer()
3254
3256
3255 peer = httppeer.makepeer(ui, path, opener=opener)
3257 peer = httppeer.makepeer(ui, path, opener=opener)
3256
3258
3257 if opts['nologhandshake']:
3259 if opts['nologhandshake']:
3258 ui.popbuffer()
3260 ui.popbuffer()
3259
3261
3260 if not isinstance(peer, httppeer.httpv2peer):
3262 if not isinstance(peer, httppeer.httpv2peer):
3261 raise error.Abort(_('could not instantiate HTTP peer for '
3263 raise error.Abort(_('could not instantiate HTTP peer for '
3262 'wire protocol version 2'),
3264 'wire protocol version 2'),
3263 hint=_('the server may not have the feature '
3265 hint=_('the server may not have the feature '
3264 'enabled or is not allowing this '
3266 'enabled or is not allowing this '
3265 'client version'))
3267 'client version'))
3266
3268
3267 elif opts['peer'] == 'raw':
3269 elif opts['peer'] == 'raw':
3268 ui.write(_('using raw connection to peer\n'))
3270 ui.write(_('using raw connection to peer\n'))
3269 peer = None
3271 peer = None
3270 elif opts['peer']:
3272 elif opts['peer']:
3271 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3273 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3272 opts['peer'])
3274 opts['peer'])
3273 else:
3275 else:
3274 peer = httppeer.makepeer(ui, path, opener=opener)
3276 peer = httppeer.makepeer(ui, path, opener=opener)
3275
3277
3276 # We /could/ populate stdin/stdout with sock.makefile()...
3278 # We /could/ populate stdin/stdout with sock.makefile()...
3277 else:
3279 else:
3278 raise error.Abort(_('unsupported connection configuration'))
3280 raise error.Abort(_('unsupported connection configuration'))
3279
3281
3280 batchedcommands = None
3282 batchedcommands = None
3281
3283
3282 # Now perform actions based on the parsed wire language instructions.
3284 # Now perform actions based on the parsed wire language instructions.
3283 for action, lines in blocks:
3285 for action, lines in blocks:
3284 if action in ('raw', 'raw+'):
3286 if action in ('raw', 'raw+'):
3285 if not stdin:
3287 if not stdin:
3286 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3288 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3287
3289
3288 # Concatenate the data together.
3290 # Concatenate the data together.
3289 data = ''.join(l.lstrip() for l in lines)
3291 data = ''.join(l.lstrip() for l in lines)
3290 data = stringutil.unescapestr(data)
3292 data = stringutil.unescapestr(data)
3291 stdin.write(data)
3293 stdin.write(data)
3292
3294
3293 if action == 'raw+':
3295 if action == 'raw+':
3294 stdin.flush()
3296 stdin.flush()
3295 elif action == 'flush':
3297 elif action == 'flush':
3296 if not stdin:
3298 if not stdin:
3297 raise error.Abort(_('cannot call flush on this peer'))
3299 raise error.Abort(_('cannot call flush on this peer'))
3298 stdin.flush()
3300 stdin.flush()
3299 elif action.startswith('command'):
3301 elif action.startswith('command'):
3300 if not peer:
3302 if not peer:
3301 raise error.Abort(_('cannot send commands unless peer instance '
3303 raise error.Abort(_('cannot send commands unless peer instance '
3302 'is available'))
3304 'is available'))
3303
3305
3304 command = action.split(' ', 1)[1]
3306 command = action.split(' ', 1)[1]
3305
3307
3306 args = {}
3308 args = {}
3307 for line in lines:
3309 for line in lines:
3308 # We need to allow empty values.
3310 # We need to allow empty values.
3309 fields = line.lstrip().split(' ', 1)
3311 fields = line.lstrip().split(' ', 1)
3310 if len(fields) == 1:
3312 if len(fields) == 1:
3311 key = fields[0]
3313 key = fields[0]
3312 value = ''
3314 value = ''
3313 else:
3315 else:
3314 key, value = fields
3316 key, value = fields
3315
3317
3316 if value.startswith('eval:'):
3318 if value.startswith('eval:'):
3317 value = stringutil.evalpythonliteral(value[5:])
3319 value = stringutil.evalpythonliteral(value[5:])
3318 else:
3320 else:
3319 value = stringutil.unescapestr(value)
3321 value = stringutil.unescapestr(value)
3320
3322
3321 args[key] = value
3323 args[key] = value
3322
3324
3323 if batchedcommands is not None:
3325 if batchedcommands is not None:
3324 batchedcommands.append((command, args))
3326 batchedcommands.append((command, args))
3325 continue
3327 continue
3326
3328
3327 ui.status(_('sending %s command\n') % command)
3329 ui.status(_('sending %s command\n') % command)
3328
3330
3329 if 'PUSHFILE' in args:
3331 if 'PUSHFILE' in args:
3330 with open(args['PUSHFILE'], r'rb') as fh:
3332 with open(args['PUSHFILE'], r'rb') as fh:
3331 del args['PUSHFILE']
3333 del args['PUSHFILE']
3332 res, output = peer._callpush(command, fh,
3334 res, output = peer._callpush(command, fh,
3333 **pycompat.strkwargs(args))
3335 **pycompat.strkwargs(args))
3334 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3336 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3335 ui.status(_('remote output: %s\n') %
3337 ui.status(_('remote output: %s\n') %
3336 stringutil.escapestr(output))
3338 stringutil.escapestr(output))
3337 else:
3339 else:
3338 with peer.commandexecutor() as e:
3340 with peer.commandexecutor() as e:
3339 res = e.callcommand(command, args).result()
3341 res = e.callcommand(command, args).result()
3340
3342
3341 if isinstance(res, wireprotov2peer.commandresponse):
3343 if isinstance(res, wireprotov2peer.commandresponse):
3342 val = res.objects()
3344 val = res.objects()
3343 ui.status(_('response: %s\n') %
3345 ui.status(_('response: %s\n') %
3344 stringutil.pprint(val, bprefix=True, indent=2))
3346 stringutil.pprint(val, bprefix=True, indent=2))
3345 else:
3347 else:
3346 ui.status(_('response: %s\n') %
3348 ui.status(_('response: %s\n') %
3347 stringutil.pprint(res, bprefix=True, indent=2))
3349 stringutil.pprint(res, bprefix=True, indent=2))
3348
3350
3349 elif action == 'batchbegin':
3351 elif action == 'batchbegin':
3350 if batchedcommands is not None:
3352 if batchedcommands is not None:
3351 raise error.Abort(_('nested batchbegin not allowed'))
3353 raise error.Abort(_('nested batchbegin not allowed'))
3352
3354
3353 batchedcommands = []
3355 batchedcommands = []
3354 elif action == 'batchsubmit':
3356 elif action == 'batchsubmit':
3355 # There is a batching API we could go through. But it would be
3357 # There is a batching API we could go through. But it would be
3356 # difficult to normalize requests into function calls. It is easier
3358 # difficult to normalize requests into function calls. It is easier
3357 # to bypass this layer and normalize to commands + args.
3359 # to bypass this layer and normalize to commands + args.
3358 ui.status(_('sending batch with %d sub-commands\n') %
3360 ui.status(_('sending batch with %d sub-commands\n') %
3359 len(batchedcommands))
3361 len(batchedcommands))
3360 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3362 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3361 ui.status(_('response #%d: %s\n') %
3363 ui.status(_('response #%d: %s\n') %
3362 (i, stringutil.escapestr(chunk)))
3364 (i, stringutil.escapestr(chunk)))
3363
3365
3364 batchedcommands = None
3366 batchedcommands = None
3365
3367
3366 elif action.startswith('httprequest '):
3368 elif action.startswith('httprequest '):
3367 if not opener:
3369 if not opener:
3368 raise error.Abort(_('cannot use httprequest without an HTTP '
3370 raise error.Abort(_('cannot use httprequest without an HTTP '
3369 'peer'))
3371 'peer'))
3370
3372
3371 request = action.split(' ', 2)
3373 request = action.split(' ', 2)
3372 if len(request) != 3:
3374 if len(request) != 3:
3373 raise error.Abort(_('invalid httprequest: expected format is '
3375 raise error.Abort(_('invalid httprequest: expected format is '
3374 '"httprequest <method> <path>'))
3376 '"httprequest <method> <path>'))
3375
3377
3376 method, httppath = request[1:]
3378 method, httppath = request[1:]
3377 headers = {}
3379 headers = {}
3378 body = None
3380 body = None
3379 frames = []
3381 frames = []
3380 for line in lines:
3382 for line in lines:
3381 line = line.lstrip()
3383 line = line.lstrip()
3382 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3384 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3383 if m:
3385 if m:
3384 # Headers need to use native strings.
3386 # Headers need to use native strings.
3385 key = pycompat.strurl(m.group(1))
3387 key = pycompat.strurl(m.group(1))
3386 value = pycompat.strurl(m.group(2))
3388 value = pycompat.strurl(m.group(2))
3387 headers[key] = value
3389 headers[key] = value
3388 continue
3390 continue
3389
3391
3390 if line.startswith(b'BODYFILE '):
3392 if line.startswith(b'BODYFILE '):
3391 with open(line.split(b' ', 1), 'rb') as fh:
3393 with open(line.split(b' ', 1), 'rb') as fh:
3392 body = fh.read()
3394 body = fh.read()
3393 elif line.startswith(b'frame '):
3395 elif line.startswith(b'frame '):
3394 frame = wireprotoframing.makeframefromhumanstring(
3396 frame = wireprotoframing.makeframefromhumanstring(
3395 line[len(b'frame '):])
3397 line[len(b'frame '):])
3396
3398
3397 frames.append(frame)
3399 frames.append(frame)
3398 else:
3400 else:
3399 raise error.Abort(_('unknown argument to httprequest: %s') %
3401 raise error.Abort(_('unknown argument to httprequest: %s') %
3400 line)
3402 line)
3401
3403
3402 url = path + httppath
3404 url = path + httppath
3403
3405
3404 if frames:
3406 if frames:
3405 body = b''.join(bytes(f) for f in frames)
3407 body = b''.join(bytes(f) for f in frames)
3406
3408
3407 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3409 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3408
3410
3409 # urllib.Request insists on using has_data() as a proxy for
3411 # urllib.Request insists on using has_data() as a proxy for
3410 # determining the request method. Override that to use our
3412 # determining the request method. Override that to use our
3411 # explicitly requested method.
3413 # explicitly requested method.
3412 req.get_method = lambda: pycompat.sysstr(method)
3414 req.get_method = lambda: pycompat.sysstr(method)
3413
3415
3414 try:
3416 try:
3415 res = opener.open(req)
3417 res = opener.open(req)
3416 body = res.read()
3418 body = res.read()
3417 except util.urlerr.urlerror as e:
3419 except util.urlerr.urlerror as e:
3418 # read() method must be called, but only exists in Python 2
3420 # read() method must be called, but only exists in Python 2
3419 getattr(e, 'read', lambda: None)()
3421 getattr(e, 'read', lambda: None)()
3420 continue
3422 continue
3421
3423
3422 ct = res.headers.get(r'Content-Type')
3424 ct = res.headers.get(r'Content-Type')
3423 if ct == r'application/mercurial-cbor':
3425 if ct == r'application/mercurial-cbor':
3424 ui.write(_('cbor> %s\n') %
3426 ui.write(_('cbor> %s\n') %
3425 stringutil.pprint(cborutil.decodeall(body),
3427 stringutil.pprint(cborutil.decodeall(body),
3426 bprefix=True,
3428 bprefix=True,
3427 indent=2))
3429 indent=2))
3428
3430
3429 elif action == 'close':
3431 elif action == 'close':
3430 peer.close()
3432 peer.close()
3431 elif action == 'readavailable':
3433 elif action == 'readavailable':
3432 if not stdout or not stderr:
3434 if not stdout or not stderr:
3433 raise error.Abort(_('readavailable not available on this peer'))
3435 raise error.Abort(_('readavailable not available on this peer'))
3434
3436
3435 stdin.close()
3437 stdin.close()
3436 stdout.read()
3438 stdout.read()
3437 stderr.read()
3439 stderr.read()
3438
3440
3439 elif action == 'readline':
3441 elif action == 'readline':
3440 if not stdout:
3442 if not stdout:
3441 raise error.Abort(_('readline not available on this peer'))
3443 raise error.Abort(_('readline not available on this peer'))
3442 stdout.readline()
3444 stdout.readline()
3443 elif action == 'ereadline':
3445 elif action == 'ereadline':
3444 if not stderr:
3446 if not stderr:
3445 raise error.Abort(_('ereadline not available on this peer'))
3447 raise error.Abort(_('ereadline not available on this peer'))
3446 stderr.readline()
3448 stderr.readline()
3447 elif action.startswith('read '):
3449 elif action.startswith('read '):
3448 count = int(action.split(' ', 1)[1])
3450 count = int(action.split(' ', 1)[1])
3449 if not stdout:
3451 if not stdout:
3450 raise error.Abort(_('read not available on this peer'))
3452 raise error.Abort(_('read not available on this peer'))
3451 stdout.read(count)
3453 stdout.read(count)
3452 elif action.startswith('eread '):
3454 elif action.startswith('eread '):
3453 count = int(action.split(' ', 1)[1])
3455 count = int(action.split(' ', 1)[1])
3454 if not stderr:
3456 if not stderr:
3455 raise error.Abort(_('eread not available on this peer'))
3457 raise error.Abort(_('eread not available on this peer'))
3456 stderr.read(count)
3458 stderr.read(count)
3457 else:
3459 else:
3458 raise error.Abort(_('unknown action: %s') % action)
3460 raise error.Abort(_('unknown action: %s') % action)
3459
3461
3460 if batchedcommands is not None:
3462 if batchedcommands is not None:
3461 raise error.Abort(_('unclosed "batchbegin" request'))
3463 raise error.Abort(_('unclosed "batchbegin" request'))
3462
3464
3463 if peer:
3465 if peer:
3464 peer.close()
3466 peer.close()
3465
3467
3466 if proc:
3468 if proc:
3467 proc.kill()
3469 proc.kill()
@@ -1,1083 +1,1121 b''
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 > echo
18 > echo
19 > echo "% -- a -> b set (tip only)"
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
21 > echo
22 > echo "% -- b -> a tree"
22 > echo "% -- b -> a tree"
23 > hg -R b debugdiscovery a --verbose --old
23 > hg -R b debugdiscovery a --verbose --old
24 > echo
24 > echo
25 > echo "% -- b -> a set"
25 > echo "% -- b -> a set"
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
27 > echo
28 > echo "% -- b -> a set (tip only)"
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 > cd ..
30 > cd ..
31 > }
31 > }
32
32
33
33
34 Small superset:
34 Small superset:
35
35
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 > +2:f +1:a1:b1
37 > +2:f +1:a1:b1
38 > <f +4 :a2
38 > <f +4 :a2
39 > +5 :b2
39 > +5 :b2
40 > <f +3 :b3'
40 > <f +3 :b3'
41
41
42 % -- a -> b tree
42 % -- a -> b tree
43 comparing with b
43 comparing with b
44 searching for changes
44 searching for changes
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 elapsed time: * seconds (glob)
46 elapsed time: * seconds (glob)
47 heads summary:
47 heads summary:
48 total common heads: 2
48 total common heads: 2
49 also local heads: 2
49 also local heads: 2
50 also remote heads: 1
50 also remote heads: 1
51 both: 1
51 local heads: 2
52 local heads: 2
52 common: 2
53 common: 2
53 missing: 0
54 missing: 0
54 remote heads: 3
55 remote heads: 3
55 common: 1
56 common: 1
56 unknown: 2
57 unknown: 2
57 local changesets: 7
58 local changesets: 7
58 common: 7
59 common: 7
59 missing: 0
60 missing: 0
60 common heads: 01241442b3c2 b5714e113bc0
61 common heads: 01241442b3c2 b5714e113bc0
61
62
62 % -- a -> b set
63 % -- a -> b set
63 comparing with b
64 comparing with b
64 query 1; heads
65 query 1; heads
65 searching for changes
66 searching for changes
66 all local heads known remotely
67 all local heads known remotely
67 elapsed time: * seconds (glob)
68 elapsed time: * seconds (glob)
68 heads summary:
69 heads summary:
69 total common heads: 2
70 total common heads: 2
70 also local heads: 2
71 also local heads: 2
71 also remote heads: 1
72 also remote heads: 1
73 both: 1
72 local heads: 2
74 local heads: 2
73 common: 2
75 common: 2
74 missing: 0
76 missing: 0
75 remote heads: 3
77 remote heads: 3
76 common: 1
78 common: 1
77 unknown: 2
79 unknown: 2
78 local changesets: 7
80 local changesets: 7
79 common: 7
81 common: 7
80 missing: 0
82 missing: 0
81 common heads: 01241442b3c2 b5714e113bc0
83 common heads: 01241442b3c2 b5714e113bc0
82
84
83 % -- a -> b set (tip only)
85 % -- a -> b set (tip only)
84 comparing with b
86 comparing with b
85 query 1; heads
87 query 1; heads
86 searching for changes
88 searching for changes
87 all local heads known remotely
89 all local heads known remotely
88 elapsed time: * seconds (glob)
90 elapsed time: * seconds (glob)
89 heads summary:
91 heads summary:
90 total common heads: 1
92 total common heads: 1
91 also local heads: 1
93 also local heads: 1
92 also remote heads: 0
94 also remote heads: 0
95 both: 0
93 local heads: 2
96 local heads: 2
94 common: 1
97 common: 1
95 missing: 1
98 missing: 1
96 remote heads: 3
99 remote heads: 3
97 common: 0
100 common: 0
98 unknown: 3
101 unknown: 3
99 local changesets: 7
102 local changesets: 7
100 common: 6
103 common: 6
101 missing: 1
104 missing: 1
102 common heads: b5714e113bc0
105 common heads: b5714e113bc0
103
106
104 % -- b -> a tree
107 % -- b -> a tree
105 comparing with a
108 comparing with a
106 searching for changes
109 searching for changes
107 unpruned common: 01241442b3c2 b5714e113bc0
110 unpruned common: 01241442b3c2 b5714e113bc0
108 elapsed time: * seconds (glob)
111 elapsed time: * seconds (glob)
109 heads summary:
112 heads summary:
110 total common heads: 2
113 total common heads: 2
111 also local heads: 1
114 also local heads: 1
112 also remote heads: 2
115 also remote heads: 2
116 both: 1
113 local heads: 3
117 local heads: 3
114 common: 1
118 common: 1
115 missing: 2
119 missing: 2
116 remote heads: 2
120 remote heads: 2
117 common: 2
121 common: 2
118 unknown: 0
122 unknown: 0
119 local changesets: 15
123 local changesets: 15
120 common: 7
124 common: 7
121 missing: 8
125 missing: 8
122 common heads: 01241442b3c2 b5714e113bc0
126 common heads: 01241442b3c2 b5714e113bc0
123
127
124 % -- b -> a set
128 % -- b -> a set
125 comparing with a
129 comparing with a
126 query 1; heads
130 query 1; heads
127 searching for changes
131 searching for changes
128 all remote heads known locally
132 all remote heads known locally
129 elapsed time: * seconds (glob)
133 elapsed time: * seconds (glob)
130 heads summary:
134 heads summary:
131 total common heads: 2
135 total common heads: 2
132 also local heads: 1
136 also local heads: 1
133 also remote heads: 2
137 also remote heads: 2
138 both: 1
134 local heads: 3
139 local heads: 3
135 common: 1
140 common: 1
136 missing: 2
141 missing: 2
137 remote heads: 2
142 remote heads: 2
138 common: 2
143 common: 2
139 unknown: 0
144 unknown: 0
140 local changesets: 15
145 local changesets: 15
141 common: 7
146 common: 7
142 missing: 8
147 missing: 8
143 common heads: 01241442b3c2 b5714e113bc0
148 common heads: 01241442b3c2 b5714e113bc0
144
149
145 % -- b -> a set (tip only)
150 % -- b -> a set (tip only)
146 comparing with a
151 comparing with a
147 query 1; heads
152 query 1; heads
148 searching for changes
153 searching for changes
149 all remote heads known locally
154 all remote heads known locally
150 elapsed time: * seconds (glob)
155 elapsed time: * seconds (glob)
151 heads summary:
156 heads summary:
152 total common heads: 2
157 total common heads: 2
153 also local heads: 1
158 also local heads: 1
154 also remote heads: 2
159 also remote heads: 2
160 both: 1
155 local heads: 3
161 local heads: 3
156 common: 1
162 common: 1
157 missing: 2
163 missing: 2
158 remote heads: 2
164 remote heads: 2
159 common: 2
165 common: 2
160 unknown: 0
166 unknown: 0
161 local changesets: 15
167 local changesets: 15
162 common: 7
168 common: 7
163 missing: 8
169 missing: 8
164 common heads: 01241442b3c2 b5714e113bc0
170 common heads: 01241442b3c2 b5714e113bc0
165
171
166
172
167 Many new:
173 Many new:
168
174
169 $ testdesc '-ra1 -ra2' '-rb' '
175 $ testdesc '-ra1 -ra2' '-rb' '
170 > +2:f +3:a1 +3:b
176 > +2:f +3:a1 +3:b
171 > <f +30 :a2'
177 > <f +30 :a2'
172
178
173 % -- a -> b tree
179 % -- a -> b tree
174 comparing with b
180 comparing with b
175 searching for changes
181 searching for changes
176 unpruned common: bebd167eb94d
182 unpruned common: bebd167eb94d
177 elapsed time: * seconds (glob)
183 elapsed time: * seconds (glob)
178 heads summary:
184 heads summary:
179 total common heads: 1
185 total common heads: 1
180 also local heads: 1
186 also local heads: 1
181 also remote heads: 0
187 also remote heads: 0
188 both: 0
182 local heads: 2
189 local heads: 2
183 common: 1
190 common: 1
184 missing: 1
191 missing: 1
185 remote heads: 1
192 remote heads: 1
186 common: 0
193 common: 0
187 unknown: 1
194 unknown: 1
188 local changesets: 35
195 local changesets: 35
189 common: 5
196 common: 5
190 missing: 30
197 missing: 30
191 common heads: bebd167eb94d
198 common heads: bebd167eb94d
192
199
193 % -- a -> b set
200 % -- a -> b set
194 comparing with b
201 comparing with b
195 query 1; heads
202 query 1; heads
196 searching for changes
203 searching for changes
197 taking initial sample
204 taking initial sample
198 searching: 2 queries
205 searching: 2 queries
199 query 2; still undecided: 29, sample size is: 29
206 query 2; still undecided: 29, sample size is: 29
200 2 total queries in *.????s (glob)
207 2 total queries in *.????s (glob)
201 elapsed time: * seconds (glob)
208 elapsed time: * seconds (glob)
202 heads summary:
209 heads summary:
203 total common heads: 1
210 total common heads: 1
204 also local heads: 1
211 also local heads: 1
205 also remote heads: 0
212 also remote heads: 0
213 both: 0
206 local heads: 2
214 local heads: 2
207 common: 1
215 common: 1
208 missing: 1
216 missing: 1
209 remote heads: 1
217 remote heads: 1
210 common: 0
218 common: 0
211 unknown: 1
219 unknown: 1
212 local changesets: 35
220 local changesets: 35
213 common: 5
221 common: 5
214 missing: 30
222 missing: 30
215 common heads: bebd167eb94d
223 common heads: bebd167eb94d
216
224
217 % -- a -> b set (tip only)
225 % -- a -> b set (tip only)
218 comparing with b
226 comparing with b
219 query 1; heads
227 query 1; heads
220 searching for changes
228 searching for changes
221 taking quick initial sample
229 taking quick initial sample
222 searching: 2 queries
230 searching: 2 queries
223 query 2; still undecided: 31, sample size is: 31
231 query 2; still undecided: 31, sample size is: 31
224 2 total queries in *.????s (glob)
232 2 total queries in *.????s (glob)
225 elapsed time: * seconds (glob)
233 elapsed time: * seconds (glob)
226 heads summary:
234 heads summary:
227 total common heads: 1
235 total common heads: 1
228 also local heads: 0
236 also local heads: 0
229 also remote heads: 0
237 also remote heads: 0
238 both: 0
230 local heads: 2
239 local heads: 2
231 common: 0
240 common: 0
232 missing: 2
241 missing: 2
233 remote heads: 1
242 remote heads: 1
234 common: 0
243 common: 0
235 unknown: 1
244 unknown: 1
236 local changesets: 35
245 local changesets: 35
237 common: 2
246 common: 2
238 missing: 33
247 missing: 33
239 common heads: 66f7d451a68b
248 common heads: 66f7d451a68b
240
249
241 % -- b -> a tree
250 % -- b -> a tree
242 comparing with a
251 comparing with a
243 searching for changes
252 searching for changes
244 unpruned common: 66f7d451a68b bebd167eb94d
253 unpruned common: 66f7d451a68b bebd167eb94d
245 elapsed time: * seconds (glob)
254 elapsed time: * seconds (glob)
246 heads summary:
255 heads summary:
247 total common heads: 1
256 total common heads: 1
248 also local heads: 0
257 also local heads: 0
249 also remote heads: 1
258 also remote heads: 1
259 both: 0
250 local heads: 1
260 local heads: 1
251 common: 0
261 common: 0
252 missing: 1
262 missing: 1
253 remote heads: 2
263 remote heads: 2
254 common: 1
264 common: 1
255 unknown: 1
265 unknown: 1
256 local changesets: 8
266 local changesets: 8
257 common: 5
267 common: 5
258 missing: 3
268 missing: 3
259 common heads: bebd167eb94d
269 common heads: bebd167eb94d
260
270
261 % -- b -> a set
271 % -- b -> a set
262 comparing with a
272 comparing with a
263 query 1; heads
273 query 1; heads
264 searching for changes
274 searching for changes
265 taking initial sample
275 taking initial sample
266 searching: 2 queries
276 searching: 2 queries
267 query 2; still undecided: 2, sample size is: 2
277 query 2; still undecided: 2, sample size is: 2
268 2 total queries in *.????s (glob)
278 2 total queries in *.????s (glob)
269 elapsed time: * seconds (glob)
279 elapsed time: * seconds (glob)
270 heads summary:
280 heads summary:
271 total common heads: 1
281 total common heads: 1
272 also local heads: 0
282 also local heads: 0
273 also remote heads: 1
283 also remote heads: 1
284 both: 0
274 local heads: 1
285 local heads: 1
275 common: 0
286 common: 0
276 missing: 1
287 missing: 1
277 remote heads: 2
288 remote heads: 2
278 common: 1
289 common: 1
279 unknown: 1
290 unknown: 1
280 local changesets: 8
291 local changesets: 8
281 common: 5
292 common: 5
282 missing: 3
293 missing: 3
283 common heads: bebd167eb94d
294 common heads: bebd167eb94d
284
295
285 % -- b -> a set (tip only)
296 % -- b -> a set (tip only)
286 comparing with a
297 comparing with a
287 query 1; heads
298 query 1; heads
288 searching for changes
299 searching for changes
289 taking initial sample
300 taking initial sample
290 searching: 2 queries
301 searching: 2 queries
291 query 2; still undecided: 2, sample size is: 2
302 query 2; still undecided: 2, sample size is: 2
292 2 total queries in *.????s (glob)
303 2 total queries in *.????s (glob)
293 elapsed time: * seconds (glob)
304 elapsed time: * seconds (glob)
294 heads summary:
305 heads summary:
295 total common heads: 1
306 total common heads: 1
296 also local heads: 0
307 also local heads: 0
297 also remote heads: 1
308 also remote heads: 1
309 both: 0
298 local heads: 1
310 local heads: 1
299 common: 0
311 common: 0
300 missing: 1
312 missing: 1
301 remote heads: 2
313 remote heads: 2
302 common: 1
314 common: 1
303 unknown: 1
315 unknown: 1
304 local changesets: 8
316 local changesets: 8
305 common: 5
317 common: 5
306 missing: 3
318 missing: 3
307 common heads: bebd167eb94d
319 common heads: bebd167eb94d
308
320
309 Both sides many new with stub:
321 Both sides many new with stub:
310
322
311 $ testdesc '-ra1 -ra2' '-rb' '
323 $ testdesc '-ra1 -ra2' '-rb' '
312 > +2:f +2:a1 +30 :b
324 > +2:f +2:a1 +30 :b
313 > <f +30 :a2'
325 > <f +30 :a2'
314
326
315 % -- a -> b tree
327 % -- a -> b tree
316 comparing with b
328 comparing with b
317 searching for changes
329 searching for changes
318 unpruned common: 2dc09a01254d
330 unpruned common: 2dc09a01254d
319 elapsed time: * seconds (glob)
331 elapsed time: * seconds (glob)
320 heads summary:
332 heads summary:
321 total common heads: 1
333 total common heads: 1
322 also local heads: 1
334 also local heads: 1
323 also remote heads: 0
335 also remote heads: 0
336 both: 0
324 local heads: 2
337 local heads: 2
325 common: 1
338 common: 1
326 missing: 1
339 missing: 1
327 remote heads: 1
340 remote heads: 1
328 common: 0
341 common: 0
329 unknown: 1
342 unknown: 1
330 local changesets: 34
343 local changesets: 34
331 common: 4
344 common: 4
332 missing: 30
345 missing: 30
333 common heads: 2dc09a01254d
346 common heads: 2dc09a01254d
334
347
335 % -- a -> b set
348 % -- a -> b set
336 comparing with b
349 comparing with b
337 query 1; heads
350 query 1; heads
338 searching for changes
351 searching for changes
339 taking initial sample
352 taking initial sample
340 searching: 2 queries
353 searching: 2 queries
341 query 2; still undecided: 29, sample size is: 29
354 query 2; still undecided: 29, sample size is: 29
342 2 total queries in *.????s (glob)
355 2 total queries in *.????s (glob)
343 elapsed time: * seconds (glob)
356 elapsed time: * seconds (glob)
344 heads summary:
357 heads summary:
345 total common heads: 1
358 total common heads: 1
346 also local heads: 1
359 also local heads: 1
347 also remote heads: 0
360 also remote heads: 0
361 both: 0
348 local heads: 2
362 local heads: 2
349 common: 1
363 common: 1
350 missing: 1
364 missing: 1
351 remote heads: 1
365 remote heads: 1
352 common: 0
366 common: 0
353 unknown: 1
367 unknown: 1
354 local changesets: 34
368 local changesets: 34
355 common: 4
369 common: 4
356 missing: 30
370 missing: 30
357 common heads: 2dc09a01254d
371 common heads: 2dc09a01254d
358
372
359 % -- a -> b set (tip only)
373 % -- a -> b set (tip only)
360 comparing with b
374 comparing with b
361 query 1; heads
375 query 1; heads
362 searching for changes
376 searching for changes
363 taking quick initial sample
377 taking quick initial sample
364 searching: 2 queries
378 searching: 2 queries
365 query 2; still undecided: 31, sample size is: 31
379 query 2; still undecided: 31, sample size is: 31
366 2 total queries in *.????s (glob)
380 2 total queries in *.????s (glob)
367 elapsed time: * seconds (glob)
381 elapsed time: * seconds (glob)
368 heads summary:
382 heads summary:
369 total common heads: 1
383 total common heads: 1
370 also local heads: 0
384 also local heads: 0
371 also remote heads: 0
385 also remote heads: 0
386 both: 0
372 local heads: 2
387 local heads: 2
373 common: 0
388 common: 0
374 missing: 2
389 missing: 2
375 remote heads: 1
390 remote heads: 1
376 common: 0
391 common: 0
377 unknown: 1
392 unknown: 1
378 local changesets: 34
393 local changesets: 34
379 common: 2
394 common: 2
380 missing: 32
395 missing: 32
381 common heads: 66f7d451a68b
396 common heads: 66f7d451a68b
382
397
383 % -- b -> a tree
398 % -- b -> a tree
384 comparing with a
399 comparing with a
385 searching for changes
400 searching for changes
386 unpruned common: 2dc09a01254d 66f7d451a68b
401 unpruned common: 2dc09a01254d 66f7d451a68b
387 elapsed time: * seconds (glob)
402 elapsed time: * seconds (glob)
388 heads summary:
403 heads summary:
389 total common heads: 1
404 total common heads: 1
390 also local heads: 0
405 also local heads: 0
391 also remote heads: 1
406 also remote heads: 1
407 both: 0
392 local heads: 1
408 local heads: 1
393 common: 0
409 common: 0
394 missing: 1
410 missing: 1
395 remote heads: 2
411 remote heads: 2
396 common: 1
412 common: 1
397 unknown: 1
413 unknown: 1
398 local changesets: 34
414 local changesets: 34
399 common: 4
415 common: 4
400 missing: 30
416 missing: 30
401 common heads: 2dc09a01254d
417 common heads: 2dc09a01254d
402
418
403 % -- b -> a set
419 % -- b -> a set
404 comparing with a
420 comparing with a
405 query 1; heads
421 query 1; heads
406 searching for changes
422 searching for changes
407 taking initial sample
423 taking initial sample
408 searching: 2 queries
424 searching: 2 queries
409 query 2; still undecided: 29, sample size is: 29
425 query 2; still undecided: 29, sample size is: 29
410 2 total queries in *.????s (glob)
426 2 total queries in *.????s (glob)
411 elapsed time: * seconds (glob)
427 elapsed time: * seconds (glob)
412 heads summary:
428 heads summary:
413 total common heads: 1
429 total common heads: 1
414 also local heads: 0
430 also local heads: 0
415 also remote heads: 1
431 also remote heads: 1
432 both: 0
416 local heads: 1
433 local heads: 1
417 common: 0
434 common: 0
418 missing: 1
435 missing: 1
419 remote heads: 2
436 remote heads: 2
420 common: 1
437 common: 1
421 unknown: 1
438 unknown: 1
422 local changesets: 34
439 local changesets: 34
423 common: 4
440 common: 4
424 missing: 30
441 missing: 30
425 common heads: 2dc09a01254d
442 common heads: 2dc09a01254d
426
443
427 % -- b -> a set (tip only)
444 % -- b -> a set (tip only)
428 comparing with a
445 comparing with a
429 query 1; heads
446 query 1; heads
430 searching for changes
447 searching for changes
431 taking initial sample
448 taking initial sample
432 searching: 2 queries
449 searching: 2 queries
433 query 2; still undecided: 29, sample size is: 29
450 query 2; still undecided: 29, sample size is: 29
434 2 total queries in *.????s (glob)
451 2 total queries in *.????s (glob)
435 elapsed time: * seconds (glob)
452 elapsed time: * seconds (glob)
436 heads summary:
453 heads summary:
437 total common heads: 1
454 total common heads: 1
438 also local heads: 0
455 also local heads: 0
439 also remote heads: 1
456 also remote heads: 1
457 both: 0
440 local heads: 1
458 local heads: 1
441 common: 0
459 common: 0
442 missing: 1
460 missing: 1
443 remote heads: 2
461 remote heads: 2
444 common: 1
462 common: 1
445 unknown: 1
463 unknown: 1
446 local changesets: 34
464 local changesets: 34
447 common: 4
465 common: 4
448 missing: 30
466 missing: 30
449 common heads: 2dc09a01254d
467 common heads: 2dc09a01254d
450
468
451
469
452 Both many new:
470 Both many new:
453
471
454 $ testdesc '-ra' '-rb' '
472 $ testdesc '-ra' '-rb' '
455 > +2:f +30 :b
473 > +2:f +30 :b
456 > <f +30 :a'
474 > <f +30 :a'
457
475
458 % -- a -> b tree
476 % -- a -> b tree
459 comparing with b
477 comparing with b
460 searching for changes
478 searching for changes
461 unpruned common: 66f7d451a68b
479 unpruned common: 66f7d451a68b
462 elapsed time: * seconds (glob)
480 elapsed time: * seconds (glob)
463 heads summary:
481 heads summary:
464 total common heads: 1
482 total common heads: 1
465 also local heads: 0
483 also local heads: 0
466 also remote heads: 0
484 also remote heads: 0
485 both: 0
467 local heads: 1
486 local heads: 1
468 common: 0
487 common: 0
469 missing: 1
488 missing: 1
470 remote heads: 1
489 remote heads: 1
471 common: 0
490 common: 0
472 unknown: 1
491 unknown: 1
473 local changesets: 32
492 local changesets: 32
474 common: 2
493 common: 2
475 missing: 30
494 missing: 30
476 common heads: 66f7d451a68b
495 common heads: 66f7d451a68b
477
496
478 % -- a -> b set
497 % -- a -> b set
479 comparing with b
498 comparing with b
480 query 1; heads
499 query 1; heads
481 searching for changes
500 searching for changes
482 taking quick initial sample
501 taking quick initial sample
483 searching: 2 queries
502 searching: 2 queries
484 query 2; still undecided: 31, sample size is: 31
503 query 2; still undecided: 31, sample size is: 31
485 2 total queries in *.????s (glob)
504 2 total queries in *.????s (glob)
486 elapsed time: * seconds (glob)
505 elapsed time: * seconds (glob)
487 heads summary:
506 heads summary:
488 total common heads: 1
507 total common heads: 1
489 also local heads: 0
508 also local heads: 0
490 also remote heads: 0
509 also remote heads: 0
510 both: 0
491 local heads: 1
511 local heads: 1
492 common: 0
512 common: 0
493 missing: 1
513 missing: 1
494 remote heads: 1
514 remote heads: 1
495 common: 0
515 common: 0
496 unknown: 1
516 unknown: 1
497 local changesets: 32
517 local changesets: 32
498 common: 2
518 common: 2
499 missing: 30
519 missing: 30
500 common heads: 66f7d451a68b
520 common heads: 66f7d451a68b
501
521
502 % -- a -> b set (tip only)
522 % -- a -> b set (tip only)
503 comparing with b
523 comparing with b
504 query 1; heads
524 query 1; heads
505 searching for changes
525 searching for changes
506 taking quick initial sample
526 taking quick initial sample
507 searching: 2 queries
527 searching: 2 queries
508 query 2; still undecided: 31, sample size is: 31
528 query 2; still undecided: 31, sample size is: 31
509 2 total queries in *.????s (glob)
529 2 total queries in *.????s (glob)
510 elapsed time: * seconds (glob)
530 elapsed time: * seconds (glob)
511 heads summary:
531 heads summary:
512 total common heads: 1
532 total common heads: 1
513 also local heads: 0
533 also local heads: 0
514 also remote heads: 0
534 also remote heads: 0
535 both: 0
515 local heads: 1
536 local heads: 1
516 common: 0
537 common: 0
517 missing: 1
538 missing: 1
518 remote heads: 1
539 remote heads: 1
519 common: 0
540 common: 0
520 unknown: 1
541 unknown: 1
521 local changesets: 32
542 local changesets: 32
522 common: 2
543 common: 2
523 missing: 30
544 missing: 30
524 common heads: 66f7d451a68b
545 common heads: 66f7d451a68b
525
546
526 % -- b -> a tree
547 % -- b -> a tree
527 comparing with a
548 comparing with a
528 searching for changes
549 searching for changes
529 unpruned common: 66f7d451a68b
550 unpruned common: 66f7d451a68b
530 elapsed time: * seconds (glob)
551 elapsed time: * seconds (glob)
531 heads summary:
552 heads summary:
532 total common heads: 1
553 total common heads: 1
533 also local heads: 0
554 also local heads: 0
534 also remote heads: 0
555 also remote heads: 0
556 both: 0
535 local heads: 1
557 local heads: 1
536 common: 0
558 common: 0
537 missing: 1
559 missing: 1
538 remote heads: 1
560 remote heads: 1
539 common: 0
561 common: 0
540 unknown: 1
562 unknown: 1
541 local changesets: 32
563 local changesets: 32
542 common: 2
564 common: 2
543 missing: 30
565 missing: 30
544 common heads: 66f7d451a68b
566 common heads: 66f7d451a68b
545
567
546 % -- b -> a set
568 % -- b -> a set
547 comparing with a
569 comparing with a
548 query 1; heads
570 query 1; heads
549 searching for changes
571 searching for changes
550 taking quick initial sample
572 taking quick initial sample
551 searching: 2 queries
573 searching: 2 queries
552 query 2; still undecided: 31, sample size is: 31
574 query 2; still undecided: 31, sample size is: 31
553 2 total queries in *.????s (glob)
575 2 total queries in *.????s (glob)
554 elapsed time: * seconds (glob)
576 elapsed time: * seconds (glob)
555 heads summary:
577 heads summary:
556 total common heads: 1
578 total common heads: 1
557 also local heads: 0
579 also local heads: 0
558 also remote heads: 0
580 also remote heads: 0
581 both: 0
559 local heads: 1
582 local heads: 1
560 common: 0
583 common: 0
561 missing: 1
584 missing: 1
562 remote heads: 1
585 remote heads: 1
563 common: 0
586 common: 0
564 unknown: 1
587 unknown: 1
565 local changesets: 32
588 local changesets: 32
566 common: 2
589 common: 2
567 missing: 30
590 missing: 30
568 common heads: 66f7d451a68b
591 common heads: 66f7d451a68b
569
592
570 % -- b -> a set (tip only)
593 % -- b -> a set (tip only)
571 comparing with a
594 comparing with a
572 query 1; heads
595 query 1; heads
573 searching for changes
596 searching for changes
574 taking quick initial sample
597 taking quick initial sample
575 searching: 2 queries
598 searching: 2 queries
576 query 2; still undecided: 31, sample size is: 31
599 query 2; still undecided: 31, sample size is: 31
577 2 total queries in *.????s (glob)
600 2 total queries in *.????s (glob)
578 elapsed time: * seconds (glob)
601 elapsed time: * seconds (glob)
579 heads summary:
602 heads summary:
580 total common heads: 1
603 total common heads: 1
581 also local heads: 0
604 also local heads: 0
582 also remote heads: 0
605 also remote heads: 0
606 both: 0
583 local heads: 1
607 local heads: 1
584 common: 0
608 common: 0
585 missing: 1
609 missing: 1
586 remote heads: 1
610 remote heads: 1
587 common: 0
611 common: 0
588 unknown: 1
612 unknown: 1
589 local changesets: 32
613 local changesets: 32
590 common: 2
614 common: 2
591 missing: 30
615 missing: 30
592 common heads: 66f7d451a68b
616 common heads: 66f7d451a68b
593
617
594
618
595 Both many new skewed:
619 Both many new skewed:
596
620
597 $ testdesc '-ra' '-rb' '
621 $ testdesc '-ra' '-rb' '
598 > +2:f +30 :b
622 > +2:f +30 :b
599 > <f +50 :a'
623 > <f +50 :a'
600
624
601 % -- a -> b tree
625 % -- a -> b tree
602 comparing with b
626 comparing with b
603 searching for changes
627 searching for changes
604 unpruned common: 66f7d451a68b
628 unpruned common: 66f7d451a68b
605 elapsed time: * seconds (glob)
629 elapsed time: * seconds (glob)
606 heads summary:
630 heads summary:
607 total common heads: 1
631 total common heads: 1
608 also local heads: 0
632 also local heads: 0
609 also remote heads: 0
633 also remote heads: 0
634 both: 0
610 local heads: 1
635 local heads: 1
611 common: 0
636 common: 0
612 missing: 1
637 missing: 1
613 remote heads: 1
638 remote heads: 1
614 common: 0
639 common: 0
615 unknown: 1
640 unknown: 1
616 local changesets: 52
641 local changesets: 52
617 common: 2
642 common: 2
618 missing: 50
643 missing: 50
619 common heads: 66f7d451a68b
644 common heads: 66f7d451a68b
620
645
621 % -- a -> b set
646 % -- a -> b set
622 comparing with b
647 comparing with b
623 query 1; heads
648 query 1; heads
624 searching for changes
649 searching for changes
625 taking quick initial sample
650 taking quick initial sample
626 searching: 2 queries
651 searching: 2 queries
627 query 2; still undecided: 51, sample size is: 51
652 query 2; still undecided: 51, sample size is: 51
628 2 total queries in *.????s (glob)
653 2 total queries in *.????s (glob)
629 elapsed time: * seconds (glob)
654 elapsed time: * seconds (glob)
630 heads summary:
655 heads summary:
631 total common heads: 1
656 total common heads: 1
632 also local heads: 0
657 also local heads: 0
633 also remote heads: 0
658 also remote heads: 0
659 both: 0
634 local heads: 1
660 local heads: 1
635 common: 0
661 common: 0
636 missing: 1
662 missing: 1
637 remote heads: 1
663 remote heads: 1
638 common: 0
664 common: 0
639 unknown: 1
665 unknown: 1
640 local changesets: 52
666 local changesets: 52
641 common: 2
667 common: 2
642 missing: 50
668 missing: 50
643 common heads: 66f7d451a68b
669 common heads: 66f7d451a68b
644
670
645 % -- a -> b set (tip only)
671 % -- a -> b set (tip only)
646 comparing with b
672 comparing with b
647 query 1; heads
673 query 1; heads
648 searching for changes
674 searching for changes
649 taking quick initial sample
675 taking quick initial sample
650 searching: 2 queries
676 searching: 2 queries
651 query 2; still undecided: 51, sample size is: 51
677 query 2; still undecided: 51, sample size is: 51
652 2 total queries in *.????s (glob)
678 2 total queries in *.????s (glob)
653 elapsed time: * seconds (glob)
679 elapsed time: * seconds (glob)
654 heads summary:
680 heads summary:
655 total common heads: 1
681 total common heads: 1
656 also local heads: 0
682 also local heads: 0
657 also remote heads: 0
683 also remote heads: 0
684 both: 0
658 local heads: 1
685 local heads: 1
659 common: 0
686 common: 0
660 missing: 1
687 missing: 1
661 remote heads: 1
688 remote heads: 1
662 common: 0
689 common: 0
663 unknown: 1
690 unknown: 1
664 local changesets: 52
691 local changesets: 52
665 common: 2
692 common: 2
666 missing: 50
693 missing: 50
667 common heads: 66f7d451a68b
694 common heads: 66f7d451a68b
668
695
669 % -- b -> a tree
696 % -- b -> a tree
670 comparing with a
697 comparing with a
671 searching for changes
698 searching for changes
672 unpruned common: 66f7d451a68b
699 unpruned common: 66f7d451a68b
673 elapsed time: * seconds (glob)
700 elapsed time: * seconds (glob)
674 heads summary:
701 heads summary:
675 total common heads: 1
702 total common heads: 1
676 also local heads: 0
703 also local heads: 0
677 also remote heads: 0
704 also remote heads: 0
705 both: 0
678 local heads: 1
706 local heads: 1
679 common: 0
707 common: 0
680 missing: 1
708 missing: 1
681 remote heads: 1
709 remote heads: 1
682 common: 0
710 common: 0
683 unknown: 1
711 unknown: 1
684 local changesets: 32
712 local changesets: 32
685 common: 2
713 common: 2
686 missing: 30
714 missing: 30
687 common heads: 66f7d451a68b
715 common heads: 66f7d451a68b
688
716
689 % -- b -> a set
717 % -- b -> a set
690 comparing with a
718 comparing with a
691 query 1; heads
719 query 1; heads
692 searching for changes
720 searching for changes
693 taking quick initial sample
721 taking quick initial sample
694 searching: 2 queries
722 searching: 2 queries
695 query 2; still undecided: 31, sample size is: 31
723 query 2; still undecided: 31, sample size is: 31
696 2 total queries in *.????s (glob)
724 2 total queries in *.????s (glob)
697 elapsed time: * seconds (glob)
725 elapsed time: * seconds (glob)
698 heads summary:
726 heads summary:
699 total common heads: 1
727 total common heads: 1
700 also local heads: 0
728 also local heads: 0
701 also remote heads: 0
729 also remote heads: 0
730 both: 0
702 local heads: 1
731 local heads: 1
703 common: 0
732 common: 0
704 missing: 1
733 missing: 1
705 remote heads: 1
734 remote heads: 1
706 common: 0
735 common: 0
707 unknown: 1
736 unknown: 1
708 local changesets: 32
737 local changesets: 32
709 common: 2
738 common: 2
710 missing: 30
739 missing: 30
711 common heads: 66f7d451a68b
740 common heads: 66f7d451a68b
712
741
713 % -- b -> a set (tip only)
742 % -- b -> a set (tip only)
714 comparing with a
743 comparing with a
715 query 1; heads
744 query 1; heads
716 searching for changes
745 searching for changes
717 taking quick initial sample
746 taking quick initial sample
718 searching: 2 queries
747 searching: 2 queries
719 query 2; still undecided: 31, sample size is: 31
748 query 2; still undecided: 31, sample size is: 31
720 2 total queries in *.????s (glob)
749 2 total queries in *.????s (glob)
721 elapsed time: * seconds (glob)
750 elapsed time: * seconds (glob)
722 heads summary:
751 heads summary:
723 total common heads: 1
752 total common heads: 1
724 also local heads: 0
753 also local heads: 0
725 also remote heads: 0
754 also remote heads: 0
755 both: 0
726 local heads: 1
756 local heads: 1
727 common: 0
757 common: 0
728 missing: 1
758 missing: 1
729 remote heads: 1
759 remote heads: 1
730 common: 0
760 common: 0
731 unknown: 1
761 unknown: 1
732 local changesets: 32
762 local changesets: 32
733 common: 2
763 common: 2
734 missing: 30
764 missing: 30
735 common heads: 66f7d451a68b
765 common heads: 66f7d451a68b
736
766
737
767
738 Both many new on top of long history:
768 Both many new on top of long history:
739
769
740 $ testdesc '-ra' '-rb' '
770 $ testdesc '-ra' '-rb' '
741 > +1000:f +30 :b
771 > +1000:f +30 :b
742 > <f +50 :a'
772 > <f +50 :a'
743
773
744 % -- a -> b tree
774 % -- a -> b tree
745 comparing with b
775 comparing with b
746 searching for changes
776 searching for changes
747 unpruned common: 7ead0cba2838
777 unpruned common: 7ead0cba2838
748 elapsed time: * seconds (glob)
778 elapsed time: * seconds (glob)
749 heads summary:
779 heads summary:
750 total common heads: 1
780 total common heads: 1
751 also local heads: 0
781 also local heads: 0
752 also remote heads: 0
782 also remote heads: 0
783 both: 0
753 local heads: 1
784 local heads: 1
754 common: 0
785 common: 0
755 missing: 1
786 missing: 1
756 remote heads: 1
787 remote heads: 1
757 common: 0
788 common: 0
758 unknown: 1
789 unknown: 1
759 local changesets: 1050
790 local changesets: 1050
760 common: 1000
791 common: 1000
761 missing: 50
792 missing: 50
762 common heads: 7ead0cba2838
793 common heads: 7ead0cba2838
763
794
764 % -- a -> b set
795 % -- a -> b set
765 comparing with b
796 comparing with b
766 query 1; heads
797 query 1; heads
767 searching for changes
798 searching for changes
768 taking quick initial sample
799 taking quick initial sample
769 searching: 2 queries
800 searching: 2 queries
770 query 2; still undecided: 1049, sample size is: 11
801 query 2; still undecided: 1049, sample size is: 11
771 sampling from both directions
802 sampling from both directions
772 searching: 3 queries
803 searching: 3 queries
773 query 3; still undecided: 31, sample size is: 31
804 query 3; still undecided: 31, sample size is: 31
774 3 total queries in *.????s (glob)
805 3 total queries in *.????s (glob)
775 elapsed time: * seconds (glob)
806 elapsed time: * seconds (glob)
776 heads summary:
807 heads summary:
777 total common heads: 1
808 total common heads: 1
778 also local heads: 0
809 also local heads: 0
779 also remote heads: 0
810 also remote heads: 0
811 both: 0
780 local heads: 1
812 local heads: 1
781 common: 0
813 common: 0
782 missing: 1
814 missing: 1
783 remote heads: 1
815 remote heads: 1
784 common: 0
816 common: 0
785 unknown: 1
817 unknown: 1
786 local changesets: 1050
818 local changesets: 1050
787 common: 1000
819 common: 1000
788 missing: 50
820 missing: 50
789 common heads: 7ead0cba2838
821 common heads: 7ead0cba2838
790
822
791 % -- a -> b set (tip only)
823 % -- a -> b set (tip only)
792 comparing with b
824 comparing with b
793 query 1; heads
825 query 1; heads
794 searching for changes
826 searching for changes
795 taking quick initial sample
827 taking quick initial sample
796 searching: 2 queries
828 searching: 2 queries
797 query 2; still undecided: 1049, sample size is: 11
829 query 2; still undecided: 1049, sample size is: 11
798 sampling from both directions
830 sampling from both directions
799 searching: 3 queries
831 searching: 3 queries
800 query 3; still undecided: 31, sample size is: 31
832 query 3; still undecided: 31, sample size is: 31
801 3 total queries in *.????s (glob)
833 3 total queries in *.????s (glob)
802 elapsed time: * seconds (glob)
834 elapsed time: * seconds (glob)
803 heads summary:
835 heads summary:
804 total common heads: 1
836 total common heads: 1
805 also local heads: 0
837 also local heads: 0
806 also remote heads: 0
838 also remote heads: 0
839 both: 0
807 local heads: 1
840 local heads: 1
808 common: 0
841 common: 0
809 missing: 1
842 missing: 1
810 remote heads: 1
843 remote heads: 1
811 common: 0
844 common: 0
812 unknown: 1
845 unknown: 1
813 local changesets: 1050
846 local changesets: 1050
814 common: 1000
847 common: 1000
815 missing: 50
848 missing: 50
816 common heads: 7ead0cba2838
849 common heads: 7ead0cba2838
817
850
818 % -- b -> a tree
851 % -- b -> a tree
819 comparing with a
852 comparing with a
820 searching for changes
853 searching for changes
821 unpruned common: 7ead0cba2838
854 unpruned common: 7ead0cba2838
822 elapsed time: * seconds (glob)
855 elapsed time: * seconds (glob)
823 heads summary:
856 heads summary:
824 total common heads: 1
857 total common heads: 1
825 also local heads: 0
858 also local heads: 0
826 also remote heads: 0
859 also remote heads: 0
860 both: 0
827 local heads: 1
861 local heads: 1
828 common: 0
862 common: 0
829 missing: 1
863 missing: 1
830 remote heads: 1
864 remote heads: 1
831 common: 0
865 common: 0
832 unknown: 1
866 unknown: 1
833 local changesets: 1030
867 local changesets: 1030
834 common: 1000
868 common: 1000
835 missing: 30
869 missing: 30
836 common heads: 7ead0cba2838
870 common heads: 7ead0cba2838
837
871
838 % -- b -> a set
872 % -- b -> a set
839 comparing with a
873 comparing with a
840 query 1; heads
874 query 1; heads
841 searching for changes
875 searching for changes
842 taking quick initial sample
876 taking quick initial sample
843 searching: 2 queries
877 searching: 2 queries
844 query 2; still undecided: 1029, sample size is: 11
878 query 2; still undecided: 1029, sample size is: 11
845 sampling from both directions
879 sampling from both directions
846 searching: 3 queries
880 searching: 3 queries
847 query 3; still undecided: 15, sample size is: 15
881 query 3; still undecided: 15, sample size is: 15
848 3 total queries in *.????s (glob)
882 3 total queries in *.????s (glob)
849 elapsed time: * seconds (glob)
883 elapsed time: * seconds (glob)
850 heads summary:
884 heads summary:
851 total common heads: 1
885 total common heads: 1
852 also local heads: 0
886 also local heads: 0
853 also remote heads: 0
887 also remote heads: 0
888 both: 0
854 local heads: 1
889 local heads: 1
855 common: 0
890 common: 0
856 missing: 1
891 missing: 1
857 remote heads: 1
892 remote heads: 1
858 common: 0
893 common: 0
859 unknown: 1
894 unknown: 1
860 local changesets: 1030
895 local changesets: 1030
861 common: 1000
896 common: 1000
862 missing: 30
897 missing: 30
863 common heads: 7ead0cba2838
898 common heads: 7ead0cba2838
864
899
865 % -- b -> a set (tip only)
900 % -- b -> a set (tip only)
866 comparing with a
901 comparing with a
867 query 1; heads
902 query 1; heads
868 searching for changes
903 searching for changes
869 taking quick initial sample
904 taking quick initial sample
870 searching: 2 queries
905 searching: 2 queries
871 query 2; still undecided: 1029, sample size is: 11
906 query 2; still undecided: 1029, sample size is: 11
872 sampling from both directions
907 sampling from both directions
873 searching: 3 queries
908 searching: 3 queries
874 query 3; still undecided: 15, sample size is: 15
909 query 3; still undecided: 15, sample size is: 15
875 3 total queries in *.????s (glob)
910 3 total queries in *.????s (glob)
876 elapsed time: * seconds (glob)
911 elapsed time: * seconds (glob)
877 heads summary:
912 heads summary:
878 total common heads: 1
913 total common heads: 1
879 also local heads: 0
914 also local heads: 0
880 also remote heads: 0
915 also remote heads: 0
916 both: 0
881 local heads: 1
917 local heads: 1
882 common: 0
918 common: 0
883 missing: 1
919 missing: 1
884 remote heads: 1
920 remote heads: 1
885 common: 0
921 common: 0
886 unknown: 1
922 unknown: 1
887 local changesets: 1030
923 local changesets: 1030
888 common: 1000
924 common: 1000
889 missing: 30
925 missing: 30
890 common heads: 7ead0cba2838
926 common heads: 7ead0cba2838
891
927
892
928
893 One with >200 heads, which used to use up all of the sample:
929 One with >200 heads, which used to use up all of the sample:
894
930
895 $ hg init manyheads
931 $ hg init manyheads
896 $ cd manyheads
932 $ cd manyheads
897 $ echo "+300:r @a" >dagdesc
933 $ echo "+300:r @a" >dagdesc
898 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
934 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
899 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
935 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
900 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
936 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
901 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
937 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
902 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
938 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
903 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
939 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
904 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
940 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
905 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
941 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
906 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
942 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
907 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
943 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
908 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
944 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
909 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
945 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
910 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
946 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
911 $ echo "@b *r+3" >>dagdesc # one more head
947 $ echo "@b *r+3" >>dagdesc # one more head
912 $ hg debugbuilddag <dagdesc
948 $ hg debugbuilddag <dagdesc
913 reading DAG from stdin
949 reading DAG from stdin
914
950
915 $ hg heads -t --template . | wc -c
951 $ hg heads -t --template . | wc -c
916 \s*261 (re)
952 \s*261 (re)
917
953
918 $ hg clone -b a . a
954 $ hg clone -b a . a
919 adding changesets
955 adding changesets
920 adding manifests
956 adding manifests
921 adding file changes
957 adding file changes
922 added 1340 changesets with 0 changes to 0 files (+259 heads)
958 added 1340 changesets with 0 changes to 0 files (+259 heads)
923 new changesets 1ea73414a91b:1c51e2c80832
959 new changesets 1ea73414a91b:1c51e2c80832
924 updating to branch a
960 updating to branch a
925 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
961 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
926 $ hg clone -b b . b
962 $ hg clone -b b . b
927 adding changesets
963 adding changesets
928 adding manifests
964 adding manifests
929 adding file changes
965 adding file changes
930 added 304 changesets with 0 changes to 0 files
966 added 304 changesets with 0 changes to 0 files
931 new changesets 1ea73414a91b:513314ca8b3a
967 new changesets 1ea73414a91b:513314ca8b3a
932 updating to branch b
968 updating to branch b
933 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
969 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
934
970
935 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
971 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
936 comparing with b
972 comparing with b
937 query 1; heads
973 query 1; heads
938 searching for changes
974 searching for changes
939 taking quick initial sample
975 taking quick initial sample
940 searching: 2 queries
976 searching: 2 queries
941 query 2; still undecided: 1240, sample size is: 100
977 query 2; still undecided: 1240, sample size is: 100
942 sampling from both directions
978 sampling from both directions
943 searching: 3 queries
979 searching: 3 queries
944 query 3; still undecided: 1140, sample size is: 200
980 query 3; still undecided: 1140, sample size is: 200
945 sampling from both directions
981 sampling from both directions
946 searching: 4 queries
982 searching: 4 queries
947 query 4; still undecided: \d+, sample size is: 200 (re)
983 query 4; still undecided: \d+, sample size is: 200 (re)
948 sampling from both directions
984 sampling from both directions
949 searching: 5 queries
985 searching: 5 queries
950 query 5; still undecided: \d+, sample size is: 200 (re)
986 query 5; still undecided: \d+, sample size is: 200 (re)
951 sampling from both directions
987 sampling from both directions
952 searching: 6 queries
988 searching: 6 queries
953 query 6; still undecided: \d+, sample size is: \d+ (re)
989 query 6; still undecided: \d+, sample size is: \d+ (re)
954 6 total queries in *.????s (glob)
990 6 total queries in *.????s (glob)
955 elapsed time: * seconds (glob)
991 elapsed time: * seconds (glob)
956 heads summary:
992 heads summary:
957 total common heads: 1
993 total common heads: 1
958 also local heads: 0
994 also local heads: 0
959 also remote heads: 0
995 also remote heads: 0
996 both: 0
960 local heads: 260
997 local heads: 260
961 common: 0
998 common: 0
962 missing: 260
999 missing: 260
963 remote heads: 1
1000 remote heads: 1
964 common: 0
1001 common: 0
965 unknown: 1
1002 unknown: 1
966 local changesets: 1340
1003 local changesets: 1340
967 common: 300
1004 common: 300
968 missing: 1040
1005 missing: 1040
969 common heads: 3ee37d65064a
1006 common heads: 3ee37d65064a
970 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1007 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
971 comparing with b
1008 comparing with b
972 query 1; heads
1009 query 1; heads
973 searching for changes
1010 searching for changes
974 taking quick initial sample
1011 taking quick initial sample
975 searching: 2 queries
1012 searching: 2 queries
976 query 2; still undecided: 303, sample size is: 9
1013 query 2; still undecided: 303, sample size is: 9
977 sampling from both directions
1014 sampling from both directions
978 searching: 3 queries
1015 searching: 3 queries
979 query 3; still undecided: 3, sample size is: 3
1016 query 3; still undecided: 3, sample size is: 3
980 3 total queries in *.????s (glob)
1017 3 total queries in *.????s (glob)
981 elapsed time: * seconds (glob)
1018 elapsed time: * seconds (glob)
982 heads summary:
1019 heads summary:
983 total common heads: 1
1020 total common heads: 1
984 also local heads: 0
1021 also local heads: 0
985 also remote heads: 0
1022 also remote heads: 0
1023 both: 0
986 local heads: 260
1024 local heads: 260
987 common: 0
1025 common: 0
988 missing: 260
1026 missing: 260
989 remote heads: 1
1027 remote heads: 1
990 common: 0
1028 common: 0
991 unknown: 1
1029 unknown: 1
992 local changesets: 1340
1030 local changesets: 1340
993 common: 300
1031 common: 300
994 missing: 1040
1032 missing: 1040
995 common heads: 3ee37d65064a
1033 common heads: 3ee37d65064a
996
1034
997 Test actual protocol when pulling one new head in addition to common heads
1035 Test actual protocol when pulling one new head in addition to common heads
998
1036
999 $ hg clone -U b c
1037 $ hg clone -U b c
1000 $ hg -R c id -ir tip
1038 $ hg -R c id -ir tip
1001 513314ca8b3a
1039 513314ca8b3a
1002 $ hg -R c up -qr default
1040 $ hg -R c up -qr default
1003 $ touch c/f
1041 $ touch c/f
1004 $ hg -R c ci -Aqm "extra head"
1042 $ hg -R c ci -Aqm "extra head"
1005 $ hg -R c id -i
1043 $ hg -R c id -i
1006 e64a39e7da8b
1044 e64a39e7da8b
1007
1045
1008 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1046 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1009 $ cat hg.pid >> $DAEMON_PIDS
1047 $ cat hg.pid >> $DAEMON_PIDS
1010
1048
1011 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1049 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1012 comparing with http://localhost:$HGPORT/
1050 comparing with http://localhost:$HGPORT/
1013 searching for changes
1051 searching for changes
1014 e64a39e7da8b
1052 e64a39e7da8b
1015
1053
1016 $ killdaemons.py
1054 $ killdaemons.py
1017 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1055 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1018 "GET /?cmd=capabilities HTTP/1.1" 200 -
1056 "GET /?cmd=capabilities HTTP/1.1" 200 -
1019 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1057 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1020 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1058 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1021 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1059 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1022 $ cat errors.log
1060 $ cat errors.log
1023
1061
1024 $ cd ..
1062 $ cd ..
1025
1063
1026
1064
1027 Issue 4438 - test coverage for 3ef893520a85 issues.
1065 Issue 4438 - test coverage for 3ef893520a85 issues.
1028
1066
1029 $ mkdir issue4438
1067 $ mkdir issue4438
1030 $ cd issue4438
1068 $ cd issue4438
1031 #if false
1069 #if false
1032 generate new bundles:
1070 generate new bundles:
1033 $ hg init r1
1071 $ hg init r1
1034 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1072 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1035 $ hg clone -q r1 r2
1073 $ hg clone -q r1 r2
1036 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1074 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1037 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1075 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1038 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1076 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1039 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1077 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1040 #else
1078 #else
1041 use existing bundles:
1079 use existing bundles:
1042 $ hg init r1
1080 $ hg init r1
1043 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1081 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1044 $ hg -R r1 -q up
1082 $ hg -R r1 -q up
1045 $ hg init r2
1083 $ hg init r2
1046 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1084 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1047 $ hg -R r2 -q up
1085 $ hg -R r2 -q up
1048 #endif
1086 #endif
1049
1087
1050 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1088 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1051
1089
1052 $ hg -R r1 outgoing r2 -T'{rev} '
1090 $ hg -R r1 outgoing r2 -T'{rev} '
1053 comparing with r2
1091 comparing with r2
1054 searching for changes
1092 searching for changes
1055 101 102 103 104 105 106 107 108 109 110 (no-eol)
1093 101 102 103 104 105 106 107 108 109 110 (no-eol)
1056
1094
1057 The case where all the 'initialsamplesize' samples already were common would
1095 The case where all the 'initialsamplesize' samples already were common would
1058 give 'all remote heads known locally' without checking the remaining heads -
1096 give 'all remote heads known locally' without checking the remaining heads -
1059 fixed in 86c35b7ae300:
1097 fixed in 86c35b7ae300:
1060
1098
1061 $ cat >> $TESTTMP/unrandomsample.py << EOF
1099 $ cat >> $TESTTMP/unrandomsample.py << EOF
1062 > import random
1100 > import random
1063 > def sample(population, k):
1101 > def sample(population, k):
1064 > return sorted(population)[:k]
1102 > return sorted(population)[:k]
1065 > random.sample = sample
1103 > random.sample = sample
1066 > EOF
1104 > EOF
1067
1105
1068 $ cat >> r1/.hg/hgrc << EOF
1106 $ cat >> r1/.hg/hgrc << EOF
1069 > [extensions]
1107 > [extensions]
1070 > unrandomsample = $TESTTMP/unrandomsample.py
1108 > unrandomsample = $TESTTMP/unrandomsample.py
1071 > EOF
1109 > EOF
1072
1110
1073 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1111 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1074 > --config blackbox.track='command commandfinish discovery'
1112 > --config blackbox.track='command commandfinish discovery'
1075 comparing with r2
1113 comparing with r2
1076 searching for changes
1114 searching for changes
1077 101 102 103 104 105 106 107 108 109 110 (no-eol)
1115 101 102 103 104 105 106 107 108 109 110 (no-eol)
1078 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1116 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1079 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1117 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1080 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1118 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1081 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
1119 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
1082 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1120 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1083 $ cd ..
1121 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now