##// END OF EJS Templates
debugdiscovery: display time elapsed during the discovery step...
marmoute -
r42202:eec20025 default
parent child Browse files
Show More
@@ -1,3462 +1,3465 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 dateutil,
85 dateutil,
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89
89
90 from .revlogutils import (
90 from .revlogutils import (
91 deltas as deltautil
91 deltas as deltautil
92 )
92 )
93
93
94 release = lockmod.release
94 release = lockmod.release
95
95
96 command = registrar.command()
96 command = registrar.command()
97
97
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 def debugancestor(ui, repo, *args):
99 def debugancestor(ui, repo, *args):
100 """find the ancestor revision of two revisions in a given index"""
100 """find the ancestor revision of two revisions in a given index"""
101 if len(args) == 3:
101 if len(args) == 3:
102 index, rev1, rev2 = args
102 index, rev1, rev2 = args
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 lookup = r.lookup
104 lookup = r.lookup
105 elif len(args) == 2:
105 elif len(args) == 2:
106 if not repo:
106 if not repo:
107 raise error.Abort(_('there is no Mercurial repository here '
107 raise error.Abort(_('there is no Mercurial repository here '
108 '(.hg not found)'))
108 '(.hg not found)'))
109 rev1, rev2 = args
109 rev1, rev2 = args
110 r = repo.changelog
110 r = repo.changelog
111 lookup = repo.lookup
111 lookup = repo.lookup
112 else:
112 else:
113 raise error.Abort(_('either two or three arguments required'))
113 raise error.Abort(_('either two or three arguments required'))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116
116
117 @command('debugapplystreamclonebundle', [], 'FILE')
117 @command('debugapplystreamclonebundle', [], 'FILE')
118 def debugapplystreamclonebundle(ui, repo, fname):
118 def debugapplystreamclonebundle(ui, repo, fname):
119 """apply a stream clone bundle file"""
119 """apply a stream clone bundle file"""
120 f = hg.openpath(ui, fname)
120 f = hg.openpath(ui, fname)
121 gen = exchange.readbundle(ui, f, fname)
121 gen = exchange.readbundle(ui, f, fname)
122 gen.apply(repo)
122 gen.apply(repo)
123
123
124 @command('debugbuilddag',
124 @command('debugbuilddag',
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('n', 'new-file', None, _('add new file at each rev'))],
127 ('n', 'new-file', None, _('add new file at each rev'))],
128 _('[OPTION]... [TEXT]'))
128 _('[OPTION]... [TEXT]'))
129 def debugbuilddag(ui, repo, text=None,
129 def debugbuilddag(ui, repo, text=None,
130 mergeable_file=False,
130 mergeable_file=False,
131 overwritten_file=False,
131 overwritten_file=False,
132 new_file=False):
132 new_file=False):
133 """builds a repo with a given DAG from scratch in the current empty repo
133 """builds a repo with a given DAG from scratch in the current empty repo
134
134
135 The description of the DAG is read from stdin if not given on the
135 The description of the DAG is read from stdin if not given on the
136 command line.
136 command line.
137
137
138 Elements:
138 Elements:
139
139
140 - "+n" is a linear run of n nodes based on the current default parent
140 - "+n" is a linear run of n nodes based on the current default parent
141 - "." is a single node based on the current default parent
141 - "." is a single node based on the current default parent
142 - "$" resets the default parent to null (implied at the start);
142 - "$" resets the default parent to null (implied at the start);
143 otherwise the default parent is always the last node created
143 otherwise the default parent is always the last node created
144 - "<p" sets the default parent to the backref p
144 - "<p" sets the default parent to the backref p
145 - "*p" is a fork at parent p, which is a backref
145 - "*p" is a fork at parent p, which is a backref
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "/p2" is a merge of the preceding node and p2
147 - "/p2" is a merge of the preceding node and p2
148 - ":tag" defines a local tag for the preceding node
148 - ":tag" defines a local tag for the preceding node
149 - "@branch" sets the named branch for subsequent nodes
149 - "@branch" sets the named branch for subsequent nodes
150 - "#...\\n" is a comment up to the end of the line
150 - "#...\\n" is a comment up to the end of the line
151
151
152 Whitespace between the above elements is ignored.
152 Whitespace between the above elements is ignored.
153
153
154 A backref is either
154 A backref is either
155
155
156 - a number n, which references the node curr-n, where curr is the current
156 - a number n, which references the node curr-n, where curr is the current
157 node, or
157 node, or
158 - the name of a local tag you placed earlier using ":tag", or
158 - the name of a local tag you placed earlier using ":tag", or
159 - empty to denote the default parent.
159 - empty to denote the default parent.
160
160
161 All string valued-elements are either strictly alphanumeric, or must
161 All string valued-elements are either strictly alphanumeric, or must
162 be enclosed in double quotes ("..."), with "\\" as escape character.
162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 """
163 """
164
164
165 if text is None:
165 if text is None:
166 ui.status(_("reading DAG from stdin\n"))
166 ui.status(_("reading DAG from stdin\n"))
167 text = ui.fin.read()
167 text = ui.fin.read()
168
168
169 cl = repo.changelog
169 cl = repo.changelog
170 if len(cl) > 0:
170 if len(cl) > 0:
171 raise error.Abort(_('repository is not empty'))
171 raise error.Abort(_('repository is not empty'))
172
172
173 # determine number of revs in DAG
173 # determine number of revs in DAG
174 total = 0
174 total = 0
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 total += 1
177 total += 1
178
178
179 if mergeable_file:
179 if mergeable_file:
180 linesperrev = 2
180 linesperrev = 2
181 # make a file with k lines per rev
181 # make a file with k lines per rev
182 initialmergedlines = ['%d' % i
182 initialmergedlines = ['%d' % i
183 for i in pycompat.xrange(0, total * linesperrev)]
183 for i in pycompat.xrange(0, total * linesperrev)]
184 initialmergedlines.append("")
184 initialmergedlines.append("")
185
185
186 tags = []
186 tags = []
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 total=total)
188 total=total)
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 at = -1
190 at = -1
191 atbranch = 'default'
191 atbranch = 'default'
192 nodeids = []
192 nodeids = []
193 id = 0
193 id = 0
194 progress.update(id)
194 progress.update(id)
195 for type, data in dagparser.parsedag(text):
195 for type, data in dagparser.parsedag(text):
196 if type == 'n':
196 if type == 'n':
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 id, ps = data
198 id, ps = data
199
199
200 files = []
200 files = []
201 filecontent = {}
201 filecontent = {}
202
202
203 p2 = None
203 p2 = None
204 if mergeable_file:
204 if mergeable_file:
205 fn = "mf"
205 fn = "mf"
206 p1 = repo[ps[0]]
206 p1 = repo[ps[0]]
207 if len(ps) > 1:
207 if len(ps) > 1:
208 p2 = repo[ps[1]]
208 p2 = repo[ps[1]]
209 pa = p1.ancestor(p2)
209 pa = p1.ancestor(p2)
210 base, local, other = [x[fn].data() for x in (pa, p1,
210 base, local, other = [x[fn].data() for x in (pa, p1,
211 p2)]
211 p2)]
212 m3 = simplemerge.Merge3Text(base, local, other)
212 m3 = simplemerge.Merge3Text(base, local, other)
213 ml = [l.strip() for l in m3.merge_lines()]
213 ml = [l.strip() for l in m3.merge_lines()]
214 ml.append("")
214 ml.append("")
215 elif at > 0:
215 elif at > 0:
216 ml = p1[fn].data().split("\n")
216 ml = p1[fn].data().split("\n")
217 else:
217 else:
218 ml = initialmergedlines
218 ml = initialmergedlines
219 ml[id * linesperrev] += " r%i" % id
219 ml[id * linesperrev] += " r%i" % id
220 mergedtext = "\n".join(ml)
220 mergedtext = "\n".join(ml)
221 files.append(fn)
221 files.append(fn)
222 filecontent[fn] = mergedtext
222 filecontent[fn] = mergedtext
223
223
224 if overwritten_file:
224 if overwritten_file:
225 fn = "of"
225 fn = "of"
226 files.append(fn)
226 files.append(fn)
227 filecontent[fn] = "r%i\n" % id
227 filecontent[fn] = "r%i\n" % id
228
228
229 if new_file:
229 if new_file:
230 fn = "nf%i" % id
230 fn = "nf%i" % id
231 files.append(fn)
231 files.append(fn)
232 filecontent[fn] = "r%i\n" % id
232 filecontent[fn] = "r%i\n" % id
233 if len(ps) > 1:
233 if len(ps) > 1:
234 if not p2:
234 if not p2:
235 p2 = repo[ps[1]]
235 p2 = repo[ps[1]]
236 for fn in p2:
236 for fn in p2:
237 if fn.startswith("nf"):
237 if fn.startswith("nf"):
238 files.append(fn)
238 files.append(fn)
239 filecontent[fn] = p2[fn].data()
239 filecontent[fn] = p2[fn].data()
240
240
241 def fctxfn(repo, cx, path):
241 def fctxfn(repo, cx, path):
242 if path in filecontent:
242 if path in filecontent:
243 return context.memfilectx(repo, cx, path,
243 return context.memfilectx(repo, cx, path,
244 filecontent[path])
244 filecontent[path])
245 return None
245 return None
246
246
247 if len(ps) == 0 or ps[0] < 0:
247 if len(ps) == 0 or ps[0] < 0:
248 pars = [None, None]
248 pars = [None, None]
249 elif len(ps) == 1:
249 elif len(ps) == 1:
250 pars = [nodeids[ps[0]], None]
250 pars = [nodeids[ps[0]], None]
251 else:
251 else:
252 pars = [nodeids[p] for p in ps]
252 pars = [nodeids[p] for p in ps]
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 date=(id, 0),
254 date=(id, 0),
255 user="debugbuilddag",
255 user="debugbuilddag",
256 extra={'branch': atbranch})
256 extra={'branch': atbranch})
257 nodeid = repo.commitctx(cx)
257 nodeid = repo.commitctx(cx)
258 nodeids.append(nodeid)
258 nodeids.append(nodeid)
259 at = id
259 at = id
260 elif type == 'l':
260 elif type == 'l':
261 id, name = data
261 id, name = data
262 ui.note(('tag %s\n' % name))
262 ui.note(('tag %s\n' % name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 elif type == 'a':
264 elif type == 'a':
265 ui.note(('branch %s\n' % data))
265 ui.note(('branch %s\n' % data))
266 atbranch = data
266 atbranch = data
267 progress.update(id)
267 progress.update(id)
268
268
269 if tags:
269 if tags:
270 repo.vfs.write("localtags", "".join(tags))
270 repo.vfs.write("localtags", "".join(tags))
271
271
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 indent_string = ' ' * indent
273 indent_string = ' ' * indent
274 if all:
274 if all:
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 % indent_string)
276 % indent_string)
277
277
278 def showchunks(named):
278 def showchunks(named):
279 ui.write("\n%s%s\n" % (indent_string, named))
279 ui.write("\n%s%s\n" % (indent_string, named))
280 for deltadata in gen.deltaiter():
280 for deltadata in gen.deltaiter():
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 ui.write("%s%s %s %s %s %s %d\n" %
282 ui.write("%s%s %s %s %s %s %d\n" %
283 (indent_string, hex(node), hex(p1), hex(p2),
283 (indent_string, hex(node), hex(p1), hex(p2),
284 hex(cs), hex(deltabase), len(delta)))
284 hex(cs), hex(deltabase), len(delta)))
285
285
286 chunkdata = gen.changelogheader()
286 chunkdata = gen.changelogheader()
287 showchunks("changelog")
287 showchunks("changelog")
288 chunkdata = gen.manifestheader()
288 chunkdata = gen.manifestheader()
289 showchunks("manifest")
289 showchunks("manifest")
290 for chunkdata in iter(gen.filelogheader, {}):
290 for chunkdata in iter(gen.filelogheader, {}):
291 fname = chunkdata['filename']
291 fname = chunkdata['filename']
292 showchunks(fname)
292 showchunks(fname)
293 else:
293 else:
294 if isinstance(gen, bundle2.unbundle20):
294 if isinstance(gen, bundle2.unbundle20):
295 raise error.Abort(_('use debugbundle2 for this file'))
295 raise error.Abort(_('use debugbundle2 for this file'))
296 chunkdata = gen.changelogheader()
296 chunkdata = gen.changelogheader()
297 for deltadata in gen.deltaiter():
297 for deltadata in gen.deltaiter():
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 ui.write("%s%s\n" % (indent_string, hex(node)))
299 ui.write("%s%s\n" % (indent_string, hex(node)))
300
300
301 def _debugobsmarkers(ui, part, indent=0, **opts):
301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 """display version and markers contained in 'data'"""
302 """display version and markers contained in 'data'"""
303 opts = pycompat.byteskwargs(opts)
303 opts = pycompat.byteskwargs(opts)
304 data = part.read()
304 data = part.read()
305 indent_string = ' ' * indent
305 indent_string = ' ' * indent
306 try:
306 try:
307 version, markers = obsolete._readmarkers(data)
307 version, markers = obsolete._readmarkers(data)
308 except error.UnknownVersion as exc:
308 except error.UnknownVersion as exc:
309 msg = "%sunsupported version: %s (%d bytes)\n"
309 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg %= indent_string, exc.version, len(data)
310 msg %= indent_string, exc.version, len(data)
311 ui.write(msg)
311 ui.write(msg)
312 else:
312 else:
313 msg = "%sversion: %d (%d bytes)\n"
313 msg = "%sversion: %d (%d bytes)\n"
314 msg %= indent_string, version, len(data)
314 msg %= indent_string, version, len(data)
315 ui.write(msg)
315 ui.write(msg)
316 fm = ui.formatter('debugobsolete', opts)
316 fm = ui.formatter('debugobsolete', opts)
317 for rawmarker in sorted(markers):
317 for rawmarker in sorted(markers):
318 m = obsutil.marker(None, rawmarker)
318 m = obsutil.marker(None, rawmarker)
319 fm.startitem()
319 fm.startitem()
320 fm.plain(indent_string)
320 fm.plain(indent_string)
321 cmdutil.showmarker(fm, m)
321 cmdutil.showmarker(fm, m)
322 fm.end()
322 fm.end()
323
323
324 def _debugphaseheads(ui, data, indent=0):
324 def _debugphaseheads(ui, data, indent=0):
325 """display version and markers contained in 'data'"""
325 """display version and markers contained in 'data'"""
326 indent_string = ' ' * indent
326 indent_string = ' ' * indent
327 headsbyphase = phases.binarydecode(data)
327 headsbyphase = phases.binarydecode(data)
328 for phase in phases.allphases:
328 for phase in phases.allphases:
329 for head in headsbyphase[phase]:
329 for head in headsbyphase[phase]:
330 ui.write(indent_string)
330 ui.write(indent_string)
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332
332
333 def _quasirepr(thing):
333 def _quasirepr(thing):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 return '{%s}' % (
335 return '{%s}' % (
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 return pycompat.bytestr(repr(thing))
337 return pycompat.bytestr(repr(thing))
338
338
339 def _debugbundle2(ui, gen, all=None, **opts):
339 def _debugbundle2(ui, gen, all=None, **opts):
340 """lists the contents of a bundle2"""
340 """lists the contents of a bundle2"""
341 if not isinstance(gen, bundle2.unbundle20):
341 if not isinstance(gen, bundle2.unbundle20):
342 raise error.Abort(_('not a bundle2 file'))
342 raise error.Abort(_('not a bundle2 file'))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 parttypes = opts.get(r'part_type', [])
344 parttypes = opts.get(r'part_type', [])
345 for part in gen.iterparts():
345 for part in gen.iterparts():
346 if parttypes and part.type not in parttypes:
346 if parttypes and part.type not in parttypes:
347 continue
347 continue
348 msg = '%s -- %s (mandatory: %r)\n'
348 msg = '%s -- %s (mandatory: %r)\n'
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 if part.type == 'changegroup':
350 if part.type == 'changegroup':
351 version = part.params.get('version', '01')
351 version = part.params.get('version', '01')
352 cg = changegroup.getunbundler(version, part, 'UN')
352 cg = changegroup.getunbundler(version, part, 'UN')
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 if part.type == 'obsmarkers':
355 if part.type == 'obsmarkers':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugobsmarkers(ui, part, indent=4, **opts)
357 _debugobsmarkers(ui, part, indent=4, **opts)
358 if part.type == 'phase-heads':
358 if part.type == 'phase-heads':
359 if not ui.quiet:
359 if not ui.quiet:
360 _debugphaseheads(ui, part, indent=4)
360 _debugphaseheads(ui, part, indent=4)
361
361
362 @command('debugbundle',
362 @command('debugbundle',
363 [('a', 'all', None, _('show all details')),
363 [('a', 'all', None, _('show all details')),
364 ('', 'part-type', [], _('show only the named part type')),
364 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 _('FILE'),
366 _('FILE'),
367 norepo=True)
367 norepo=True)
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 """lists the contents of a bundle"""
369 """lists the contents of a bundle"""
370 with hg.openpath(ui, bundlepath) as f:
370 with hg.openpath(ui, bundlepath) as f:
371 if spec:
371 if spec:
372 spec = exchange.getbundlespec(ui, f)
372 spec = exchange.getbundlespec(ui, f)
373 ui.write('%s\n' % spec)
373 ui.write('%s\n' % spec)
374 return
374 return
375
375
376 gen = exchange.readbundle(ui, f, bundlepath)
376 gen = exchange.readbundle(ui, f, bundlepath)
377 if isinstance(gen, bundle2.unbundle20):
377 if isinstance(gen, bundle2.unbundle20):
378 return _debugbundle2(ui, gen, all=all, **opts)
378 return _debugbundle2(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
380
380
381 @command('debugcapabilities',
381 @command('debugcapabilities',
382 [], _('PATH'),
382 [], _('PATH'),
383 norepo=True)
383 norepo=True)
384 def debugcapabilities(ui, path, **opts):
384 def debugcapabilities(ui, path, **opts):
385 """lists the capabilities of a remote peer"""
385 """lists the capabilities of a remote peer"""
386 opts = pycompat.byteskwargs(opts)
386 opts = pycompat.byteskwargs(opts)
387 peer = hg.peer(ui, opts, path)
387 peer = hg.peer(ui, opts, path)
388 caps = peer.capabilities()
388 caps = peer.capabilities()
389 ui.write(('Main capabilities:\n'))
389 ui.write(('Main capabilities:\n'))
390 for c in sorted(caps):
390 for c in sorted(caps):
391 ui.write((' %s\n') % c)
391 ui.write((' %s\n') % c)
392 b2caps = bundle2.bundle2caps(peer)
392 b2caps = bundle2.bundle2caps(peer)
393 if b2caps:
393 if b2caps:
394 ui.write(('Bundle2 capabilities:\n'))
394 ui.write(('Bundle2 capabilities:\n'))
395 for key, values in sorted(b2caps.iteritems()):
395 for key, values in sorted(b2caps.iteritems()):
396 ui.write((' %s\n') % key)
396 ui.write((' %s\n') % key)
397 for v in values:
397 for v in values:
398 ui.write((' %s\n') % v)
398 ui.write((' %s\n') % v)
399
399
400 @command('debugcheckstate', [], '')
400 @command('debugcheckstate', [], '')
401 def debugcheckstate(ui, repo):
401 def debugcheckstate(ui, repo):
402 """validate the correctness of the current dirstate"""
402 """validate the correctness of the current dirstate"""
403 parent1, parent2 = repo.dirstate.parents()
403 parent1, parent2 = repo.dirstate.parents()
404 m1 = repo[parent1].manifest()
404 m1 = repo[parent1].manifest()
405 m2 = repo[parent2].manifest()
405 m2 = repo[parent2].manifest()
406 errors = 0
406 errors = 0
407 for f in repo.dirstate:
407 for f in repo.dirstate:
408 state = repo.dirstate[f]
408 state = repo.dirstate[f]
409 if state in "nr" and f not in m1:
409 if state in "nr" and f not in m1:
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "a" and f in m1:
412 if state in "a" and f in m1:
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 errors += 1
414 errors += 1
415 if state in "m" and f not in m1 and f not in m2:
415 if state in "m" and f not in m1 and f not in m2:
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 (f, state))
417 (f, state))
418 errors += 1
418 errors += 1
419 for f in m1:
419 for f in m1:
420 state = repo.dirstate[f]
420 state = repo.dirstate[f]
421 if state not in "nrm":
421 if state not in "nrm":
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 errors += 1
423 errors += 1
424 if errors:
424 if errors:
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 raise error.Abort(error)
426 raise error.Abort(error)
427
427
428 @command('debugcolor',
428 @command('debugcolor',
429 [('', 'style', None, _('show all configured styles'))],
429 [('', 'style', None, _('show all configured styles'))],
430 'hg debugcolor')
430 'hg debugcolor')
431 def debugcolor(ui, repo, **opts):
431 def debugcolor(ui, repo, **opts):
432 """show available color, effects or style"""
432 """show available color, effects or style"""
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 if opts.get(r'style'):
434 if opts.get(r'style'):
435 return _debugdisplaystyle(ui)
435 return _debugdisplaystyle(ui)
436 else:
436 else:
437 return _debugdisplaycolor(ui)
437 return _debugdisplaycolor(ui)
438
438
439 def _debugdisplaycolor(ui):
439 def _debugdisplaycolor(ui):
440 ui = ui.copy()
440 ui = ui.copy()
441 ui._styles.clear()
441 ui._styles.clear()
442 for effect in color._activeeffects(ui).keys():
442 for effect in color._activeeffects(ui).keys():
443 ui._styles[effect] = effect
443 ui._styles[effect] = effect
444 if ui._terminfoparams:
444 if ui._terminfoparams:
445 for k, v in ui.configitems('color'):
445 for k, v in ui.configitems('color'):
446 if k.startswith('color.'):
446 if k.startswith('color.'):
447 ui._styles[k] = k[6:]
447 ui._styles[k] = k[6:]
448 elif k.startswith('terminfo.'):
448 elif k.startswith('terminfo.'):
449 ui._styles[k] = k[9:]
449 ui._styles[k] = k[9:]
450 ui.write(_('available colors:\n'))
450 ui.write(_('available colors:\n'))
451 # sort label with a '_' after the other to group '_background' entry.
451 # sort label with a '_' after the other to group '_background' entry.
452 items = sorted(ui._styles.items(),
452 items = sorted(ui._styles.items(),
453 key=lambda i: ('_' in i[0], i[0], i[1]))
453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 for colorname, label in items:
454 for colorname, label in items:
455 ui.write(('%s\n') % colorname, label=label)
455 ui.write(('%s\n') % colorname, label=label)
456
456
457 def _debugdisplaystyle(ui):
457 def _debugdisplaystyle(ui):
458 ui.write(_('available style:\n'))
458 ui.write(_('available style:\n'))
459 if not ui._styles:
459 if not ui._styles:
460 return
460 return
461 width = max(len(s) for s in ui._styles)
461 width = max(len(s) for s in ui._styles)
462 for label, effects in sorted(ui._styles.items()):
462 for label, effects in sorted(ui._styles.items()):
463 ui.write('%s' % label, label=label)
463 ui.write('%s' % label, label=label)
464 if effects:
464 if effects:
465 # 50
465 # 50
466 ui.write(': ')
466 ui.write(': ')
467 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write('\n')
469 ui.write('\n')
470
470
471 @command('debugcreatestreamclonebundle', [], 'FILE')
471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 def debugcreatestreamclonebundle(ui, repo, fname):
472 def debugcreatestreamclonebundle(ui, repo, fname):
473 """create a stream clone bundle file
473 """create a stream clone bundle file
474
474
475 Stream bundles are special bundles that are essentially archives of
475 Stream bundles are special bundles that are essentially archives of
476 revlog files. They are commonly used for cloning very quickly.
476 revlog files. They are commonly used for cloning very quickly.
477 """
477 """
478 # TODO we may want to turn this into an abort when this functionality
478 # TODO we may want to turn this into an abort when this functionality
479 # is moved into `hg bundle`.
479 # is moved into `hg bundle`.
480 if phases.hassecret(repo):
480 if phases.hassecret(repo):
481 ui.warn(_('(warning: stream clone bundle will contain secret '
481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 'revisions)\n'))
482 'revisions)\n'))
483
483
484 requirements, gen = streamclone.generatebundlev1(repo)
484 requirements, gen = streamclone.generatebundlev1(repo)
485 changegroup.writechunks(ui, gen, fname)
485 changegroup.writechunks(ui, gen, fname)
486
486
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488
488
489 @command('debugdag',
489 @command('debugdag',
490 [('t', 'tags', None, _('use tags as labels')),
490 [('t', 'tags', None, _('use tags as labels')),
491 ('b', 'branches', None, _('annotate with branch names')),
491 ('b', 'branches', None, _('annotate with branch names')),
492 ('', 'dots', None, _('use dots for runs')),
492 ('', 'dots', None, _('use dots for runs')),
493 ('s', 'spaces', None, _('separate elements by spaces'))],
493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 _('[OPTION]... [FILE [REV]...]'),
494 _('[OPTION]... [FILE [REV]...]'),
495 optionalrepo=True)
495 optionalrepo=True)
496 def debugdag(ui, repo, file_=None, *revs, **opts):
496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 """format the changelog or an index DAG as a concise textual description
497 """format the changelog or an index DAG as a concise textual description
498
498
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 revision numbers, they get labeled in the output as rN.
500 revision numbers, they get labeled in the output as rN.
501
501
502 Otherwise, the changelog DAG of the current repo is emitted.
502 Otherwise, the changelog DAG of the current repo is emitted.
503 """
503 """
504 spaces = opts.get(r'spaces')
504 spaces = opts.get(r'spaces')
505 dots = opts.get(r'dots')
505 dots = opts.get(r'dots')
506 if file_:
506 if file_:
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 file_)
508 file_)
509 revs = set((int(r) for r in revs))
509 revs = set((int(r) for r in revs))
510 def events():
510 def events():
511 for r in rlog:
511 for r in rlog:
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 if p != -1))
513 if p != -1))
514 if r in revs:
514 if r in revs:
515 yield 'l', (r, "r%i" % r)
515 yield 'l', (r, "r%i" % r)
516 elif repo:
516 elif repo:
517 cl = repo.changelog
517 cl = repo.changelog
518 tags = opts.get(r'tags')
518 tags = opts.get(r'tags')
519 branches = opts.get(r'branches')
519 branches = opts.get(r'branches')
520 if tags:
520 if tags:
521 labels = {}
521 labels = {}
522 for l, n in repo.tags().items():
522 for l, n in repo.tags().items():
523 labels.setdefault(cl.rev(n), []).append(l)
523 labels.setdefault(cl.rev(n), []).append(l)
524 def events():
524 def events():
525 b = "default"
525 b = "default"
526 for r in cl:
526 for r in cl:
527 if branches:
527 if branches:
528 newb = cl.read(cl.node(r))[5]['branch']
528 newb = cl.read(cl.node(r))[5]['branch']
529 if newb != b:
529 if newb != b:
530 yield 'a', newb
530 yield 'a', newb
531 b = newb
531 b = newb
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 if p != -1))
533 if p != -1))
534 if tags:
534 if tags:
535 ls = labels.get(r)
535 ls = labels.get(r)
536 if ls:
536 if ls:
537 for l in ls:
537 for l in ls:
538 yield 'l', (r, l)
538 yield 'l', (r, l)
539 else:
539 else:
540 raise error.Abort(_('need repo for changelog dag'))
540 raise error.Abort(_('need repo for changelog dag'))
541
541
542 for line in dagparser.dagtextlines(events(),
542 for line in dagparser.dagtextlines(events(),
543 addspaces=spaces,
543 addspaces=spaces,
544 wraplabels=True,
544 wraplabels=True,
545 wrapannotations=True,
545 wrapannotations=True,
546 wrapnonlinear=dots,
546 wrapnonlinear=dots,
547 usedots=dots,
547 usedots=dots,
548 maxlinewidth=70):
548 maxlinewidth=70):
549 ui.write(line)
549 ui.write(line)
550 ui.write("\n")
550 ui.write("\n")
551
551
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 def debugdata(ui, repo, file_, rev=None, **opts):
553 def debugdata(ui, repo, file_, rev=None, **opts):
554 """dump the contents of a data file revision"""
554 """dump the contents of a data file revision"""
555 opts = pycompat.byteskwargs(opts)
555 opts = pycompat.byteskwargs(opts)
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if rev is not None:
557 if rev is not None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 file_, rev = None, file_
559 file_, rev = None, file_
560 elif rev is None:
560 elif rev is None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 try:
563 try:
564 ui.write(r.revision(r.lookup(rev), raw=True))
564 ui.write(r.revision(r.lookup(rev), raw=True))
565 except KeyError:
565 except KeyError:
566 raise error.Abort(_('invalid revision identifier %s') % rev)
566 raise error.Abort(_('invalid revision identifier %s') % rev)
567
567
568 @command('debugdate',
568 @command('debugdate',
569 [('e', 'extended', None, _('try extended date formats'))],
569 [('e', 'extended', None, _('try extended date formats'))],
570 _('[-e] DATE [RANGE]'),
570 _('[-e] DATE [RANGE]'),
571 norepo=True, optionalrepo=True)
571 norepo=True, optionalrepo=True)
572 def debugdate(ui, date, range=None, **opts):
572 def debugdate(ui, date, range=None, **opts):
573 """parse and display a date"""
573 """parse and display a date"""
574 if opts[r"extended"]:
574 if opts[r"extended"]:
575 d = dateutil.parsedate(date, util.extendeddateformats)
575 d = dateutil.parsedate(date, util.extendeddateformats)
576 else:
576 else:
577 d = dateutil.parsedate(date)
577 d = dateutil.parsedate(date)
578 ui.write(("internal: %d %d\n") % d)
578 ui.write(("internal: %d %d\n") % d)
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 if range:
580 if range:
581 m = dateutil.matchdate(range)
581 m = dateutil.matchdate(range)
582 ui.write(("match: %s\n") % m(d[0]))
582 ui.write(("match: %s\n") % m(d[0]))
583
583
584 @command('debugdeltachain',
584 @command('debugdeltachain',
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 _('-c|-m|FILE'),
586 _('-c|-m|FILE'),
587 optionalrepo=True)
587 optionalrepo=True)
588 def debugdeltachain(ui, repo, file_=None, **opts):
588 def debugdeltachain(ui, repo, file_=None, **opts):
589 """dump information about delta chains in a revlog
589 """dump information about delta chains in a revlog
590
590
591 Output can be templatized. Available template keywords are:
591 Output can be templatized. Available template keywords are:
592
592
593 :``rev``: revision number
593 :``rev``: revision number
594 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainlen``: delta chain length to this revision
595 :``chainlen``: delta chain length to this revision
596 :``prevrev``: previous revision in delta chain
596 :``prevrev``: previous revision in delta chain
597 :``deltatype``: role of delta / how it was computed
597 :``deltatype``: role of delta / how it was computed
598 :``compsize``: compressed size of revision
598 :``compsize``: compressed size of revision
599 :``uncompsize``: uncompressed size of revision
599 :``uncompsize``: uncompressed size of revision
600 :``chainsize``: total size of compressed revisions in chain
600 :``chainsize``: total size of compressed revisions in chain
601 :``chainratio``: total chain size divided by uncompressed revision size
601 :``chainratio``: total chain size divided by uncompressed revision size
602 (new delta chains typically start at ratio 2.00)
602 (new delta chains typically start at ratio 2.00)
603 :``lindist``: linear distance from base revision in delta chain to end
603 :``lindist``: linear distance from base revision in delta chain to end
604 of this revision
604 of this revision
605 :``extradist``: total size of revisions not part of this delta chain from
605 :``extradist``: total size of revisions not part of this delta chain from
606 base of delta chain to end of this revision; a measurement
606 base of delta chain to end of this revision; a measurement
607 of how much extra data we need to read/seek across to read
607 of how much extra data we need to read/seek across to read
608 the delta chain for this revision
608 the delta chain for this revision
609 :``extraratio``: extradist divided by chainsize; another representation of
609 :``extraratio``: extradist divided by chainsize; another representation of
610 how much unrelated data is needed to load this delta chain
610 how much unrelated data is needed to load this delta chain
611
611
612 If the repository is configured to use the sparse read, additional keywords
612 If the repository is configured to use the sparse read, additional keywords
613 are available:
613 are available:
614
614
615 :``readsize``: total size of data read from the disk for a revision
615 :``readsize``: total size of data read from the disk for a revision
616 (sum of the sizes of all the blocks)
616 (sum of the sizes of all the blocks)
617 :``largestblock``: size of the largest block of data read from the disk
617 :``largestblock``: size of the largest block of data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
619 :``srchunks``: in how many data hunks the whole revision would be read
619 :``srchunks``: in how many data hunks the whole revision would be read
620
620
621 The sparse read can be enabled with experimental.sparse-read = True
621 The sparse read can be enabled with experimental.sparse-read = True
622 """
622 """
623 opts = pycompat.byteskwargs(opts)
623 opts = pycompat.byteskwargs(opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 index = r.index
625 index = r.index
626 start = r.start
626 start = r.start
627 length = r.length
627 length = r.length
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 withsparseread = getattr(r, '_withsparseread', False)
629 withsparseread = getattr(r, '_withsparseread', False)
630
630
631 def revinfo(rev):
631 def revinfo(rev):
632 e = index[rev]
632 e = index[rev]
633 compsize = e[1]
633 compsize = e[1]
634 uncompsize = e[2]
634 uncompsize = e[2]
635 chainsize = 0
635 chainsize = 0
636
636
637 if generaldelta:
637 if generaldelta:
638 if e[3] == e[5]:
638 if e[3] == e[5]:
639 deltatype = 'p1'
639 deltatype = 'p1'
640 elif e[3] == e[6]:
640 elif e[3] == e[6]:
641 deltatype = 'p2'
641 deltatype = 'p2'
642 elif e[3] == rev - 1:
642 elif e[3] == rev - 1:
643 deltatype = 'prev'
643 deltatype = 'prev'
644 elif e[3] == rev:
644 elif e[3] == rev:
645 deltatype = 'base'
645 deltatype = 'base'
646 else:
646 else:
647 deltatype = 'other'
647 deltatype = 'other'
648 else:
648 else:
649 if e[3] == rev:
649 if e[3] == rev:
650 deltatype = 'base'
650 deltatype = 'base'
651 else:
651 else:
652 deltatype = 'prev'
652 deltatype = 'prev'
653
653
654 chain = r._deltachain(rev)[0]
654 chain = r._deltachain(rev)[0]
655 for iterrev in chain:
655 for iterrev in chain:
656 e = index[iterrev]
656 e = index[iterrev]
657 chainsize += e[1]
657 chainsize += e[1]
658
658
659 return compsize, uncompsize, deltatype, chain, chainsize
659 return compsize, uncompsize, deltatype, chain, chainsize
660
660
661 fm = ui.formatter('debugdeltachain', opts)
661 fm = ui.formatter('debugdeltachain', opts)
662
662
663 fm.plain(' rev chain# chainlen prev delta '
663 fm.plain(' rev chain# chainlen prev delta '
664 'size rawsize chainsize ratio lindist extradist '
664 'size rawsize chainsize ratio lindist extradist '
665 'extraratio')
665 'extraratio')
666 if withsparseread:
666 if withsparseread:
667 fm.plain(' readsize largestblk rddensity srchunks')
667 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain('\n')
668 fm.plain('\n')
669
669
670 chainbases = {}
670 chainbases = {}
671 for rev in r:
671 for rev in r:
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 chainbase = chain[0]
673 chainbase = chain[0]
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 basestart = start(chainbase)
675 basestart = start(chainbase)
676 revstart = start(rev)
676 revstart = start(rev)
677 lineardist = revstart + comp - basestart
677 lineardist = revstart + comp - basestart
678 extradist = lineardist - chainsize
678 extradist = lineardist - chainsize
679 try:
679 try:
680 prevrev = chain[-2]
680 prevrev = chain[-2]
681 except IndexError:
681 except IndexError:
682 prevrev = -1
682 prevrev = -1
683
683
684 if uncomp != 0:
684 if uncomp != 0:
685 chainratio = float(chainsize) / float(uncomp)
685 chainratio = float(chainsize) / float(uncomp)
686 else:
686 else:
687 chainratio = chainsize
687 chainratio = chainsize
688
688
689 if chainsize != 0:
689 if chainsize != 0:
690 extraratio = float(extradist) / float(chainsize)
690 extraratio = float(extradist) / float(chainsize)
691 else:
691 else:
692 extraratio = extradist
692 extraratio = extradist
693
693
694 fm.startitem()
694 fm.startitem()
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 'uncompsize chainsize chainratio lindist extradist '
696 'uncompsize chainsize chainratio lindist extradist '
697 'extraratio',
697 'extraratio',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 rev, chainid, len(chain), prevrev, deltatype, comp,
699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 uncomp, chainsize, chainratio, lineardist, extradist,
700 uncomp, chainsize, chainratio, lineardist, extradist,
701 extraratio,
701 extraratio,
702 rev=rev, chainid=chainid, chainlen=len(chain),
702 rev=rev, chainid=chainid, chainlen=len(chain),
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 uncompsize=uncomp, chainsize=chainsize,
704 uncompsize=uncomp, chainsize=chainsize,
705 chainratio=chainratio, lindist=lineardist,
705 chainratio=chainratio, lindist=lineardist,
706 extradist=extradist, extraratio=extraratio)
706 extradist=extradist, extraratio=extraratio)
707 if withsparseread:
707 if withsparseread:
708 readsize = 0
708 readsize = 0
709 largestblock = 0
709 largestblock = 0
710 srchunks = 0
710 srchunks = 0
711
711
712 for revschunk in deltautil.slicechunk(r, chain):
712 for revschunk in deltautil.slicechunk(r, chain):
713 srchunks += 1
713 srchunks += 1
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blksize = blkend - start(revschunk[0])
715 blksize = blkend - start(revschunk[0])
716
716
717 readsize += blksize
717 readsize += blksize
718 if largestblock < blksize:
718 if largestblock < blksize:
719 largestblock = blksize
719 largestblock = blksize
720
720
721 if readsize:
721 if readsize:
722 readdensity = float(chainsize) / float(readsize)
722 readdensity = float(chainsize) / float(readsize)
723 else:
723 else:
724 readdensity = 1
724 readdensity = 1
725
725
726 fm.write('readsize largestblock readdensity srchunks',
726 fm.write('readsize largestblock readdensity srchunks',
727 ' %10d %10d %9.5f %8d',
727 ' %10d %10d %9.5f %8d',
728 readsize, largestblock, readdensity, srchunks,
728 readsize, largestblock, readdensity, srchunks,
729 readsize=readsize, largestblock=largestblock,
729 readsize=readsize, largestblock=largestblock,
730 readdensity=readdensity, srchunks=srchunks)
730 readdensity=readdensity, srchunks=srchunks)
731
731
732 fm.plain('\n')
732 fm.plain('\n')
733
733
734 fm.end()
734 fm.end()
735
735
736 @command('debugdirstate|debugstate',
736 @command('debugdirstate|debugstate',
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 ('', 'dates', True, _('display the saved mtime')),
738 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'datesort', None, _('sort by saved mtime'))],
739 ('', 'datesort', None, _('sort by saved mtime'))],
740 _('[OPTION]...'))
740 _('[OPTION]...'))
741 def debugstate(ui, repo, **opts):
741 def debugstate(ui, repo, **opts):
742 """show the contents of the current dirstate"""
742 """show the contents of the current dirstate"""
743
743
744 nodates = not opts[r'dates']
744 nodates = not opts[r'dates']
745 if opts.get(r'nodates') is not None:
745 if opts.get(r'nodates') is not None:
746 nodates = True
746 nodates = True
747 datesort = opts.get(r'datesort')
747 datesort = opts.get(r'datesort')
748
748
749 if datesort:
749 if datesort:
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 else:
751 else:
752 keyfunc = None # sort by filename
752 keyfunc = None # sort by filename
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 if ent[3] == -1:
754 if ent[3] == -1:
755 timestr = 'unset '
755 timestr = 'unset '
756 elif nodates:
756 elif nodates:
757 timestr = 'set '
757 timestr = 'set '
758 else:
758 else:
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 time.localtime(ent[3]))
760 time.localtime(ent[3]))
761 timestr = encoding.strtolocal(timestr)
761 timestr = encoding.strtolocal(timestr)
762 if ent[1] & 0o20000:
762 if ent[1] & 0o20000:
763 mode = 'lnk'
763 mode = 'lnk'
764 else:
764 else:
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 for f in repo.dirstate.copies():
767 for f in repo.dirstate.copies():
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769
769
770 @command('debugdiscovery',
770 @command('debugdiscovery',
771 [('', 'old', None, _('use old-style discovery')),
771 [('', 'old', None, _('use old-style discovery')),
772 ('', 'nonheads', None,
772 ('', 'nonheads', None,
773 _('use old-style discovery with non-heads included')),
773 _('use old-style discovery with non-heads included')),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ('', 'seed', '12323', 'specify the random seed use for discovery'),
775 ('', 'seed', '12323', 'specify the random seed use for discovery'),
776 ] + cmdutil.remoteopts,
776 ] + cmdutil.remoteopts,
777 _('[--rev REV] [OTHER]'))
777 _('[--rev REV] [OTHER]'))
778 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 def debugdiscovery(ui, repo, remoteurl="default", **opts):
779 """runs the changeset discovery protocol in isolation"""
779 """runs the changeset discovery protocol in isolation"""
780 opts = pycompat.byteskwargs(opts)
780 opts = pycompat.byteskwargs(opts)
781 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
782 remote = hg.peer(repo, opts, remoteurl)
782 remote = hg.peer(repo, opts, remoteurl)
783 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
784
784
785 # make sure tests are repeatable
785 # make sure tests are repeatable
786 random.seed(int(opts['seed']))
786 random.seed(int(opts['seed']))
787
787
788
788
789
789
790 if opts.get('old'):
790 if opts.get('old'):
791 def doit(pushedrevs, remoteheads, remote=remote):
791 def doit(pushedrevs, remoteheads, remote=remote):
792 if not util.safehasattr(remote, 'branches'):
792 if not util.safehasattr(remote, 'branches'):
793 # enable in-client legacy support
793 # enable in-client legacy support
794 remote = localrepo.locallegacypeer(remote.local())
794 remote = localrepo.locallegacypeer(remote.local())
795 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
795 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
796 force=True)
796 force=True)
797 common = set(common)
797 common = set(common)
798 if not opts.get('nonheads'):
798 if not opts.get('nonheads'):
799 ui.write(("unpruned common: %s\n") %
799 ui.write(("unpruned common: %s\n") %
800 " ".join(sorted(short(n) for n in common)))
800 " ".join(sorted(short(n) for n in common)))
801
801
802 clnode = repo.changelog.node
802 clnode = repo.changelog.node
803 common = repo.revs('heads(::%ln)', common)
803 common = repo.revs('heads(::%ln)', common)
804 common = {clnode(r) for r in common}
804 common = {clnode(r) for r in common}
805 return common, hds
805 return common, hds
806 else:
806 else:
807 def doit(pushedrevs, remoteheads, remote=remote):
807 def doit(pushedrevs, remoteheads, remote=remote):
808 nodes = None
808 nodes = None
809 if pushedrevs:
809 if pushedrevs:
810 revs = scmutil.revrange(repo, pushedrevs)
810 revs = scmutil.revrange(repo, pushedrevs)
811 nodes = [repo[r].node() for r in revs]
811 nodes = [repo[r].node() for r in revs]
812 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
812 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
813 ancestorsof=nodes)
813 ancestorsof=nodes)
814 return common, hds
814 return common, hds
815
815
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 localrevs = opts['rev']
817 localrevs = opts['rev']
818 common, hds = doit(localrevs, remoterevs)
818 with util.timedcm('debug-discovery') as t:
819 common, hds = doit(localrevs, remoterevs)
819
820
820 # compute all statistics
821 # compute all statistics
821 common = set(common)
822 common = set(common)
822 rheads = set(hds)
823 rheads = set(hds)
823 lheads = set(repo.heads())
824 lheads = set(repo.heads())
824
825
825 data = {}
826 data = {}
827 data['elapsed'] = t.elapsed
826 data['nb-common'] = len(common)
828 data['nb-common'] = len(common)
827 data['nb-common-local'] = len(common & lheads)
829 data['nb-common-local'] = len(common & lheads)
828 data['nb-common-remote'] = len(common & rheads)
830 data['nb-common-remote'] = len(common & rheads)
829 data['nb-local'] = len(lheads)
831 data['nb-local'] = len(lheads)
830 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
832 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
831 data['nb-remote'] = len(rheads)
833 data['nb-remote'] = len(rheads)
832 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
834 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
833 data['nb-revs'] = len(repo.revs('all()'))
835 data['nb-revs'] = len(repo.revs('all()'))
834 data['nb-revs-common'] = len(repo.revs('::%ln', common))
836 data['nb-revs-common'] = len(repo.revs('::%ln', common))
835 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
837 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
836
838
837 # display discovery summary
839 # display discovery summary
840 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
838 ui.write(("heads summary:\n"))
841 ui.write(("heads summary:\n"))
839 ui.write((" total common heads: %(nb-common)9d\n") % data)
842 ui.write((" total common heads: %(nb-common)9d\n") % data)
840 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
843 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
841 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
844 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
842 ui.write((" local heads: %(nb-local)9d\n") % data)
845 ui.write((" local heads: %(nb-local)9d\n") % data)
843 ui.write((" common: %(nb-common-local)9d\n") % data)
846 ui.write((" common: %(nb-common-local)9d\n") % data)
844 ui.write((" missing: %(nb-local-missing)9d\n") % data)
847 ui.write((" missing: %(nb-local-missing)9d\n") % data)
845 ui.write((" remote heads: %(nb-remote)9d\n") % data)
848 ui.write((" remote heads: %(nb-remote)9d\n") % data)
846 ui.write((" common: %(nb-common-remote)9d\n") % data)
849 ui.write((" common: %(nb-common-remote)9d\n") % data)
847 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
850 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
848 ui.write(("local changesets: %(nb-revs)9d\n") % data)
851 ui.write(("local changesets: %(nb-revs)9d\n") % data)
849 ui.write((" common: %(nb-revs-common)9d\n") % data)
852 ui.write((" common: %(nb-revs-common)9d\n") % data)
850 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
853 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
851
854
852 if ui.verbose:
855 if ui.verbose:
853 ui.write(("common heads: %s\n") %
856 ui.write(("common heads: %s\n") %
854 " ".join(sorted(short(n) for n in common)))
857 " ".join(sorted(short(n) for n in common)))
855
858
856 _chunksize = 4 << 10
859 _chunksize = 4 << 10
857
860
858 @command('debugdownload',
861 @command('debugdownload',
859 [
862 [
860 ('o', 'output', '', _('path')),
863 ('o', 'output', '', _('path')),
861 ],
864 ],
862 optionalrepo=True)
865 optionalrepo=True)
863 def debugdownload(ui, repo, url, output=None, **opts):
866 def debugdownload(ui, repo, url, output=None, **opts):
864 """download a resource using Mercurial logic and config
867 """download a resource using Mercurial logic and config
865 """
868 """
866 fh = urlmod.open(ui, url, output)
869 fh = urlmod.open(ui, url, output)
867
870
868 dest = ui
871 dest = ui
869 if output:
872 if output:
870 dest = open(output, "wb", _chunksize)
873 dest = open(output, "wb", _chunksize)
871 try:
874 try:
872 data = fh.read(_chunksize)
875 data = fh.read(_chunksize)
873 while data:
876 while data:
874 dest.write(data)
877 dest.write(data)
875 data = fh.read(_chunksize)
878 data = fh.read(_chunksize)
876 finally:
879 finally:
877 if output:
880 if output:
878 dest.close()
881 dest.close()
879
882
880 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
883 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
881 def debugextensions(ui, repo, **opts):
884 def debugextensions(ui, repo, **opts):
882 '''show information about active extensions'''
885 '''show information about active extensions'''
883 opts = pycompat.byteskwargs(opts)
886 opts = pycompat.byteskwargs(opts)
884 exts = extensions.extensions(ui)
887 exts = extensions.extensions(ui)
885 hgver = util.version()
888 hgver = util.version()
886 fm = ui.formatter('debugextensions', opts)
889 fm = ui.formatter('debugextensions', opts)
887 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
890 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
888 isinternal = extensions.ismoduleinternal(extmod)
891 isinternal = extensions.ismoduleinternal(extmod)
889 extsource = pycompat.fsencode(extmod.__file__)
892 extsource = pycompat.fsencode(extmod.__file__)
890 if isinternal:
893 if isinternal:
891 exttestedwith = [] # never expose magic string to users
894 exttestedwith = [] # never expose magic string to users
892 else:
895 else:
893 exttestedwith = getattr(extmod, 'testedwith', '').split()
896 exttestedwith = getattr(extmod, 'testedwith', '').split()
894 extbuglink = getattr(extmod, 'buglink', None)
897 extbuglink = getattr(extmod, 'buglink', None)
895
898
896 fm.startitem()
899 fm.startitem()
897
900
898 if ui.quiet or ui.verbose:
901 if ui.quiet or ui.verbose:
899 fm.write('name', '%s\n', extname)
902 fm.write('name', '%s\n', extname)
900 else:
903 else:
901 fm.write('name', '%s', extname)
904 fm.write('name', '%s', extname)
902 if isinternal or hgver in exttestedwith:
905 if isinternal or hgver in exttestedwith:
903 fm.plain('\n')
906 fm.plain('\n')
904 elif not exttestedwith:
907 elif not exttestedwith:
905 fm.plain(_(' (untested!)\n'))
908 fm.plain(_(' (untested!)\n'))
906 else:
909 else:
907 lasttestedversion = exttestedwith[-1]
910 lasttestedversion = exttestedwith[-1]
908 fm.plain(' (%s!)\n' % lasttestedversion)
911 fm.plain(' (%s!)\n' % lasttestedversion)
909
912
910 fm.condwrite(ui.verbose and extsource, 'source',
913 fm.condwrite(ui.verbose and extsource, 'source',
911 _(' location: %s\n'), extsource or "")
914 _(' location: %s\n'), extsource or "")
912
915
913 if ui.verbose:
916 if ui.verbose:
914 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
917 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
915 fm.data(bundled=isinternal)
918 fm.data(bundled=isinternal)
916
919
917 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
920 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
918 _(' tested with: %s\n'),
921 _(' tested with: %s\n'),
919 fm.formatlist(exttestedwith, name='ver'))
922 fm.formatlist(exttestedwith, name='ver'))
920
923
921 fm.condwrite(ui.verbose and extbuglink, 'buglink',
924 fm.condwrite(ui.verbose and extbuglink, 'buglink',
922 _(' bug reporting: %s\n'), extbuglink or "")
925 _(' bug reporting: %s\n'), extbuglink or "")
923
926
924 fm.end()
927 fm.end()
925
928
926 @command('debugfileset',
929 @command('debugfileset',
927 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
930 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
928 ('', 'all-files', False,
931 ('', 'all-files', False,
929 _('test files from all revisions and working directory')),
932 _('test files from all revisions and working directory')),
930 ('s', 'show-matcher', None,
933 ('s', 'show-matcher', None,
931 _('print internal representation of matcher')),
934 _('print internal representation of matcher')),
932 ('p', 'show-stage', [],
935 ('p', 'show-stage', [],
933 _('print parsed tree at the given stage'), _('NAME'))],
936 _('print parsed tree at the given stage'), _('NAME'))],
934 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
937 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
935 def debugfileset(ui, repo, expr, **opts):
938 def debugfileset(ui, repo, expr, **opts):
936 '''parse and apply a fileset specification'''
939 '''parse and apply a fileset specification'''
937 from . import fileset
940 from . import fileset
938 fileset.symbols # force import of fileset so we have predicates to optimize
941 fileset.symbols # force import of fileset so we have predicates to optimize
939 opts = pycompat.byteskwargs(opts)
942 opts = pycompat.byteskwargs(opts)
940 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
943 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
941
944
942 stages = [
945 stages = [
943 ('parsed', pycompat.identity),
946 ('parsed', pycompat.identity),
944 ('analyzed', filesetlang.analyze),
947 ('analyzed', filesetlang.analyze),
945 ('optimized', filesetlang.optimize),
948 ('optimized', filesetlang.optimize),
946 ]
949 ]
947 stagenames = set(n for n, f in stages)
950 stagenames = set(n for n, f in stages)
948
951
949 showalways = set()
952 showalways = set()
950 if ui.verbose and not opts['show_stage']:
953 if ui.verbose and not opts['show_stage']:
951 # show parsed tree by --verbose (deprecated)
954 # show parsed tree by --verbose (deprecated)
952 showalways.add('parsed')
955 showalways.add('parsed')
953 if opts['show_stage'] == ['all']:
956 if opts['show_stage'] == ['all']:
954 showalways.update(stagenames)
957 showalways.update(stagenames)
955 else:
958 else:
956 for n in opts['show_stage']:
959 for n in opts['show_stage']:
957 if n not in stagenames:
960 if n not in stagenames:
958 raise error.Abort(_('invalid stage name: %s') % n)
961 raise error.Abort(_('invalid stage name: %s') % n)
959 showalways.update(opts['show_stage'])
962 showalways.update(opts['show_stage'])
960
963
961 tree = filesetlang.parse(expr)
964 tree = filesetlang.parse(expr)
962 for n, f in stages:
965 for n, f in stages:
963 tree = f(tree)
966 tree = f(tree)
964 if n in showalways:
967 if n in showalways:
965 if opts['show_stage'] or n != 'parsed':
968 if opts['show_stage'] or n != 'parsed':
966 ui.write(("* %s:\n") % n)
969 ui.write(("* %s:\n") % n)
967 ui.write(filesetlang.prettyformat(tree), "\n")
970 ui.write(filesetlang.prettyformat(tree), "\n")
968
971
969 files = set()
972 files = set()
970 if opts['all_files']:
973 if opts['all_files']:
971 for r in repo:
974 for r in repo:
972 c = repo[r]
975 c = repo[r]
973 files.update(c.files())
976 files.update(c.files())
974 files.update(c.substate)
977 files.update(c.substate)
975 if opts['all_files'] or ctx.rev() is None:
978 if opts['all_files'] or ctx.rev() is None:
976 wctx = repo[None]
979 wctx = repo[None]
977 files.update(repo.dirstate.walk(scmutil.matchall(repo),
980 files.update(repo.dirstate.walk(scmutil.matchall(repo),
978 subrepos=list(wctx.substate),
981 subrepos=list(wctx.substate),
979 unknown=True, ignored=True))
982 unknown=True, ignored=True))
980 files.update(wctx.substate)
983 files.update(wctx.substate)
981 else:
984 else:
982 files.update(ctx.files())
985 files.update(ctx.files())
983 files.update(ctx.substate)
986 files.update(ctx.substate)
984
987
985 m = ctx.matchfileset(expr)
988 m = ctx.matchfileset(expr)
986 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
989 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
987 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
990 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
988 for f in sorted(files):
991 for f in sorted(files):
989 if not m(f):
992 if not m(f):
990 continue
993 continue
991 ui.write("%s\n" % f)
994 ui.write("%s\n" % f)
992
995
993 @command('debugformat',
996 @command('debugformat',
994 [] + cmdutil.formatteropts)
997 [] + cmdutil.formatteropts)
995 def debugformat(ui, repo, **opts):
998 def debugformat(ui, repo, **opts):
996 """display format information about the current repository
999 """display format information about the current repository
997
1000
998 Use --verbose to get extra information about current config value and
1001 Use --verbose to get extra information about current config value and
999 Mercurial default."""
1002 Mercurial default."""
1000 opts = pycompat.byteskwargs(opts)
1003 opts = pycompat.byteskwargs(opts)
1001 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1004 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1002 maxvariantlength = max(len('format-variant'), maxvariantlength)
1005 maxvariantlength = max(len('format-variant'), maxvariantlength)
1003
1006
1004 def makeformatname(name):
1007 def makeformatname(name):
1005 return '%s:' + (' ' * (maxvariantlength - len(name)))
1008 return '%s:' + (' ' * (maxvariantlength - len(name)))
1006
1009
1007 fm = ui.formatter('debugformat', opts)
1010 fm = ui.formatter('debugformat', opts)
1008 if fm.isplain():
1011 if fm.isplain():
1009 def formatvalue(value):
1012 def formatvalue(value):
1010 if util.safehasattr(value, 'startswith'):
1013 if util.safehasattr(value, 'startswith'):
1011 return value
1014 return value
1012 if value:
1015 if value:
1013 return 'yes'
1016 return 'yes'
1014 else:
1017 else:
1015 return 'no'
1018 return 'no'
1016 else:
1019 else:
1017 formatvalue = pycompat.identity
1020 formatvalue = pycompat.identity
1018
1021
1019 fm.plain('format-variant')
1022 fm.plain('format-variant')
1020 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1023 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1021 fm.plain(' repo')
1024 fm.plain(' repo')
1022 if ui.verbose:
1025 if ui.verbose:
1023 fm.plain(' config default')
1026 fm.plain(' config default')
1024 fm.plain('\n')
1027 fm.plain('\n')
1025 for fv in upgrade.allformatvariant:
1028 for fv in upgrade.allformatvariant:
1026 fm.startitem()
1029 fm.startitem()
1027 repovalue = fv.fromrepo(repo)
1030 repovalue = fv.fromrepo(repo)
1028 configvalue = fv.fromconfig(repo)
1031 configvalue = fv.fromconfig(repo)
1029
1032
1030 if repovalue != configvalue:
1033 if repovalue != configvalue:
1031 namelabel = 'formatvariant.name.mismatchconfig'
1034 namelabel = 'formatvariant.name.mismatchconfig'
1032 repolabel = 'formatvariant.repo.mismatchconfig'
1035 repolabel = 'formatvariant.repo.mismatchconfig'
1033 elif repovalue != fv.default:
1036 elif repovalue != fv.default:
1034 namelabel = 'formatvariant.name.mismatchdefault'
1037 namelabel = 'formatvariant.name.mismatchdefault'
1035 repolabel = 'formatvariant.repo.mismatchdefault'
1038 repolabel = 'formatvariant.repo.mismatchdefault'
1036 else:
1039 else:
1037 namelabel = 'formatvariant.name.uptodate'
1040 namelabel = 'formatvariant.name.uptodate'
1038 repolabel = 'formatvariant.repo.uptodate'
1041 repolabel = 'formatvariant.repo.uptodate'
1039
1042
1040 fm.write('name', makeformatname(fv.name), fv.name,
1043 fm.write('name', makeformatname(fv.name), fv.name,
1041 label=namelabel)
1044 label=namelabel)
1042 fm.write('repo', ' %3s', formatvalue(repovalue),
1045 fm.write('repo', ' %3s', formatvalue(repovalue),
1043 label=repolabel)
1046 label=repolabel)
1044 if fv.default != configvalue:
1047 if fv.default != configvalue:
1045 configlabel = 'formatvariant.config.special'
1048 configlabel = 'formatvariant.config.special'
1046 else:
1049 else:
1047 configlabel = 'formatvariant.config.default'
1050 configlabel = 'formatvariant.config.default'
1048 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1051 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1049 label=configlabel)
1052 label=configlabel)
1050 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1053 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1051 label='formatvariant.default')
1054 label='formatvariant.default')
1052 fm.plain('\n')
1055 fm.plain('\n')
1053 fm.end()
1056 fm.end()
1054
1057
1055 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1058 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1056 def debugfsinfo(ui, path="."):
1059 def debugfsinfo(ui, path="."):
1057 """show information detected about current filesystem"""
1060 """show information detected about current filesystem"""
1058 ui.write(('path: %s\n') % path)
1061 ui.write(('path: %s\n') % path)
1059 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1062 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1060 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1063 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1061 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1064 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1062 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1065 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1063 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1066 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1064 casesensitive = '(unknown)'
1067 casesensitive = '(unknown)'
1065 try:
1068 try:
1066 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1069 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1067 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1070 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1068 except OSError:
1071 except OSError:
1069 pass
1072 pass
1070 ui.write(('case-sensitive: %s\n') % casesensitive)
1073 ui.write(('case-sensitive: %s\n') % casesensitive)
1071
1074
1072 @command('debuggetbundle',
1075 @command('debuggetbundle',
1073 [('H', 'head', [], _('id of head node'), _('ID')),
1076 [('H', 'head', [], _('id of head node'), _('ID')),
1074 ('C', 'common', [], _('id of common node'), _('ID')),
1077 ('C', 'common', [], _('id of common node'), _('ID')),
1075 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1078 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1076 _('REPO FILE [-H|-C ID]...'),
1079 _('REPO FILE [-H|-C ID]...'),
1077 norepo=True)
1080 norepo=True)
1078 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1081 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1079 """retrieves a bundle from a repo
1082 """retrieves a bundle from a repo
1080
1083
1081 Every ID must be a full-length hex node id string. Saves the bundle to the
1084 Every ID must be a full-length hex node id string. Saves the bundle to the
1082 given file.
1085 given file.
1083 """
1086 """
1084 opts = pycompat.byteskwargs(opts)
1087 opts = pycompat.byteskwargs(opts)
1085 repo = hg.peer(ui, opts, repopath)
1088 repo = hg.peer(ui, opts, repopath)
1086 if not repo.capable('getbundle'):
1089 if not repo.capable('getbundle'):
1087 raise error.Abort("getbundle() not supported by target repository")
1090 raise error.Abort("getbundle() not supported by target repository")
1088 args = {}
1091 args = {}
1089 if common:
1092 if common:
1090 args[r'common'] = [bin(s) for s in common]
1093 args[r'common'] = [bin(s) for s in common]
1091 if head:
1094 if head:
1092 args[r'heads'] = [bin(s) for s in head]
1095 args[r'heads'] = [bin(s) for s in head]
1093 # TODO: get desired bundlecaps from command line.
1096 # TODO: get desired bundlecaps from command line.
1094 args[r'bundlecaps'] = None
1097 args[r'bundlecaps'] = None
1095 bundle = repo.getbundle('debug', **args)
1098 bundle = repo.getbundle('debug', **args)
1096
1099
1097 bundletype = opts.get('type', 'bzip2').lower()
1100 bundletype = opts.get('type', 'bzip2').lower()
1098 btypes = {'none': 'HG10UN',
1101 btypes = {'none': 'HG10UN',
1099 'bzip2': 'HG10BZ',
1102 'bzip2': 'HG10BZ',
1100 'gzip': 'HG10GZ',
1103 'gzip': 'HG10GZ',
1101 'bundle2': 'HG20'}
1104 'bundle2': 'HG20'}
1102 bundletype = btypes.get(bundletype)
1105 bundletype = btypes.get(bundletype)
1103 if bundletype not in bundle2.bundletypes:
1106 if bundletype not in bundle2.bundletypes:
1104 raise error.Abort(_('unknown bundle type specified with --type'))
1107 raise error.Abort(_('unknown bundle type specified with --type'))
1105 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1108 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1106
1109
1107 @command('debugignore', [], '[FILE]')
1110 @command('debugignore', [], '[FILE]')
1108 def debugignore(ui, repo, *files, **opts):
1111 def debugignore(ui, repo, *files, **opts):
1109 """display the combined ignore pattern and information about ignored files
1112 """display the combined ignore pattern and information about ignored files
1110
1113
1111 With no argument display the combined ignore pattern.
1114 With no argument display the combined ignore pattern.
1112
1115
1113 Given space separated file names, shows if the given file is ignored and
1116 Given space separated file names, shows if the given file is ignored and
1114 if so, show the ignore rule (file and line number) that matched it.
1117 if so, show the ignore rule (file and line number) that matched it.
1115 """
1118 """
1116 ignore = repo.dirstate._ignore
1119 ignore = repo.dirstate._ignore
1117 if not files:
1120 if not files:
1118 # Show all the patterns
1121 # Show all the patterns
1119 ui.write("%s\n" % pycompat.byterepr(ignore))
1122 ui.write("%s\n" % pycompat.byterepr(ignore))
1120 else:
1123 else:
1121 m = scmutil.match(repo[None], pats=files)
1124 m = scmutil.match(repo[None], pats=files)
1122 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1125 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1123 for f in m.files():
1126 for f in m.files():
1124 nf = util.normpath(f)
1127 nf = util.normpath(f)
1125 ignored = None
1128 ignored = None
1126 ignoredata = None
1129 ignoredata = None
1127 if nf != '.':
1130 if nf != '.':
1128 if ignore(nf):
1131 if ignore(nf):
1129 ignored = nf
1132 ignored = nf
1130 ignoredata = repo.dirstate._ignorefileandline(nf)
1133 ignoredata = repo.dirstate._ignorefileandline(nf)
1131 else:
1134 else:
1132 for p in util.finddirs(nf):
1135 for p in util.finddirs(nf):
1133 if ignore(p):
1136 if ignore(p):
1134 ignored = p
1137 ignored = p
1135 ignoredata = repo.dirstate._ignorefileandline(p)
1138 ignoredata = repo.dirstate._ignorefileandline(p)
1136 break
1139 break
1137 if ignored:
1140 if ignored:
1138 if ignored == nf:
1141 if ignored == nf:
1139 ui.write(_("%s is ignored\n") % uipathfn(f))
1142 ui.write(_("%s is ignored\n") % uipathfn(f))
1140 else:
1143 else:
1141 ui.write(_("%s is ignored because of "
1144 ui.write(_("%s is ignored because of "
1142 "containing folder %s\n")
1145 "containing folder %s\n")
1143 % (uipathfn(f), ignored))
1146 % (uipathfn(f), ignored))
1144 ignorefile, lineno, line = ignoredata
1147 ignorefile, lineno, line = ignoredata
1145 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1148 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1146 % (ignorefile, lineno, line))
1149 % (ignorefile, lineno, line))
1147 else:
1150 else:
1148 ui.write(_("%s is not ignored\n") % uipathfn(f))
1151 ui.write(_("%s is not ignored\n") % uipathfn(f))
1149
1152
1150 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1153 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1151 _('-c|-m|FILE'))
1154 _('-c|-m|FILE'))
1152 def debugindex(ui, repo, file_=None, **opts):
1155 def debugindex(ui, repo, file_=None, **opts):
1153 """dump index data for a storage primitive"""
1156 """dump index data for a storage primitive"""
1154 opts = pycompat.byteskwargs(opts)
1157 opts = pycompat.byteskwargs(opts)
1155 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1158 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1156
1159
1157 if ui.debugflag:
1160 if ui.debugflag:
1158 shortfn = hex
1161 shortfn = hex
1159 else:
1162 else:
1160 shortfn = short
1163 shortfn = short
1161
1164
1162 idlen = 12
1165 idlen = 12
1163 for i in store:
1166 for i in store:
1164 idlen = len(shortfn(store.node(i)))
1167 idlen = len(shortfn(store.node(i)))
1165 break
1168 break
1166
1169
1167 fm = ui.formatter('debugindex', opts)
1170 fm = ui.formatter('debugindex', opts)
1168 fm.plain(b' rev linkrev %s %s p2\n' % (
1171 fm.plain(b' rev linkrev %s %s p2\n' % (
1169 b'nodeid'.ljust(idlen),
1172 b'nodeid'.ljust(idlen),
1170 b'p1'.ljust(idlen)))
1173 b'p1'.ljust(idlen)))
1171
1174
1172 for rev in store:
1175 for rev in store:
1173 node = store.node(rev)
1176 node = store.node(rev)
1174 parents = store.parents(node)
1177 parents = store.parents(node)
1175
1178
1176 fm.startitem()
1179 fm.startitem()
1177 fm.write(b'rev', b'%6d ', rev)
1180 fm.write(b'rev', b'%6d ', rev)
1178 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1181 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1179 fm.write(b'node', '%s ', shortfn(node))
1182 fm.write(b'node', '%s ', shortfn(node))
1180 fm.write(b'p1', '%s ', shortfn(parents[0]))
1183 fm.write(b'p1', '%s ', shortfn(parents[0]))
1181 fm.write(b'p2', '%s', shortfn(parents[1]))
1184 fm.write(b'p2', '%s', shortfn(parents[1]))
1182 fm.plain(b'\n')
1185 fm.plain(b'\n')
1183
1186
1184 fm.end()
1187 fm.end()
1185
1188
1186 @command('debugindexdot', cmdutil.debugrevlogopts,
1189 @command('debugindexdot', cmdutil.debugrevlogopts,
1187 _('-c|-m|FILE'), optionalrepo=True)
1190 _('-c|-m|FILE'), optionalrepo=True)
1188 def debugindexdot(ui, repo, file_=None, **opts):
1191 def debugindexdot(ui, repo, file_=None, **opts):
1189 """dump an index DAG as a graphviz dot file"""
1192 """dump an index DAG as a graphviz dot file"""
1190 opts = pycompat.byteskwargs(opts)
1193 opts = pycompat.byteskwargs(opts)
1191 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1194 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1192 ui.write(("digraph G {\n"))
1195 ui.write(("digraph G {\n"))
1193 for i in r:
1196 for i in r:
1194 node = r.node(i)
1197 node = r.node(i)
1195 pp = r.parents(node)
1198 pp = r.parents(node)
1196 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1199 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1197 if pp[1] != nullid:
1200 if pp[1] != nullid:
1198 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1201 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1199 ui.write("}\n")
1202 ui.write("}\n")
1200
1203
1201 @command('debugindexstats', [])
1204 @command('debugindexstats', [])
1202 def debugindexstats(ui, repo):
1205 def debugindexstats(ui, repo):
1203 """show stats related to the changelog index"""
1206 """show stats related to the changelog index"""
1204 repo.changelog.shortest(nullid, 1)
1207 repo.changelog.shortest(nullid, 1)
1205 index = repo.changelog.index
1208 index = repo.changelog.index
1206 if not util.safehasattr(index, 'stats'):
1209 if not util.safehasattr(index, 'stats'):
1207 raise error.Abort(_('debugindexstats only works with native code'))
1210 raise error.Abort(_('debugindexstats only works with native code'))
1208 for k, v in sorted(index.stats().items()):
1211 for k, v in sorted(index.stats().items()):
1209 ui.write('%s: %d\n' % (k, v))
1212 ui.write('%s: %d\n' % (k, v))
1210
1213
1211 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1214 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1212 def debuginstall(ui, **opts):
1215 def debuginstall(ui, **opts):
1213 '''test Mercurial installation
1216 '''test Mercurial installation
1214
1217
1215 Returns 0 on success.
1218 Returns 0 on success.
1216 '''
1219 '''
1217 opts = pycompat.byteskwargs(opts)
1220 opts = pycompat.byteskwargs(opts)
1218
1221
1219 problems = 0
1222 problems = 0
1220
1223
1221 fm = ui.formatter('debuginstall', opts)
1224 fm = ui.formatter('debuginstall', opts)
1222 fm.startitem()
1225 fm.startitem()
1223
1226
1224 # encoding
1227 # encoding
1225 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1228 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1226 err = None
1229 err = None
1227 try:
1230 try:
1228 codecs.lookup(pycompat.sysstr(encoding.encoding))
1231 codecs.lookup(pycompat.sysstr(encoding.encoding))
1229 except LookupError as inst:
1232 except LookupError as inst:
1230 err = stringutil.forcebytestr(inst)
1233 err = stringutil.forcebytestr(inst)
1231 problems += 1
1234 problems += 1
1232 fm.condwrite(err, 'encodingerror', _(" %s\n"
1235 fm.condwrite(err, 'encodingerror', _(" %s\n"
1233 " (check that your locale is properly set)\n"), err)
1236 " (check that your locale is properly set)\n"), err)
1234
1237
1235 # Python
1238 # Python
1236 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1239 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1237 pycompat.sysexecutable)
1240 pycompat.sysexecutable)
1238 fm.write('pythonver', _("checking Python version (%s)\n"),
1241 fm.write('pythonver', _("checking Python version (%s)\n"),
1239 ("%d.%d.%d" % sys.version_info[:3]))
1242 ("%d.%d.%d" % sys.version_info[:3]))
1240 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1243 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1241 os.path.dirname(pycompat.fsencode(os.__file__)))
1244 os.path.dirname(pycompat.fsencode(os.__file__)))
1242
1245
1243 security = set(sslutil.supportedprotocols)
1246 security = set(sslutil.supportedprotocols)
1244 if sslutil.hassni:
1247 if sslutil.hassni:
1245 security.add('sni')
1248 security.add('sni')
1246
1249
1247 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1250 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1248 fm.formatlist(sorted(security), name='protocol',
1251 fm.formatlist(sorted(security), name='protocol',
1249 fmt='%s', sep=','))
1252 fmt='%s', sep=','))
1250
1253
1251 # These are warnings, not errors. So don't increment problem count. This
1254 # These are warnings, not errors. So don't increment problem count. This
1252 # may change in the future.
1255 # may change in the future.
1253 if 'tls1.2' not in security:
1256 if 'tls1.2' not in security:
1254 fm.plain(_(' TLS 1.2 not supported by Python install; '
1257 fm.plain(_(' TLS 1.2 not supported by Python install; '
1255 'network connections lack modern security\n'))
1258 'network connections lack modern security\n'))
1256 if 'sni' not in security:
1259 if 'sni' not in security:
1257 fm.plain(_(' SNI not supported by Python install; may have '
1260 fm.plain(_(' SNI not supported by Python install; may have '
1258 'connectivity issues with some servers\n'))
1261 'connectivity issues with some servers\n'))
1259
1262
1260 # TODO print CA cert info
1263 # TODO print CA cert info
1261
1264
1262 # hg version
1265 # hg version
1263 hgver = util.version()
1266 hgver = util.version()
1264 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1267 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1265 hgver.split('+')[0])
1268 hgver.split('+')[0])
1266 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1269 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1267 '+'.join(hgver.split('+')[1:]))
1270 '+'.join(hgver.split('+')[1:]))
1268
1271
1269 # compiled modules
1272 # compiled modules
1270 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1273 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1271 policy.policy)
1274 policy.policy)
1272 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1275 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1273 os.path.dirname(pycompat.fsencode(__file__)))
1276 os.path.dirname(pycompat.fsencode(__file__)))
1274
1277
1275 if policy.policy in ('c', 'allow'):
1278 if policy.policy in ('c', 'allow'):
1276 err = None
1279 err = None
1277 try:
1280 try:
1278 from .cext import (
1281 from .cext import (
1279 base85,
1282 base85,
1280 bdiff,
1283 bdiff,
1281 mpatch,
1284 mpatch,
1282 osutil,
1285 osutil,
1283 )
1286 )
1284 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1287 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1285 except Exception as inst:
1288 except Exception as inst:
1286 err = stringutil.forcebytestr(inst)
1289 err = stringutil.forcebytestr(inst)
1287 problems += 1
1290 problems += 1
1288 fm.condwrite(err, 'extensionserror', " %s\n", err)
1291 fm.condwrite(err, 'extensionserror', " %s\n", err)
1289
1292
1290 compengines = util.compengines._engines.values()
1293 compengines = util.compengines._engines.values()
1291 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1294 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1292 fm.formatlist(sorted(e.name() for e in compengines),
1295 fm.formatlist(sorted(e.name() for e in compengines),
1293 name='compengine', fmt='%s', sep=', '))
1296 name='compengine', fmt='%s', sep=', '))
1294 fm.write('compenginesavail', _('checking available compression engines '
1297 fm.write('compenginesavail', _('checking available compression engines '
1295 '(%s)\n'),
1298 '(%s)\n'),
1296 fm.formatlist(sorted(e.name() for e in compengines
1299 fm.formatlist(sorted(e.name() for e in compengines
1297 if e.available()),
1300 if e.available()),
1298 name='compengine', fmt='%s', sep=', '))
1301 name='compengine', fmt='%s', sep=', '))
1299 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1302 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1300 fm.write('compenginesserver', _('checking available compression engines '
1303 fm.write('compenginesserver', _('checking available compression engines '
1301 'for wire protocol (%s)\n'),
1304 'for wire protocol (%s)\n'),
1302 fm.formatlist([e.name() for e in wirecompengines
1305 fm.formatlist([e.name() for e in wirecompengines
1303 if e.wireprotosupport()],
1306 if e.wireprotosupport()],
1304 name='compengine', fmt='%s', sep=', '))
1307 name='compengine', fmt='%s', sep=', '))
1305 re2 = 'missing'
1308 re2 = 'missing'
1306 if util._re2:
1309 if util._re2:
1307 re2 = 'available'
1310 re2 = 'available'
1308 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1311 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1309 fm.data(re2=bool(util._re2))
1312 fm.data(re2=bool(util._re2))
1310
1313
1311 # templates
1314 # templates
1312 p = templater.templatepaths()
1315 p = templater.templatepaths()
1313 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1316 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1314 fm.condwrite(not p, '', _(" no template directories found\n"))
1317 fm.condwrite(not p, '', _(" no template directories found\n"))
1315 if p:
1318 if p:
1316 m = templater.templatepath("map-cmdline.default")
1319 m = templater.templatepath("map-cmdline.default")
1317 if m:
1320 if m:
1318 # template found, check if it is working
1321 # template found, check if it is working
1319 err = None
1322 err = None
1320 try:
1323 try:
1321 templater.templater.frommapfile(m)
1324 templater.templater.frommapfile(m)
1322 except Exception as inst:
1325 except Exception as inst:
1323 err = stringutil.forcebytestr(inst)
1326 err = stringutil.forcebytestr(inst)
1324 p = None
1327 p = None
1325 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1328 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1326 else:
1329 else:
1327 p = None
1330 p = None
1328 fm.condwrite(p, 'defaulttemplate',
1331 fm.condwrite(p, 'defaulttemplate',
1329 _("checking default template (%s)\n"), m)
1332 _("checking default template (%s)\n"), m)
1330 fm.condwrite(not m, 'defaulttemplatenotfound',
1333 fm.condwrite(not m, 'defaulttemplatenotfound',
1331 _(" template '%s' not found\n"), "default")
1334 _(" template '%s' not found\n"), "default")
1332 if not p:
1335 if not p:
1333 problems += 1
1336 problems += 1
1334 fm.condwrite(not p, '',
1337 fm.condwrite(not p, '',
1335 _(" (templates seem to have been installed incorrectly)\n"))
1338 _(" (templates seem to have been installed incorrectly)\n"))
1336
1339
1337 # editor
1340 # editor
1338 editor = ui.geteditor()
1341 editor = ui.geteditor()
1339 editor = util.expandpath(editor)
1342 editor = util.expandpath(editor)
1340 editorbin = procutil.shellsplit(editor)[0]
1343 editorbin = procutil.shellsplit(editor)[0]
1341 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1344 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1342 cmdpath = procutil.findexe(editorbin)
1345 cmdpath = procutil.findexe(editorbin)
1343 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1346 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1344 _(" No commit editor set and can't find %s in PATH\n"
1347 _(" No commit editor set and can't find %s in PATH\n"
1345 " (specify a commit editor in your configuration"
1348 " (specify a commit editor in your configuration"
1346 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1349 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1347 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1350 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1348 _(" Can't find editor '%s' in PATH\n"
1351 _(" Can't find editor '%s' in PATH\n"
1349 " (specify a commit editor in your configuration"
1352 " (specify a commit editor in your configuration"
1350 " file)\n"), not cmdpath and editorbin)
1353 " file)\n"), not cmdpath and editorbin)
1351 if not cmdpath and editor != 'vi':
1354 if not cmdpath and editor != 'vi':
1352 problems += 1
1355 problems += 1
1353
1356
1354 # check username
1357 # check username
1355 username = None
1358 username = None
1356 err = None
1359 err = None
1357 try:
1360 try:
1358 username = ui.username()
1361 username = ui.username()
1359 except error.Abort as e:
1362 except error.Abort as e:
1360 err = stringutil.forcebytestr(e)
1363 err = stringutil.forcebytestr(e)
1361 problems += 1
1364 problems += 1
1362
1365
1363 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1366 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1364 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1367 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1365 " (specify a username in your configuration file)\n"), err)
1368 " (specify a username in your configuration file)\n"), err)
1366
1369
1367 fm.condwrite(not problems, '',
1370 fm.condwrite(not problems, '',
1368 _("no problems detected\n"))
1371 _("no problems detected\n"))
1369 if not problems:
1372 if not problems:
1370 fm.data(problems=problems)
1373 fm.data(problems=problems)
1371 fm.condwrite(problems, 'problems',
1374 fm.condwrite(problems, 'problems',
1372 _("%d problems detected,"
1375 _("%d problems detected,"
1373 " please check your install!\n"), problems)
1376 " please check your install!\n"), problems)
1374 fm.end()
1377 fm.end()
1375
1378
1376 return problems
1379 return problems
1377
1380
1378 @command('debugknown', [], _('REPO ID...'), norepo=True)
1381 @command('debugknown', [], _('REPO ID...'), norepo=True)
1379 def debugknown(ui, repopath, *ids, **opts):
1382 def debugknown(ui, repopath, *ids, **opts):
1380 """test whether node ids are known to a repo
1383 """test whether node ids are known to a repo
1381
1384
1382 Every ID must be a full-length hex node id string. Returns a list of 0s
1385 Every ID must be a full-length hex node id string. Returns a list of 0s
1383 and 1s indicating unknown/known.
1386 and 1s indicating unknown/known.
1384 """
1387 """
1385 opts = pycompat.byteskwargs(opts)
1388 opts = pycompat.byteskwargs(opts)
1386 repo = hg.peer(ui, opts, repopath)
1389 repo = hg.peer(ui, opts, repopath)
1387 if not repo.capable('known'):
1390 if not repo.capable('known'):
1388 raise error.Abort("known() not supported by target repository")
1391 raise error.Abort("known() not supported by target repository")
1389 flags = repo.known([bin(s) for s in ids])
1392 flags = repo.known([bin(s) for s in ids])
1390 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1393 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1391
1394
1392 @command('debuglabelcomplete', [], _('LABEL...'))
1395 @command('debuglabelcomplete', [], _('LABEL...'))
1393 def debuglabelcomplete(ui, repo, *args):
1396 def debuglabelcomplete(ui, repo, *args):
1394 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1397 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1395 debugnamecomplete(ui, repo, *args)
1398 debugnamecomplete(ui, repo, *args)
1396
1399
1397 @command('debuglocks',
1400 @command('debuglocks',
1398 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1401 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1399 ('W', 'force-wlock', None,
1402 ('W', 'force-wlock', None,
1400 _('free the working state lock (DANGEROUS)')),
1403 _('free the working state lock (DANGEROUS)')),
1401 ('s', 'set-lock', None, _('set the store lock until stopped')),
1404 ('s', 'set-lock', None, _('set the store lock until stopped')),
1402 ('S', 'set-wlock', None,
1405 ('S', 'set-wlock', None,
1403 _('set the working state lock until stopped'))],
1406 _('set the working state lock until stopped'))],
1404 _('[OPTION]...'))
1407 _('[OPTION]...'))
1405 def debuglocks(ui, repo, **opts):
1408 def debuglocks(ui, repo, **opts):
1406 """show or modify state of locks
1409 """show or modify state of locks
1407
1410
1408 By default, this command will show which locks are held. This
1411 By default, this command will show which locks are held. This
1409 includes the user and process holding the lock, the amount of time
1412 includes the user and process holding the lock, the amount of time
1410 the lock has been held, and the machine name where the process is
1413 the lock has been held, and the machine name where the process is
1411 running if it's not local.
1414 running if it's not local.
1412
1415
1413 Locks protect the integrity of Mercurial's data, so should be
1416 Locks protect the integrity of Mercurial's data, so should be
1414 treated with care. System crashes or other interruptions may cause
1417 treated with care. System crashes or other interruptions may cause
1415 locks to not be properly released, though Mercurial will usually
1418 locks to not be properly released, though Mercurial will usually
1416 detect and remove such stale locks automatically.
1419 detect and remove such stale locks automatically.
1417
1420
1418 However, detecting stale locks may not always be possible (for
1421 However, detecting stale locks may not always be possible (for
1419 instance, on a shared filesystem). Removing locks may also be
1422 instance, on a shared filesystem). Removing locks may also be
1420 blocked by filesystem permissions.
1423 blocked by filesystem permissions.
1421
1424
1422 Setting a lock will prevent other commands from changing the data.
1425 Setting a lock will prevent other commands from changing the data.
1423 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1426 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1424 The set locks are removed when the command exits.
1427 The set locks are removed when the command exits.
1425
1428
1426 Returns 0 if no locks are held.
1429 Returns 0 if no locks are held.
1427
1430
1428 """
1431 """
1429
1432
1430 if opts.get(r'force_lock'):
1433 if opts.get(r'force_lock'):
1431 repo.svfs.unlink('lock')
1434 repo.svfs.unlink('lock')
1432 if opts.get(r'force_wlock'):
1435 if opts.get(r'force_wlock'):
1433 repo.vfs.unlink('wlock')
1436 repo.vfs.unlink('wlock')
1434 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1437 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1435 return 0
1438 return 0
1436
1439
1437 locks = []
1440 locks = []
1438 try:
1441 try:
1439 if opts.get(r'set_wlock'):
1442 if opts.get(r'set_wlock'):
1440 try:
1443 try:
1441 locks.append(repo.wlock(False))
1444 locks.append(repo.wlock(False))
1442 except error.LockHeld:
1445 except error.LockHeld:
1443 raise error.Abort(_('wlock is already held'))
1446 raise error.Abort(_('wlock is already held'))
1444 if opts.get(r'set_lock'):
1447 if opts.get(r'set_lock'):
1445 try:
1448 try:
1446 locks.append(repo.lock(False))
1449 locks.append(repo.lock(False))
1447 except error.LockHeld:
1450 except error.LockHeld:
1448 raise error.Abort(_('lock is already held'))
1451 raise error.Abort(_('lock is already held'))
1449 if len(locks):
1452 if len(locks):
1450 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1453 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1451 return 0
1454 return 0
1452 finally:
1455 finally:
1453 release(*locks)
1456 release(*locks)
1454
1457
1455 now = time.time()
1458 now = time.time()
1456 held = 0
1459 held = 0
1457
1460
1458 def report(vfs, name, method):
1461 def report(vfs, name, method):
1459 # this causes stale locks to get reaped for more accurate reporting
1462 # this causes stale locks to get reaped for more accurate reporting
1460 try:
1463 try:
1461 l = method(False)
1464 l = method(False)
1462 except error.LockHeld:
1465 except error.LockHeld:
1463 l = None
1466 l = None
1464
1467
1465 if l:
1468 if l:
1466 l.release()
1469 l.release()
1467 else:
1470 else:
1468 try:
1471 try:
1469 st = vfs.lstat(name)
1472 st = vfs.lstat(name)
1470 age = now - st[stat.ST_MTIME]
1473 age = now - st[stat.ST_MTIME]
1471 user = util.username(st.st_uid)
1474 user = util.username(st.st_uid)
1472 locker = vfs.readlock(name)
1475 locker = vfs.readlock(name)
1473 if ":" in locker:
1476 if ":" in locker:
1474 host, pid = locker.split(':')
1477 host, pid = locker.split(':')
1475 if host == socket.gethostname():
1478 if host == socket.gethostname():
1476 locker = 'user %s, process %s' % (user or b'None', pid)
1479 locker = 'user %s, process %s' % (user or b'None', pid)
1477 else:
1480 else:
1478 locker = ('user %s, process %s, host %s'
1481 locker = ('user %s, process %s, host %s'
1479 % (user or b'None', pid, host))
1482 % (user or b'None', pid, host))
1480 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1483 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1481 return 1
1484 return 1
1482 except OSError as e:
1485 except OSError as e:
1483 if e.errno != errno.ENOENT:
1486 if e.errno != errno.ENOENT:
1484 raise
1487 raise
1485
1488
1486 ui.write(("%-6s free\n") % (name + ":"))
1489 ui.write(("%-6s free\n") % (name + ":"))
1487 return 0
1490 return 0
1488
1491
1489 held += report(repo.svfs, "lock", repo.lock)
1492 held += report(repo.svfs, "lock", repo.lock)
1490 held += report(repo.vfs, "wlock", repo.wlock)
1493 held += report(repo.vfs, "wlock", repo.wlock)
1491
1494
1492 return held
1495 return held
1493
1496
1494 @command('debugmanifestfulltextcache', [
1497 @command('debugmanifestfulltextcache', [
1495 ('', 'clear', False, _('clear the cache')),
1498 ('', 'clear', False, _('clear the cache')),
1496 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1499 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1497 _('NODE'))
1500 _('NODE'))
1498 ], '')
1501 ], '')
1499 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1502 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1500 """show, clear or amend the contents of the manifest fulltext cache"""
1503 """show, clear or amend the contents of the manifest fulltext cache"""
1501
1504
1502 def getcache():
1505 def getcache():
1503 r = repo.manifestlog.getstorage(b'')
1506 r = repo.manifestlog.getstorage(b'')
1504 try:
1507 try:
1505 return r._fulltextcache
1508 return r._fulltextcache
1506 except AttributeError:
1509 except AttributeError:
1507 msg = _("Current revlog implementation doesn't appear to have a "
1510 msg = _("Current revlog implementation doesn't appear to have a "
1508 "manifest fulltext cache\n")
1511 "manifest fulltext cache\n")
1509 raise error.Abort(msg)
1512 raise error.Abort(msg)
1510
1513
1511 if opts.get(r'clear'):
1514 if opts.get(r'clear'):
1512 with repo.wlock():
1515 with repo.wlock():
1513 cache = getcache()
1516 cache = getcache()
1514 cache.clear(clear_persisted_data=True)
1517 cache.clear(clear_persisted_data=True)
1515 return
1518 return
1516
1519
1517 if add:
1520 if add:
1518 with repo.wlock():
1521 with repo.wlock():
1519 m = repo.manifestlog
1522 m = repo.manifestlog
1520 store = m.getstorage(b'')
1523 store = m.getstorage(b'')
1521 for n in add:
1524 for n in add:
1522 try:
1525 try:
1523 manifest = m[store.lookup(n)]
1526 manifest = m[store.lookup(n)]
1524 except error.LookupError as e:
1527 except error.LookupError as e:
1525 raise error.Abort(e, hint="Check your manifest node id")
1528 raise error.Abort(e, hint="Check your manifest node id")
1526 manifest.read() # stores revisision in cache too
1529 manifest.read() # stores revisision in cache too
1527 return
1530 return
1528
1531
1529 cache = getcache()
1532 cache = getcache()
1530 if not len(cache):
1533 if not len(cache):
1531 ui.write(_('cache empty\n'))
1534 ui.write(_('cache empty\n'))
1532 else:
1535 else:
1533 ui.write(
1536 ui.write(
1534 _('cache contains %d manifest entries, in order of most to '
1537 _('cache contains %d manifest entries, in order of most to '
1535 'least recent:\n') % (len(cache),))
1538 'least recent:\n') % (len(cache),))
1536 totalsize = 0
1539 totalsize = 0
1537 for nodeid in cache:
1540 for nodeid in cache:
1538 # Use cache.get to not update the LRU order
1541 # Use cache.get to not update the LRU order
1539 data = cache.peek(nodeid)
1542 data = cache.peek(nodeid)
1540 size = len(data)
1543 size = len(data)
1541 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1544 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1542 ui.write(_('id: %s, size %s\n') % (
1545 ui.write(_('id: %s, size %s\n') % (
1543 hex(nodeid), util.bytecount(size)))
1546 hex(nodeid), util.bytecount(size)))
1544 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1547 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1545 ui.write(
1548 ui.write(
1546 _('total cache data size %s, on-disk %s\n') % (
1549 _('total cache data size %s, on-disk %s\n') % (
1547 util.bytecount(totalsize), util.bytecount(ondisk))
1550 util.bytecount(totalsize), util.bytecount(ondisk))
1548 )
1551 )
1549
1552
1550 @command('debugmergestate', [], '')
1553 @command('debugmergestate', [], '')
1551 def debugmergestate(ui, repo, *args):
1554 def debugmergestate(ui, repo, *args):
1552 """print merge state
1555 """print merge state
1553
1556
1554 Use --verbose to print out information about whether v1 or v2 merge state
1557 Use --verbose to print out information about whether v1 or v2 merge state
1555 was chosen."""
1558 was chosen."""
1556 def _hashornull(h):
1559 def _hashornull(h):
1557 if h == nullhex:
1560 if h == nullhex:
1558 return 'null'
1561 return 'null'
1559 else:
1562 else:
1560 return h
1563 return h
1561
1564
1562 def printrecords(version):
1565 def printrecords(version):
1563 ui.write(('* version %d records\n') % version)
1566 ui.write(('* version %d records\n') % version)
1564 if version == 1:
1567 if version == 1:
1565 records = v1records
1568 records = v1records
1566 else:
1569 else:
1567 records = v2records
1570 records = v2records
1568
1571
1569 for rtype, record in records:
1572 for rtype, record in records:
1570 # pretty print some record types
1573 # pretty print some record types
1571 if rtype == 'L':
1574 if rtype == 'L':
1572 ui.write(('local: %s\n') % record)
1575 ui.write(('local: %s\n') % record)
1573 elif rtype == 'O':
1576 elif rtype == 'O':
1574 ui.write(('other: %s\n') % record)
1577 ui.write(('other: %s\n') % record)
1575 elif rtype == 'm':
1578 elif rtype == 'm':
1576 driver, mdstate = record.split('\0', 1)
1579 driver, mdstate = record.split('\0', 1)
1577 ui.write(('merge driver: %s (state "%s")\n')
1580 ui.write(('merge driver: %s (state "%s")\n')
1578 % (driver, mdstate))
1581 % (driver, mdstate))
1579 elif rtype in 'FDC':
1582 elif rtype in 'FDC':
1580 r = record.split('\0')
1583 r = record.split('\0')
1581 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1584 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1582 if version == 1:
1585 if version == 1:
1583 onode = 'not stored in v1 format'
1586 onode = 'not stored in v1 format'
1584 flags = r[7]
1587 flags = r[7]
1585 else:
1588 else:
1586 onode, flags = r[7:9]
1589 onode, flags = r[7:9]
1587 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1590 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1588 % (f, rtype, state, _hashornull(hash)))
1591 % (f, rtype, state, _hashornull(hash)))
1589 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1592 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1590 ui.write((' ancestor path: %s (node %s)\n')
1593 ui.write((' ancestor path: %s (node %s)\n')
1591 % (afile, _hashornull(anode)))
1594 % (afile, _hashornull(anode)))
1592 ui.write((' other path: %s (node %s)\n')
1595 ui.write((' other path: %s (node %s)\n')
1593 % (ofile, _hashornull(onode)))
1596 % (ofile, _hashornull(onode)))
1594 elif rtype == 'f':
1597 elif rtype == 'f':
1595 filename, rawextras = record.split('\0', 1)
1598 filename, rawextras = record.split('\0', 1)
1596 extras = rawextras.split('\0')
1599 extras = rawextras.split('\0')
1597 i = 0
1600 i = 0
1598 extrastrings = []
1601 extrastrings = []
1599 while i < len(extras):
1602 while i < len(extras):
1600 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1603 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1601 i += 2
1604 i += 2
1602
1605
1603 ui.write(('file extras: %s (%s)\n')
1606 ui.write(('file extras: %s (%s)\n')
1604 % (filename, ', '.join(extrastrings)))
1607 % (filename, ', '.join(extrastrings)))
1605 elif rtype == 'l':
1608 elif rtype == 'l':
1606 labels = record.split('\0', 2)
1609 labels = record.split('\0', 2)
1607 labels = [l for l in labels if len(l) > 0]
1610 labels = [l for l in labels if len(l) > 0]
1608 ui.write(('labels:\n'))
1611 ui.write(('labels:\n'))
1609 ui.write((' local: %s\n' % labels[0]))
1612 ui.write((' local: %s\n' % labels[0]))
1610 ui.write((' other: %s\n' % labels[1]))
1613 ui.write((' other: %s\n' % labels[1]))
1611 if len(labels) > 2:
1614 if len(labels) > 2:
1612 ui.write((' base: %s\n' % labels[2]))
1615 ui.write((' base: %s\n' % labels[2]))
1613 else:
1616 else:
1614 ui.write(('unrecognized entry: %s\t%s\n')
1617 ui.write(('unrecognized entry: %s\t%s\n')
1615 % (rtype, record.replace('\0', '\t')))
1618 % (rtype, record.replace('\0', '\t')))
1616
1619
1617 # Avoid mergestate.read() since it may raise an exception for unsupported
1620 # Avoid mergestate.read() since it may raise an exception for unsupported
1618 # merge state records. We shouldn't be doing this, but this is OK since this
1621 # merge state records. We shouldn't be doing this, but this is OK since this
1619 # command is pretty low-level.
1622 # command is pretty low-level.
1620 ms = mergemod.mergestate(repo)
1623 ms = mergemod.mergestate(repo)
1621
1624
1622 # sort so that reasonable information is on top
1625 # sort so that reasonable information is on top
1623 v1records = ms._readrecordsv1()
1626 v1records = ms._readrecordsv1()
1624 v2records = ms._readrecordsv2()
1627 v2records = ms._readrecordsv2()
1625 order = 'LOml'
1628 order = 'LOml'
1626 def key(r):
1629 def key(r):
1627 idx = order.find(r[0])
1630 idx = order.find(r[0])
1628 if idx == -1:
1631 if idx == -1:
1629 return (1, r[1])
1632 return (1, r[1])
1630 else:
1633 else:
1631 return (0, idx)
1634 return (0, idx)
1632 v1records.sort(key=key)
1635 v1records.sort(key=key)
1633 v2records.sort(key=key)
1636 v2records.sort(key=key)
1634
1637
1635 if not v1records and not v2records:
1638 if not v1records and not v2records:
1636 ui.write(('no merge state found\n'))
1639 ui.write(('no merge state found\n'))
1637 elif not v2records:
1640 elif not v2records:
1638 ui.note(('no version 2 merge state\n'))
1641 ui.note(('no version 2 merge state\n'))
1639 printrecords(1)
1642 printrecords(1)
1640 elif ms._v1v2match(v1records, v2records):
1643 elif ms._v1v2match(v1records, v2records):
1641 ui.note(('v1 and v2 states match: using v2\n'))
1644 ui.note(('v1 and v2 states match: using v2\n'))
1642 printrecords(2)
1645 printrecords(2)
1643 else:
1646 else:
1644 ui.note(('v1 and v2 states mismatch: using v1\n'))
1647 ui.note(('v1 and v2 states mismatch: using v1\n'))
1645 printrecords(1)
1648 printrecords(1)
1646 if ui.verbose:
1649 if ui.verbose:
1647 printrecords(2)
1650 printrecords(2)
1648
1651
1649 @command('debugnamecomplete', [], _('NAME...'))
1652 @command('debugnamecomplete', [], _('NAME...'))
1650 def debugnamecomplete(ui, repo, *args):
1653 def debugnamecomplete(ui, repo, *args):
1651 '''complete "names" - tags, open branch names, bookmark names'''
1654 '''complete "names" - tags, open branch names, bookmark names'''
1652
1655
1653 names = set()
1656 names = set()
1654 # since we previously only listed open branches, we will handle that
1657 # since we previously only listed open branches, we will handle that
1655 # specially (after this for loop)
1658 # specially (after this for loop)
1656 for name, ns in repo.names.iteritems():
1659 for name, ns in repo.names.iteritems():
1657 if name != 'branches':
1660 if name != 'branches':
1658 names.update(ns.listnames(repo))
1661 names.update(ns.listnames(repo))
1659 names.update(tag for (tag, heads, tip, closed)
1662 names.update(tag for (tag, heads, tip, closed)
1660 in repo.branchmap().iterbranches() if not closed)
1663 in repo.branchmap().iterbranches() if not closed)
1661 completions = set()
1664 completions = set()
1662 if not args:
1665 if not args:
1663 args = ['']
1666 args = ['']
1664 for a in args:
1667 for a in args:
1665 completions.update(n for n in names if n.startswith(a))
1668 completions.update(n for n in names if n.startswith(a))
1666 ui.write('\n'.join(sorted(completions)))
1669 ui.write('\n'.join(sorted(completions)))
1667 ui.write('\n')
1670 ui.write('\n')
1668
1671
1669 @command('debugobsolete',
1672 @command('debugobsolete',
1670 [('', 'flags', 0, _('markers flag')),
1673 [('', 'flags', 0, _('markers flag')),
1671 ('', 'record-parents', False,
1674 ('', 'record-parents', False,
1672 _('record parent information for the precursor')),
1675 _('record parent information for the precursor')),
1673 ('r', 'rev', [], _('display markers relevant to REV')),
1676 ('r', 'rev', [], _('display markers relevant to REV')),
1674 ('', 'exclusive', False, _('restrict display to markers only '
1677 ('', 'exclusive', False, _('restrict display to markers only '
1675 'relevant to REV')),
1678 'relevant to REV')),
1676 ('', 'index', False, _('display index of the marker')),
1679 ('', 'index', False, _('display index of the marker')),
1677 ('', 'delete', [], _('delete markers specified by indices')),
1680 ('', 'delete', [], _('delete markers specified by indices')),
1678 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1681 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1679 _('[OBSOLETED [REPLACEMENT ...]]'))
1682 _('[OBSOLETED [REPLACEMENT ...]]'))
1680 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1683 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1681 """create arbitrary obsolete marker
1684 """create arbitrary obsolete marker
1682
1685
1683 With no arguments, displays the list of obsolescence markers."""
1686 With no arguments, displays the list of obsolescence markers."""
1684
1687
1685 opts = pycompat.byteskwargs(opts)
1688 opts = pycompat.byteskwargs(opts)
1686
1689
1687 def parsenodeid(s):
1690 def parsenodeid(s):
1688 try:
1691 try:
1689 # We do not use revsingle/revrange functions here to accept
1692 # We do not use revsingle/revrange functions here to accept
1690 # arbitrary node identifiers, possibly not present in the
1693 # arbitrary node identifiers, possibly not present in the
1691 # local repository.
1694 # local repository.
1692 n = bin(s)
1695 n = bin(s)
1693 if len(n) != len(nullid):
1696 if len(n) != len(nullid):
1694 raise TypeError()
1697 raise TypeError()
1695 return n
1698 return n
1696 except TypeError:
1699 except TypeError:
1697 raise error.Abort('changeset references must be full hexadecimal '
1700 raise error.Abort('changeset references must be full hexadecimal '
1698 'node identifiers')
1701 'node identifiers')
1699
1702
1700 if opts.get('delete'):
1703 if opts.get('delete'):
1701 indices = []
1704 indices = []
1702 for v in opts.get('delete'):
1705 for v in opts.get('delete'):
1703 try:
1706 try:
1704 indices.append(int(v))
1707 indices.append(int(v))
1705 except ValueError:
1708 except ValueError:
1706 raise error.Abort(_('invalid index value: %r') % v,
1709 raise error.Abort(_('invalid index value: %r') % v,
1707 hint=_('use integers for indices'))
1710 hint=_('use integers for indices'))
1708
1711
1709 if repo.currenttransaction():
1712 if repo.currenttransaction():
1710 raise error.Abort(_('cannot delete obsmarkers in the middle '
1713 raise error.Abort(_('cannot delete obsmarkers in the middle '
1711 'of transaction.'))
1714 'of transaction.'))
1712
1715
1713 with repo.lock():
1716 with repo.lock():
1714 n = repair.deleteobsmarkers(repo.obsstore, indices)
1717 n = repair.deleteobsmarkers(repo.obsstore, indices)
1715 ui.write(_('deleted %i obsolescence markers\n') % n)
1718 ui.write(_('deleted %i obsolescence markers\n') % n)
1716
1719
1717 return
1720 return
1718
1721
1719 if precursor is not None:
1722 if precursor is not None:
1720 if opts['rev']:
1723 if opts['rev']:
1721 raise error.Abort('cannot select revision when creating marker')
1724 raise error.Abort('cannot select revision when creating marker')
1722 metadata = {}
1725 metadata = {}
1723 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1726 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1724 succs = tuple(parsenodeid(succ) for succ in successors)
1727 succs = tuple(parsenodeid(succ) for succ in successors)
1725 l = repo.lock()
1728 l = repo.lock()
1726 try:
1729 try:
1727 tr = repo.transaction('debugobsolete')
1730 tr = repo.transaction('debugobsolete')
1728 try:
1731 try:
1729 date = opts.get('date')
1732 date = opts.get('date')
1730 if date:
1733 if date:
1731 date = dateutil.parsedate(date)
1734 date = dateutil.parsedate(date)
1732 else:
1735 else:
1733 date = None
1736 date = None
1734 prec = parsenodeid(precursor)
1737 prec = parsenodeid(precursor)
1735 parents = None
1738 parents = None
1736 if opts['record_parents']:
1739 if opts['record_parents']:
1737 if prec not in repo.unfiltered():
1740 if prec not in repo.unfiltered():
1738 raise error.Abort('cannot used --record-parents on '
1741 raise error.Abort('cannot used --record-parents on '
1739 'unknown changesets')
1742 'unknown changesets')
1740 parents = repo.unfiltered()[prec].parents()
1743 parents = repo.unfiltered()[prec].parents()
1741 parents = tuple(p.node() for p in parents)
1744 parents = tuple(p.node() for p in parents)
1742 repo.obsstore.create(tr, prec, succs, opts['flags'],
1745 repo.obsstore.create(tr, prec, succs, opts['flags'],
1743 parents=parents, date=date,
1746 parents=parents, date=date,
1744 metadata=metadata, ui=ui)
1747 metadata=metadata, ui=ui)
1745 tr.close()
1748 tr.close()
1746 except ValueError as exc:
1749 except ValueError as exc:
1747 raise error.Abort(_('bad obsmarker input: %s') %
1750 raise error.Abort(_('bad obsmarker input: %s') %
1748 pycompat.bytestr(exc))
1751 pycompat.bytestr(exc))
1749 finally:
1752 finally:
1750 tr.release()
1753 tr.release()
1751 finally:
1754 finally:
1752 l.release()
1755 l.release()
1753 else:
1756 else:
1754 if opts['rev']:
1757 if opts['rev']:
1755 revs = scmutil.revrange(repo, opts['rev'])
1758 revs = scmutil.revrange(repo, opts['rev'])
1756 nodes = [repo[r].node() for r in revs]
1759 nodes = [repo[r].node() for r in revs]
1757 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1760 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1758 exclusive=opts['exclusive']))
1761 exclusive=opts['exclusive']))
1759 markers.sort(key=lambda x: x._data)
1762 markers.sort(key=lambda x: x._data)
1760 else:
1763 else:
1761 markers = obsutil.getmarkers(repo)
1764 markers = obsutil.getmarkers(repo)
1762
1765
1763 markerstoiter = markers
1766 markerstoiter = markers
1764 isrelevant = lambda m: True
1767 isrelevant = lambda m: True
1765 if opts.get('rev') and opts.get('index'):
1768 if opts.get('rev') and opts.get('index'):
1766 markerstoiter = obsutil.getmarkers(repo)
1769 markerstoiter = obsutil.getmarkers(repo)
1767 markerset = set(markers)
1770 markerset = set(markers)
1768 isrelevant = lambda m: m in markerset
1771 isrelevant = lambda m: m in markerset
1769
1772
1770 fm = ui.formatter('debugobsolete', opts)
1773 fm = ui.formatter('debugobsolete', opts)
1771 for i, m in enumerate(markerstoiter):
1774 for i, m in enumerate(markerstoiter):
1772 if not isrelevant(m):
1775 if not isrelevant(m):
1773 # marker can be irrelevant when we're iterating over a set
1776 # marker can be irrelevant when we're iterating over a set
1774 # of markers (markerstoiter) which is bigger than the set
1777 # of markers (markerstoiter) which is bigger than the set
1775 # of markers we want to display (markers)
1778 # of markers we want to display (markers)
1776 # this can happen if both --index and --rev options are
1779 # this can happen if both --index and --rev options are
1777 # provided and thus we need to iterate over all of the markers
1780 # provided and thus we need to iterate over all of the markers
1778 # to get the correct indices, but only display the ones that
1781 # to get the correct indices, but only display the ones that
1779 # are relevant to --rev value
1782 # are relevant to --rev value
1780 continue
1783 continue
1781 fm.startitem()
1784 fm.startitem()
1782 ind = i if opts.get('index') else None
1785 ind = i if opts.get('index') else None
1783 cmdutil.showmarker(fm, m, index=ind)
1786 cmdutil.showmarker(fm, m, index=ind)
1784 fm.end()
1787 fm.end()
1785
1788
1786 @command('debugp1copies',
1789 @command('debugp1copies',
1787 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1790 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1788 _('[-r REV]'))
1791 _('[-r REV]'))
1789 def debugp1copies(ui, repo, **opts):
1792 def debugp1copies(ui, repo, **opts):
1790 """dump copy information compared to p1"""
1793 """dump copy information compared to p1"""
1791
1794
1792 opts = pycompat.byteskwargs(opts)
1795 opts = pycompat.byteskwargs(opts)
1793 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1796 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1794 for dst, src in ctx.p1copies().items():
1797 for dst, src in ctx.p1copies().items():
1795 ui.write('%s -> %s\n' % (src, dst))
1798 ui.write('%s -> %s\n' % (src, dst))
1796
1799
1797 @command('debugp2copies',
1800 @command('debugp2copies',
1798 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1801 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1799 _('[-r REV]'))
1802 _('[-r REV]'))
1800 def debugp1copies(ui, repo, **opts):
1803 def debugp1copies(ui, repo, **opts):
1801 """dump copy information compared to p2"""
1804 """dump copy information compared to p2"""
1802
1805
1803 opts = pycompat.byteskwargs(opts)
1806 opts = pycompat.byteskwargs(opts)
1804 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1807 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1805 for dst, src in ctx.p2copies().items():
1808 for dst, src in ctx.p2copies().items():
1806 ui.write('%s -> %s\n' % (src, dst))
1809 ui.write('%s -> %s\n' % (src, dst))
1807
1810
1808 @command('debugpathcomplete',
1811 @command('debugpathcomplete',
1809 [('f', 'full', None, _('complete an entire path')),
1812 [('f', 'full', None, _('complete an entire path')),
1810 ('n', 'normal', None, _('show only normal files')),
1813 ('n', 'normal', None, _('show only normal files')),
1811 ('a', 'added', None, _('show only added files')),
1814 ('a', 'added', None, _('show only added files')),
1812 ('r', 'removed', None, _('show only removed files'))],
1815 ('r', 'removed', None, _('show only removed files'))],
1813 _('FILESPEC...'))
1816 _('FILESPEC...'))
1814 def debugpathcomplete(ui, repo, *specs, **opts):
1817 def debugpathcomplete(ui, repo, *specs, **opts):
1815 '''complete part or all of a tracked path
1818 '''complete part or all of a tracked path
1816
1819
1817 This command supports shells that offer path name completion. It
1820 This command supports shells that offer path name completion. It
1818 currently completes only files already known to the dirstate.
1821 currently completes only files already known to the dirstate.
1819
1822
1820 Completion extends only to the next path segment unless
1823 Completion extends only to the next path segment unless
1821 --full is specified, in which case entire paths are used.'''
1824 --full is specified, in which case entire paths are used.'''
1822
1825
1823 def complete(path, acceptable):
1826 def complete(path, acceptable):
1824 dirstate = repo.dirstate
1827 dirstate = repo.dirstate
1825 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1828 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1826 rootdir = repo.root + pycompat.ossep
1829 rootdir = repo.root + pycompat.ossep
1827 if spec != repo.root and not spec.startswith(rootdir):
1830 if spec != repo.root and not spec.startswith(rootdir):
1828 return [], []
1831 return [], []
1829 if os.path.isdir(spec):
1832 if os.path.isdir(spec):
1830 spec += '/'
1833 spec += '/'
1831 spec = spec[len(rootdir):]
1834 spec = spec[len(rootdir):]
1832 fixpaths = pycompat.ossep != '/'
1835 fixpaths = pycompat.ossep != '/'
1833 if fixpaths:
1836 if fixpaths:
1834 spec = spec.replace(pycompat.ossep, '/')
1837 spec = spec.replace(pycompat.ossep, '/')
1835 speclen = len(spec)
1838 speclen = len(spec)
1836 fullpaths = opts[r'full']
1839 fullpaths = opts[r'full']
1837 files, dirs = set(), set()
1840 files, dirs = set(), set()
1838 adddir, addfile = dirs.add, files.add
1841 adddir, addfile = dirs.add, files.add
1839 for f, st in dirstate.iteritems():
1842 for f, st in dirstate.iteritems():
1840 if f.startswith(spec) and st[0] in acceptable:
1843 if f.startswith(spec) and st[0] in acceptable:
1841 if fixpaths:
1844 if fixpaths:
1842 f = f.replace('/', pycompat.ossep)
1845 f = f.replace('/', pycompat.ossep)
1843 if fullpaths:
1846 if fullpaths:
1844 addfile(f)
1847 addfile(f)
1845 continue
1848 continue
1846 s = f.find(pycompat.ossep, speclen)
1849 s = f.find(pycompat.ossep, speclen)
1847 if s >= 0:
1850 if s >= 0:
1848 adddir(f[:s])
1851 adddir(f[:s])
1849 else:
1852 else:
1850 addfile(f)
1853 addfile(f)
1851 return files, dirs
1854 return files, dirs
1852
1855
1853 acceptable = ''
1856 acceptable = ''
1854 if opts[r'normal']:
1857 if opts[r'normal']:
1855 acceptable += 'nm'
1858 acceptable += 'nm'
1856 if opts[r'added']:
1859 if opts[r'added']:
1857 acceptable += 'a'
1860 acceptable += 'a'
1858 if opts[r'removed']:
1861 if opts[r'removed']:
1859 acceptable += 'r'
1862 acceptable += 'r'
1860 cwd = repo.getcwd()
1863 cwd = repo.getcwd()
1861 if not specs:
1864 if not specs:
1862 specs = ['.']
1865 specs = ['.']
1863
1866
1864 files, dirs = set(), set()
1867 files, dirs = set(), set()
1865 for spec in specs:
1868 for spec in specs:
1866 f, d = complete(spec, acceptable or 'nmar')
1869 f, d = complete(spec, acceptable or 'nmar')
1867 files.update(f)
1870 files.update(f)
1868 dirs.update(d)
1871 dirs.update(d)
1869 files.update(dirs)
1872 files.update(dirs)
1870 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1873 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1871 ui.write('\n')
1874 ui.write('\n')
1872
1875
1873 @command('debugpathcopies',
1876 @command('debugpathcopies',
1874 cmdutil.walkopts,
1877 cmdutil.walkopts,
1875 'hg debugpathcopies REV1 REV2 [FILE]',
1878 'hg debugpathcopies REV1 REV2 [FILE]',
1876 inferrepo=True)
1879 inferrepo=True)
1877 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1880 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1878 """show copies between two revisions"""
1881 """show copies between two revisions"""
1879 ctx1 = scmutil.revsingle(repo, rev1)
1882 ctx1 = scmutil.revsingle(repo, rev1)
1880 ctx2 = scmutil.revsingle(repo, rev2)
1883 ctx2 = scmutil.revsingle(repo, rev2)
1881 m = scmutil.match(ctx1, pats, opts)
1884 m = scmutil.match(ctx1, pats, opts)
1882 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1885 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1883 ui.write('%s -> %s\n' % (src, dst))
1886 ui.write('%s -> %s\n' % (src, dst))
1884
1887
1885 @command('debugpeer', [], _('PATH'), norepo=True)
1888 @command('debugpeer', [], _('PATH'), norepo=True)
1886 def debugpeer(ui, path):
1889 def debugpeer(ui, path):
1887 """establish a connection to a peer repository"""
1890 """establish a connection to a peer repository"""
1888 # Always enable peer request logging. Requires --debug to display
1891 # Always enable peer request logging. Requires --debug to display
1889 # though.
1892 # though.
1890 overrides = {
1893 overrides = {
1891 ('devel', 'debug.peer-request'): True,
1894 ('devel', 'debug.peer-request'): True,
1892 }
1895 }
1893
1896
1894 with ui.configoverride(overrides):
1897 with ui.configoverride(overrides):
1895 peer = hg.peer(ui, {}, path)
1898 peer = hg.peer(ui, {}, path)
1896
1899
1897 local = peer.local() is not None
1900 local = peer.local() is not None
1898 canpush = peer.canpush()
1901 canpush = peer.canpush()
1899
1902
1900 ui.write(_('url: %s\n') % peer.url())
1903 ui.write(_('url: %s\n') % peer.url())
1901 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1904 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1902 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1905 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1903
1906
1904 @command('debugpickmergetool',
1907 @command('debugpickmergetool',
1905 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1908 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1906 ('', 'changedelete', None, _('emulate merging change and delete')),
1909 ('', 'changedelete', None, _('emulate merging change and delete')),
1907 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1910 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1908 _('[PATTERN]...'),
1911 _('[PATTERN]...'),
1909 inferrepo=True)
1912 inferrepo=True)
1910 def debugpickmergetool(ui, repo, *pats, **opts):
1913 def debugpickmergetool(ui, repo, *pats, **opts):
1911 """examine which merge tool is chosen for specified file
1914 """examine which merge tool is chosen for specified file
1912
1915
1913 As described in :hg:`help merge-tools`, Mercurial examines
1916 As described in :hg:`help merge-tools`, Mercurial examines
1914 configurations below in this order to decide which merge tool is
1917 configurations below in this order to decide which merge tool is
1915 chosen for specified file.
1918 chosen for specified file.
1916
1919
1917 1. ``--tool`` option
1920 1. ``--tool`` option
1918 2. ``HGMERGE`` environment variable
1921 2. ``HGMERGE`` environment variable
1919 3. configurations in ``merge-patterns`` section
1922 3. configurations in ``merge-patterns`` section
1920 4. configuration of ``ui.merge``
1923 4. configuration of ``ui.merge``
1921 5. configurations in ``merge-tools`` section
1924 5. configurations in ``merge-tools`` section
1922 6. ``hgmerge`` tool (for historical reason only)
1925 6. ``hgmerge`` tool (for historical reason only)
1923 7. default tool for fallback (``:merge`` or ``:prompt``)
1926 7. default tool for fallback (``:merge`` or ``:prompt``)
1924
1927
1925 This command writes out examination result in the style below::
1928 This command writes out examination result in the style below::
1926
1929
1927 FILE = MERGETOOL
1930 FILE = MERGETOOL
1928
1931
1929 By default, all files known in the first parent context of the
1932 By default, all files known in the first parent context of the
1930 working directory are examined. Use file patterns and/or -I/-X
1933 working directory are examined. Use file patterns and/or -I/-X
1931 options to limit target files. -r/--rev is also useful to examine
1934 options to limit target files. -r/--rev is also useful to examine
1932 files in another context without actual updating to it.
1935 files in another context without actual updating to it.
1933
1936
1934 With --debug, this command shows warning messages while matching
1937 With --debug, this command shows warning messages while matching
1935 against ``merge-patterns`` and so on, too. It is recommended to
1938 against ``merge-patterns`` and so on, too. It is recommended to
1936 use this option with explicit file patterns and/or -I/-X options,
1939 use this option with explicit file patterns and/or -I/-X options,
1937 because this option increases amount of output per file according
1940 because this option increases amount of output per file according
1938 to configurations in hgrc.
1941 to configurations in hgrc.
1939
1942
1940 With -v/--verbose, this command shows configurations below at
1943 With -v/--verbose, this command shows configurations below at
1941 first (only if specified).
1944 first (only if specified).
1942
1945
1943 - ``--tool`` option
1946 - ``--tool`` option
1944 - ``HGMERGE`` environment variable
1947 - ``HGMERGE`` environment variable
1945 - configuration of ``ui.merge``
1948 - configuration of ``ui.merge``
1946
1949
1947 If merge tool is chosen before matching against
1950 If merge tool is chosen before matching against
1948 ``merge-patterns``, this command can't show any helpful
1951 ``merge-patterns``, this command can't show any helpful
1949 information, even with --debug. In such case, information above is
1952 information, even with --debug. In such case, information above is
1950 useful to know why a merge tool is chosen.
1953 useful to know why a merge tool is chosen.
1951 """
1954 """
1952 opts = pycompat.byteskwargs(opts)
1955 opts = pycompat.byteskwargs(opts)
1953 overrides = {}
1956 overrides = {}
1954 if opts['tool']:
1957 if opts['tool']:
1955 overrides[('ui', 'forcemerge')] = opts['tool']
1958 overrides[('ui', 'forcemerge')] = opts['tool']
1956 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1959 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1957
1960
1958 with ui.configoverride(overrides, 'debugmergepatterns'):
1961 with ui.configoverride(overrides, 'debugmergepatterns'):
1959 hgmerge = encoding.environ.get("HGMERGE")
1962 hgmerge = encoding.environ.get("HGMERGE")
1960 if hgmerge is not None:
1963 if hgmerge is not None:
1961 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1964 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1962 uimerge = ui.config("ui", "merge")
1965 uimerge = ui.config("ui", "merge")
1963 if uimerge:
1966 if uimerge:
1964 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1967 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1965
1968
1966 ctx = scmutil.revsingle(repo, opts.get('rev'))
1969 ctx = scmutil.revsingle(repo, opts.get('rev'))
1967 m = scmutil.match(ctx, pats, opts)
1970 m = scmutil.match(ctx, pats, opts)
1968 changedelete = opts['changedelete']
1971 changedelete = opts['changedelete']
1969 for path in ctx.walk(m):
1972 for path in ctx.walk(m):
1970 fctx = ctx[path]
1973 fctx = ctx[path]
1971 try:
1974 try:
1972 if not ui.debugflag:
1975 if not ui.debugflag:
1973 ui.pushbuffer(error=True)
1976 ui.pushbuffer(error=True)
1974 tool, toolpath = filemerge._picktool(repo, ui, path,
1977 tool, toolpath = filemerge._picktool(repo, ui, path,
1975 fctx.isbinary(),
1978 fctx.isbinary(),
1976 'l' in fctx.flags(),
1979 'l' in fctx.flags(),
1977 changedelete)
1980 changedelete)
1978 finally:
1981 finally:
1979 if not ui.debugflag:
1982 if not ui.debugflag:
1980 ui.popbuffer()
1983 ui.popbuffer()
1981 ui.write(('%s = %s\n') % (path, tool))
1984 ui.write(('%s = %s\n') % (path, tool))
1982
1985
1983 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1986 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1984 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1987 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1985 '''access the pushkey key/value protocol
1988 '''access the pushkey key/value protocol
1986
1989
1987 With two args, list the keys in the given namespace.
1990 With two args, list the keys in the given namespace.
1988
1991
1989 With five args, set a key to new if it currently is set to old.
1992 With five args, set a key to new if it currently is set to old.
1990 Reports success or failure.
1993 Reports success or failure.
1991 '''
1994 '''
1992
1995
1993 target = hg.peer(ui, {}, repopath)
1996 target = hg.peer(ui, {}, repopath)
1994 if keyinfo:
1997 if keyinfo:
1995 key, old, new = keyinfo
1998 key, old, new = keyinfo
1996 with target.commandexecutor() as e:
1999 with target.commandexecutor() as e:
1997 r = e.callcommand('pushkey', {
2000 r = e.callcommand('pushkey', {
1998 'namespace': namespace,
2001 'namespace': namespace,
1999 'key': key,
2002 'key': key,
2000 'old': old,
2003 'old': old,
2001 'new': new,
2004 'new': new,
2002 }).result()
2005 }).result()
2003
2006
2004 ui.status(pycompat.bytestr(r) + '\n')
2007 ui.status(pycompat.bytestr(r) + '\n')
2005 return not r
2008 return not r
2006 else:
2009 else:
2007 for k, v in sorted(target.listkeys(namespace).iteritems()):
2010 for k, v in sorted(target.listkeys(namespace).iteritems()):
2008 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2011 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2009 stringutil.escapestr(v)))
2012 stringutil.escapestr(v)))
2010
2013
2011 @command('debugpvec', [], _('A B'))
2014 @command('debugpvec', [], _('A B'))
2012 def debugpvec(ui, repo, a, b=None):
2015 def debugpvec(ui, repo, a, b=None):
2013 ca = scmutil.revsingle(repo, a)
2016 ca = scmutil.revsingle(repo, a)
2014 cb = scmutil.revsingle(repo, b)
2017 cb = scmutil.revsingle(repo, b)
2015 pa = pvec.ctxpvec(ca)
2018 pa = pvec.ctxpvec(ca)
2016 pb = pvec.ctxpvec(cb)
2019 pb = pvec.ctxpvec(cb)
2017 if pa == pb:
2020 if pa == pb:
2018 rel = "="
2021 rel = "="
2019 elif pa > pb:
2022 elif pa > pb:
2020 rel = ">"
2023 rel = ">"
2021 elif pa < pb:
2024 elif pa < pb:
2022 rel = "<"
2025 rel = "<"
2023 elif pa | pb:
2026 elif pa | pb:
2024 rel = "|"
2027 rel = "|"
2025 ui.write(_("a: %s\n") % pa)
2028 ui.write(_("a: %s\n") % pa)
2026 ui.write(_("b: %s\n") % pb)
2029 ui.write(_("b: %s\n") % pb)
2027 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2030 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2028 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2031 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2029 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2032 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2030 pa.distance(pb), rel))
2033 pa.distance(pb), rel))
2031
2034
2032 @command('debugrebuilddirstate|debugrebuildstate',
2035 @command('debugrebuilddirstate|debugrebuildstate',
2033 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2036 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2034 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2037 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2035 'the working copy parent')),
2038 'the working copy parent')),
2036 ],
2039 ],
2037 _('[-r REV]'))
2040 _('[-r REV]'))
2038 def debugrebuilddirstate(ui, repo, rev, **opts):
2041 def debugrebuilddirstate(ui, repo, rev, **opts):
2039 """rebuild the dirstate as it would look like for the given revision
2042 """rebuild the dirstate as it would look like for the given revision
2040
2043
2041 If no revision is specified the first current parent will be used.
2044 If no revision is specified the first current parent will be used.
2042
2045
2043 The dirstate will be set to the files of the given revision.
2046 The dirstate will be set to the files of the given revision.
2044 The actual working directory content or existing dirstate
2047 The actual working directory content or existing dirstate
2045 information such as adds or removes is not considered.
2048 information such as adds or removes is not considered.
2046
2049
2047 ``minimal`` will only rebuild the dirstate status for files that claim to be
2050 ``minimal`` will only rebuild the dirstate status for files that claim to be
2048 tracked but are not in the parent manifest, or that exist in the parent
2051 tracked but are not in the parent manifest, or that exist in the parent
2049 manifest but are not in the dirstate. It will not change adds, removes, or
2052 manifest but are not in the dirstate. It will not change adds, removes, or
2050 modified files that are in the working copy parent.
2053 modified files that are in the working copy parent.
2051
2054
2052 One use of this command is to make the next :hg:`status` invocation
2055 One use of this command is to make the next :hg:`status` invocation
2053 check the actual file content.
2056 check the actual file content.
2054 """
2057 """
2055 ctx = scmutil.revsingle(repo, rev)
2058 ctx = scmutil.revsingle(repo, rev)
2056 with repo.wlock():
2059 with repo.wlock():
2057 dirstate = repo.dirstate
2060 dirstate = repo.dirstate
2058 changedfiles = None
2061 changedfiles = None
2059 # See command doc for what minimal does.
2062 # See command doc for what minimal does.
2060 if opts.get(r'minimal'):
2063 if opts.get(r'minimal'):
2061 manifestfiles = set(ctx.manifest().keys())
2064 manifestfiles = set(ctx.manifest().keys())
2062 dirstatefiles = set(dirstate)
2065 dirstatefiles = set(dirstate)
2063 manifestonly = manifestfiles - dirstatefiles
2066 manifestonly = manifestfiles - dirstatefiles
2064 dsonly = dirstatefiles - manifestfiles
2067 dsonly = dirstatefiles - manifestfiles
2065 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2068 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2066 changedfiles = manifestonly | dsnotadded
2069 changedfiles = manifestonly | dsnotadded
2067
2070
2068 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2071 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2069
2072
2070 @command('debugrebuildfncache', [], '')
2073 @command('debugrebuildfncache', [], '')
2071 def debugrebuildfncache(ui, repo):
2074 def debugrebuildfncache(ui, repo):
2072 """rebuild the fncache file"""
2075 """rebuild the fncache file"""
2073 repair.rebuildfncache(ui, repo)
2076 repair.rebuildfncache(ui, repo)
2074
2077
2075 @command('debugrename',
2078 @command('debugrename',
2076 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2079 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2077 _('[-r REV] [FILE]...'))
2080 _('[-r REV] [FILE]...'))
2078 def debugrename(ui, repo, *pats, **opts):
2081 def debugrename(ui, repo, *pats, **opts):
2079 """dump rename information"""
2082 """dump rename information"""
2080
2083
2081 opts = pycompat.byteskwargs(opts)
2084 opts = pycompat.byteskwargs(opts)
2082 ctx = scmutil.revsingle(repo, opts.get('rev'))
2085 ctx = scmutil.revsingle(repo, opts.get('rev'))
2083 m = scmutil.match(ctx, pats, opts)
2086 m = scmutil.match(ctx, pats, opts)
2084 for abs in ctx.walk(m):
2087 for abs in ctx.walk(m):
2085 fctx = ctx[abs]
2088 fctx = ctx[abs]
2086 o = fctx.filelog().renamed(fctx.filenode())
2089 o = fctx.filelog().renamed(fctx.filenode())
2087 rel = repo.pathto(abs)
2090 rel = repo.pathto(abs)
2088 if o:
2091 if o:
2089 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2092 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2090 else:
2093 else:
2091 ui.write(_("%s not renamed\n") % rel)
2094 ui.write(_("%s not renamed\n") % rel)
2092
2095
2093 @command('debugrevlog', cmdutil.debugrevlogopts +
2096 @command('debugrevlog', cmdutil.debugrevlogopts +
2094 [('d', 'dump', False, _('dump index data'))],
2097 [('d', 'dump', False, _('dump index data'))],
2095 _('-c|-m|FILE'),
2098 _('-c|-m|FILE'),
2096 optionalrepo=True)
2099 optionalrepo=True)
2097 def debugrevlog(ui, repo, file_=None, **opts):
2100 def debugrevlog(ui, repo, file_=None, **opts):
2098 """show data and statistics about a revlog"""
2101 """show data and statistics about a revlog"""
2099 opts = pycompat.byteskwargs(opts)
2102 opts = pycompat.byteskwargs(opts)
2100 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2103 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2101
2104
2102 if opts.get("dump"):
2105 if opts.get("dump"):
2103 numrevs = len(r)
2106 numrevs = len(r)
2104 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2107 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2105 " rawsize totalsize compression heads chainlen\n"))
2108 " rawsize totalsize compression heads chainlen\n"))
2106 ts = 0
2109 ts = 0
2107 heads = set()
2110 heads = set()
2108
2111
2109 for rev in pycompat.xrange(numrevs):
2112 for rev in pycompat.xrange(numrevs):
2110 dbase = r.deltaparent(rev)
2113 dbase = r.deltaparent(rev)
2111 if dbase == -1:
2114 if dbase == -1:
2112 dbase = rev
2115 dbase = rev
2113 cbase = r.chainbase(rev)
2116 cbase = r.chainbase(rev)
2114 clen = r.chainlen(rev)
2117 clen = r.chainlen(rev)
2115 p1, p2 = r.parentrevs(rev)
2118 p1, p2 = r.parentrevs(rev)
2116 rs = r.rawsize(rev)
2119 rs = r.rawsize(rev)
2117 ts = ts + rs
2120 ts = ts + rs
2118 heads -= set(r.parentrevs(rev))
2121 heads -= set(r.parentrevs(rev))
2119 heads.add(rev)
2122 heads.add(rev)
2120 try:
2123 try:
2121 compression = ts / r.end(rev)
2124 compression = ts / r.end(rev)
2122 except ZeroDivisionError:
2125 except ZeroDivisionError:
2123 compression = 0
2126 compression = 0
2124 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2127 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2125 "%11d %5d %8d\n" %
2128 "%11d %5d %8d\n" %
2126 (rev, p1, p2, r.start(rev), r.end(rev),
2129 (rev, p1, p2, r.start(rev), r.end(rev),
2127 r.start(dbase), r.start(cbase),
2130 r.start(dbase), r.start(cbase),
2128 r.start(p1), r.start(p2),
2131 r.start(p1), r.start(p2),
2129 rs, ts, compression, len(heads), clen))
2132 rs, ts, compression, len(heads), clen))
2130 return 0
2133 return 0
2131
2134
2132 v = r.version
2135 v = r.version
2133 format = v & 0xFFFF
2136 format = v & 0xFFFF
2134 flags = []
2137 flags = []
2135 gdelta = False
2138 gdelta = False
2136 if v & revlog.FLAG_INLINE_DATA:
2139 if v & revlog.FLAG_INLINE_DATA:
2137 flags.append('inline')
2140 flags.append('inline')
2138 if v & revlog.FLAG_GENERALDELTA:
2141 if v & revlog.FLAG_GENERALDELTA:
2139 gdelta = True
2142 gdelta = True
2140 flags.append('generaldelta')
2143 flags.append('generaldelta')
2141 if not flags:
2144 if not flags:
2142 flags = ['(none)']
2145 flags = ['(none)']
2143
2146
2144 ### tracks merge vs single parent
2147 ### tracks merge vs single parent
2145 nummerges = 0
2148 nummerges = 0
2146
2149
2147 ### tracks ways the "delta" are build
2150 ### tracks ways the "delta" are build
2148 # nodelta
2151 # nodelta
2149 numempty = 0
2152 numempty = 0
2150 numemptytext = 0
2153 numemptytext = 0
2151 numemptydelta = 0
2154 numemptydelta = 0
2152 # full file content
2155 # full file content
2153 numfull = 0
2156 numfull = 0
2154 # intermediate snapshot against a prior snapshot
2157 # intermediate snapshot against a prior snapshot
2155 numsemi = 0
2158 numsemi = 0
2156 # snapshot count per depth
2159 # snapshot count per depth
2157 numsnapdepth = collections.defaultdict(lambda: 0)
2160 numsnapdepth = collections.defaultdict(lambda: 0)
2158 # delta against previous revision
2161 # delta against previous revision
2159 numprev = 0
2162 numprev = 0
2160 # delta against first or second parent (not prev)
2163 # delta against first or second parent (not prev)
2161 nump1 = 0
2164 nump1 = 0
2162 nump2 = 0
2165 nump2 = 0
2163 # delta against neither prev nor parents
2166 # delta against neither prev nor parents
2164 numother = 0
2167 numother = 0
2165 # delta against prev that are also first or second parent
2168 # delta against prev that are also first or second parent
2166 # (details of `numprev`)
2169 # (details of `numprev`)
2167 nump1prev = 0
2170 nump1prev = 0
2168 nump2prev = 0
2171 nump2prev = 0
2169
2172
2170 # data about delta chain of each revs
2173 # data about delta chain of each revs
2171 chainlengths = []
2174 chainlengths = []
2172 chainbases = []
2175 chainbases = []
2173 chainspans = []
2176 chainspans = []
2174
2177
2175 # data about each revision
2178 # data about each revision
2176 datasize = [None, 0, 0]
2179 datasize = [None, 0, 0]
2177 fullsize = [None, 0, 0]
2180 fullsize = [None, 0, 0]
2178 semisize = [None, 0, 0]
2181 semisize = [None, 0, 0]
2179 # snapshot count per depth
2182 # snapshot count per depth
2180 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2183 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2181 deltasize = [None, 0, 0]
2184 deltasize = [None, 0, 0]
2182 chunktypecounts = {}
2185 chunktypecounts = {}
2183 chunktypesizes = {}
2186 chunktypesizes = {}
2184
2187
2185 def addsize(size, l):
2188 def addsize(size, l):
2186 if l[0] is None or size < l[0]:
2189 if l[0] is None or size < l[0]:
2187 l[0] = size
2190 l[0] = size
2188 if size > l[1]:
2191 if size > l[1]:
2189 l[1] = size
2192 l[1] = size
2190 l[2] += size
2193 l[2] += size
2191
2194
2192 numrevs = len(r)
2195 numrevs = len(r)
2193 for rev in pycompat.xrange(numrevs):
2196 for rev in pycompat.xrange(numrevs):
2194 p1, p2 = r.parentrevs(rev)
2197 p1, p2 = r.parentrevs(rev)
2195 delta = r.deltaparent(rev)
2198 delta = r.deltaparent(rev)
2196 if format > 0:
2199 if format > 0:
2197 addsize(r.rawsize(rev), datasize)
2200 addsize(r.rawsize(rev), datasize)
2198 if p2 != nullrev:
2201 if p2 != nullrev:
2199 nummerges += 1
2202 nummerges += 1
2200 size = r.length(rev)
2203 size = r.length(rev)
2201 if delta == nullrev:
2204 if delta == nullrev:
2202 chainlengths.append(0)
2205 chainlengths.append(0)
2203 chainbases.append(r.start(rev))
2206 chainbases.append(r.start(rev))
2204 chainspans.append(size)
2207 chainspans.append(size)
2205 if size == 0:
2208 if size == 0:
2206 numempty += 1
2209 numempty += 1
2207 numemptytext += 1
2210 numemptytext += 1
2208 else:
2211 else:
2209 numfull += 1
2212 numfull += 1
2210 numsnapdepth[0] += 1
2213 numsnapdepth[0] += 1
2211 addsize(size, fullsize)
2214 addsize(size, fullsize)
2212 addsize(size, snapsizedepth[0])
2215 addsize(size, snapsizedepth[0])
2213 else:
2216 else:
2214 chainlengths.append(chainlengths[delta] + 1)
2217 chainlengths.append(chainlengths[delta] + 1)
2215 baseaddr = chainbases[delta]
2218 baseaddr = chainbases[delta]
2216 revaddr = r.start(rev)
2219 revaddr = r.start(rev)
2217 chainbases.append(baseaddr)
2220 chainbases.append(baseaddr)
2218 chainspans.append((revaddr - baseaddr) + size)
2221 chainspans.append((revaddr - baseaddr) + size)
2219 if size == 0:
2222 if size == 0:
2220 numempty += 1
2223 numempty += 1
2221 numemptydelta += 1
2224 numemptydelta += 1
2222 elif r.issnapshot(rev):
2225 elif r.issnapshot(rev):
2223 addsize(size, semisize)
2226 addsize(size, semisize)
2224 numsemi += 1
2227 numsemi += 1
2225 depth = r.snapshotdepth(rev)
2228 depth = r.snapshotdepth(rev)
2226 numsnapdepth[depth] += 1
2229 numsnapdepth[depth] += 1
2227 addsize(size, snapsizedepth[depth])
2230 addsize(size, snapsizedepth[depth])
2228 else:
2231 else:
2229 addsize(size, deltasize)
2232 addsize(size, deltasize)
2230 if delta == rev - 1:
2233 if delta == rev - 1:
2231 numprev += 1
2234 numprev += 1
2232 if delta == p1:
2235 if delta == p1:
2233 nump1prev += 1
2236 nump1prev += 1
2234 elif delta == p2:
2237 elif delta == p2:
2235 nump2prev += 1
2238 nump2prev += 1
2236 elif delta == p1:
2239 elif delta == p1:
2237 nump1 += 1
2240 nump1 += 1
2238 elif delta == p2:
2241 elif delta == p2:
2239 nump2 += 1
2242 nump2 += 1
2240 elif delta != nullrev:
2243 elif delta != nullrev:
2241 numother += 1
2244 numother += 1
2242
2245
2243 # Obtain data on the raw chunks in the revlog.
2246 # Obtain data on the raw chunks in the revlog.
2244 if util.safehasattr(r, '_getsegmentforrevs'):
2247 if util.safehasattr(r, '_getsegmentforrevs'):
2245 segment = r._getsegmentforrevs(rev, rev)[1]
2248 segment = r._getsegmentforrevs(rev, rev)[1]
2246 else:
2249 else:
2247 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2250 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2248 if segment:
2251 if segment:
2249 chunktype = bytes(segment[0:1])
2252 chunktype = bytes(segment[0:1])
2250 else:
2253 else:
2251 chunktype = 'empty'
2254 chunktype = 'empty'
2252
2255
2253 if chunktype not in chunktypecounts:
2256 if chunktype not in chunktypecounts:
2254 chunktypecounts[chunktype] = 0
2257 chunktypecounts[chunktype] = 0
2255 chunktypesizes[chunktype] = 0
2258 chunktypesizes[chunktype] = 0
2256
2259
2257 chunktypecounts[chunktype] += 1
2260 chunktypecounts[chunktype] += 1
2258 chunktypesizes[chunktype] += size
2261 chunktypesizes[chunktype] += size
2259
2262
2260 # Adjust size min value for empty cases
2263 # Adjust size min value for empty cases
2261 for size in (datasize, fullsize, semisize, deltasize):
2264 for size in (datasize, fullsize, semisize, deltasize):
2262 if size[0] is None:
2265 if size[0] is None:
2263 size[0] = 0
2266 size[0] = 0
2264
2267
2265 numdeltas = numrevs - numfull - numempty - numsemi
2268 numdeltas = numrevs - numfull - numempty - numsemi
2266 numoprev = numprev - nump1prev - nump2prev
2269 numoprev = numprev - nump1prev - nump2prev
2267 totalrawsize = datasize[2]
2270 totalrawsize = datasize[2]
2268 datasize[2] /= numrevs
2271 datasize[2] /= numrevs
2269 fulltotal = fullsize[2]
2272 fulltotal = fullsize[2]
2270 fullsize[2] /= numfull
2273 fullsize[2] /= numfull
2271 semitotal = semisize[2]
2274 semitotal = semisize[2]
2272 snaptotal = {}
2275 snaptotal = {}
2273 if numsemi > 0:
2276 if numsemi > 0:
2274 semisize[2] /= numsemi
2277 semisize[2] /= numsemi
2275 for depth in snapsizedepth:
2278 for depth in snapsizedepth:
2276 snaptotal[depth] = snapsizedepth[depth][2]
2279 snaptotal[depth] = snapsizedepth[depth][2]
2277 snapsizedepth[depth][2] /= numsnapdepth[depth]
2280 snapsizedepth[depth][2] /= numsnapdepth[depth]
2278
2281
2279 deltatotal = deltasize[2]
2282 deltatotal = deltasize[2]
2280 if numdeltas > 0:
2283 if numdeltas > 0:
2281 deltasize[2] /= numdeltas
2284 deltasize[2] /= numdeltas
2282 totalsize = fulltotal + semitotal + deltatotal
2285 totalsize = fulltotal + semitotal + deltatotal
2283 avgchainlen = sum(chainlengths) / numrevs
2286 avgchainlen = sum(chainlengths) / numrevs
2284 maxchainlen = max(chainlengths)
2287 maxchainlen = max(chainlengths)
2285 maxchainspan = max(chainspans)
2288 maxchainspan = max(chainspans)
2286 compratio = 1
2289 compratio = 1
2287 if totalsize:
2290 if totalsize:
2288 compratio = totalrawsize / totalsize
2291 compratio = totalrawsize / totalsize
2289
2292
2290 basedfmtstr = '%%%dd\n'
2293 basedfmtstr = '%%%dd\n'
2291 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2294 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2292
2295
2293 def dfmtstr(max):
2296 def dfmtstr(max):
2294 return basedfmtstr % len(str(max))
2297 return basedfmtstr % len(str(max))
2295 def pcfmtstr(max, padding=0):
2298 def pcfmtstr(max, padding=0):
2296 return basepcfmtstr % (len(str(max)), ' ' * padding)
2299 return basepcfmtstr % (len(str(max)), ' ' * padding)
2297
2300
2298 def pcfmt(value, total):
2301 def pcfmt(value, total):
2299 if total:
2302 if total:
2300 return (value, 100 * float(value) / total)
2303 return (value, 100 * float(value) / total)
2301 else:
2304 else:
2302 return value, 100.0
2305 return value, 100.0
2303
2306
2304 ui.write(('format : %d\n') % format)
2307 ui.write(('format : %d\n') % format)
2305 ui.write(('flags : %s\n') % ', '.join(flags))
2308 ui.write(('flags : %s\n') % ', '.join(flags))
2306
2309
2307 ui.write('\n')
2310 ui.write('\n')
2308 fmt = pcfmtstr(totalsize)
2311 fmt = pcfmtstr(totalsize)
2309 fmt2 = dfmtstr(totalsize)
2312 fmt2 = dfmtstr(totalsize)
2310 ui.write(('revisions : ') + fmt2 % numrevs)
2313 ui.write(('revisions : ') + fmt2 % numrevs)
2311 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2314 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2312 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2315 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2313 ui.write(('revisions : ') + fmt2 % numrevs)
2316 ui.write(('revisions : ') + fmt2 % numrevs)
2314 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2317 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2315 ui.write((' text : ')
2318 ui.write((' text : ')
2316 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2319 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2317 ui.write((' delta : ')
2320 ui.write((' delta : ')
2318 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2321 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2319 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2322 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2320 for depth in sorted(numsnapdepth):
2323 for depth in sorted(numsnapdepth):
2321 ui.write((' lvl-%-3d : ' % depth)
2324 ui.write((' lvl-%-3d : ' % depth)
2322 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2325 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2323 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2326 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2324 ui.write(('revision size : ') + fmt2 % totalsize)
2327 ui.write(('revision size : ') + fmt2 % totalsize)
2325 ui.write((' snapshot : ')
2328 ui.write((' snapshot : ')
2326 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2329 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2327 for depth in sorted(numsnapdepth):
2330 for depth in sorted(numsnapdepth):
2328 ui.write((' lvl-%-3d : ' % depth)
2331 ui.write((' lvl-%-3d : ' % depth)
2329 + fmt % pcfmt(snaptotal[depth], totalsize))
2332 + fmt % pcfmt(snaptotal[depth], totalsize))
2330 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2333 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2331
2334
2332 def fmtchunktype(chunktype):
2335 def fmtchunktype(chunktype):
2333 if chunktype == 'empty':
2336 if chunktype == 'empty':
2334 return ' %s : ' % chunktype
2337 return ' %s : ' % chunktype
2335 elif chunktype in pycompat.bytestr(string.ascii_letters):
2338 elif chunktype in pycompat.bytestr(string.ascii_letters):
2336 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2339 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2337 else:
2340 else:
2338 return ' 0x%s : ' % hex(chunktype)
2341 return ' 0x%s : ' % hex(chunktype)
2339
2342
2340 ui.write('\n')
2343 ui.write('\n')
2341 ui.write(('chunks : ') + fmt2 % numrevs)
2344 ui.write(('chunks : ') + fmt2 % numrevs)
2342 for chunktype in sorted(chunktypecounts):
2345 for chunktype in sorted(chunktypecounts):
2343 ui.write(fmtchunktype(chunktype))
2346 ui.write(fmtchunktype(chunktype))
2344 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2347 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2345 ui.write(('chunks size : ') + fmt2 % totalsize)
2348 ui.write(('chunks size : ') + fmt2 % totalsize)
2346 for chunktype in sorted(chunktypecounts):
2349 for chunktype in sorted(chunktypecounts):
2347 ui.write(fmtchunktype(chunktype))
2350 ui.write(fmtchunktype(chunktype))
2348 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2351 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2349
2352
2350 ui.write('\n')
2353 ui.write('\n')
2351 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2354 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2352 ui.write(('avg chain length : ') + fmt % avgchainlen)
2355 ui.write(('avg chain length : ') + fmt % avgchainlen)
2353 ui.write(('max chain length : ') + fmt % maxchainlen)
2356 ui.write(('max chain length : ') + fmt % maxchainlen)
2354 ui.write(('max chain reach : ') + fmt % maxchainspan)
2357 ui.write(('max chain reach : ') + fmt % maxchainspan)
2355 ui.write(('compression ratio : ') + fmt % compratio)
2358 ui.write(('compression ratio : ') + fmt % compratio)
2356
2359
2357 if format > 0:
2360 if format > 0:
2358 ui.write('\n')
2361 ui.write('\n')
2359 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2362 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2360 % tuple(datasize))
2363 % tuple(datasize))
2361 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2364 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2362 % tuple(fullsize))
2365 % tuple(fullsize))
2363 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2366 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2364 % tuple(semisize))
2367 % tuple(semisize))
2365 for depth in sorted(snapsizedepth):
2368 for depth in sorted(snapsizedepth):
2366 if depth == 0:
2369 if depth == 0:
2367 continue
2370 continue
2368 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2371 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2369 % ((depth,) + tuple(snapsizedepth[depth])))
2372 % ((depth,) + tuple(snapsizedepth[depth])))
2370 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2373 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2371 % tuple(deltasize))
2374 % tuple(deltasize))
2372
2375
2373 if numdeltas > 0:
2376 if numdeltas > 0:
2374 ui.write('\n')
2377 ui.write('\n')
2375 fmt = pcfmtstr(numdeltas)
2378 fmt = pcfmtstr(numdeltas)
2376 fmt2 = pcfmtstr(numdeltas, 4)
2379 fmt2 = pcfmtstr(numdeltas, 4)
2377 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2380 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2378 if numprev > 0:
2381 if numprev > 0:
2379 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2382 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2380 numprev))
2383 numprev))
2381 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2384 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2382 numprev))
2385 numprev))
2383 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2386 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2384 numprev))
2387 numprev))
2385 if gdelta:
2388 if gdelta:
2386 ui.write(('deltas against p1 : ')
2389 ui.write(('deltas against p1 : ')
2387 + fmt % pcfmt(nump1, numdeltas))
2390 + fmt % pcfmt(nump1, numdeltas))
2388 ui.write(('deltas against p2 : ')
2391 ui.write(('deltas against p2 : ')
2389 + fmt % pcfmt(nump2, numdeltas))
2392 + fmt % pcfmt(nump2, numdeltas))
2390 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2393 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2391 numdeltas))
2394 numdeltas))
2392
2395
2393 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2396 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2394 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2397 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2395 _('[-f FORMAT] -c|-m|FILE'),
2398 _('[-f FORMAT] -c|-m|FILE'),
2396 optionalrepo=True)
2399 optionalrepo=True)
2397 def debugrevlogindex(ui, repo, file_=None, **opts):
2400 def debugrevlogindex(ui, repo, file_=None, **opts):
2398 """dump the contents of a revlog index"""
2401 """dump the contents of a revlog index"""
2399 opts = pycompat.byteskwargs(opts)
2402 opts = pycompat.byteskwargs(opts)
2400 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2403 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2401 format = opts.get('format', 0)
2404 format = opts.get('format', 0)
2402 if format not in (0, 1):
2405 if format not in (0, 1):
2403 raise error.Abort(_("unknown format %d") % format)
2406 raise error.Abort(_("unknown format %d") % format)
2404
2407
2405 if ui.debugflag:
2408 if ui.debugflag:
2406 shortfn = hex
2409 shortfn = hex
2407 else:
2410 else:
2408 shortfn = short
2411 shortfn = short
2409
2412
2410 # There might not be anything in r, so have a sane default
2413 # There might not be anything in r, so have a sane default
2411 idlen = 12
2414 idlen = 12
2412 for i in r:
2415 for i in r:
2413 idlen = len(shortfn(r.node(i)))
2416 idlen = len(shortfn(r.node(i)))
2414 break
2417 break
2415
2418
2416 if format == 0:
2419 if format == 0:
2417 if ui.verbose:
2420 if ui.verbose:
2418 ui.write((" rev offset length linkrev"
2421 ui.write((" rev offset length linkrev"
2419 " %s %s p2\n") % ("nodeid".ljust(idlen),
2422 " %s %s p2\n") % ("nodeid".ljust(idlen),
2420 "p1".ljust(idlen)))
2423 "p1".ljust(idlen)))
2421 else:
2424 else:
2422 ui.write((" rev linkrev %s %s p2\n") % (
2425 ui.write((" rev linkrev %s %s p2\n") % (
2423 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2426 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2424 elif format == 1:
2427 elif format == 1:
2425 if ui.verbose:
2428 if ui.verbose:
2426 ui.write((" rev flag offset length size link p1"
2429 ui.write((" rev flag offset length size link p1"
2427 " p2 %s\n") % "nodeid".rjust(idlen))
2430 " p2 %s\n") % "nodeid".rjust(idlen))
2428 else:
2431 else:
2429 ui.write((" rev flag size link p1 p2 %s\n") %
2432 ui.write((" rev flag size link p1 p2 %s\n") %
2430 "nodeid".rjust(idlen))
2433 "nodeid".rjust(idlen))
2431
2434
2432 for i in r:
2435 for i in r:
2433 node = r.node(i)
2436 node = r.node(i)
2434 if format == 0:
2437 if format == 0:
2435 try:
2438 try:
2436 pp = r.parents(node)
2439 pp = r.parents(node)
2437 except Exception:
2440 except Exception:
2438 pp = [nullid, nullid]
2441 pp = [nullid, nullid]
2439 if ui.verbose:
2442 if ui.verbose:
2440 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2443 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2441 i, r.start(i), r.length(i), r.linkrev(i),
2444 i, r.start(i), r.length(i), r.linkrev(i),
2442 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2445 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2443 else:
2446 else:
2444 ui.write("% 6d % 7d %s %s %s\n" % (
2447 ui.write("% 6d % 7d %s %s %s\n" % (
2445 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2448 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2446 shortfn(pp[1])))
2449 shortfn(pp[1])))
2447 elif format == 1:
2450 elif format == 1:
2448 pr = r.parentrevs(i)
2451 pr = r.parentrevs(i)
2449 if ui.verbose:
2452 if ui.verbose:
2450 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2453 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2451 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2454 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2452 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2455 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2453 else:
2456 else:
2454 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2457 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2455 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2458 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2456 shortfn(node)))
2459 shortfn(node)))
2457
2460
2458 @command('debugrevspec',
2461 @command('debugrevspec',
2459 [('', 'optimize', None,
2462 [('', 'optimize', None,
2460 _('print parsed tree after optimizing (DEPRECATED)')),
2463 _('print parsed tree after optimizing (DEPRECATED)')),
2461 ('', 'show-revs', True, _('print list of result revisions (default)')),
2464 ('', 'show-revs', True, _('print list of result revisions (default)')),
2462 ('s', 'show-set', None, _('print internal representation of result set')),
2465 ('s', 'show-set', None, _('print internal representation of result set')),
2463 ('p', 'show-stage', [],
2466 ('p', 'show-stage', [],
2464 _('print parsed tree at the given stage'), _('NAME')),
2467 _('print parsed tree at the given stage'), _('NAME')),
2465 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2468 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2466 ('', 'verify-optimized', False, _('verify optimized result')),
2469 ('', 'verify-optimized', False, _('verify optimized result')),
2467 ],
2470 ],
2468 ('REVSPEC'))
2471 ('REVSPEC'))
2469 def debugrevspec(ui, repo, expr, **opts):
2472 def debugrevspec(ui, repo, expr, **opts):
2470 """parse and apply a revision specification
2473 """parse and apply a revision specification
2471
2474
2472 Use -p/--show-stage option to print the parsed tree at the given stages.
2475 Use -p/--show-stage option to print the parsed tree at the given stages.
2473 Use -p all to print tree at every stage.
2476 Use -p all to print tree at every stage.
2474
2477
2475 Use --no-show-revs option with -s or -p to print only the set
2478 Use --no-show-revs option with -s or -p to print only the set
2476 representation or the parsed tree respectively.
2479 representation or the parsed tree respectively.
2477
2480
2478 Use --verify-optimized to compare the optimized result with the unoptimized
2481 Use --verify-optimized to compare the optimized result with the unoptimized
2479 one. Returns 1 if the optimized result differs.
2482 one. Returns 1 if the optimized result differs.
2480 """
2483 """
2481 opts = pycompat.byteskwargs(opts)
2484 opts = pycompat.byteskwargs(opts)
2482 aliases = ui.configitems('revsetalias')
2485 aliases = ui.configitems('revsetalias')
2483 stages = [
2486 stages = [
2484 ('parsed', lambda tree: tree),
2487 ('parsed', lambda tree: tree),
2485 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2488 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2486 ui.warn)),
2489 ui.warn)),
2487 ('concatenated', revsetlang.foldconcat),
2490 ('concatenated', revsetlang.foldconcat),
2488 ('analyzed', revsetlang.analyze),
2491 ('analyzed', revsetlang.analyze),
2489 ('optimized', revsetlang.optimize),
2492 ('optimized', revsetlang.optimize),
2490 ]
2493 ]
2491 if opts['no_optimized']:
2494 if opts['no_optimized']:
2492 stages = stages[:-1]
2495 stages = stages[:-1]
2493 if opts['verify_optimized'] and opts['no_optimized']:
2496 if opts['verify_optimized'] and opts['no_optimized']:
2494 raise error.Abort(_('cannot use --verify-optimized with '
2497 raise error.Abort(_('cannot use --verify-optimized with '
2495 '--no-optimized'))
2498 '--no-optimized'))
2496 stagenames = set(n for n, f in stages)
2499 stagenames = set(n for n, f in stages)
2497
2500
2498 showalways = set()
2501 showalways = set()
2499 showchanged = set()
2502 showchanged = set()
2500 if ui.verbose and not opts['show_stage']:
2503 if ui.verbose and not opts['show_stage']:
2501 # show parsed tree by --verbose (deprecated)
2504 # show parsed tree by --verbose (deprecated)
2502 showalways.add('parsed')
2505 showalways.add('parsed')
2503 showchanged.update(['expanded', 'concatenated'])
2506 showchanged.update(['expanded', 'concatenated'])
2504 if opts['optimize']:
2507 if opts['optimize']:
2505 showalways.add('optimized')
2508 showalways.add('optimized')
2506 if opts['show_stage'] and opts['optimize']:
2509 if opts['show_stage'] and opts['optimize']:
2507 raise error.Abort(_('cannot use --optimize with --show-stage'))
2510 raise error.Abort(_('cannot use --optimize with --show-stage'))
2508 if opts['show_stage'] == ['all']:
2511 if opts['show_stage'] == ['all']:
2509 showalways.update(stagenames)
2512 showalways.update(stagenames)
2510 else:
2513 else:
2511 for n in opts['show_stage']:
2514 for n in opts['show_stage']:
2512 if n not in stagenames:
2515 if n not in stagenames:
2513 raise error.Abort(_('invalid stage name: %s') % n)
2516 raise error.Abort(_('invalid stage name: %s') % n)
2514 showalways.update(opts['show_stage'])
2517 showalways.update(opts['show_stage'])
2515
2518
2516 treebystage = {}
2519 treebystage = {}
2517 printedtree = None
2520 printedtree = None
2518 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2521 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2519 for n, f in stages:
2522 for n, f in stages:
2520 treebystage[n] = tree = f(tree)
2523 treebystage[n] = tree = f(tree)
2521 if n in showalways or (n in showchanged and tree != printedtree):
2524 if n in showalways or (n in showchanged and tree != printedtree):
2522 if opts['show_stage'] or n != 'parsed':
2525 if opts['show_stage'] or n != 'parsed':
2523 ui.write(("* %s:\n") % n)
2526 ui.write(("* %s:\n") % n)
2524 ui.write(revsetlang.prettyformat(tree), "\n")
2527 ui.write(revsetlang.prettyformat(tree), "\n")
2525 printedtree = tree
2528 printedtree = tree
2526
2529
2527 if opts['verify_optimized']:
2530 if opts['verify_optimized']:
2528 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2531 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2529 brevs = revset.makematcher(treebystage['optimized'])(repo)
2532 brevs = revset.makematcher(treebystage['optimized'])(repo)
2530 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2533 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2531 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2534 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2532 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2535 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2533 arevs = list(arevs)
2536 arevs = list(arevs)
2534 brevs = list(brevs)
2537 brevs = list(brevs)
2535 if arevs == brevs:
2538 if arevs == brevs:
2536 return 0
2539 return 0
2537 ui.write(('--- analyzed\n'), label='diff.file_a')
2540 ui.write(('--- analyzed\n'), label='diff.file_a')
2538 ui.write(('+++ optimized\n'), label='diff.file_b')
2541 ui.write(('+++ optimized\n'), label='diff.file_b')
2539 sm = difflib.SequenceMatcher(None, arevs, brevs)
2542 sm = difflib.SequenceMatcher(None, arevs, brevs)
2540 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2543 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2541 if tag in (r'delete', r'replace'):
2544 if tag in (r'delete', r'replace'):
2542 for c in arevs[alo:ahi]:
2545 for c in arevs[alo:ahi]:
2543 ui.write('-%d\n' % c, label='diff.deleted')
2546 ui.write('-%d\n' % c, label='diff.deleted')
2544 if tag in (r'insert', r'replace'):
2547 if tag in (r'insert', r'replace'):
2545 for c in brevs[blo:bhi]:
2548 for c in brevs[blo:bhi]:
2546 ui.write('+%d\n' % c, label='diff.inserted')
2549 ui.write('+%d\n' % c, label='diff.inserted')
2547 if tag == r'equal':
2550 if tag == r'equal':
2548 for c in arevs[alo:ahi]:
2551 for c in arevs[alo:ahi]:
2549 ui.write(' %d\n' % c)
2552 ui.write(' %d\n' % c)
2550 return 1
2553 return 1
2551
2554
2552 func = revset.makematcher(tree)
2555 func = revset.makematcher(tree)
2553 revs = func(repo)
2556 revs = func(repo)
2554 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2557 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2555 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2558 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2556 if not opts['show_revs']:
2559 if not opts['show_revs']:
2557 return
2560 return
2558 for c in revs:
2561 for c in revs:
2559 ui.write("%d\n" % c)
2562 ui.write("%d\n" % c)
2560
2563
2561 @command('debugserve', [
2564 @command('debugserve', [
2562 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2565 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2563 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2566 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2564 ('', 'logiofile', '', _('file to log server I/O to')),
2567 ('', 'logiofile', '', _('file to log server I/O to')),
2565 ], '')
2568 ], '')
2566 def debugserve(ui, repo, **opts):
2569 def debugserve(ui, repo, **opts):
2567 """run a server with advanced settings
2570 """run a server with advanced settings
2568
2571
2569 This command is similar to :hg:`serve`. It exists partially as a
2572 This command is similar to :hg:`serve`. It exists partially as a
2570 workaround to the fact that ``hg serve --stdio`` must have specific
2573 workaround to the fact that ``hg serve --stdio`` must have specific
2571 arguments for security reasons.
2574 arguments for security reasons.
2572 """
2575 """
2573 opts = pycompat.byteskwargs(opts)
2576 opts = pycompat.byteskwargs(opts)
2574
2577
2575 if not opts['sshstdio']:
2578 if not opts['sshstdio']:
2576 raise error.Abort(_('only --sshstdio is currently supported'))
2579 raise error.Abort(_('only --sshstdio is currently supported'))
2577
2580
2578 logfh = None
2581 logfh = None
2579
2582
2580 if opts['logiofd'] and opts['logiofile']:
2583 if opts['logiofd'] and opts['logiofile']:
2581 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2584 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2582
2585
2583 if opts['logiofd']:
2586 if opts['logiofd']:
2584 # Line buffered because output is line based.
2587 # Line buffered because output is line based.
2585 try:
2588 try:
2586 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2589 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2587 except OSError as e:
2590 except OSError as e:
2588 if e.errno != errno.ESPIPE:
2591 if e.errno != errno.ESPIPE:
2589 raise
2592 raise
2590 # can't seek a pipe, so `ab` mode fails on py3
2593 # can't seek a pipe, so `ab` mode fails on py3
2591 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2594 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2592 elif opts['logiofile']:
2595 elif opts['logiofile']:
2593 logfh = open(opts['logiofile'], 'ab', 1)
2596 logfh = open(opts['logiofile'], 'ab', 1)
2594
2597
2595 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2598 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2596 s.serve_forever()
2599 s.serve_forever()
2597
2600
2598 @command('debugsetparents', [], _('REV1 [REV2]'))
2601 @command('debugsetparents', [], _('REV1 [REV2]'))
2599 def debugsetparents(ui, repo, rev1, rev2=None):
2602 def debugsetparents(ui, repo, rev1, rev2=None):
2600 """manually set the parents of the current working directory
2603 """manually set the parents of the current working directory
2601
2604
2602 This is useful for writing repository conversion tools, but should
2605 This is useful for writing repository conversion tools, but should
2603 be used with care. For example, neither the working directory nor the
2606 be used with care. For example, neither the working directory nor the
2604 dirstate is updated, so file status may be incorrect after running this
2607 dirstate is updated, so file status may be incorrect after running this
2605 command.
2608 command.
2606
2609
2607 Returns 0 on success.
2610 Returns 0 on success.
2608 """
2611 """
2609
2612
2610 node1 = scmutil.revsingle(repo, rev1).node()
2613 node1 = scmutil.revsingle(repo, rev1).node()
2611 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2614 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2612
2615
2613 with repo.wlock():
2616 with repo.wlock():
2614 repo.setparents(node1, node2)
2617 repo.setparents(node1, node2)
2615
2618
2616 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2619 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2617 def debugssl(ui, repo, source=None, **opts):
2620 def debugssl(ui, repo, source=None, **opts):
2618 '''test a secure connection to a server
2621 '''test a secure connection to a server
2619
2622
2620 This builds the certificate chain for the server on Windows, installing the
2623 This builds the certificate chain for the server on Windows, installing the
2621 missing intermediates and trusted root via Windows Update if necessary. It
2624 missing intermediates and trusted root via Windows Update if necessary. It
2622 does nothing on other platforms.
2625 does nothing on other platforms.
2623
2626
2624 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2627 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2625 that server is used. See :hg:`help urls` for more information.
2628 that server is used. See :hg:`help urls` for more information.
2626
2629
2627 If the update succeeds, retry the original operation. Otherwise, the cause
2630 If the update succeeds, retry the original operation. Otherwise, the cause
2628 of the SSL error is likely another issue.
2631 of the SSL error is likely another issue.
2629 '''
2632 '''
2630 if not pycompat.iswindows:
2633 if not pycompat.iswindows:
2631 raise error.Abort(_('certificate chain building is only possible on '
2634 raise error.Abort(_('certificate chain building is only possible on '
2632 'Windows'))
2635 'Windows'))
2633
2636
2634 if not source:
2637 if not source:
2635 if not repo:
2638 if not repo:
2636 raise error.Abort(_("there is no Mercurial repository here, and no "
2639 raise error.Abort(_("there is no Mercurial repository here, and no "
2637 "server specified"))
2640 "server specified"))
2638 source = "default"
2641 source = "default"
2639
2642
2640 source, branches = hg.parseurl(ui.expandpath(source))
2643 source, branches = hg.parseurl(ui.expandpath(source))
2641 url = util.url(source)
2644 url = util.url(source)
2642
2645
2643 defaultport = {'https': 443, 'ssh': 22}
2646 defaultport = {'https': 443, 'ssh': 22}
2644 if url.scheme in defaultport:
2647 if url.scheme in defaultport:
2645 try:
2648 try:
2646 addr = (url.host, int(url.port or defaultport[url.scheme]))
2649 addr = (url.host, int(url.port or defaultport[url.scheme]))
2647 except ValueError:
2650 except ValueError:
2648 raise error.Abort(_("malformed port number in URL"))
2651 raise error.Abort(_("malformed port number in URL"))
2649 else:
2652 else:
2650 raise error.Abort(_("only https and ssh connections are supported"))
2653 raise error.Abort(_("only https and ssh connections are supported"))
2651
2654
2652 from . import win32
2655 from . import win32
2653
2656
2654 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2657 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2655 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2658 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2656
2659
2657 try:
2660 try:
2658 s.connect(addr)
2661 s.connect(addr)
2659 cert = s.getpeercert(True)
2662 cert = s.getpeercert(True)
2660
2663
2661 ui.status(_('checking the certificate chain for %s\n') % url.host)
2664 ui.status(_('checking the certificate chain for %s\n') % url.host)
2662
2665
2663 complete = win32.checkcertificatechain(cert, build=False)
2666 complete = win32.checkcertificatechain(cert, build=False)
2664
2667
2665 if not complete:
2668 if not complete:
2666 ui.status(_('certificate chain is incomplete, updating... '))
2669 ui.status(_('certificate chain is incomplete, updating... '))
2667
2670
2668 if not win32.checkcertificatechain(cert):
2671 if not win32.checkcertificatechain(cert):
2669 ui.status(_('failed.\n'))
2672 ui.status(_('failed.\n'))
2670 else:
2673 else:
2671 ui.status(_('done.\n'))
2674 ui.status(_('done.\n'))
2672 else:
2675 else:
2673 ui.status(_('full certificate chain is available\n'))
2676 ui.status(_('full certificate chain is available\n'))
2674 finally:
2677 finally:
2675 s.close()
2678 s.close()
2676
2679
2677 @command('debugsub',
2680 @command('debugsub',
2678 [('r', 'rev', '',
2681 [('r', 'rev', '',
2679 _('revision to check'), _('REV'))],
2682 _('revision to check'), _('REV'))],
2680 _('[-r REV] [REV]'))
2683 _('[-r REV] [REV]'))
2681 def debugsub(ui, repo, rev=None):
2684 def debugsub(ui, repo, rev=None):
2682 ctx = scmutil.revsingle(repo, rev, None)
2685 ctx = scmutil.revsingle(repo, rev, None)
2683 for k, v in sorted(ctx.substate.items()):
2686 for k, v in sorted(ctx.substate.items()):
2684 ui.write(('path %s\n') % k)
2687 ui.write(('path %s\n') % k)
2685 ui.write((' source %s\n') % v[0])
2688 ui.write((' source %s\n') % v[0])
2686 ui.write((' revision %s\n') % v[1])
2689 ui.write((' revision %s\n') % v[1])
2687
2690
2688 @command('debugsuccessorssets',
2691 @command('debugsuccessorssets',
2689 [('', 'closest', False, _('return closest successors sets only'))],
2692 [('', 'closest', False, _('return closest successors sets only'))],
2690 _('[REV]'))
2693 _('[REV]'))
2691 def debugsuccessorssets(ui, repo, *revs, **opts):
2694 def debugsuccessorssets(ui, repo, *revs, **opts):
2692 """show set of successors for revision
2695 """show set of successors for revision
2693
2696
2694 A successors set of changeset A is a consistent group of revisions that
2697 A successors set of changeset A is a consistent group of revisions that
2695 succeed A. It contains non-obsolete changesets only unless closests
2698 succeed A. It contains non-obsolete changesets only unless closests
2696 successors set is set.
2699 successors set is set.
2697
2700
2698 In most cases a changeset A has a single successors set containing a single
2701 In most cases a changeset A has a single successors set containing a single
2699 successor (changeset A replaced by A').
2702 successor (changeset A replaced by A').
2700
2703
2701 A changeset that is made obsolete with no successors are called "pruned".
2704 A changeset that is made obsolete with no successors are called "pruned".
2702 Such changesets have no successors sets at all.
2705 Such changesets have no successors sets at all.
2703
2706
2704 A changeset that has been "split" will have a successors set containing
2707 A changeset that has been "split" will have a successors set containing
2705 more than one successor.
2708 more than one successor.
2706
2709
2707 A changeset that has been rewritten in multiple different ways is called
2710 A changeset that has been rewritten in multiple different ways is called
2708 "divergent". Such changesets have multiple successor sets (each of which
2711 "divergent". Such changesets have multiple successor sets (each of which
2709 may also be split, i.e. have multiple successors).
2712 may also be split, i.e. have multiple successors).
2710
2713
2711 Results are displayed as follows::
2714 Results are displayed as follows::
2712
2715
2713 <rev1>
2716 <rev1>
2714 <successors-1A>
2717 <successors-1A>
2715 <rev2>
2718 <rev2>
2716 <successors-2A>
2719 <successors-2A>
2717 <successors-2B1> <successors-2B2> <successors-2B3>
2720 <successors-2B1> <successors-2B2> <successors-2B3>
2718
2721
2719 Here rev2 has two possible (i.e. divergent) successors sets. The first
2722 Here rev2 has two possible (i.e. divergent) successors sets. The first
2720 holds one element, whereas the second holds three (i.e. the changeset has
2723 holds one element, whereas the second holds three (i.e. the changeset has
2721 been split).
2724 been split).
2722 """
2725 """
2723 # passed to successorssets caching computation from one call to another
2726 # passed to successorssets caching computation from one call to another
2724 cache = {}
2727 cache = {}
2725 ctx2str = bytes
2728 ctx2str = bytes
2726 node2str = short
2729 node2str = short
2727 for rev in scmutil.revrange(repo, revs):
2730 for rev in scmutil.revrange(repo, revs):
2728 ctx = repo[rev]
2731 ctx = repo[rev]
2729 ui.write('%s\n'% ctx2str(ctx))
2732 ui.write('%s\n'% ctx2str(ctx))
2730 for succsset in obsutil.successorssets(repo, ctx.node(),
2733 for succsset in obsutil.successorssets(repo, ctx.node(),
2731 closest=opts[r'closest'],
2734 closest=opts[r'closest'],
2732 cache=cache):
2735 cache=cache):
2733 if succsset:
2736 if succsset:
2734 ui.write(' ')
2737 ui.write(' ')
2735 ui.write(node2str(succsset[0]))
2738 ui.write(node2str(succsset[0]))
2736 for node in succsset[1:]:
2739 for node in succsset[1:]:
2737 ui.write(' ')
2740 ui.write(' ')
2738 ui.write(node2str(node))
2741 ui.write(node2str(node))
2739 ui.write('\n')
2742 ui.write('\n')
2740
2743
2741 @command('debugtemplate',
2744 @command('debugtemplate',
2742 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2745 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2743 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2746 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2744 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2747 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2745 optionalrepo=True)
2748 optionalrepo=True)
2746 def debugtemplate(ui, repo, tmpl, **opts):
2749 def debugtemplate(ui, repo, tmpl, **opts):
2747 """parse and apply a template
2750 """parse and apply a template
2748
2751
2749 If -r/--rev is given, the template is processed as a log template and
2752 If -r/--rev is given, the template is processed as a log template and
2750 applied to the given changesets. Otherwise, it is processed as a generic
2753 applied to the given changesets. Otherwise, it is processed as a generic
2751 template.
2754 template.
2752
2755
2753 Use --verbose to print the parsed tree.
2756 Use --verbose to print the parsed tree.
2754 """
2757 """
2755 revs = None
2758 revs = None
2756 if opts[r'rev']:
2759 if opts[r'rev']:
2757 if repo is None:
2760 if repo is None:
2758 raise error.RepoError(_('there is no Mercurial repository here '
2761 raise error.RepoError(_('there is no Mercurial repository here '
2759 '(.hg not found)'))
2762 '(.hg not found)'))
2760 revs = scmutil.revrange(repo, opts[r'rev'])
2763 revs = scmutil.revrange(repo, opts[r'rev'])
2761
2764
2762 props = {}
2765 props = {}
2763 for d in opts[r'define']:
2766 for d in opts[r'define']:
2764 try:
2767 try:
2765 k, v = (e.strip() for e in d.split('=', 1))
2768 k, v = (e.strip() for e in d.split('=', 1))
2766 if not k or k == 'ui':
2769 if not k or k == 'ui':
2767 raise ValueError
2770 raise ValueError
2768 props[k] = v
2771 props[k] = v
2769 except ValueError:
2772 except ValueError:
2770 raise error.Abort(_('malformed keyword definition: %s') % d)
2773 raise error.Abort(_('malformed keyword definition: %s') % d)
2771
2774
2772 if ui.verbose:
2775 if ui.verbose:
2773 aliases = ui.configitems('templatealias')
2776 aliases = ui.configitems('templatealias')
2774 tree = templater.parse(tmpl)
2777 tree = templater.parse(tmpl)
2775 ui.note(templater.prettyformat(tree), '\n')
2778 ui.note(templater.prettyformat(tree), '\n')
2776 newtree = templater.expandaliases(tree, aliases)
2779 newtree = templater.expandaliases(tree, aliases)
2777 if newtree != tree:
2780 if newtree != tree:
2778 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2781 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2779
2782
2780 if revs is None:
2783 if revs is None:
2781 tres = formatter.templateresources(ui, repo)
2784 tres = formatter.templateresources(ui, repo)
2782 t = formatter.maketemplater(ui, tmpl, resources=tres)
2785 t = formatter.maketemplater(ui, tmpl, resources=tres)
2783 if ui.verbose:
2786 if ui.verbose:
2784 kwds, funcs = t.symbolsuseddefault()
2787 kwds, funcs = t.symbolsuseddefault()
2785 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2788 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2786 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2789 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2787 ui.write(t.renderdefault(props))
2790 ui.write(t.renderdefault(props))
2788 else:
2791 else:
2789 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2792 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2790 if ui.verbose:
2793 if ui.verbose:
2791 kwds, funcs = displayer.t.symbolsuseddefault()
2794 kwds, funcs = displayer.t.symbolsuseddefault()
2792 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2795 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2793 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2796 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2794 for r in revs:
2797 for r in revs:
2795 displayer.show(repo[r], **pycompat.strkwargs(props))
2798 displayer.show(repo[r], **pycompat.strkwargs(props))
2796 displayer.close()
2799 displayer.close()
2797
2800
2798 @command('debuguigetpass', [
2801 @command('debuguigetpass', [
2799 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2802 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2800 ], _('[-p TEXT]'), norepo=True)
2803 ], _('[-p TEXT]'), norepo=True)
2801 def debuguigetpass(ui, prompt=''):
2804 def debuguigetpass(ui, prompt=''):
2802 """show prompt to type password"""
2805 """show prompt to type password"""
2803 r = ui.getpass(prompt)
2806 r = ui.getpass(prompt)
2804 ui.write(('respose: %s\n') % r)
2807 ui.write(('respose: %s\n') % r)
2805
2808
2806 @command('debuguiprompt', [
2809 @command('debuguiprompt', [
2807 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2810 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2808 ], _('[-p TEXT]'), norepo=True)
2811 ], _('[-p TEXT]'), norepo=True)
2809 def debuguiprompt(ui, prompt=''):
2812 def debuguiprompt(ui, prompt=''):
2810 """show plain prompt"""
2813 """show plain prompt"""
2811 r = ui.prompt(prompt)
2814 r = ui.prompt(prompt)
2812 ui.write(('response: %s\n') % r)
2815 ui.write(('response: %s\n') % r)
2813
2816
2814 @command('debugupdatecaches', [])
2817 @command('debugupdatecaches', [])
2815 def debugupdatecaches(ui, repo, *pats, **opts):
2818 def debugupdatecaches(ui, repo, *pats, **opts):
2816 """warm all known caches in the repository"""
2819 """warm all known caches in the repository"""
2817 with repo.wlock(), repo.lock():
2820 with repo.wlock(), repo.lock():
2818 repo.updatecaches(full=True)
2821 repo.updatecaches(full=True)
2819
2822
2820 @command('debugupgraderepo', [
2823 @command('debugupgraderepo', [
2821 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2824 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2822 ('', 'run', False, _('performs an upgrade')),
2825 ('', 'run', False, _('performs an upgrade')),
2823 ('', 'backup', True, _('keep the old repository content around')),
2826 ('', 'backup', True, _('keep the old repository content around')),
2824 ])
2827 ])
2825 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2828 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2826 """upgrade a repository to use different features
2829 """upgrade a repository to use different features
2827
2830
2828 If no arguments are specified, the repository is evaluated for upgrade
2831 If no arguments are specified, the repository is evaluated for upgrade
2829 and a list of problems and potential optimizations is printed.
2832 and a list of problems and potential optimizations is printed.
2830
2833
2831 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2834 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2832 can be influenced via additional arguments. More details will be provided
2835 can be influenced via additional arguments. More details will be provided
2833 by the command output when run without ``--run``.
2836 by the command output when run without ``--run``.
2834
2837
2835 During the upgrade, the repository will be locked and no writes will be
2838 During the upgrade, the repository will be locked and no writes will be
2836 allowed.
2839 allowed.
2837
2840
2838 At the end of the upgrade, the repository may not be readable while new
2841 At the end of the upgrade, the repository may not be readable while new
2839 repository data is swapped in. This window will be as long as it takes to
2842 repository data is swapped in. This window will be as long as it takes to
2840 rename some directories inside the ``.hg`` directory. On most machines, this
2843 rename some directories inside the ``.hg`` directory. On most machines, this
2841 should complete almost instantaneously and the chances of a consumer being
2844 should complete almost instantaneously and the chances of a consumer being
2842 unable to access the repository should be low.
2845 unable to access the repository should be low.
2843 """
2846 """
2844 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2847 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2845 backup=backup)
2848 backup=backup)
2846
2849
2847 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2850 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2848 inferrepo=True)
2851 inferrepo=True)
2849 def debugwalk(ui, repo, *pats, **opts):
2852 def debugwalk(ui, repo, *pats, **opts):
2850 """show how files match on given patterns"""
2853 """show how files match on given patterns"""
2851 opts = pycompat.byteskwargs(opts)
2854 opts = pycompat.byteskwargs(opts)
2852 m = scmutil.match(repo[None], pats, opts)
2855 m = scmutil.match(repo[None], pats, opts)
2853 if ui.verbose:
2856 if ui.verbose:
2854 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2857 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2855 items = list(repo[None].walk(m))
2858 items = list(repo[None].walk(m))
2856 if not items:
2859 if not items:
2857 return
2860 return
2858 f = lambda fn: fn
2861 f = lambda fn: fn
2859 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2862 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2860 f = lambda fn: util.normpath(fn)
2863 f = lambda fn: util.normpath(fn)
2861 fmt = 'f %%-%ds %%-%ds %%s' % (
2864 fmt = 'f %%-%ds %%-%ds %%s' % (
2862 max([len(abs) for abs in items]),
2865 max([len(abs) for abs in items]),
2863 max([len(repo.pathto(abs)) for abs in items]))
2866 max([len(repo.pathto(abs)) for abs in items]))
2864 for abs in items:
2867 for abs in items:
2865 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2868 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2866 ui.write("%s\n" % line.rstrip())
2869 ui.write("%s\n" % line.rstrip())
2867
2870
2868 @command('debugwhyunstable', [], _('REV'))
2871 @command('debugwhyunstable', [], _('REV'))
2869 def debugwhyunstable(ui, repo, rev):
2872 def debugwhyunstable(ui, repo, rev):
2870 """explain instabilities of a changeset"""
2873 """explain instabilities of a changeset"""
2871 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2874 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2872 dnodes = ''
2875 dnodes = ''
2873 if entry.get('divergentnodes'):
2876 if entry.get('divergentnodes'):
2874 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2877 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2875 for ctx in entry['divergentnodes']) + ' '
2878 for ctx in entry['divergentnodes']) + ' '
2876 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2879 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2877 entry['reason'], entry['node']))
2880 entry['reason'], entry['node']))
2878
2881
2879 @command('debugwireargs',
2882 @command('debugwireargs',
2880 [('', 'three', '', 'three'),
2883 [('', 'three', '', 'three'),
2881 ('', 'four', '', 'four'),
2884 ('', 'four', '', 'four'),
2882 ('', 'five', '', 'five'),
2885 ('', 'five', '', 'five'),
2883 ] + cmdutil.remoteopts,
2886 ] + cmdutil.remoteopts,
2884 _('REPO [OPTIONS]... [ONE [TWO]]'),
2887 _('REPO [OPTIONS]... [ONE [TWO]]'),
2885 norepo=True)
2888 norepo=True)
2886 def debugwireargs(ui, repopath, *vals, **opts):
2889 def debugwireargs(ui, repopath, *vals, **opts):
2887 opts = pycompat.byteskwargs(opts)
2890 opts = pycompat.byteskwargs(opts)
2888 repo = hg.peer(ui, opts, repopath)
2891 repo = hg.peer(ui, opts, repopath)
2889 for opt in cmdutil.remoteopts:
2892 for opt in cmdutil.remoteopts:
2890 del opts[opt[1]]
2893 del opts[opt[1]]
2891 args = {}
2894 args = {}
2892 for k, v in opts.iteritems():
2895 for k, v in opts.iteritems():
2893 if v:
2896 if v:
2894 args[k] = v
2897 args[k] = v
2895 args = pycompat.strkwargs(args)
2898 args = pycompat.strkwargs(args)
2896 # run twice to check that we don't mess up the stream for the next command
2899 # run twice to check that we don't mess up the stream for the next command
2897 res1 = repo.debugwireargs(*vals, **args)
2900 res1 = repo.debugwireargs(*vals, **args)
2898 res2 = repo.debugwireargs(*vals, **args)
2901 res2 = repo.debugwireargs(*vals, **args)
2899 ui.write("%s\n" % res1)
2902 ui.write("%s\n" % res1)
2900 if res1 != res2:
2903 if res1 != res2:
2901 ui.warn("%s\n" % res2)
2904 ui.warn("%s\n" % res2)
2902
2905
2903 def _parsewirelangblocks(fh):
2906 def _parsewirelangblocks(fh):
2904 activeaction = None
2907 activeaction = None
2905 blocklines = []
2908 blocklines = []
2906 lastindent = 0
2909 lastindent = 0
2907
2910
2908 for line in fh:
2911 for line in fh:
2909 line = line.rstrip()
2912 line = line.rstrip()
2910 if not line:
2913 if not line:
2911 continue
2914 continue
2912
2915
2913 if line.startswith(b'#'):
2916 if line.startswith(b'#'):
2914 continue
2917 continue
2915
2918
2916 if not line.startswith(b' '):
2919 if not line.startswith(b' '):
2917 # New block. Flush previous one.
2920 # New block. Flush previous one.
2918 if activeaction:
2921 if activeaction:
2919 yield activeaction, blocklines
2922 yield activeaction, blocklines
2920
2923
2921 activeaction = line
2924 activeaction = line
2922 blocklines = []
2925 blocklines = []
2923 lastindent = 0
2926 lastindent = 0
2924 continue
2927 continue
2925
2928
2926 # Else we start with an indent.
2929 # Else we start with an indent.
2927
2930
2928 if not activeaction:
2931 if not activeaction:
2929 raise error.Abort(_('indented line outside of block'))
2932 raise error.Abort(_('indented line outside of block'))
2930
2933
2931 indent = len(line) - len(line.lstrip())
2934 indent = len(line) - len(line.lstrip())
2932
2935
2933 # If this line is indented more than the last line, concatenate it.
2936 # If this line is indented more than the last line, concatenate it.
2934 if indent > lastindent and blocklines:
2937 if indent > lastindent and blocklines:
2935 blocklines[-1] += line.lstrip()
2938 blocklines[-1] += line.lstrip()
2936 else:
2939 else:
2937 blocklines.append(line)
2940 blocklines.append(line)
2938 lastindent = indent
2941 lastindent = indent
2939
2942
2940 # Flush last block.
2943 # Flush last block.
2941 if activeaction:
2944 if activeaction:
2942 yield activeaction, blocklines
2945 yield activeaction, blocklines
2943
2946
2944 @command('debugwireproto',
2947 @command('debugwireproto',
2945 [
2948 [
2946 ('', 'localssh', False, _('start an SSH server for this repo')),
2949 ('', 'localssh', False, _('start an SSH server for this repo')),
2947 ('', 'peer', '', _('construct a specific version of the peer')),
2950 ('', 'peer', '', _('construct a specific version of the peer')),
2948 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2951 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2949 ('', 'nologhandshake', False,
2952 ('', 'nologhandshake', False,
2950 _('do not log I/O related to the peer handshake')),
2953 _('do not log I/O related to the peer handshake')),
2951 ] + cmdutil.remoteopts,
2954 ] + cmdutil.remoteopts,
2952 _('[PATH]'),
2955 _('[PATH]'),
2953 optionalrepo=True)
2956 optionalrepo=True)
2954 def debugwireproto(ui, repo, path=None, **opts):
2957 def debugwireproto(ui, repo, path=None, **opts):
2955 """send wire protocol commands to a server
2958 """send wire protocol commands to a server
2956
2959
2957 This command can be used to issue wire protocol commands to remote
2960 This command can be used to issue wire protocol commands to remote
2958 peers and to debug the raw data being exchanged.
2961 peers and to debug the raw data being exchanged.
2959
2962
2960 ``--localssh`` will start an SSH server against the current repository
2963 ``--localssh`` will start an SSH server against the current repository
2961 and connect to that. By default, the connection will perform a handshake
2964 and connect to that. By default, the connection will perform a handshake
2962 and establish an appropriate peer instance.
2965 and establish an appropriate peer instance.
2963
2966
2964 ``--peer`` can be used to bypass the handshake protocol and construct a
2967 ``--peer`` can be used to bypass the handshake protocol and construct a
2965 peer instance using the specified class type. Valid values are ``raw``,
2968 peer instance using the specified class type. Valid values are ``raw``,
2966 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2969 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2967 raw data payloads and don't support higher-level command actions.
2970 raw data payloads and don't support higher-level command actions.
2968
2971
2969 ``--noreadstderr`` can be used to disable automatic reading from stderr
2972 ``--noreadstderr`` can be used to disable automatic reading from stderr
2970 of the peer (for SSH connections only). Disabling automatic reading of
2973 of the peer (for SSH connections only). Disabling automatic reading of
2971 stderr is useful for making output more deterministic.
2974 stderr is useful for making output more deterministic.
2972
2975
2973 Commands are issued via a mini language which is specified via stdin.
2976 Commands are issued via a mini language which is specified via stdin.
2974 The language consists of individual actions to perform. An action is
2977 The language consists of individual actions to perform. An action is
2975 defined by a block. A block is defined as a line with no leading
2978 defined by a block. A block is defined as a line with no leading
2976 space followed by 0 or more lines with leading space. Blocks are
2979 space followed by 0 or more lines with leading space. Blocks are
2977 effectively a high-level command with additional metadata.
2980 effectively a high-level command with additional metadata.
2978
2981
2979 Lines beginning with ``#`` are ignored.
2982 Lines beginning with ``#`` are ignored.
2980
2983
2981 The following sections denote available actions.
2984 The following sections denote available actions.
2982
2985
2983 raw
2986 raw
2984 ---
2987 ---
2985
2988
2986 Send raw data to the server.
2989 Send raw data to the server.
2987
2990
2988 The block payload contains the raw data to send as one atomic send
2991 The block payload contains the raw data to send as one atomic send
2989 operation. The data may not actually be delivered in a single system
2992 operation. The data may not actually be delivered in a single system
2990 call: it depends on the abilities of the transport being used.
2993 call: it depends on the abilities of the transport being used.
2991
2994
2992 Each line in the block is de-indented and concatenated. Then, that
2995 Each line in the block is de-indented and concatenated. Then, that
2993 value is evaluated as a Python b'' literal. This allows the use of
2996 value is evaluated as a Python b'' literal. This allows the use of
2994 backslash escaping, etc.
2997 backslash escaping, etc.
2995
2998
2996 raw+
2999 raw+
2997 ----
3000 ----
2998
3001
2999 Behaves like ``raw`` except flushes output afterwards.
3002 Behaves like ``raw`` except flushes output afterwards.
3000
3003
3001 command <X>
3004 command <X>
3002 -----------
3005 -----------
3003
3006
3004 Send a request to run a named command, whose name follows the ``command``
3007 Send a request to run a named command, whose name follows the ``command``
3005 string.
3008 string.
3006
3009
3007 Arguments to the command are defined as lines in this block. The format of
3010 Arguments to the command are defined as lines in this block. The format of
3008 each line is ``<key> <value>``. e.g.::
3011 each line is ``<key> <value>``. e.g.::
3009
3012
3010 command listkeys
3013 command listkeys
3011 namespace bookmarks
3014 namespace bookmarks
3012
3015
3013 If the value begins with ``eval:``, it will be interpreted as a Python
3016 If the value begins with ``eval:``, it will be interpreted as a Python
3014 literal expression. Otherwise values are interpreted as Python b'' literals.
3017 literal expression. Otherwise values are interpreted as Python b'' literals.
3015 This allows sending complex types and encoding special byte sequences via
3018 This allows sending complex types and encoding special byte sequences via
3016 backslash escaping.
3019 backslash escaping.
3017
3020
3018 The following arguments have special meaning:
3021 The following arguments have special meaning:
3019
3022
3020 ``PUSHFILE``
3023 ``PUSHFILE``
3021 When defined, the *push* mechanism of the peer will be used instead
3024 When defined, the *push* mechanism of the peer will be used instead
3022 of the static request-response mechanism and the content of the
3025 of the static request-response mechanism and the content of the
3023 file specified in the value of this argument will be sent as the
3026 file specified in the value of this argument will be sent as the
3024 command payload.
3027 command payload.
3025
3028
3026 This can be used to submit a local bundle file to the remote.
3029 This can be used to submit a local bundle file to the remote.
3027
3030
3028 batchbegin
3031 batchbegin
3029 ----------
3032 ----------
3030
3033
3031 Instruct the peer to begin a batched send.
3034 Instruct the peer to begin a batched send.
3032
3035
3033 All ``command`` blocks are queued for execution until the next
3036 All ``command`` blocks are queued for execution until the next
3034 ``batchsubmit`` block.
3037 ``batchsubmit`` block.
3035
3038
3036 batchsubmit
3039 batchsubmit
3037 -----------
3040 -----------
3038
3041
3039 Submit previously queued ``command`` blocks as a batch request.
3042 Submit previously queued ``command`` blocks as a batch request.
3040
3043
3041 This action MUST be paired with a ``batchbegin`` action.
3044 This action MUST be paired with a ``batchbegin`` action.
3042
3045
3043 httprequest <method> <path>
3046 httprequest <method> <path>
3044 ---------------------------
3047 ---------------------------
3045
3048
3046 (HTTP peer only)
3049 (HTTP peer only)
3047
3050
3048 Send an HTTP request to the peer.
3051 Send an HTTP request to the peer.
3049
3052
3050 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3053 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3051
3054
3052 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3055 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3053 headers to add to the request. e.g. ``Accept: foo``.
3056 headers to add to the request. e.g. ``Accept: foo``.
3054
3057
3055 The following arguments are special:
3058 The following arguments are special:
3056
3059
3057 ``BODYFILE``
3060 ``BODYFILE``
3058 The content of the file defined as the value to this argument will be
3061 The content of the file defined as the value to this argument will be
3059 transferred verbatim as the HTTP request body.
3062 transferred verbatim as the HTTP request body.
3060
3063
3061 ``frame <type> <flags> <payload>``
3064 ``frame <type> <flags> <payload>``
3062 Send a unified protocol frame as part of the request body.
3065 Send a unified protocol frame as part of the request body.
3063
3066
3064 All frames will be collected and sent as the body to the HTTP
3067 All frames will be collected and sent as the body to the HTTP
3065 request.
3068 request.
3066
3069
3067 close
3070 close
3068 -----
3071 -----
3069
3072
3070 Close the connection to the server.
3073 Close the connection to the server.
3071
3074
3072 flush
3075 flush
3073 -----
3076 -----
3074
3077
3075 Flush data written to the server.
3078 Flush data written to the server.
3076
3079
3077 readavailable
3080 readavailable
3078 -------------
3081 -------------
3079
3082
3080 Close the write end of the connection and read all available data from
3083 Close the write end of the connection and read all available data from
3081 the server.
3084 the server.
3082
3085
3083 If the connection to the server encompasses multiple pipes, we poll both
3086 If the connection to the server encompasses multiple pipes, we poll both
3084 pipes and read available data.
3087 pipes and read available data.
3085
3088
3086 readline
3089 readline
3087 --------
3090 --------
3088
3091
3089 Read a line of output from the server. If there are multiple output
3092 Read a line of output from the server. If there are multiple output
3090 pipes, reads only the main pipe.
3093 pipes, reads only the main pipe.
3091
3094
3092 ereadline
3095 ereadline
3093 ---------
3096 ---------
3094
3097
3095 Like ``readline``, but read from the stderr pipe, if available.
3098 Like ``readline``, but read from the stderr pipe, if available.
3096
3099
3097 read <X>
3100 read <X>
3098 --------
3101 --------
3099
3102
3100 ``read()`` N bytes from the server's main output pipe.
3103 ``read()`` N bytes from the server's main output pipe.
3101
3104
3102 eread <X>
3105 eread <X>
3103 ---------
3106 ---------
3104
3107
3105 ``read()`` N bytes from the server's stderr pipe, if available.
3108 ``read()`` N bytes from the server's stderr pipe, if available.
3106
3109
3107 Specifying Unified Frame-Based Protocol Frames
3110 Specifying Unified Frame-Based Protocol Frames
3108 ----------------------------------------------
3111 ----------------------------------------------
3109
3112
3110 It is possible to emit a *Unified Frame-Based Protocol* by using special
3113 It is possible to emit a *Unified Frame-Based Protocol* by using special
3111 syntax.
3114 syntax.
3112
3115
3113 A frame is composed as a type, flags, and payload. These can be parsed
3116 A frame is composed as a type, flags, and payload. These can be parsed
3114 from a string of the form:
3117 from a string of the form:
3115
3118
3116 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3119 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3117
3120
3118 ``request-id`` and ``stream-id`` are integers defining the request and
3121 ``request-id`` and ``stream-id`` are integers defining the request and
3119 stream identifiers.
3122 stream identifiers.
3120
3123
3121 ``type`` can be an integer value for the frame type or the string name
3124 ``type`` can be an integer value for the frame type or the string name
3122 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3125 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3123 ``command-name``.
3126 ``command-name``.
3124
3127
3125 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3128 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3126 components. Each component (and there can be just one) can be an integer
3129 components. Each component (and there can be just one) can be an integer
3127 or a flag name for stream flags or frame flags, respectively. Values are
3130 or a flag name for stream flags or frame flags, respectively. Values are
3128 resolved to integers and then bitwise OR'd together.
3131 resolved to integers and then bitwise OR'd together.
3129
3132
3130 ``payload`` represents the raw frame payload. If it begins with
3133 ``payload`` represents the raw frame payload. If it begins with
3131 ``cbor:``, the following string is evaluated as Python code and the
3134 ``cbor:``, the following string is evaluated as Python code and the
3132 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3135 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3133 as a Python byte string literal.
3136 as a Python byte string literal.
3134 """
3137 """
3135 opts = pycompat.byteskwargs(opts)
3138 opts = pycompat.byteskwargs(opts)
3136
3139
3137 if opts['localssh'] and not repo:
3140 if opts['localssh'] and not repo:
3138 raise error.Abort(_('--localssh requires a repository'))
3141 raise error.Abort(_('--localssh requires a repository'))
3139
3142
3140 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3143 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3141 raise error.Abort(_('invalid value for --peer'),
3144 raise error.Abort(_('invalid value for --peer'),
3142 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3145 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3143
3146
3144 if path and opts['localssh']:
3147 if path and opts['localssh']:
3145 raise error.Abort(_('cannot specify --localssh with an explicit '
3148 raise error.Abort(_('cannot specify --localssh with an explicit '
3146 'path'))
3149 'path'))
3147
3150
3148 if ui.interactive():
3151 if ui.interactive():
3149 ui.write(_('(waiting for commands on stdin)\n'))
3152 ui.write(_('(waiting for commands on stdin)\n'))
3150
3153
3151 blocks = list(_parsewirelangblocks(ui.fin))
3154 blocks = list(_parsewirelangblocks(ui.fin))
3152
3155
3153 proc = None
3156 proc = None
3154 stdin = None
3157 stdin = None
3155 stdout = None
3158 stdout = None
3156 stderr = None
3159 stderr = None
3157 opener = None
3160 opener = None
3158
3161
3159 if opts['localssh']:
3162 if opts['localssh']:
3160 # We start the SSH server in its own process so there is process
3163 # We start the SSH server in its own process so there is process
3161 # separation. This prevents a whole class of potential bugs around
3164 # separation. This prevents a whole class of potential bugs around
3162 # shared state from interfering with server operation.
3165 # shared state from interfering with server operation.
3163 args = procutil.hgcmd() + [
3166 args = procutil.hgcmd() + [
3164 '-R', repo.root,
3167 '-R', repo.root,
3165 'debugserve', '--sshstdio',
3168 'debugserve', '--sshstdio',
3166 ]
3169 ]
3167 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3170 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3168 stdin=subprocess.PIPE,
3171 stdin=subprocess.PIPE,
3169 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3172 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3170 bufsize=0)
3173 bufsize=0)
3171
3174
3172 stdin = proc.stdin
3175 stdin = proc.stdin
3173 stdout = proc.stdout
3176 stdout = proc.stdout
3174 stderr = proc.stderr
3177 stderr = proc.stderr
3175
3178
3176 # We turn the pipes into observers so we can log I/O.
3179 # We turn the pipes into observers so we can log I/O.
3177 if ui.verbose or opts['peer'] == 'raw':
3180 if ui.verbose or opts['peer'] == 'raw':
3178 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3181 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3179 logdata=True)
3182 logdata=True)
3180 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3183 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3181 logdata=True)
3184 logdata=True)
3182 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3185 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3183 logdata=True)
3186 logdata=True)
3184
3187
3185 # --localssh also implies the peer connection settings.
3188 # --localssh also implies the peer connection settings.
3186
3189
3187 url = 'ssh://localserver'
3190 url = 'ssh://localserver'
3188 autoreadstderr = not opts['noreadstderr']
3191 autoreadstderr = not opts['noreadstderr']
3189
3192
3190 if opts['peer'] == 'ssh1':
3193 if opts['peer'] == 'ssh1':
3191 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3194 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3192 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3195 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3193 None, autoreadstderr=autoreadstderr)
3196 None, autoreadstderr=autoreadstderr)
3194 elif opts['peer'] == 'ssh2':
3197 elif opts['peer'] == 'ssh2':
3195 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3198 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3196 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3199 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3197 None, autoreadstderr=autoreadstderr)
3200 None, autoreadstderr=autoreadstderr)
3198 elif opts['peer'] == 'raw':
3201 elif opts['peer'] == 'raw':
3199 ui.write(_('using raw connection to peer\n'))
3202 ui.write(_('using raw connection to peer\n'))
3200 peer = None
3203 peer = None
3201 else:
3204 else:
3202 ui.write(_('creating ssh peer from handshake results\n'))
3205 ui.write(_('creating ssh peer from handshake results\n'))
3203 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3206 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3204 autoreadstderr=autoreadstderr)
3207 autoreadstderr=autoreadstderr)
3205
3208
3206 elif path:
3209 elif path:
3207 # We bypass hg.peer() so we can proxy the sockets.
3210 # We bypass hg.peer() so we can proxy the sockets.
3208 # TODO consider not doing this because we skip
3211 # TODO consider not doing this because we skip
3209 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3212 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3210 u = util.url(path)
3213 u = util.url(path)
3211 if u.scheme != 'http':
3214 if u.scheme != 'http':
3212 raise error.Abort(_('only http:// paths are currently supported'))
3215 raise error.Abort(_('only http:// paths are currently supported'))
3213
3216
3214 url, authinfo = u.authinfo()
3217 url, authinfo = u.authinfo()
3215 openerargs = {
3218 openerargs = {
3216 r'useragent': b'Mercurial debugwireproto',
3219 r'useragent': b'Mercurial debugwireproto',
3217 }
3220 }
3218
3221
3219 # Turn pipes/sockets into observers so we can log I/O.
3222 # Turn pipes/sockets into observers so we can log I/O.
3220 if ui.verbose:
3223 if ui.verbose:
3221 openerargs.update({
3224 openerargs.update({
3222 r'loggingfh': ui,
3225 r'loggingfh': ui,
3223 r'loggingname': b's',
3226 r'loggingname': b's',
3224 r'loggingopts': {
3227 r'loggingopts': {
3225 r'logdata': True,
3228 r'logdata': True,
3226 r'logdataapis': False,
3229 r'logdataapis': False,
3227 },
3230 },
3228 })
3231 })
3229
3232
3230 if ui.debugflag:
3233 if ui.debugflag:
3231 openerargs[r'loggingopts'][r'logdataapis'] = True
3234 openerargs[r'loggingopts'][r'logdataapis'] = True
3232
3235
3233 # Don't send default headers when in raw mode. This allows us to
3236 # Don't send default headers when in raw mode. This allows us to
3234 # bypass most of the behavior of our URL handling code so we can
3237 # bypass most of the behavior of our URL handling code so we can
3235 # have near complete control over what's sent on the wire.
3238 # have near complete control over what's sent on the wire.
3236 if opts['peer'] == 'raw':
3239 if opts['peer'] == 'raw':
3237 openerargs[r'sendaccept'] = False
3240 openerargs[r'sendaccept'] = False
3238
3241
3239 opener = urlmod.opener(ui, authinfo, **openerargs)
3242 opener = urlmod.opener(ui, authinfo, **openerargs)
3240
3243
3241 if opts['peer'] == 'http2':
3244 if opts['peer'] == 'http2':
3242 ui.write(_('creating http peer for wire protocol version 2\n'))
3245 ui.write(_('creating http peer for wire protocol version 2\n'))
3243 # We go through makepeer() because we need an API descriptor for
3246 # We go through makepeer() because we need an API descriptor for
3244 # the peer instance to be useful.
3247 # the peer instance to be useful.
3245 with ui.configoverride({
3248 with ui.configoverride({
3246 ('experimental', 'httppeer.advertise-v2'): True}):
3249 ('experimental', 'httppeer.advertise-v2'): True}):
3247 if opts['nologhandshake']:
3250 if opts['nologhandshake']:
3248 ui.pushbuffer()
3251 ui.pushbuffer()
3249
3252
3250 peer = httppeer.makepeer(ui, path, opener=opener)
3253 peer = httppeer.makepeer(ui, path, opener=opener)
3251
3254
3252 if opts['nologhandshake']:
3255 if opts['nologhandshake']:
3253 ui.popbuffer()
3256 ui.popbuffer()
3254
3257
3255 if not isinstance(peer, httppeer.httpv2peer):
3258 if not isinstance(peer, httppeer.httpv2peer):
3256 raise error.Abort(_('could not instantiate HTTP peer for '
3259 raise error.Abort(_('could not instantiate HTTP peer for '
3257 'wire protocol version 2'),
3260 'wire protocol version 2'),
3258 hint=_('the server may not have the feature '
3261 hint=_('the server may not have the feature '
3259 'enabled or is not allowing this '
3262 'enabled or is not allowing this '
3260 'client version'))
3263 'client version'))
3261
3264
3262 elif opts['peer'] == 'raw':
3265 elif opts['peer'] == 'raw':
3263 ui.write(_('using raw connection to peer\n'))
3266 ui.write(_('using raw connection to peer\n'))
3264 peer = None
3267 peer = None
3265 elif opts['peer']:
3268 elif opts['peer']:
3266 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3269 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3267 opts['peer'])
3270 opts['peer'])
3268 else:
3271 else:
3269 peer = httppeer.makepeer(ui, path, opener=opener)
3272 peer = httppeer.makepeer(ui, path, opener=opener)
3270
3273
3271 # We /could/ populate stdin/stdout with sock.makefile()...
3274 # We /could/ populate stdin/stdout with sock.makefile()...
3272 else:
3275 else:
3273 raise error.Abort(_('unsupported connection configuration'))
3276 raise error.Abort(_('unsupported connection configuration'))
3274
3277
3275 batchedcommands = None
3278 batchedcommands = None
3276
3279
3277 # Now perform actions based on the parsed wire language instructions.
3280 # Now perform actions based on the parsed wire language instructions.
3278 for action, lines in blocks:
3281 for action, lines in blocks:
3279 if action in ('raw', 'raw+'):
3282 if action in ('raw', 'raw+'):
3280 if not stdin:
3283 if not stdin:
3281 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3284 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3282
3285
3283 # Concatenate the data together.
3286 # Concatenate the data together.
3284 data = ''.join(l.lstrip() for l in lines)
3287 data = ''.join(l.lstrip() for l in lines)
3285 data = stringutil.unescapestr(data)
3288 data = stringutil.unescapestr(data)
3286 stdin.write(data)
3289 stdin.write(data)
3287
3290
3288 if action == 'raw+':
3291 if action == 'raw+':
3289 stdin.flush()
3292 stdin.flush()
3290 elif action == 'flush':
3293 elif action == 'flush':
3291 if not stdin:
3294 if not stdin:
3292 raise error.Abort(_('cannot call flush on this peer'))
3295 raise error.Abort(_('cannot call flush on this peer'))
3293 stdin.flush()
3296 stdin.flush()
3294 elif action.startswith('command'):
3297 elif action.startswith('command'):
3295 if not peer:
3298 if not peer:
3296 raise error.Abort(_('cannot send commands unless peer instance '
3299 raise error.Abort(_('cannot send commands unless peer instance '
3297 'is available'))
3300 'is available'))
3298
3301
3299 command = action.split(' ', 1)[1]
3302 command = action.split(' ', 1)[1]
3300
3303
3301 args = {}
3304 args = {}
3302 for line in lines:
3305 for line in lines:
3303 # We need to allow empty values.
3306 # We need to allow empty values.
3304 fields = line.lstrip().split(' ', 1)
3307 fields = line.lstrip().split(' ', 1)
3305 if len(fields) == 1:
3308 if len(fields) == 1:
3306 key = fields[0]
3309 key = fields[0]
3307 value = ''
3310 value = ''
3308 else:
3311 else:
3309 key, value = fields
3312 key, value = fields
3310
3313
3311 if value.startswith('eval:'):
3314 if value.startswith('eval:'):
3312 value = stringutil.evalpythonliteral(value[5:])
3315 value = stringutil.evalpythonliteral(value[5:])
3313 else:
3316 else:
3314 value = stringutil.unescapestr(value)
3317 value = stringutil.unescapestr(value)
3315
3318
3316 args[key] = value
3319 args[key] = value
3317
3320
3318 if batchedcommands is not None:
3321 if batchedcommands is not None:
3319 batchedcommands.append((command, args))
3322 batchedcommands.append((command, args))
3320 continue
3323 continue
3321
3324
3322 ui.status(_('sending %s command\n') % command)
3325 ui.status(_('sending %s command\n') % command)
3323
3326
3324 if 'PUSHFILE' in args:
3327 if 'PUSHFILE' in args:
3325 with open(args['PUSHFILE'], r'rb') as fh:
3328 with open(args['PUSHFILE'], r'rb') as fh:
3326 del args['PUSHFILE']
3329 del args['PUSHFILE']
3327 res, output = peer._callpush(command, fh,
3330 res, output = peer._callpush(command, fh,
3328 **pycompat.strkwargs(args))
3331 **pycompat.strkwargs(args))
3329 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3332 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3330 ui.status(_('remote output: %s\n') %
3333 ui.status(_('remote output: %s\n') %
3331 stringutil.escapestr(output))
3334 stringutil.escapestr(output))
3332 else:
3335 else:
3333 with peer.commandexecutor() as e:
3336 with peer.commandexecutor() as e:
3334 res = e.callcommand(command, args).result()
3337 res = e.callcommand(command, args).result()
3335
3338
3336 if isinstance(res, wireprotov2peer.commandresponse):
3339 if isinstance(res, wireprotov2peer.commandresponse):
3337 val = res.objects()
3340 val = res.objects()
3338 ui.status(_('response: %s\n') %
3341 ui.status(_('response: %s\n') %
3339 stringutil.pprint(val, bprefix=True, indent=2))
3342 stringutil.pprint(val, bprefix=True, indent=2))
3340 else:
3343 else:
3341 ui.status(_('response: %s\n') %
3344 ui.status(_('response: %s\n') %
3342 stringutil.pprint(res, bprefix=True, indent=2))
3345 stringutil.pprint(res, bprefix=True, indent=2))
3343
3346
3344 elif action == 'batchbegin':
3347 elif action == 'batchbegin':
3345 if batchedcommands is not None:
3348 if batchedcommands is not None:
3346 raise error.Abort(_('nested batchbegin not allowed'))
3349 raise error.Abort(_('nested batchbegin not allowed'))
3347
3350
3348 batchedcommands = []
3351 batchedcommands = []
3349 elif action == 'batchsubmit':
3352 elif action == 'batchsubmit':
3350 # There is a batching API we could go through. But it would be
3353 # There is a batching API we could go through. But it would be
3351 # difficult to normalize requests into function calls. It is easier
3354 # difficult to normalize requests into function calls. It is easier
3352 # to bypass this layer and normalize to commands + args.
3355 # to bypass this layer and normalize to commands + args.
3353 ui.status(_('sending batch with %d sub-commands\n') %
3356 ui.status(_('sending batch with %d sub-commands\n') %
3354 len(batchedcommands))
3357 len(batchedcommands))
3355 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3358 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3356 ui.status(_('response #%d: %s\n') %
3359 ui.status(_('response #%d: %s\n') %
3357 (i, stringutil.escapestr(chunk)))
3360 (i, stringutil.escapestr(chunk)))
3358
3361
3359 batchedcommands = None
3362 batchedcommands = None
3360
3363
3361 elif action.startswith('httprequest '):
3364 elif action.startswith('httprequest '):
3362 if not opener:
3365 if not opener:
3363 raise error.Abort(_('cannot use httprequest without an HTTP '
3366 raise error.Abort(_('cannot use httprequest without an HTTP '
3364 'peer'))
3367 'peer'))
3365
3368
3366 request = action.split(' ', 2)
3369 request = action.split(' ', 2)
3367 if len(request) != 3:
3370 if len(request) != 3:
3368 raise error.Abort(_('invalid httprequest: expected format is '
3371 raise error.Abort(_('invalid httprequest: expected format is '
3369 '"httprequest <method> <path>'))
3372 '"httprequest <method> <path>'))
3370
3373
3371 method, httppath = request[1:]
3374 method, httppath = request[1:]
3372 headers = {}
3375 headers = {}
3373 body = None
3376 body = None
3374 frames = []
3377 frames = []
3375 for line in lines:
3378 for line in lines:
3376 line = line.lstrip()
3379 line = line.lstrip()
3377 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3380 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3378 if m:
3381 if m:
3379 # Headers need to use native strings.
3382 # Headers need to use native strings.
3380 key = pycompat.strurl(m.group(1))
3383 key = pycompat.strurl(m.group(1))
3381 value = pycompat.strurl(m.group(2))
3384 value = pycompat.strurl(m.group(2))
3382 headers[key] = value
3385 headers[key] = value
3383 continue
3386 continue
3384
3387
3385 if line.startswith(b'BODYFILE '):
3388 if line.startswith(b'BODYFILE '):
3386 with open(line.split(b' ', 1), 'rb') as fh:
3389 with open(line.split(b' ', 1), 'rb') as fh:
3387 body = fh.read()
3390 body = fh.read()
3388 elif line.startswith(b'frame '):
3391 elif line.startswith(b'frame '):
3389 frame = wireprotoframing.makeframefromhumanstring(
3392 frame = wireprotoframing.makeframefromhumanstring(
3390 line[len(b'frame '):])
3393 line[len(b'frame '):])
3391
3394
3392 frames.append(frame)
3395 frames.append(frame)
3393 else:
3396 else:
3394 raise error.Abort(_('unknown argument to httprequest: %s') %
3397 raise error.Abort(_('unknown argument to httprequest: %s') %
3395 line)
3398 line)
3396
3399
3397 url = path + httppath
3400 url = path + httppath
3398
3401
3399 if frames:
3402 if frames:
3400 body = b''.join(bytes(f) for f in frames)
3403 body = b''.join(bytes(f) for f in frames)
3401
3404
3402 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3405 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3403
3406
3404 # urllib.Request insists on using has_data() as a proxy for
3407 # urllib.Request insists on using has_data() as a proxy for
3405 # determining the request method. Override that to use our
3408 # determining the request method. Override that to use our
3406 # explicitly requested method.
3409 # explicitly requested method.
3407 req.get_method = lambda: pycompat.sysstr(method)
3410 req.get_method = lambda: pycompat.sysstr(method)
3408
3411
3409 try:
3412 try:
3410 res = opener.open(req)
3413 res = opener.open(req)
3411 body = res.read()
3414 body = res.read()
3412 except util.urlerr.urlerror as e:
3415 except util.urlerr.urlerror as e:
3413 # read() method must be called, but only exists in Python 2
3416 # read() method must be called, but only exists in Python 2
3414 getattr(e, 'read', lambda: None)()
3417 getattr(e, 'read', lambda: None)()
3415 continue
3418 continue
3416
3419
3417 ct = res.headers.get(r'Content-Type')
3420 ct = res.headers.get(r'Content-Type')
3418 if ct == r'application/mercurial-cbor':
3421 if ct == r'application/mercurial-cbor':
3419 ui.write(_('cbor> %s\n') %
3422 ui.write(_('cbor> %s\n') %
3420 stringutil.pprint(cborutil.decodeall(body),
3423 stringutil.pprint(cborutil.decodeall(body),
3421 bprefix=True,
3424 bprefix=True,
3422 indent=2))
3425 indent=2))
3423
3426
3424 elif action == 'close':
3427 elif action == 'close':
3425 peer.close()
3428 peer.close()
3426 elif action == 'readavailable':
3429 elif action == 'readavailable':
3427 if not stdout or not stderr:
3430 if not stdout or not stderr:
3428 raise error.Abort(_('readavailable not available on this peer'))
3431 raise error.Abort(_('readavailable not available on this peer'))
3429
3432
3430 stdin.close()
3433 stdin.close()
3431 stdout.read()
3434 stdout.read()
3432 stderr.read()
3435 stderr.read()
3433
3436
3434 elif action == 'readline':
3437 elif action == 'readline':
3435 if not stdout:
3438 if not stdout:
3436 raise error.Abort(_('readline not available on this peer'))
3439 raise error.Abort(_('readline not available on this peer'))
3437 stdout.readline()
3440 stdout.readline()
3438 elif action == 'ereadline':
3441 elif action == 'ereadline':
3439 if not stderr:
3442 if not stderr:
3440 raise error.Abort(_('ereadline not available on this peer'))
3443 raise error.Abort(_('ereadline not available on this peer'))
3441 stderr.readline()
3444 stderr.readline()
3442 elif action.startswith('read '):
3445 elif action.startswith('read '):
3443 count = int(action.split(' ', 1)[1])
3446 count = int(action.split(' ', 1)[1])
3444 if not stdout:
3447 if not stdout:
3445 raise error.Abort(_('read not available on this peer'))
3448 raise error.Abort(_('read not available on this peer'))
3446 stdout.read(count)
3449 stdout.read(count)
3447 elif action.startswith('eread '):
3450 elif action.startswith('eread '):
3448 count = int(action.split(' ', 1)[1])
3451 count = int(action.split(' ', 1)[1])
3449 if not stderr:
3452 if not stderr:
3450 raise error.Abort(_('eread not available on this peer'))
3453 raise error.Abort(_('eread not available on this peer'))
3451 stderr.read(count)
3454 stderr.read(count)
3452 else:
3455 else:
3453 raise error.Abort(_('unknown action: %s') % action)
3456 raise error.Abort(_('unknown action: %s') % action)
3454
3457
3455 if batchedcommands is not None:
3458 if batchedcommands is not None:
3456 raise error.Abort(_('unclosed "batchbegin" request'))
3459 raise error.Abort(_('unclosed "batchbegin" request'))
3457
3460
3458 if peer:
3461 if peer:
3459 peer.close()
3462 peer.close()
3460
3463
3461 if proc:
3464 if proc:
3462 proc.kill()
3465 proc.kill()
@@ -1,1045 +1,1083 b''
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 > echo
18 > echo
19 > echo "% -- a -> b set (tip only)"
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
21 > echo
22 > echo "% -- b -> a tree"
22 > echo "% -- b -> a tree"
23 > hg -R b debugdiscovery a --verbose --old
23 > hg -R b debugdiscovery a --verbose --old
24 > echo
24 > echo
25 > echo "% -- b -> a set"
25 > echo "% -- b -> a set"
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
27 > echo
28 > echo "% -- b -> a set (tip only)"
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 > cd ..
30 > cd ..
31 > }
31 > }
32
32
33
33
34 Small superset:
34 Small superset:
35
35
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 > +2:f +1:a1:b1
37 > +2:f +1:a1:b1
38 > <f +4 :a2
38 > <f +4 :a2
39 > +5 :b2
39 > +5 :b2
40 > <f +3 :b3'
40 > <f +3 :b3'
41
41
42 % -- a -> b tree
42 % -- a -> b tree
43 comparing with b
43 comparing with b
44 searching for changes
44 searching for changes
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 elapsed time: * seconds (glob)
46 heads summary:
47 heads summary:
47 total common heads: 2
48 total common heads: 2
48 also local heads: 2
49 also local heads: 2
49 also remote heads: 1
50 also remote heads: 1
50 local heads: 2
51 local heads: 2
51 common: 2
52 common: 2
52 missing: 0
53 missing: 0
53 remote heads: 3
54 remote heads: 3
54 common: 1
55 common: 1
55 unknown: 2
56 unknown: 2
56 local changesets: 7
57 local changesets: 7
57 common: 7
58 common: 7
58 missing: 0
59 missing: 0
59 common heads: 01241442b3c2 b5714e113bc0
60 common heads: 01241442b3c2 b5714e113bc0
60
61
61 % -- a -> b set
62 % -- a -> b set
62 comparing with b
63 comparing with b
63 query 1; heads
64 query 1; heads
64 searching for changes
65 searching for changes
65 all local heads known remotely
66 all local heads known remotely
67 elapsed time: * seconds (glob)
66 heads summary:
68 heads summary:
67 total common heads: 2
69 total common heads: 2
68 also local heads: 2
70 also local heads: 2
69 also remote heads: 1
71 also remote heads: 1
70 local heads: 2
72 local heads: 2
71 common: 2
73 common: 2
72 missing: 0
74 missing: 0
73 remote heads: 3
75 remote heads: 3
74 common: 1
76 common: 1
75 unknown: 2
77 unknown: 2
76 local changesets: 7
78 local changesets: 7
77 common: 7
79 common: 7
78 missing: 0
80 missing: 0
79 common heads: 01241442b3c2 b5714e113bc0
81 common heads: 01241442b3c2 b5714e113bc0
80
82
81 % -- a -> b set (tip only)
83 % -- a -> b set (tip only)
82 comparing with b
84 comparing with b
83 query 1; heads
85 query 1; heads
84 searching for changes
86 searching for changes
85 all local heads known remotely
87 all local heads known remotely
88 elapsed time: * seconds (glob)
86 heads summary:
89 heads summary:
87 total common heads: 1
90 total common heads: 1
88 also local heads: 1
91 also local heads: 1
89 also remote heads: 0
92 also remote heads: 0
90 local heads: 2
93 local heads: 2
91 common: 1
94 common: 1
92 missing: 1
95 missing: 1
93 remote heads: 3
96 remote heads: 3
94 common: 0
97 common: 0
95 unknown: 3
98 unknown: 3
96 local changesets: 7
99 local changesets: 7
97 common: 6
100 common: 6
98 missing: 1
101 missing: 1
99 common heads: b5714e113bc0
102 common heads: b5714e113bc0
100
103
101 % -- b -> a tree
104 % -- b -> a tree
102 comparing with a
105 comparing with a
103 searching for changes
106 searching for changes
104 unpruned common: 01241442b3c2 b5714e113bc0
107 unpruned common: 01241442b3c2 b5714e113bc0
108 elapsed time: * seconds (glob)
105 heads summary:
109 heads summary:
106 total common heads: 2
110 total common heads: 2
107 also local heads: 1
111 also local heads: 1
108 also remote heads: 2
112 also remote heads: 2
109 local heads: 3
113 local heads: 3
110 common: 1
114 common: 1
111 missing: 2
115 missing: 2
112 remote heads: 2
116 remote heads: 2
113 common: 2
117 common: 2
114 unknown: 0
118 unknown: 0
115 local changesets: 15
119 local changesets: 15
116 common: 7
120 common: 7
117 missing: 8
121 missing: 8
118 common heads: 01241442b3c2 b5714e113bc0
122 common heads: 01241442b3c2 b5714e113bc0
119
123
120 % -- b -> a set
124 % -- b -> a set
121 comparing with a
125 comparing with a
122 query 1; heads
126 query 1; heads
123 searching for changes
127 searching for changes
124 all remote heads known locally
128 all remote heads known locally
129 elapsed time: * seconds (glob)
125 heads summary:
130 heads summary:
126 total common heads: 2
131 total common heads: 2
127 also local heads: 1
132 also local heads: 1
128 also remote heads: 2
133 also remote heads: 2
129 local heads: 3
134 local heads: 3
130 common: 1
135 common: 1
131 missing: 2
136 missing: 2
132 remote heads: 2
137 remote heads: 2
133 common: 2
138 common: 2
134 unknown: 0
139 unknown: 0
135 local changesets: 15
140 local changesets: 15
136 common: 7
141 common: 7
137 missing: 8
142 missing: 8
138 common heads: 01241442b3c2 b5714e113bc0
143 common heads: 01241442b3c2 b5714e113bc0
139
144
140 % -- b -> a set (tip only)
145 % -- b -> a set (tip only)
141 comparing with a
146 comparing with a
142 query 1; heads
147 query 1; heads
143 searching for changes
148 searching for changes
144 all remote heads known locally
149 all remote heads known locally
150 elapsed time: * seconds (glob)
145 heads summary:
151 heads summary:
146 total common heads: 2
152 total common heads: 2
147 also local heads: 1
153 also local heads: 1
148 also remote heads: 2
154 also remote heads: 2
149 local heads: 3
155 local heads: 3
150 common: 1
156 common: 1
151 missing: 2
157 missing: 2
152 remote heads: 2
158 remote heads: 2
153 common: 2
159 common: 2
154 unknown: 0
160 unknown: 0
155 local changesets: 15
161 local changesets: 15
156 common: 7
162 common: 7
157 missing: 8
163 missing: 8
158 common heads: 01241442b3c2 b5714e113bc0
164 common heads: 01241442b3c2 b5714e113bc0
159
165
160
166
161 Many new:
167 Many new:
162
168
163 $ testdesc '-ra1 -ra2' '-rb' '
169 $ testdesc '-ra1 -ra2' '-rb' '
164 > +2:f +3:a1 +3:b
170 > +2:f +3:a1 +3:b
165 > <f +30 :a2'
171 > <f +30 :a2'
166
172
167 % -- a -> b tree
173 % -- a -> b tree
168 comparing with b
174 comparing with b
169 searching for changes
175 searching for changes
170 unpruned common: bebd167eb94d
176 unpruned common: bebd167eb94d
177 elapsed time: * seconds (glob)
171 heads summary:
178 heads summary:
172 total common heads: 1
179 total common heads: 1
173 also local heads: 1
180 also local heads: 1
174 also remote heads: 0
181 also remote heads: 0
175 local heads: 2
182 local heads: 2
176 common: 1
183 common: 1
177 missing: 1
184 missing: 1
178 remote heads: 1
185 remote heads: 1
179 common: 0
186 common: 0
180 unknown: 1
187 unknown: 1
181 local changesets: 35
188 local changesets: 35
182 common: 5
189 common: 5
183 missing: 30
190 missing: 30
184 common heads: bebd167eb94d
191 common heads: bebd167eb94d
185
192
186 % -- a -> b set
193 % -- a -> b set
187 comparing with b
194 comparing with b
188 query 1; heads
195 query 1; heads
189 searching for changes
196 searching for changes
190 taking initial sample
197 taking initial sample
191 searching: 2 queries
198 searching: 2 queries
192 query 2; still undecided: 29, sample size is: 29
199 query 2; still undecided: 29, sample size is: 29
193 2 total queries in *.????s (glob)
200 2 total queries in *.????s (glob)
201 elapsed time: * seconds (glob)
194 heads summary:
202 heads summary:
195 total common heads: 1
203 total common heads: 1
196 also local heads: 1
204 also local heads: 1
197 also remote heads: 0
205 also remote heads: 0
198 local heads: 2
206 local heads: 2
199 common: 1
207 common: 1
200 missing: 1
208 missing: 1
201 remote heads: 1
209 remote heads: 1
202 common: 0
210 common: 0
203 unknown: 1
211 unknown: 1
204 local changesets: 35
212 local changesets: 35
205 common: 5
213 common: 5
206 missing: 30
214 missing: 30
207 common heads: bebd167eb94d
215 common heads: bebd167eb94d
208
216
209 % -- a -> b set (tip only)
217 % -- a -> b set (tip only)
210 comparing with b
218 comparing with b
211 query 1; heads
219 query 1; heads
212 searching for changes
220 searching for changes
213 taking quick initial sample
221 taking quick initial sample
214 searching: 2 queries
222 searching: 2 queries
215 query 2; still undecided: 31, sample size is: 31
223 query 2; still undecided: 31, sample size is: 31
216 2 total queries in *.????s (glob)
224 2 total queries in *.????s (glob)
225 elapsed time: * seconds (glob)
217 heads summary:
226 heads summary:
218 total common heads: 1
227 total common heads: 1
219 also local heads: 0
228 also local heads: 0
220 also remote heads: 0
229 also remote heads: 0
221 local heads: 2
230 local heads: 2
222 common: 0
231 common: 0
223 missing: 2
232 missing: 2
224 remote heads: 1
233 remote heads: 1
225 common: 0
234 common: 0
226 unknown: 1
235 unknown: 1
227 local changesets: 35
236 local changesets: 35
228 common: 2
237 common: 2
229 missing: 33
238 missing: 33
230 common heads: 66f7d451a68b
239 common heads: 66f7d451a68b
231
240
232 % -- b -> a tree
241 % -- b -> a tree
233 comparing with a
242 comparing with a
234 searching for changes
243 searching for changes
235 unpruned common: 66f7d451a68b bebd167eb94d
244 unpruned common: 66f7d451a68b bebd167eb94d
245 elapsed time: * seconds (glob)
236 heads summary:
246 heads summary:
237 total common heads: 1
247 total common heads: 1
238 also local heads: 0
248 also local heads: 0
239 also remote heads: 1
249 also remote heads: 1
240 local heads: 1
250 local heads: 1
241 common: 0
251 common: 0
242 missing: 1
252 missing: 1
243 remote heads: 2
253 remote heads: 2
244 common: 1
254 common: 1
245 unknown: 1
255 unknown: 1
246 local changesets: 8
256 local changesets: 8
247 common: 5
257 common: 5
248 missing: 3
258 missing: 3
249 common heads: bebd167eb94d
259 common heads: bebd167eb94d
250
260
251 % -- b -> a set
261 % -- b -> a set
252 comparing with a
262 comparing with a
253 query 1; heads
263 query 1; heads
254 searching for changes
264 searching for changes
255 taking initial sample
265 taking initial sample
256 searching: 2 queries
266 searching: 2 queries
257 query 2; still undecided: 2, sample size is: 2
267 query 2; still undecided: 2, sample size is: 2
258 2 total queries in *.????s (glob)
268 2 total queries in *.????s (glob)
269 elapsed time: * seconds (glob)
259 heads summary:
270 heads summary:
260 total common heads: 1
271 total common heads: 1
261 also local heads: 0
272 also local heads: 0
262 also remote heads: 1
273 also remote heads: 1
263 local heads: 1
274 local heads: 1
264 common: 0
275 common: 0
265 missing: 1
276 missing: 1
266 remote heads: 2
277 remote heads: 2
267 common: 1
278 common: 1
268 unknown: 1
279 unknown: 1
269 local changesets: 8
280 local changesets: 8
270 common: 5
281 common: 5
271 missing: 3
282 missing: 3
272 common heads: bebd167eb94d
283 common heads: bebd167eb94d
273
284
274 % -- b -> a set (tip only)
285 % -- b -> a set (tip only)
275 comparing with a
286 comparing with a
276 query 1; heads
287 query 1; heads
277 searching for changes
288 searching for changes
278 taking initial sample
289 taking initial sample
279 searching: 2 queries
290 searching: 2 queries
280 query 2; still undecided: 2, sample size is: 2
291 query 2; still undecided: 2, sample size is: 2
281 2 total queries in *.????s (glob)
292 2 total queries in *.????s (glob)
293 elapsed time: * seconds (glob)
282 heads summary:
294 heads summary:
283 total common heads: 1
295 total common heads: 1
284 also local heads: 0
296 also local heads: 0
285 also remote heads: 1
297 also remote heads: 1
286 local heads: 1
298 local heads: 1
287 common: 0
299 common: 0
288 missing: 1
300 missing: 1
289 remote heads: 2
301 remote heads: 2
290 common: 1
302 common: 1
291 unknown: 1
303 unknown: 1
292 local changesets: 8
304 local changesets: 8
293 common: 5
305 common: 5
294 missing: 3
306 missing: 3
295 common heads: bebd167eb94d
307 common heads: bebd167eb94d
296
308
297 Both sides many new with stub:
309 Both sides many new with stub:
298
310
299 $ testdesc '-ra1 -ra2' '-rb' '
311 $ testdesc '-ra1 -ra2' '-rb' '
300 > +2:f +2:a1 +30 :b
312 > +2:f +2:a1 +30 :b
301 > <f +30 :a2'
313 > <f +30 :a2'
302
314
303 % -- a -> b tree
315 % -- a -> b tree
304 comparing with b
316 comparing with b
305 searching for changes
317 searching for changes
306 unpruned common: 2dc09a01254d
318 unpruned common: 2dc09a01254d
319 elapsed time: * seconds (glob)
307 heads summary:
320 heads summary:
308 total common heads: 1
321 total common heads: 1
309 also local heads: 1
322 also local heads: 1
310 also remote heads: 0
323 also remote heads: 0
311 local heads: 2
324 local heads: 2
312 common: 1
325 common: 1
313 missing: 1
326 missing: 1
314 remote heads: 1
327 remote heads: 1
315 common: 0
328 common: 0
316 unknown: 1
329 unknown: 1
317 local changesets: 34
330 local changesets: 34
318 common: 4
331 common: 4
319 missing: 30
332 missing: 30
320 common heads: 2dc09a01254d
333 common heads: 2dc09a01254d
321
334
322 % -- a -> b set
335 % -- a -> b set
323 comparing with b
336 comparing with b
324 query 1; heads
337 query 1; heads
325 searching for changes
338 searching for changes
326 taking initial sample
339 taking initial sample
327 searching: 2 queries
340 searching: 2 queries
328 query 2; still undecided: 29, sample size is: 29
341 query 2; still undecided: 29, sample size is: 29
329 2 total queries in *.????s (glob)
342 2 total queries in *.????s (glob)
343 elapsed time: * seconds (glob)
330 heads summary:
344 heads summary:
331 total common heads: 1
345 total common heads: 1
332 also local heads: 1
346 also local heads: 1
333 also remote heads: 0
347 also remote heads: 0
334 local heads: 2
348 local heads: 2
335 common: 1
349 common: 1
336 missing: 1
350 missing: 1
337 remote heads: 1
351 remote heads: 1
338 common: 0
352 common: 0
339 unknown: 1
353 unknown: 1
340 local changesets: 34
354 local changesets: 34
341 common: 4
355 common: 4
342 missing: 30
356 missing: 30
343 common heads: 2dc09a01254d
357 common heads: 2dc09a01254d
344
358
345 % -- a -> b set (tip only)
359 % -- a -> b set (tip only)
346 comparing with b
360 comparing with b
347 query 1; heads
361 query 1; heads
348 searching for changes
362 searching for changes
349 taking quick initial sample
363 taking quick initial sample
350 searching: 2 queries
364 searching: 2 queries
351 query 2; still undecided: 31, sample size is: 31
365 query 2; still undecided: 31, sample size is: 31
352 2 total queries in *.????s (glob)
366 2 total queries in *.????s (glob)
367 elapsed time: * seconds (glob)
353 heads summary:
368 heads summary:
354 total common heads: 1
369 total common heads: 1
355 also local heads: 0
370 also local heads: 0
356 also remote heads: 0
371 also remote heads: 0
357 local heads: 2
372 local heads: 2
358 common: 0
373 common: 0
359 missing: 2
374 missing: 2
360 remote heads: 1
375 remote heads: 1
361 common: 0
376 common: 0
362 unknown: 1
377 unknown: 1
363 local changesets: 34
378 local changesets: 34
364 common: 2
379 common: 2
365 missing: 32
380 missing: 32
366 common heads: 66f7d451a68b
381 common heads: 66f7d451a68b
367
382
368 % -- b -> a tree
383 % -- b -> a tree
369 comparing with a
384 comparing with a
370 searching for changes
385 searching for changes
371 unpruned common: 2dc09a01254d 66f7d451a68b
386 unpruned common: 2dc09a01254d 66f7d451a68b
387 elapsed time: * seconds (glob)
372 heads summary:
388 heads summary:
373 total common heads: 1
389 total common heads: 1
374 also local heads: 0
390 also local heads: 0
375 also remote heads: 1
391 also remote heads: 1
376 local heads: 1
392 local heads: 1
377 common: 0
393 common: 0
378 missing: 1
394 missing: 1
379 remote heads: 2
395 remote heads: 2
380 common: 1
396 common: 1
381 unknown: 1
397 unknown: 1
382 local changesets: 34
398 local changesets: 34
383 common: 4
399 common: 4
384 missing: 30
400 missing: 30
385 common heads: 2dc09a01254d
401 common heads: 2dc09a01254d
386
402
387 % -- b -> a set
403 % -- b -> a set
388 comparing with a
404 comparing with a
389 query 1; heads
405 query 1; heads
390 searching for changes
406 searching for changes
391 taking initial sample
407 taking initial sample
392 searching: 2 queries
408 searching: 2 queries
393 query 2; still undecided: 29, sample size is: 29
409 query 2; still undecided: 29, sample size is: 29
394 2 total queries in *.????s (glob)
410 2 total queries in *.????s (glob)
411 elapsed time: * seconds (glob)
395 heads summary:
412 heads summary:
396 total common heads: 1
413 total common heads: 1
397 also local heads: 0
414 also local heads: 0
398 also remote heads: 1
415 also remote heads: 1
399 local heads: 1
416 local heads: 1
400 common: 0
417 common: 0
401 missing: 1
418 missing: 1
402 remote heads: 2
419 remote heads: 2
403 common: 1
420 common: 1
404 unknown: 1
421 unknown: 1
405 local changesets: 34
422 local changesets: 34
406 common: 4
423 common: 4
407 missing: 30
424 missing: 30
408 common heads: 2dc09a01254d
425 common heads: 2dc09a01254d
409
426
410 % -- b -> a set (tip only)
427 % -- b -> a set (tip only)
411 comparing with a
428 comparing with a
412 query 1; heads
429 query 1; heads
413 searching for changes
430 searching for changes
414 taking initial sample
431 taking initial sample
415 searching: 2 queries
432 searching: 2 queries
416 query 2; still undecided: 29, sample size is: 29
433 query 2; still undecided: 29, sample size is: 29
417 2 total queries in *.????s (glob)
434 2 total queries in *.????s (glob)
435 elapsed time: * seconds (glob)
418 heads summary:
436 heads summary:
419 total common heads: 1
437 total common heads: 1
420 also local heads: 0
438 also local heads: 0
421 also remote heads: 1
439 also remote heads: 1
422 local heads: 1
440 local heads: 1
423 common: 0
441 common: 0
424 missing: 1
442 missing: 1
425 remote heads: 2
443 remote heads: 2
426 common: 1
444 common: 1
427 unknown: 1
445 unknown: 1
428 local changesets: 34
446 local changesets: 34
429 common: 4
447 common: 4
430 missing: 30
448 missing: 30
431 common heads: 2dc09a01254d
449 common heads: 2dc09a01254d
432
450
433
451
434 Both many new:
452 Both many new:
435
453
436 $ testdesc '-ra' '-rb' '
454 $ testdesc '-ra' '-rb' '
437 > +2:f +30 :b
455 > +2:f +30 :b
438 > <f +30 :a'
456 > <f +30 :a'
439
457
440 % -- a -> b tree
458 % -- a -> b tree
441 comparing with b
459 comparing with b
442 searching for changes
460 searching for changes
443 unpruned common: 66f7d451a68b
461 unpruned common: 66f7d451a68b
462 elapsed time: * seconds (glob)
444 heads summary:
463 heads summary:
445 total common heads: 1
464 total common heads: 1
446 also local heads: 0
465 also local heads: 0
447 also remote heads: 0
466 also remote heads: 0
448 local heads: 1
467 local heads: 1
449 common: 0
468 common: 0
450 missing: 1
469 missing: 1
451 remote heads: 1
470 remote heads: 1
452 common: 0
471 common: 0
453 unknown: 1
472 unknown: 1
454 local changesets: 32
473 local changesets: 32
455 common: 2
474 common: 2
456 missing: 30
475 missing: 30
457 common heads: 66f7d451a68b
476 common heads: 66f7d451a68b
458
477
459 % -- a -> b set
478 % -- a -> b set
460 comparing with b
479 comparing with b
461 query 1; heads
480 query 1; heads
462 searching for changes
481 searching for changes
463 taking quick initial sample
482 taking quick initial sample
464 searching: 2 queries
483 searching: 2 queries
465 query 2; still undecided: 31, sample size is: 31
484 query 2; still undecided: 31, sample size is: 31
466 2 total queries in *.????s (glob)
485 2 total queries in *.????s (glob)
486 elapsed time: * seconds (glob)
467 heads summary:
487 heads summary:
468 total common heads: 1
488 total common heads: 1
469 also local heads: 0
489 also local heads: 0
470 also remote heads: 0
490 also remote heads: 0
471 local heads: 1
491 local heads: 1
472 common: 0
492 common: 0
473 missing: 1
493 missing: 1
474 remote heads: 1
494 remote heads: 1
475 common: 0
495 common: 0
476 unknown: 1
496 unknown: 1
477 local changesets: 32
497 local changesets: 32
478 common: 2
498 common: 2
479 missing: 30
499 missing: 30
480 common heads: 66f7d451a68b
500 common heads: 66f7d451a68b
481
501
482 % -- a -> b set (tip only)
502 % -- a -> b set (tip only)
483 comparing with b
503 comparing with b
484 query 1; heads
504 query 1; heads
485 searching for changes
505 searching for changes
486 taking quick initial sample
506 taking quick initial sample
487 searching: 2 queries
507 searching: 2 queries
488 query 2; still undecided: 31, sample size is: 31
508 query 2; still undecided: 31, sample size is: 31
489 2 total queries in *.????s (glob)
509 2 total queries in *.????s (glob)
510 elapsed time: * seconds (glob)
490 heads summary:
511 heads summary:
491 total common heads: 1
512 total common heads: 1
492 also local heads: 0
513 also local heads: 0
493 also remote heads: 0
514 also remote heads: 0
494 local heads: 1
515 local heads: 1
495 common: 0
516 common: 0
496 missing: 1
517 missing: 1
497 remote heads: 1
518 remote heads: 1
498 common: 0
519 common: 0
499 unknown: 1
520 unknown: 1
500 local changesets: 32
521 local changesets: 32
501 common: 2
522 common: 2
502 missing: 30
523 missing: 30
503 common heads: 66f7d451a68b
524 common heads: 66f7d451a68b
504
525
505 % -- b -> a tree
526 % -- b -> a tree
506 comparing with a
527 comparing with a
507 searching for changes
528 searching for changes
508 unpruned common: 66f7d451a68b
529 unpruned common: 66f7d451a68b
530 elapsed time: * seconds (glob)
509 heads summary:
531 heads summary:
510 total common heads: 1
532 total common heads: 1
511 also local heads: 0
533 also local heads: 0
512 also remote heads: 0
534 also remote heads: 0
513 local heads: 1
535 local heads: 1
514 common: 0
536 common: 0
515 missing: 1
537 missing: 1
516 remote heads: 1
538 remote heads: 1
517 common: 0
539 common: 0
518 unknown: 1
540 unknown: 1
519 local changesets: 32
541 local changesets: 32
520 common: 2
542 common: 2
521 missing: 30
543 missing: 30
522 common heads: 66f7d451a68b
544 common heads: 66f7d451a68b
523
545
524 % -- b -> a set
546 % -- b -> a set
525 comparing with a
547 comparing with a
526 query 1; heads
548 query 1; heads
527 searching for changes
549 searching for changes
528 taking quick initial sample
550 taking quick initial sample
529 searching: 2 queries
551 searching: 2 queries
530 query 2; still undecided: 31, sample size is: 31
552 query 2; still undecided: 31, sample size is: 31
531 2 total queries in *.????s (glob)
553 2 total queries in *.????s (glob)
554 elapsed time: * seconds (glob)
532 heads summary:
555 heads summary:
533 total common heads: 1
556 total common heads: 1
534 also local heads: 0
557 also local heads: 0
535 also remote heads: 0
558 also remote heads: 0
536 local heads: 1
559 local heads: 1
537 common: 0
560 common: 0
538 missing: 1
561 missing: 1
539 remote heads: 1
562 remote heads: 1
540 common: 0
563 common: 0
541 unknown: 1
564 unknown: 1
542 local changesets: 32
565 local changesets: 32
543 common: 2
566 common: 2
544 missing: 30
567 missing: 30
545 common heads: 66f7d451a68b
568 common heads: 66f7d451a68b
546
569
547 % -- b -> a set (tip only)
570 % -- b -> a set (tip only)
548 comparing with a
571 comparing with a
549 query 1; heads
572 query 1; heads
550 searching for changes
573 searching for changes
551 taking quick initial sample
574 taking quick initial sample
552 searching: 2 queries
575 searching: 2 queries
553 query 2; still undecided: 31, sample size is: 31
576 query 2; still undecided: 31, sample size is: 31
554 2 total queries in *.????s (glob)
577 2 total queries in *.????s (glob)
578 elapsed time: * seconds (glob)
555 heads summary:
579 heads summary:
556 total common heads: 1
580 total common heads: 1
557 also local heads: 0
581 also local heads: 0
558 also remote heads: 0
582 also remote heads: 0
559 local heads: 1
583 local heads: 1
560 common: 0
584 common: 0
561 missing: 1
585 missing: 1
562 remote heads: 1
586 remote heads: 1
563 common: 0
587 common: 0
564 unknown: 1
588 unknown: 1
565 local changesets: 32
589 local changesets: 32
566 common: 2
590 common: 2
567 missing: 30
591 missing: 30
568 common heads: 66f7d451a68b
592 common heads: 66f7d451a68b
569
593
570
594
571 Both many new skewed:
595 Both many new skewed:
572
596
573 $ testdesc '-ra' '-rb' '
597 $ testdesc '-ra' '-rb' '
574 > +2:f +30 :b
598 > +2:f +30 :b
575 > <f +50 :a'
599 > <f +50 :a'
576
600
577 % -- a -> b tree
601 % -- a -> b tree
578 comparing with b
602 comparing with b
579 searching for changes
603 searching for changes
580 unpruned common: 66f7d451a68b
604 unpruned common: 66f7d451a68b
605 elapsed time: * seconds (glob)
581 heads summary:
606 heads summary:
582 total common heads: 1
607 total common heads: 1
583 also local heads: 0
608 also local heads: 0
584 also remote heads: 0
609 also remote heads: 0
585 local heads: 1
610 local heads: 1
586 common: 0
611 common: 0
587 missing: 1
612 missing: 1
588 remote heads: 1
613 remote heads: 1
589 common: 0
614 common: 0
590 unknown: 1
615 unknown: 1
591 local changesets: 52
616 local changesets: 52
592 common: 2
617 common: 2
593 missing: 50
618 missing: 50
594 common heads: 66f7d451a68b
619 common heads: 66f7d451a68b
595
620
596 % -- a -> b set
621 % -- a -> b set
597 comparing with b
622 comparing with b
598 query 1; heads
623 query 1; heads
599 searching for changes
624 searching for changes
600 taking quick initial sample
625 taking quick initial sample
601 searching: 2 queries
626 searching: 2 queries
602 query 2; still undecided: 51, sample size is: 51
627 query 2; still undecided: 51, sample size is: 51
603 2 total queries in *.????s (glob)
628 2 total queries in *.????s (glob)
629 elapsed time: * seconds (glob)
604 heads summary:
630 heads summary:
605 total common heads: 1
631 total common heads: 1
606 also local heads: 0
632 also local heads: 0
607 also remote heads: 0
633 also remote heads: 0
608 local heads: 1
634 local heads: 1
609 common: 0
635 common: 0
610 missing: 1
636 missing: 1
611 remote heads: 1
637 remote heads: 1
612 common: 0
638 common: 0
613 unknown: 1
639 unknown: 1
614 local changesets: 52
640 local changesets: 52
615 common: 2
641 common: 2
616 missing: 50
642 missing: 50
617 common heads: 66f7d451a68b
643 common heads: 66f7d451a68b
618
644
619 % -- a -> b set (tip only)
645 % -- a -> b set (tip only)
620 comparing with b
646 comparing with b
621 query 1; heads
647 query 1; heads
622 searching for changes
648 searching for changes
623 taking quick initial sample
649 taking quick initial sample
624 searching: 2 queries
650 searching: 2 queries
625 query 2; still undecided: 51, sample size is: 51
651 query 2; still undecided: 51, sample size is: 51
626 2 total queries in *.????s (glob)
652 2 total queries in *.????s (glob)
653 elapsed time: * seconds (glob)
627 heads summary:
654 heads summary:
628 total common heads: 1
655 total common heads: 1
629 also local heads: 0
656 also local heads: 0
630 also remote heads: 0
657 also remote heads: 0
631 local heads: 1
658 local heads: 1
632 common: 0
659 common: 0
633 missing: 1
660 missing: 1
634 remote heads: 1
661 remote heads: 1
635 common: 0
662 common: 0
636 unknown: 1
663 unknown: 1
637 local changesets: 52
664 local changesets: 52
638 common: 2
665 common: 2
639 missing: 50
666 missing: 50
640 common heads: 66f7d451a68b
667 common heads: 66f7d451a68b
641
668
642 % -- b -> a tree
669 % -- b -> a tree
643 comparing with a
670 comparing with a
644 searching for changes
671 searching for changes
645 unpruned common: 66f7d451a68b
672 unpruned common: 66f7d451a68b
673 elapsed time: * seconds (glob)
646 heads summary:
674 heads summary:
647 total common heads: 1
675 total common heads: 1
648 also local heads: 0
676 also local heads: 0
649 also remote heads: 0
677 also remote heads: 0
650 local heads: 1
678 local heads: 1
651 common: 0
679 common: 0
652 missing: 1
680 missing: 1
653 remote heads: 1
681 remote heads: 1
654 common: 0
682 common: 0
655 unknown: 1
683 unknown: 1
656 local changesets: 32
684 local changesets: 32
657 common: 2
685 common: 2
658 missing: 30
686 missing: 30
659 common heads: 66f7d451a68b
687 common heads: 66f7d451a68b
660
688
661 % -- b -> a set
689 % -- b -> a set
662 comparing with a
690 comparing with a
663 query 1; heads
691 query 1; heads
664 searching for changes
692 searching for changes
665 taking quick initial sample
693 taking quick initial sample
666 searching: 2 queries
694 searching: 2 queries
667 query 2; still undecided: 31, sample size is: 31
695 query 2; still undecided: 31, sample size is: 31
668 2 total queries in *.????s (glob)
696 2 total queries in *.????s (glob)
697 elapsed time: * seconds (glob)
669 heads summary:
698 heads summary:
670 total common heads: 1
699 total common heads: 1
671 also local heads: 0
700 also local heads: 0
672 also remote heads: 0
701 also remote heads: 0
673 local heads: 1
702 local heads: 1
674 common: 0
703 common: 0
675 missing: 1
704 missing: 1
676 remote heads: 1
705 remote heads: 1
677 common: 0
706 common: 0
678 unknown: 1
707 unknown: 1
679 local changesets: 32
708 local changesets: 32
680 common: 2
709 common: 2
681 missing: 30
710 missing: 30
682 common heads: 66f7d451a68b
711 common heads: 66f7d451a68b
683
712
684 % -- b -> a set (tip only)
713 % -- b -> a set (tip only)
685 comparing with a
714 comparing with a
686 query 1; heads
715 query 1; heads
687 searching for changes
716 searching for changes
688 taking quick initial sample
717 taking quick initial sample
689 searching: 2 queries
718 searching: 2 queries
690 query 2; still undecided: 31, sample size is: 31
719 query 2; still undecided: 31, sample size is: 31
691 2 total queries in *.????s (glob)
720 2 total queries in *.????s (glob)
721 elapsed time: * seconds (glob)
692 heads summary:
722 heads summary:
693 total common heads: 1
723 total common heads: 1
694 also local heads: 0
724 also local heads: 0
695 also remote heads: 0
725 also remote heads: 0
696 local heads: 1
726 local heads: 1
697 common: 0
727 common: 0
698 missing: 1
728 missing: 1
699 remote heads: 1
729 remote heads: 1
700 common: 0
730 common: 0
701 unknown: 1
731 unknown: 1
702 local changesets: 32
732 local changesets: 32
703 common: 2
733 common: 2
704 missing: 30
734 missing: 30
705 common heads: 66f7d451a68b
735 common heads: 66f7d451a68b
706
736
707
737
708 Both many new on top of long history:
738 Both many new on top of long history:
709
739
710 $ testdesc '-ra' '-rb' '
740 $ testdesc '-ra' '-rb' '
711 > +1000:f +30 :b
741 > +1000:f +30 :b
712 > <f +50 :a'
742 > <f +50 :a'
713
743
714 % -- a -> b tree
744 % -- a -> b tree
715 comparing with b
745 comparing with b
716 searching for changes
746 searching for changes
717 unpruned common: 7ead0cba2838
747 unpruned common: 7ead0cba2838
748 elapsed time: * seconds (glob)
718 heads summary:
749 heads summary:
719 total common heads: 1
750 total common heads: 1
720 also local heads: 0
751 also local heads: 0
721 also remote heads: 0
752 also remote heads: 0
722 local heads: 1
753 local heads: 1
723 common: 0
754 common: 0
724 missing: 1
755 missing: 1
725 remote heads: 1
756 remote heads: 1
726 common: 0
757 common: 0
727 unknown: 1
758 unknown: 1
728 local changesets: 1050
759 local changesets: 1050
729 common: 1000
760 common: 1000
730 missing: 50
761 missing: 50
731 common heads: 7ead0cba2838
762 common heads: 7ead0cba2838
732
763
733 % -- a -> b set
764 % -- a -> b set
734 comparing with b
765 comparing with b
735 query 1; heads
766 query 1; heads
736 searching for changes
767 searching for changes
737 taking quick initial sample
768 taking quick initial sample
738 searching: 2 queries
769 searching: 2 queries
739 query 2; still undecided: 1049, sample size is: 11
770 query 2; still undecided: 1049, sample size is: 11
740 sampling from both directions
771 sampling from both directions
741 searching: 3 queries
772 searching: 3 queries
742 query 3; still undecided: 31, sample size is: 31
773 query 3; still undecided: 31, sample size is: 31
743 3 total queries in *.????s (glob)
774 3 total queries in *.????s (glob)
775 elapsed time: * seconds (glob)
744 heads summary:
776 heads summary:
745 total common heads: 1
777 total common heads: 1
746 also local heads: 0
778 also local heads: 0
747 also remote heads: 0
779 also remote heads: 0
748 local heads: 1
780 local heads: 1
749 common: 0
781 common: 0
750 missing: 1
782 missing: 1
751 remote heads: 1
783 remote heads: 1
752 common: 0
784 common: 0
753 unknown: 1
785 unknown: 1
754 local changesets: 1050
786 local changesets: 1050
755 common: 1000
787 common: 1000
756 missing: 50
788 missing: 50
757 common heads: 7ead0cba2838
789 common heads: 7ead0cba2838
758
790
759 % -- a -> b set (tip only)
791 % -- a -> b set (tip only)
760 comparing with b
792 comparing with b
761 query 1; heads
793 query 1; heads
762 searching for changes
794 searching for changes
763 taking quick initial sample
795 taking quick initial sample
764 searching: 2 queries
796 searching: 2 queries
765 query 2; still undecided: 1049, sample size is: 11
797 query 2; still undecided: 1049, sample size is: 11
766 sampling from both directions
798 sampling from both directions
767 searching: 3 queries
799 searching: 3 queries
768 query 3; still undecided: 31, sample size is: 31
800 query 3; still undecided: 31, sample size is: 31
769 3 total queries in *.????s (glob)
801 3 total queries in *.????s (glob)
802 elapsed time: * seconds (glob)
770 heads summary:
803 heads summary:
771 total common heads: 1
804 total common heads: 1
772 also local heads: 0
805 also local heads: 0
773 also remote heads: 0
806 also remote heads: 0
774 local heads: 1
807 local heads: 1
775 common: 0
808 common: 0
776 missing: 1
809 missing: 1
777 remote heads: 1
810 remote heads: 1
778 common: 0
811 common: 0
779 unknown: 1
812 unknown: 1
780 local changesets: 1050
813 local changesets: 1050
781 common: 1000
814 common: 1000
782 missing: 50
815 missing: 50
783 common heads: 7ead0cba2838
816 common heads: 7ead0cba2838
784
817
785 % -- b -> a tree
818 % -- b -> a tree
786 comparing with a
819 comparing with a
787 searching for changes
820 searching for changes
788 unpruned common: 7ead0cba2838
821 unpruned common: 7ead0cba2838
822 elapsed time: * seconds (glob)
789 heads summary:
823 heads summary:
790 total common heads: 1
824 total common heads: 1
791 also local heads: 0
825 also local heads: 0
792 also remote heads: 0
826 also remote heads: 0
793 local heads: 1
827 local heads: 1
794 common: 0
828 common: 0
795 missing: 1
829 missing: 1
796 remote heads: 1
830 remote heads: 1
797 common: 0
831 common: 0
798 unknown: 1
832 unknown: 1
799 local changesets: 1030
833 local changesets: 1030
800 common: 1000
834 common: 1000
801 missing: 30
835 missing: 30
802 common heads: 7ead0cba2838
836 common heads: 7ead0cba2838
803
837
804 % -- b -> a set
838 % -- b -> a set
805 comparing with a
839 comparing with a
806 query 1; heads
840 query 1; heads
807 searching for changes
841 searching for changes
808 taking quick initial sample
842 taking quick initial sample
809 searching: 2 queries
843 searching: 2 queries
810 query 2; still undecided: 1029, sample size is: 11
844 query 2; still undecided: 1029, sample size is: 11
811 sampling from both directions
845 sampling from both directions
812 searching: 3 queries
846 searching: 3 queries
813 query 3; still undecided: 15, sample size is: 15
847 query 3; still undecided: 15, sample size is: 15
814 3 total queries in *.????s (glob)
848 3 total queries in *.????s (glob)
849 elapsed time: * seconds (glob)
815 heads summary:
850 heads summary:
816 total common heads: 1
851 total common heads: 1
817 also local heads: 0
852 also local heads: 0
818 also remote heads: 0
853 also remote heads: 0
819 local heads: 1
854 local heads: 1
820 common: 0
855 common: 0
821 missing: 1
856 missing: 1
822 remote heads: 1
857 remote heads: 1
823 common: 0
858 common: 0
824 unknown: 1
859 unknown: 1
825 local changesets: 1030
860 local changesets: 1030
826 common: 1000
861 common: 1000
827 missing: 30
862 missing: 30
828 common heads: 7ead0cba2838
863 common heads: 7ead0cba2838
829
864
830 % -- b -> a set (tip only)
865 % -- b -> a set (tip only)
831 comparing with a
866 comparing with a
832 query 1; heads
867 query 1; heads
833 searching for changes
868 searching for changes
834 taking quick initial sample
869 taking quick initial sample
835 searching: 2 queries
870 searching: 2 queries
836 query 2; still undecided: 1029, sample size is: 11
871 query 2; still undecided: 1029, sample size is: 11
837 sampling from both directions
872 sampling from both directions
838 searching: 3 queries
873 searching: 3 queries
839 query 3; still undecided: 15, sample size is: 15
874 query 3; still undecided: 15, sample size is: 15
840 3 total queries in *.????s (glob)
875 3 total queries in *.????s (glob)
876 elapsed time: * seconds (glob)
841 heads summary:
877 heads summary:
842 total common heads: 1
878 total common heads: 1
843 also local heads: 0
879 also local heads: 0
844 also remote heads: 0
880 also remote heads: 0
845 local heads: 1
881 local heads: 1
846 common: 0
882 common: 0
847 missing: 1
883 missing: 1
848 remote heads: 1
884 remote heads: 1
849 common: 0
885 common: 0
850 unknown: 1
886 unknown: 1
851 local changesets: 1030
887 local changesets: 1030
852 common: 1000
888 common: 1000
853 missing: 30
889 missing: 30
854 common heads: 7ead0cba2838
890 common heads: 7ead0cba2838
855
891
856
892
857 One with >200 heads, which used to use up all of the sample:
893 One with >200 heads, which used to use up all of the sample:
858
894
859 $ hg init manyheads
895 $ hg init manyheads
860 $ cd manyheads
896 $ cd manyheads
861 $ echo "+300:r @a" >dagdesc
897 $ echo "+300:r @a" >dagdesc
862 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
898 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
863 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
899 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
864 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
900 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
865 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
901 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
866 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
902 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
867 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
903 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
868 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
904 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
869 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
905 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
870 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
906 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
871 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
907 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
872 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
908 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
873 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
909 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
874 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
910 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
875 $ echo "@b *r+3" >>dagdesc # one more head
911 $ echo "@b *r+3" >>dagdesc # one more head
876 $ hg debugbuilddag <dagdesc
912 $ hg debugbuilddag <dagdesc
877 reading DAG from stdin
913 reading DAG from stdin
878
914
879 $ hg heads -t --template . | wc -c
915 $ hg heads -t --template . | wc -c
880 \s*261 (re)
916 \s*261 (re)
881
917
882 $ hg clone -b a . a
918 $ hg clone -b a . a
883 adding changesets
919 adding changesets
884 adding manifests
920 adding manifests
885 adding file changes
921 adding file changes
886 added 1340 changesets with 0 changes to 0 files (+259 heads)
922 added 1340 changesets with 0 changes to 0 files (+259 heads)
887 new changesets 1ea73414a91b:1c51e2c80832
923 new changesets 1ea73414a91b:1c51e2c80832
888 updating to branch a
924 updating to branch a
889 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
925 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
890 $ hg clone -b b . b
926 $ hg clone -b b . b
891 adding changesets
927 adding changesets
892 adding manifests
928 adding manifests
893 adding file changes
929 adding file changes
894 added 304 changesets with 0 changes to 0 files
930 added 304 changesets with 0 changes to 0 files
895 new changesets 1ea73414a91b:513314ca8b3a
931 new changesets 1ea73414a91b:513314ca8b3a
896 updating to branch b
932 updating to branch b
897 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
933 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
898
934
899 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
935 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
900 comparing with b
936 comparing with b
901 query 1; heads
937 query 1; heads
902 searching for changes
938 searching for changes
903 taking quick initial sample
939 taking quick initial sample
904 searching: 2 queries
940 searching: 2 queries
905 query 2; still undecided: 1240, sample size is: 100
941 query 2; still undecided: 1240, sample size is: 100
906 sampling from both directions
942 sampling from both directions
907 searching: 3 queries
943 searching: 3 queries
908 query 3; still undecided: 1140, sample size is: 200
944 query 3; still undecided: 1140, sample size is: 200
909 sampling from both directions
945 sampling from both directions
910 searching: 4 queries
946 searching: 4 queries
911 query 4; still undecided: \d+, sample size is: 200 (re)
947 query 4; still undecided: \d+, sample size is: 200 (re)
912 sampling from both directions
948 sampling from both directions
913 searching: 5 queries
949 searching: 5 queries
914 query 5; still undecided: \d+, sample size is: 200 (re)
950 query 5; still undecided: \d+, sample size is: 200 (re)
915 sampling from both directions
951 sampling from both directions
916 searching: 6 queries
952 searching: 6 queries
917 query 6; still undecided: \d+, sample size is: \d+ (re)
953 query 6; still undecided: \d+, sample size is: \d+ (re)
918 6 total queries in *.????s (glob)
954 6 total queries in *.????s (glob)
955 elapsed time: * seconds (glob)
919 heads summary:
956 heads summary:
920 total common heads: 1
957 total common heads: 1
921 also local heads: 0
958 also local heads: 0
922 also remote heads: 0
959 also remote heads: 0
923 local heads: 260
960 local heads: 260
924 common: 0
961 common: 0
925 missing: 260
962 missing: 260
926 remote heads: 1
963 remote heads: 1
927 common: 0
964 common: 0
928 unknown: 1
965 unknown: 1
929 local changesets: 1340
966 local changesets: 1340
930 common: 300
967 common: 300
931 missing: 1040
968 missing: 1040
932 common heads: 3ee37d65064a
969 common heads: 3ee37d65064a
933 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
970 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
934 comparing with b
971 comparing with b
935 query 1; heads
972 query 1; heads
936 searching for changes
973 searching for changes
937 taking quick initial sample
974 taking quick initial sample
938 searching: 2 queries
975 searching: 2 queries
939 query 2; still undecided: 303, sample size is: 9
976 query 2; still undecided: 303, sample size is: 9
940 sampling from both directions
977 sampling from both directions
941 searching: 3 queries
978 searching: 3 queries
942 query 3; still undecided: 3, sample size is: 3
979 query 3; still undecided: 3, sample size is: 3
943 3 total queries in *.????s (glob)
980 3 total queries in *.????s (glob)
981 elapsed time: * seconds (glob)
944 heads summary:
982 heads summary:
945 total common heads: 1
983 total common heads: 1
946 also local heads: 0
984 also local heads: 0
947 also remote heads: 0
985 also remote heads: 0
948 local heads: 260
986 local heads: 260
949 common: 0
987 common: 0
950 missing: 260
988 missing: 260
951 remote heads: 1
989 remote heads: 1
952 common: 0
990 common: 0
953 unknown: 1
991 unknown: 1
954 local changesets: 1340
992 local changesets: 1340
955 common: 300
993 common: 300
956 missing: 1040
994 missing: 1040
957 common heads: 3ee37d65064a
995 common heads: 3ee37d65064a
958
996
959 Test actual protocol when pulling one new head in addition to common heads
997 Test actual protocol when pulling one new head in addition to common heads
960
998
961 $ hg clone -U b c
999 $ hg clone -U b c
962 $ hg -R c id -ir tip
1000 $ hg -R c id -ir tip
963 513314ca8b3a
1001 513314ca8b3a
964 $ hg -R c up -qr default
1002 $ hg -R c up -qr default
965 $ touch c/f
1003 $ touch c/f
966 $ hg -R c ci -Aqm "extra head"
1004 $ hg -R c ci -Aqm "extra head"
967 $ hg -R c id -i
1005 $ hg -R c id -i
968 e64a39e7da8b
1006 e64a39e7da8b
969
1007
970 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1008 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
971 $ cat hg.pid >> $DAEMON_PIDS
1009 $ cat hg.pid >> $DAEMON_PIDS
972
1010
973 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1011 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
974 comparing with http://localhost:$HGPORT/
1012 comparing with http://localhost:$HGPORT/
975 searching for changes
1013 searching for changes
976 e64a39e7da8b
1014 e64a39e7da8b
977
1015
978 $ killdaemons.py
1016 $ killdaemons.py
979 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1017 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
980 "GET /?cmd=capabilities HTTP/1.1" 200 -
1018 "GET /?cmd=capabilities HTTP/1.1" 200 -
981 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1019 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
982 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1020 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
983 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1021 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
984 $ cat errors.log
1022 $ cat errors.log
985
1023
986 $ cd ..
1024 $ cd ..
987
1025
988
1026
989 Issue 4438 - test coverage for 3ef893520a85 issues.
1027 Issue 4438 - test coverage for 3ef893520a85 issues.
990
1028
991 $ mkdir issue4438
1029 $ mkdir issue4438
992 $ cd issue4438
1030 $ cd issue4438
993 #if false
1031 #if false
994 generate new bundles:
1032 generate new bundles:
995 $ hg init r1
1033 $ hg init r1
996 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1034 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
997 $ hg clone -q r1 r2
1035 $ hg clone -q r1 r2
998 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1036 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
999 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1037 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1000 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1038 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1001 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1039 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1002 #else
1040 #else
1003 use existing bundles:
1041 use existing bundles:
1004 $ hg init r1
1042 $ hg init r1
1005 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1043 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1006 $ hg -R r1 -q up
1044 $ hg -R r1 -q up
1007 $ hg init r2
1045 $ hg init r2
1008 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1046 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1009 $ hg -R r2 -q up
1047 $ hg -R r2 -q up
1010 #endif
1048 #endif
1011
1049
1012 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1050 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1013
1051
1014 $ hg -R r1 outgoing r2 -T'{rev} '
1052 $ hg -R r1 outgoing r2 -T'{rev} '
1015 comparing with r2
1053 comparing with r2
1016 searching for changes
1054 searching for changes
1017 101 102 103 104 105 106 107 108 109 110 (no-eol)
1055 101 102 103 104 105 106 107 108 109 110 (no-eol)
1018
1056
1019 The case where all the 'initialsamplesize' samples already were common would
1057 The case where all the 'initialsamplesize' samples already were common would
1020 give 'all remote heads known locally' without checking the remaining heads -
1058 give 'all remote heads known locally' without checking the remaining heads -
1021 fixed in 86c35b7ae300:
1059 fixed in 86c35b7ae300:
1022
1060
1023 $ cat >> $TESTTMP/unrandomsample.py << EOF
1061 $ cat >> $TESTTMP/unrandomsample.py << EOF
1024 > import random
1062 > import random
1025 > def sample(population, k):
1063 > def sample(population, k):
1026 > return sorted(population)[:k]
1064 > return sorted(population)[:k]
1027 > random.sample = sample
1065 > random.sample = sample
1028 > EOF
1066 > EOF
1029
1067
1030 $ cat >> r1/.hg/hgrc << EOF
1068 $ cat >> r1/.hg/hgrc << EOF
1031 > [extensions]
1069 > [extensions]
1032 > unrandomsample = $TESTTMP/unrandomsample.py
1070 > unrandomsample = $TESTTMP/unrandomsample.py
1033 > EOF
1071 > EOF
1034
1072
1035 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1073 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1036 > --config blackbox.track='command commandfinish discovery'
1074 > --config blackbox.track='command commandfinish discovery'
1037 comparing with r2
1075 comparing with r2
1038 searching for changes
1076 searching for changes
1039 101 102 103 104 105 106 107 108 109 110 (no-eol)
1077 101 102 103 104 105 106 107 108 109 110 (no-eol)
1040 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1078 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1041 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1079 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1042 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1080 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1043 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
1081 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
1044 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1082 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1045 $ cd ..
1083 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now