##// END OF EJS Templates
debug-discovery: also gather details on tree-discovery queries type...
marmoute -
r50296:362c0026 stable
parent child Browse files
Show More
@@ -1,5034 +1,5038 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revlogutils,
76 revlogutils,
77 revset,
77 revset,
78 revsetlang,
78 revsetlang,
79 scmutil,
79 scmutil,
80 setdiscovery,
80 setdiscovery,
81 simplemerge,
81 simplemerge,
82 sshpeer,
82 sshpeer,
83 sslutil,
83 sslutil,
84 streamclone,
84 streamclone,
85 strip,
85 strip,
86 tags as tagsmod,
86 tags as tagsmod,
87 templater,
87 templater,
88 treediscovery,
88 treediscovery,
89 upgrade,
89 upgrade,
90 url as urlmod,
90 url as urlmod,
91 util,
91 util,
92 vfs as vfsmod,
92 vfs as vfsmod,
93 wireprotoframing,
93 wireprotoframing,
94 wireprotoserver,
94 wireprotoserver,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .utils import (
97 from .utils import (
98 cborutil,
98 cborutil,
99 compression,
99 compression,
100 dateutil,
100 dateutil,
101 procutil,
101 procutil,
102 stringutil,
102 stringutil,
103 urlutil,
103 urlutil,
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 constants as revlog_constants,
107 constants as revlog_constants,
108 debug as revlog_debug,
108 debug as revlog_debug,
109 deltas as deltautil,
109 deltas as deltautil,
110 nodemap,
110 nodemap,
111 rewrite,
111 rewrite,
112 sidedata,
112 sidedata,
113 )
113 )
114
114
115 release = lockmod.release
115 release = lockmod.release
116
116
117 table = {}
117 table = {}
118 table.update(strip.command._table)
118 table.update(strip.command._table)
119 command = registrar.command(table)
119 command = registrar.command(table)
120
120
121
121
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 def debugancestor(ui, repo, *args):
123 def debugancestor(ui, repo, *args):
124 """find the ancestor revision of two revisions in a given index"""
124 """find the ancestor revision of two revisions in a given index"""
125 if len(args) == 3:
125 if len(args) == 3:
126 index, rev1, rev2 = args
126 index, rev1, rev2 = args
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 lookup = r.lookup
128 lookup = r.lookup
129 elif len(args) == 2:
129 elif len(args) == 2:
130 if not repo:
130 if not repo:
131 raise error.Abort(
131 raise error.Abort(
132 _(b'there is no Mercurial repository here (.hg not found)')
132 _(b'there is no Mercurial repository here (.hg not found)')
133 )
133 )
134 rev1, rev2 = args
134 rev1, rev2 = args
135 r = repo.changelog
135 r = repo.changelog
136 lookup = repo.lookup
136 lookup = repo.lookup
137 else:
137 else:
138 raise error.Abort(_(b'either two or three arguments required'))
138 raise error.Abort(_(b'either two or three arguments required'))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141
141
142
142
143 @command(b'debugantivirusrunning', [])
143 @command(b'debugantivirusrunning', [])
144 def debugantivirusrunning(ui, repo):
144 def debugantivirusrunning(ui, repo):
145 """attempt to trigger an antivirus scanner to see if one is active"""
145 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 f.write(
147 f.write(
148 util.b85decode(
148 util.b85decode(
149 # This is a base85-armored version of the EICAR test file. See
149 # This is a base85-armored version of the EICAR test file. See
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 )
153 )
154 )
154 )
155 # Give an AV engine time to scan the file.
155 # Give an AV engine time to scan the file.
156 time.sleep(2)
156 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158
158
159
159
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 def debugapplystreamclonebundle(ui, repo, fname):
161 def debugapplystreamclonebundle(ui, repo, fname):
162 """apply a stream clone bundle file"""
162 """apply a stream clone bundle file"""
163 f = hg.openpath(ui, fname)
163 f = hg.openpath(ui, fname)
164 gen = exchange.readbundle(ui, f, fname)
164 gen = exchange.readbundle(ui, f, fname)
165 gen.apply(repo)
165 gen.apply(repo)
166
166
167
167
168 @command(
168 @command(
169 b'debugbuilddag',
169 b'debugbuilddag',
170 [
170 [
171 (
171 (
172 b'm',
172 b'm',
173 b'mergeable-file',
173 b'mergeable-file',
174 None,
174 None,
175 _(b'add single file mergeable changes'),
175 _(b'add single file mergeable changes'),
176 ),
176 ),
177 (
177 (
178 b'o',
178 b'o',
179 b'overwritten-file',
179 b'overwritten-file',
180 None,
180 None,
181 _(b'add single file all revs overwrite'),
181 _(b'add single file all revs overwrite'),
182 ),
182 ),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (
184 (
185 b'',
185 b'',
186 b'from-existing',
186 b'from-existing',
187 None,
187 None,
188 _(b'continue from a non-empty repository'),
188 _(b'continue from a non-empty repository'),
189 ),
189 ),
190 ],
190 ],
191 _(b'[OPTION]... [TEXT]'),
191 _(b'[OPTION]... [TEXT]'),
192 )
192 )
193 def debugbuilddag(
193 def debugbuilddag(
194 ui,
194 ui,
195 repo,
195 repo,
196 text=None,
196 text=None,
197 mergeable_file=False,
197 mergeable_file=False,
198 overwritten_file=False,
198 overwritten_file=False,
199 new_file=False,
199 new_file=False,
200 from_existing=False,
200 from_existing=False,
201 ):
201 ):
202 """builds a repo with a given DAG from scratch in the current empty repo
202 """builds a repo with a given DAG from scratch in the current empty repo
203
203
204 The description of the DAG is read from stdin if not given on the
204 The description of the DAG is read from stdin if not given on the
205 command line.
205 command line.
206
206
207 Elements:
207 Elements:
208
208
209 - "+n" is a linear run of n nodes based on the current default parent
209 - "+n" is a linear run of n nodes based on the current default parent
210 - "." is a single node based on the current default parent
210 - "." is a single node based on the current default parent
211 - "$" resets the default parent to null (implied at the start);
211 - "$" resets the default parent to null (implied at the start);
212 otherwise the default parent is always the last node created
212 otherwise the default parent is always the last node created
213 - "<p" sets the default parent to the backref p
213 - "<p" sets the default parent to the backref p
214 - "*p" is a fork at parent p, which is a backref
214 - "*p" is a fork at parent p, which is a backref
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "/p2" is a merge of the preceding node and p2
216 - "/p2" is a merge of the preceding node and p2
217 - ":tag" defines a local tag for the preceding node
217 - ":tag" defines a local tag for the preceding node
218 - "@branch" sets the named branch for subsequent nodes
218 - "@branch" sets the named branch for subsequent nodes
219 - "#...\\n" is a comment up to the end of the line
219 - "#...\\n" is a comment up to the end of the line
220
220
221 Whitespace between the above elements is ignored.
221 Whitespace between the above elements is ignored.
222
222
223 A backref is either
223 A backref is either
224
224
225 - a number n, which references the node curr-n, where curr is the current
225 - a number n, which references the node curr-n, where curr is the current
226 node, or
226 node, or
227 - the name of a local tag you placed earlier using ":tag", or
227 - the name of a local tag you placed earlier using ":tag", or
228 - empty to denote the default parent.
228 - empty to denote the default parent.
229
229
230 All string valued-elements are either strictly alphanumeric, or must
230 All string valued-elements are either strictly alphanumeric, or must
231 be enclosed in double quotes ("..."), with "\\" as escape character.
231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 """
232 """
233
233
234 if text is None:
234 if text is None:
235 ui.status(_(b"reading DAG from stdin\n"))
235 ui.status(_(b"reading DAG from stdin\n"))
236 text = ui.fin.read()
236 text = ui.fin.read()
237
237
238 cl = repo.changelog
238 cl = repo.changelog
239 if len(cl) > 0 and not from_existing:
239 if len(cl) > 0 and not from_existing:
240 raise error.Abort(_(b'repository is not empty'))
240 raise error.Abort(_(b'repository is not empty'))
241
241
242 # determine number of revs in DAG
242 # determine number of revs in DAG
243 total = 0
243 total = 0
244 for type, data in dagparser.parsedag(text):
244 for type, data in dagparser.parsedag(text):
245 if type == b'n':
245 if type == b'n':
246 total += 1
246 total += 1
247
247
248 if mergeable_file:
248 if mergeable_file:
249 linesperrev = 2
249 linesperrev = 2
250 # make a file with k lines per rev
250 # make a file with k lines per rev
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines.append(b"")
252 initialmergedlines.append(b"")
253
253
254 tags = []
254 tags = []
255 progress = ui.makeprogress(
255 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
256 _(b'building'), unit=_(b'revisions'), total=total
257 )
257 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
259 at = -1
260 atbranch = b'default'
260 atbranch = b'default'
261 nodeids = []
261 nodeids = []
262 id = 0
262 id = 0
263 progress.update(id)
263 progress.update(id)
264 for type, data in dagparser.parsedag(text):
264 for type, data in dagparser.parsedag(text):
265 if type == b'n':
265 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
267 id, ps = data
268
268
269 files = []
269 files = []
270 filecontent = {}
270 filecontent = {}
271
271
272 p2 = None
272 p2 = None
273 if mergeable_file:
273 if mergeable_file:
274 fn = b"mf"
274 fn = b"mf"
275 p1 = repo[ps[0]]
275 p1 = repo[ps[0]]
276 if len(ps) > 1:
276 if len(ps) > 1:
277 p2 = repo[ps[1]]
277 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
278 pa = p1.ancestor(p2)
279 base, local, other = [
279 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
280 x[fn].data() for x in (pa, p1, p2)
281 ]
281 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
282 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
283 ml = [
284 l.strip()
284 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
285 for l in simplemerge.render_minimized(m3)[0]
286 ]
286 ]
287 ml.append(b"")
287 ml.append(b"")
288 elif at > 0:
288 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
289 ml = p1[fn].data().split(b"\n")
290 else:
290 else:
291 ml = initialmergedlines
291 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
292 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
293 mergedtext = b"\n".join(ml)
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = mergedtext
295 filecontent[fn] = mergedtext
296
296
297 if overwritten_file:
297 if overwritten_file:
298 fn = b"of"
298 fn = b"of"
299 files.append(fn)
299 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
300 filecontent[fn] = b"r%i\n" % id
301
301
302 if new_file:
302 if new_file:
303 fn = b"nf%i" % id
303 fn = b"nf%i" % id
304 files.append(fn)
304 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
305 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
306 if len(ps) > 1:
307 if not p2:
307 if not p2:
308 p2 = repo[ps[1]]
308 p2 = repo[ps[1]]
309 for fn in p2:
309 for fn in p2:
310 if fn.startswith(b"nf"):
310 if fn.startswith(b"nf"):
311 files.append(fn)
311 files.append(fn)
312 filecontent[fn] = p2[fn].data()
312 filecontent[fn] = p2[fn].data()
313
313
314 def fctxfn(repo, cx, path):
314 def fctxfn(repo, cx, path):
315 if path in filecontent:
315 if path in filecontent:
316 return context.memfilectx(
316 return context.memfilectx(
317 repo, cx, path, filecontent[path]
317 repo, cx, path, filecontent[path]
318 )
318 )
319 return None
319 return None
320
320
321 if len(ps) == 0 or ps[0] < 0:
321 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
322 pars = [None, None]
323 elif len(ps) == 1:
323 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
324 pars = [nodeids[ps[0]], None]
325 else:
325 else:
326 pars = [nodeids[p] for p in ps]
326 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
327 cx = context.memctx(
328 repo,
328 repo,
329 pars,
329 pars,
330 b"r%i" % id,
330 b"r%i" % id,
331 files,
331 files,
332 fctxfn,
332 fctxfn,
333 date=(id, 0),
333 date=(id, 0),
334 user=b"debugbuilddag",
334 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
335 extra={b'branch': atbranch},
336 )
336 )
337 nodeid = repo.commitctx(cx)
337 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
338 nodeids.append(nodeid)
339 at = id
339 at = id
340 elif type == b'l':
340 elif type == b'l':
341 id, name = data
341 id, name = data
342 ui.note((b'tag %s\n' % name))
342 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
344 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
345 ui.note((b'branch %s\n' % data))
346 atbranch = data
346 atbranch = data
347 progress.update(id)
347 progress.update(id)
348
348
349 if tags:
349 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
350 repo.vfs.write(b"localtags", b"".join(tags))
351
351
352
352
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 if all:
355 if all:
356 ui.writenoi18n(
356 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
358 % indent_string
359 )
359 )
360
360
361 def showchunks(named):
361 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
363 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
365 ui.write(
366 b"%s%s %s %s %s %s %d\n"
366 b"%s%s %s %s %s %s %d\n"
367 % (
367 % (
368 indent_string,
368 indent_string,
369 hex(node),
369 hex(node),
370 hex(p1),
370 hex(p1),
371 hex(p2),
371 hex(p2),
372 hex(cs),
372 hex(cs),
373 hex(deltabase),
373 hex(deltabase),
374 len(delta),
374 len(delta),
375 )
375 )
376 )
376 )
377
377
378 gen.changelogheader()
378 gen.changelogheader()
379 showchunks(b"changelog")
379 showchunks(b"changelog")
380 gen.manifestheader()
380 gen.manifestheader()
381 showchunks(b"manifest")
381 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
382 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
383 fname = chunkdata[b'filename']
384 showchunks(fname)
384 showchunks(fname)
385 else:
385 else:
386 if isinstance(gen, bundle2.unbundle20):
386 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
388 gen.changelogheader()
389 for deltadata in gen.deltaiter():
389 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
392
393
393
394 def _debugobsmarkers(ui, part, indent=0, **opts):
394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
395 """display version and markers contained in 'data'"""
396 opts = pycompat.byteskwargs(opts)
396 opts = pycompat.byteskwargs(opts)
397 data = part.read()
397 data = part.read()
398 indent_string = b' ' * indent
398 indent_string = b' ' * indent
399 try:
399 try:
400 version, markers = obsolete._readmarkers(data)
400 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
401 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
403 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
404 ui.write(msg)
405 else:
405 else:
406 msg = b"%sversion: %d (%d bytes)\n"
406 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
407 msg %= indent_string, version, len(data)
408 ui.write(msg)
408 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', opts)
409 fm = ui.formatter(b'debugobsolete', opts)
410 for rawmarker in sorted(markers):
410 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
411 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
412 fm.startitem()
413 fm.plain(indent_string)
413 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
414 cmdutil.showmarker(fm, m)
415 fm.end()
415 fm.end()
416
416
417
417
418 def _debugphaseheads(ui, data, indent=0):
418 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
419 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
420 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
421 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
422 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
423 for head in headsbyphase[phase]:
424 ui.write(indent_string)
424 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
426
427
427
428 def _quasirepr(thing):
428 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
430 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
432 )
433 return pycompat.bytestr(repr(thing))
433 return pycompat.bytestr(repr(thing))
434
434
435
435
436 def _debugbundle2(ui, gen, all=None, **opts):
436 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
437 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
438 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
439 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
441 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
442 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
443 if parttypes and part.type not in parttypes:
444 continue
444 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
445 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
447 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
448 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
449 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
450 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
452 if part.type == b'obsmarkers':
453 if not ui.quiet:
453 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
454 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
455 if part.type == b'phase-heads':
456 if not ui.quiet:
456 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
457 _debugphaseheads(ui, part, indent=4)
458
458
459
459
460 @command(
460 @command(
461 b'debugbundle',
461 b'debugbundle',
462 [
462 [
463 (b'a', b'all', None, _(b'show all details')),
463 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
466 ],
467 _(b'FILE'),
467 _(b'FILE'),
468 norepo=True,
468 norepo=True,
469 )
469 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
471 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
472 with hg.openpath(ui, bundlepath) as f:
473 if spec:
473 if spec:
474 spec = exchange.getbundlespec(ui, f)
474 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
475 ui.write(b'%s\n' % spec)
476 return
476 return
477
477
478 gen = exchange.readbundle(ui, f, bundlepath)
478 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
479 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
480 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
482
482
483
483
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
485 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
486 """lists the capabilities of a remote peer"""
487 opts = pycompat.byteskwargs(opts)
487 opts = pycompat.byteskwargs(opts)
488 peer = hg.peer(ui, opts, path)
488 peer = hg.peer(ui, opts, path)
489 try:
489 try:
490 caps = peer.capabilities()
490 caps = peer.capabilities()
491 ui.writenoi18n(b'Main capabilities:\n')
491 ui.writenoi18n(b'Main capabilities:\n')
492 for c in sorted(caps):
492 for c in sorted(caps):
493 ui.write(b' %s\n' % c)
493 ui.write(b' %s\n' % c)
494 b2caps = bundle2.bundle2caps(peer)
494 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
495 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(b2caps.items()):
497 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
498 ui.write(b' %s\n' % key)
499 for v in values:
499 for v in values:
500 ui.write(b' %s\n' % v)
500 ui.write(b' %s\n' % v)
501 finally:
501 finally:
502 peer.close()
502 peer.close()
503
503
504
504
505 @command(
505 @command(
506 b'debugchangedfiles',
506 b'debugchangedfiles',
507 [
507 [
508 (
508 (
509 b'',
509 b'',
510 b'compute',
510 b'compute',
511 False,
511 False,
512 b"compute information instead of reading it from storage",
512 b"compute information instead of reading it from storage",
513 ),
513 ),
514 ],
514 ],
515 b'REV',
515 b'REV',
516 )
516 )
517 def debugchangedfiles(ui, repo, rev, **opts):
517 def debugchangedfiles(ui, repo, rev, **opts):
518 """list the stored files changes for a revision"""
518 """list the stored files changes for a revision"""
519 ctx = logcmdutil.revsingle(repo, rev, None)
519 ctx = logcmdutil.revsingle(repo, rev, None)
520 files = None
520 files = None
521
521
522 if opts['compute']:
522 if opts['compute']:
523 files = metadata.compute_all_files_changes(ctx)
523 files = metadata.compute_all_files_changes(ctx)
524 else:
524 else:
525 sd = repo.changelog.sidedata(ctx.rev())
525 sd = repo.changelog.sidedata(ctx.rev())
526 files_block = sd.get(sidedata.SD_FILES)
526 files_block = sd.get(sidedata.SD_FILES)
527 if files_block is not None:
527 if files_block is not None:
528 files = metadata.decode_files_sidedata(sd)
528 files = metadata.decode_files_sidedata(sd)
529 if files is not None:
529 if files is not None:
530 for f in sorted(files.touched):
530 for f in sorted(files.touched):
531 if f in files.added:
531 if f in files.added:
532 action = b"added"
532 action = b"added"
533 elif f in files.removed:
533 elif f in files.removed:
534 action = b"removed"
534 action = b"removed"
535 elif f in files.merged:
535 elif f in files.merged:
536 action = b"merged"
536 action = b"merged"
537 elif f in files.salvaged:
537 elif f in files.salvaged:
538 action = b"salvaged"
538 action = b"salvaged"
539 else:
539 else:
540 action = b"touched"
540 action = b"touched"
541
541
542 copy_parent = b""
542 copy_parent = b""
543 copy_source = b""
543 copy_source = b""
544 if f in files.copied_from_p1:
544 if f in files.copied_from_p1:
545 copy_parent = b"p1"
545 copy_parent = b"p1"
546 copy_source = files.copied_from_p1[f]
546 copy_source = files.copied_from_p1[f]
547 elif f in files.copied_from_p2:
547 elif f in files.copied_from_p2:
548 copy_parent = b"p2"
548 copy_parent = b"p2"
549 copy_source = files.copied_from_p2[f]
549 copy_source = files.copied_from_p2[f]
550
550
551 data = (action, copy_parent, f, copy_source)
551 data = (action, copy_parent, f, copy_source)
552 template = b"%-8s %2s: %s, %s;\n"
552 template = b"%-8s %2s: %s, %s;\n"
553 ui.write(template % data)
553 ui.write(template % data)
554
554
555
555
556 @command(b'debugcheckstate', [], b'')
556 @command(b'debugcheckstate', [], b'')
557 def debugcheckstate(ui, repo):
557 def debugcheckstate(ui, repo):
558 """validate the correctness of the current dirstate"""
558 """validate the correctness of the current dirstate"""
559 parent1, parent2 = repo.dirstate.parents()
559 parent1, parent2 = repo.dirstate.parents()
560 m1 = repo[parent1].manifest()
560 m1 = repo[parent1].manifest()
561 m2 = repo[parent2].manifest()
561 m2 = repo[parent2].manifest()
562 errors = 0
562 errors = 0
563 for err in repo.dirstate.verify(m1, m2):
563 for err in repo.dirstate.verify(m1, m2):
564 ui.warn(err[0] % err[1:])
564 ui.warn(err[0] % err[1:])
565 errors += 1
565 errors += 1
566 if errors:
566 if errors:
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 raise error.Abort(errstr)
568 raise error.Abort(errstr)
569
569
570
570
571 @command(
571 @command(
572 b'debugcolor',
572 b'debugcolor',
573 [(b'', b'style', None, _(b'show all configured styles'))],
573 [(b'', b'style', None, _(b'show all configured styles'))],
574 b'hg debugcolor',
574 b'hg debugcolor',
575 )
575 )
576 def debugcolor(ui, repo, **opts):
576 def debugcolor(ui, repo, **opts):
577 """show available color, effects or style"""
577 """show available color, effects or style"""
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 if opts.get('style'):
579 if opts.get('style'):
580 return _debugdisplaystyle(ui)
580 return _debugdisplaystyle(ui)
581 else:
581 else:
582 return _debugdisplaycolor(ui)
582 return _debugdisplaycolor(ui)
583
583
584
584
585 def _debugdisplaycolor(ui):
585 def _debugdisplaycolor(ui):
586 ui = ui.copy()
586 ui = ui.copy()
587 ui._styles.clear()
587 ui._styles.clear()
588 for effect in color._activeeffects(ui).keys():
588 for effect in color._activeeffects(ui).keys():
589 ui._styles[effect] = effect
589 ui._styles[effect] = effect
590 if ui._terminfoparams:
590 if ui._terminfoparams:
591 for k, v in ui.configitems(b'color'):
591 for k, v in ui.configitems(b'color'):
592 if k.startswith(b'color.'):
592 if k.startswith(b'color.'):
593 ui._styles[k] = k[6:]
593 ui._styles[k] = k[6:]
594 elif k.startswith(b'terminfo.'):
594 elif k.startswith(b'terminfo.'):
595 ui._styles[k] = k[9:]
595 ui._styles[k] = k[9:]
596 ui.write(_(b'available colors:\n'))
596 ui.write(_(b'available colors:\n'))
597 # sort label with a '_' after the other to group '_background' entry.
597 # sort label with a '_' after the other to group '_background' entry.
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 for colorname, label in items:
599 for colorname, label in items:
600 ui.write(b'%s\n' % colorname, label=label)
600 ui.write(b'%s\n' % colorname, label=label)
601
601
602
602
603 def _debugdisplaystyle(ui):
603 def _debugdisplaystyle(ui):
604 ui.write(_(b'available style:\n'))
604 ui.write(_(b'available style:\n'))
605 if not ui._styles:
605 if not ui._styles:
606 return
606 return
607 width = max(len(s) for s in ui._styles)
607 width = max(len(s) for s in ui._styles)
608 for label, effects in sorted(ui._styles.items()):
608 for label, effects in sorted(ui._styles.items()):
609 ui.write(b'%s' % label, label=label)
609 ui.write(b'%s' % label, label=label)
610 if effects:
610 if effects:
611 # 50
611 # 50
612 ui.write(b': ')
612 ui.write(b': ')
613 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b'\n')
615 ui.write(b'\n')
616
616
617
617
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 def debugcreatestreamclonebundle(ui, repo, fname):
619 def debugcreatestreamclonebundle(ui, repo, fname):
620 """create a stream clone bundle file
620 """create a stream clone bundle file
621
621
622 Stream bundles are special bundles that are essentially archives of
622 Stream bundles are special bundles that are essentially archives of
623 revlog files. They are commonly used for cloning very quickly.
623 revlog files. They are commonly used for cloning very quickly.
624 """
624 """
625 # TODO we may want to turn this into an abort when this functionality
625 # TODO we may want to turn this into an abort when this functionality
626 # is moved into `hg bundle`.
626 # is moved into `hg bundle`.
627 if phases.hassecret(repo):
627 if phases.hassecret(repo):
628 ui.warn(
628 ui.warn(
629 _(
629 _(
630 b'(warning: stream clone bundle will contain secret '
630 b'(warning: stream clone bundle will contain secret '
631 b'revisions)\n'
631 b'revisions)\n'
632 )
632 )
633 )
633 )
634
634
635 requirements, gen = streamclone.generatebundlev1(repo)
635 requirements, gen = streamclone.generatebundlev1(repo)
636 changegroup.writechunks(ui, gen, fname)
636 changegroup.writechunks(ui, gen, fname)
637
637
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639
639
640
640
641 @command(
641 @command(
642 b'debugdag',
642 b'debugdag',
643 [
643 [
644 (b't', b'tags', None, _(b'use tags as labels')),
644 (b't', b'tags', None, _(b'use tags as labels')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'', b'dots', None, _(b'use dots for runs')),
646 (b'', b'dots', None, _(b'use dots for runs')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 ],
648 ],
649 _(b'[OPTION]... [FILE [REV]...]'),
649 _(b'[OPTION]... [FILE [REV]...]'),
650 optionalrepo=True,
650 optionalrepo=True,
651 )
651 )
652 def debugdag(ui, repo, file_=None, *revs, **opts):
652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 """format the changelog or an index DAG as a concise textual description
653 """format the changelog or an index DAG as a concise textual description
654
654
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 revision numbers, they get labeled in the output as rN.
656 revision numbers, they get labeled in the output as rN.
657
657
658 Otherwise, the changelog DAG of the current repo is emitted.
658 Otherwise, the changelog DAG of the current repo is emitted.
659 """
659 """
660 spaces = opts.get('spaces')
660 spaces = opts.get('spaces')
661 dots = opts.get('dots')
661 dots = opts.get('dots')
662 if file_:
662 if file_:
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 revs = {int(r) for r in revs}
664 revs = {int(r) for r in revs}
665
665
666 def events():
666 def events():
667 for r in rlog:
667 for r in rlog:
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 if r in revs:
669 if r in revs:
670 yield b'l', (r, b"r%i" % r)
670 yield b'l', (r, b"r%i" % r)
671
671
672 elif repo:
672 elif repo:
673 cl = repo.changelog
673 cl = repo.changelog
674 tags = opts.get('tags')
674 tags = opts.get('tags')
675 branches = opts.get('branches')
675 branches = opts.get('branches')
676 if tags:
676 if tags:
677 labels = {}
677 labels = {}
678 for l, n in repo.tags().items():
678 for l, n in repo.tags().items():
679 labels.setdefault(cl.rev(n), []).append(l)
679 labels.setdefault(cl.rev(n), []).append(l)
680
680
681 def events():
681 def events():
682 b = b"default"
682 b = b"default"
683 for r in cl:
683 for r in cl:
684 if branches:
684 if branches:
685 newb = cl.read(cl.node(r))[5][b'branch']
685 newb = cl.read(cl.node(r))[5][b'branch']
686 if newb != b:
686 if newb != b:
687 yield b'a', newb
687 yield b'a', newb
688 b = newb
688 b = newb
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 if tags:
690 if tags:
691 ls = labels.get(r)
691 ls = labels.get(r)
692 if ls:
692 if ls:
693 for l in ls:
693 for l in ls:
694 yield b'l', (r, l)
694 yield b'l', (r, l)
695
695
696 else:
696 else:
697 raise error.Abort(_(b'need repo for changelog dag'))
697 raise error.Abort(_(b'need repo for changelog dag'))
698
698
699 for line in dagparser.dagtextlines(
699 for line in dagparser.dagtextlines(
700 events(),
700 events(),
701 addspaces=spaces,
701 addspaces=spaces,
702 wraplabels=True,
702 wraplabels=True,
703 wrapannotations=True,
703 wrapannotations=True,
704 wrapnonlinear=dots,
704 wrapnonlinear=dots,
705 usedots=dots,
705 usedots=dots,
706 maxlinewidth=70,
706 maxlinewidth=70,
707 ):
707 ):
708 ui.write(line)
708 ui.write(line)
709 ui.write(b"\n")
709 ui.write(b"\n")
710
710
711
711
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 def debugdata(ui, repo, file_, rev=None, **opts):
713 def debugdata(ui, repo, file_, rev=None, **opts):
714 """dump the contents of a data file revision"""
714 """dump the contents of a data file revision"""
715 opts = pycompat.byteskwargs(opts)
715 opts = pycompat.byteskwargs(opts)
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if rev is not None:
717 if rev is not None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 file_, rev = None, file_
719 file_, rev = None, file_
720 elif rev is None:
720 elif rev is None:
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 try:
723 try:
724 ui.write(r.rawdata(r.lookup(rev)))
724 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
725 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
727
728
728
729 @command(
729 @command(
730 b'debugdate',
730 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
732 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
733 norepo=True,
734 optionalrepo=True,
734 optionalrepo=True,
735 )
735 )
736 def debugdate(ui, date, range=None, **opts):
736 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
737 """parse and display a date"""
738 if opts["extended"]:
738 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
740 else:
741 d = dateutil.parsedate(date)
741 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
744 if range:
745 m = dateutil.matchdate(range)
745 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
747
748
748
749 @command(
749 @command(
750 b'debugdeltachain',
750 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
752 _(b'-c|-m|FILE'),
753 optionalrepo=True,
753 optionalrepo=True,
754 )
754 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
756 """dump information about delta chains in a revlog
757
757
758 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
759
759
760 :``rev``: revision number
760 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
761 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
767 - base: a full snapshot
768 - snap: an intermediate snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
770 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
771 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
772 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
773 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
774 (when p2 has empty delta
775 - prev: a delta against the previous revision
775 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
776 - other: a delta against an arbitrary revision
777 :``compsize``: compressed size of revision
777 :``compsize``: compressed size of revision
778 :``uncompsize``: uncompressed size of revision
778 :``uncompsize``: uncompressed size of revision
779 :``chainsize``: total size of compressed revisions in chain
779 :``chainsize``: total size of compressed revisions in chain
780 :``chainratio``: total chain size divided by uncompressed revision size
780 :``chainratio``: total chain size divided by uncompressed revision size
781 (new delta chains typically start at ratio 2.00)
781 (new delta chains typically start at ratio 2.00)
782 :``lindist``: linear distance from base revision in delta chain to end
782 :``lindist``: linear distance from base revision in delta chain to end
783 of this revision
783 of this revision
784 :``extradist``: total size of revisions not part of this delta chain from
784 :``extradist``: total size of revisions not part of this delta chain from
785 base of delta chain to end of this revision; a measurement
785 base of delta chain to end of this revision; a measurement
786 of how much extra data we need to read/seek across to read
786 of how much extra data we need to read/seek across to read
787 the delta chain for this revision
787 the delta chain for this revision
788 :``extraratio``: extradist divided by chainsize; another representation of
788 :``extraratio``: extradist divided by chainsize; another representation of
789 how much unrelated data is needed to load this delta chain
789 how much unrelated data is needed to load this delta chain
790
790
791 If the repository is configured to use the sparse read, additional keywords
791 If the repository is configured to use the sparse read, additional keywords
792 are available:
792 are available:
793
793
794 :``readsize``: total size of data read from the disk for a revision
794 :``readsize``: total size of data read from the disk for a revision
795 (sum of the sizes of all the blocks)
795 (sum of the sizes of all the blocks)
796 :``largestblock``: size of the largest block of data read from the disk
796 :``largestblock``: size of the largest block of data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
798 :``srchunks``: in how many data hunks the whole revision would be read
798 :``srchunks``: in how many data hunks the whole revision would be read
799
799
800 The sparse read can be enabled with experimental.sparse-read = True
800 The sparse read can be enabled with experimental.sparse-read = True
801 """
801 """
802 opts = pycompat.byteskwargs(opts)
802 opts = pycompat.byteskwargs(opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 index = r.index
804 index = r.index
805 start = r.start
805 start = r.start
806 length = r.length
806 length = r.length
807 generaldelta = r._generaldelta
807 generaldelta = r._generaldelta
808 withsparseread = getattr(r, '_withsparseread', False)
808 withsparseread = getattr(r, '_withsparseread', False)
809
809
810 # security to avoid crash on corrupted revlogs
810 # security to avoid crash on corrupted revlogs
811 total_revs = len(index)
811 total_revs = len(index)
812
812
813 def revinfo(rev):
813 def revinfo(rev):
814 e = index[rev]
814 e = index[rev]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 chainsize = 0
817 chainsize = 0
818
818
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822
822
823 # If the parents of a revision has an empty delta, we never try to delta
823 # If the parents of a revision has an empty delta, we never try to delta
824 # against that parent, but directly against the delta base of that
824 # against that parent, but directly against the delta base of that
825 # parent (recursively). It avoids adding a useless entry in the chain.
825 # parent (recursively). It avoids adding a useless entry in the chain.
826 #
826 #
827 # However we need to detect that as a special case for delta-type, that
827 # However we need to detect that as a special case for delta-type, that
828 # is not simply "other".
828 # is not simply "other".
829 p1_base = p1
829 p1_base = p1
830 if p1 != nullrev and p1 < total_revs:
830 if p1 != nullrev and p1 < total_revs:
831 e1 = index[p1]
831 e1 = index[p1]
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 if (
834 if (
835 new_base == p1_base
835 new_base == p1_base
836 or new_base == nullrev
836 or new_base == nullrev
837 or new_base >= total_revs
837 or new_base >= total_revs
838 ):
838 ):
839 break
839 break
840 p1_base = new_base
840 p1_base = new_base
841 e1 = index[p1_base]
841 e1 = index[p1_base]
842 p2_base = p2
842 p2_base = p2
843 if p2 != nullrev and p2 < total_revs:
843 if p2 != nullrev and p2 < total_revs:
844 e2 = index[p2]
844 e2 = index[p2]
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 if (
847 if (
848 new_base == p2_base
848 new_base == p2_base
849 or new_base == nullrev
849 or new_base == nullrev
850 or new_base >= total_revs
850 or new_base >= total_revs
851 ):
851 ):
852 break
852 break
853 p2_base = new_base
853 p2_base = new_base
854 e2 = index[p2_base]
854 e2 = index[p2_base]
855
855
856 if generaldelta:
856 if generaldelta:
857 if base == p1:
857 if base == p1:
858 deltatype = b'p1'
858 deltatype = b'p1'
859 elif base == p2:
859 elif base == p2:
860 deltatype = b'p2'
860 deltatype = b'p2'
861 elif base == rev:
861 elif base == rev:
862 deltatype = b'base'
862 deltatype = b'base'
863 elif base == p1_base:
863 elif base == p1_base:
864 deltatype = b'skip1'
864 deltatype = b'skip1'
865 elif base == p2_base:
865 elif base == p2_base:
866 deltatype = b'skip2'
866 deltatype = b'skip2'
867 elif r.issnapshot(rev):
867 elif r.issnapshot(rev):
868 deltatype = b'snap'
868 deltatype = b'snap'
869 elif base == rev - 1:
869 elif base == rev - 1:
870 deltatype = b'prev'
870 deltatype = b'prev'
871 else:
871 else:
872 deltatype = b'other'
872 deltatype = b'other'
873 else:
873 else:
874 if base == rev:
874 if base == rev:
875 deltatype = b'base'
875 deltatype = b'base'
876 else:
876 else:
877 deltatype = b'prev'
877 deltatype = b'prev'
878
878
879 chain = r._deltachain(rev)[0]
879 chain = r._deltachain(rev)[0]
880 for iterrev in chain:
880 for iterrev in chain:
881 e = index[iterrev]
881 e = index[iterrev]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883
883
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885
885
886 fm = ui.formatter(b'debugdeltachain', opts)
886 fm = ui.formatter(b'debugdeltachain', opts)
887
887
888 fm.plain(
888 fm.plain(
889 b' rev p1 p2 chain# chainlen prev delta '
889 b' rev p1 p2 chain# chainlen prev delta '
890 b'size rawsize chainsize ratio lindist extradist '
890 b'size rawsize chainsize ratio lindist extradist '
891 b'extraratio'
891 b'extraratio'
892 )
892 )
893 if withsparseread:
893 if withsparseread:
894 fm.plain(b' readsize largestblk rddensity srchunks')
894 fm.plain(b' readsize largestblk rddensity srchunks')
895 fm.plain(b'\n')
895 fm.plain(b'\n')
896
896
897 chainbases = {}
897 chainbases = {}
898 for rev in r:
898 for rev in r:
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 chainbase = chain[0]
900 chainbase = chain[0]
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 basestart = start(chainbase)
902 basestart = start(chainbase)
903 revstart = start(rev)
903 revstart = start(rev)
904 lineardist = revstart + comp - basestart
904 lineardist = revstart + comp - basestart
905 extradist = lineardist - chainsize
905 extradist = lineardist - chainsize
906 try:
906 try:
907 prevrev = chain[-2]
907 prevrev = chain[-2]
908 except IndexError:
908 except IndexError:
909 prevrev = -1
909 prevrev = -1
910
910
911 if uncomp != 0:
911 if uncomp != 0:
912 chainratio = float(chainsize) / float(uncomp)
912 chainratio = float(chainsize) / float(uncomp)
913 else:
913 else:
914 chainratio = chainsize
914 chainratio = chainsize
915
915
916 if chainsize != 0:
916 if chainsize != 0:
917 extraratio = float(extradist) / float(chainsize)
917 extraratio = float(extradist) / float(chainsize)
918 else:
918 else:
919 extraratio = extradist
919 extraratio = extradist
920
920
921 fm.startitem()
921 fm.startitem()
922 fm.write(
922 fm.write(
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 b'uncompsize chainsize chainratio lindist extradist '
924 b'uncompsize chainsize chainratio lindist extradist '
925 b'extraratio',
925 b'extraratio',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 rev,
927 rev,
928 p1,
928 p1,
929 p2,
929 p2,
930 chainid,
930 chainid,
931 len(chain),
931 len(chain),
932 prevrev,
932 prevrev,
933 deltatype,
933 deltatype,
934 comp,
934 comp,
935 uncomp,
935 uncomp,
936 chainsize,
936 chainsize,
937 chainratio,
937 chainratio,
938 lineardist,
938 lineardist,
939 extradist,
939 extradist,
940 extraratio,
940 extraratio,
941 rev=rev,
941 rev=rev,
942 chainid=chainid,
942 chainid=chainid,
943 chainlen=len(chain),
943 chainlen=len(chain),
944 prevrev=prevrev,
944 prevrev=prevrev,
945 deltatype=deltatype,
945 deltatype=deltatype,
946 compsize=comp,
946 compsize=comp,
947 uncompsize=uncomp,
947 uncompsize=uncomp,
948 chainsize=chainsize,
948 chainsize=chainsize,
949 chainratio=chainratio,
949 chainratio=chainratio,
950 lindist=lineardist,
950 lindist=lineardist,
951 extradist=extradist,
951 extradist=extradist,
952 extraratio=extraratio,
952 extraratio=extraratio,
953 )
953 )
954 if withsparseread:
954 if withsparseread:
955 readsize = 0
955 readsize = 0
956 largestblock = 0
956 largestblock = 0
957 srchunks = 0
957 srchunks = 0
958
958
959 for revschunk in deltautil.slicechunk(r, chain):
959 for revschunk in deltautil.slicechunk(r, chain):
960 srchunks += 1
960 srchunks += 1
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 blksize = blkend - start(revschunk[0])
962 blksize = blkend - start(revschunk[0])
963
963
964 readsize += blksize
964 readsize += blksize
965 if largestblock < blksize:
965 if largestblock < blksize:
966 largestblock = blksize
966 largestblock = blksize
967
967
968 if readsize:
968 if readsize:
969 readdensity = float(chainsize) / float(readsize)
969 readdensity = float(chainsize) / float(readsize)
970 else:
970 else:
971 readdensity = 1
971 readdensity = 1
972
972
973 fm.write(
973 fm.write(
974 b'readsize largestblock readdensity srchunks',
974 b'readsize largestblock readdensity srchunks',
975 b' %10d %10d %9.5f %8d',
975 b' %10d %10d %9.5f %8d',
976 readsize,
976 readsize,
977 largestblock,
977 largestblock,
978 readdensity,
978 readdensity,
979 srchunks,
979 srchunks,
980 readsize=readsize,
980 readsize=readsize,
981 largestblock=largestblock,
981 largestblock=largestblock,
982 readdensity=readdensity,
982 readdensity=readdensity,
983 srchunks=srchunks,
983 srchunks=srchunks,
984 )
984 )
985
985
986 fm.plain(b'\n')
986 fm.plain(b'\n')
987
987
988 fm.end()
988 fm.end()
989
989
990
990
991 @command(
991 @command(
992 b'debug-delta-find',
992 b'debug-delta-find',
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 _(b'-c|-m|FILE REV'),
994 _(b'-c|-m|FILE REV'),
995 optionalrepo=True,
995 optionalrepo=True,
996 )
996 )
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
998 """display the computation to get to a valid delta for storing REV
998 """display the computation to get to a valid delta for storing REV
999
999
1000 This command will replay the process used to find the "best" delta to store
1000 This command will replay the process used to find the "best" delta to store
1001 a revision and display information about all the steps used to get to that
1001 a revision and display information about all the steps used to get to that
1002 result.
1002 result.
1003
1003
1004 The revision use the revision number of the target storage (not changelog
1004 The revision use the revision number of the target storage (not changelog
1005 revision number).
1005 revision number).
1006
1006
1007 note: the process is initiated from a full text of the revision to store.
1007 note: the process is initiated from a full text of the revision to store.
1008 """
1008 """
1009 opts = pycompat.byteskwargs(opts)
1009 opts = pycompat.byteskwargs(opts)
1010 if arg_2 is None:
1010 if arg_2 is None:
1011 file_ = None
1011 file_ = None
1012 rev = arg_1
1012 rev = arg_1
1013 else:
1013 else:
1014 file_ = arg_1
1014 file_ = arg_1
1015 rev = arg_2
1015 rev = arg_2
1016
1016
1017 rev = int(rev)
1017 rev = int(rev)
1018
1018
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020
1020
1021 deltacomputer = deltautil.deltacomputer(
1021 deltacomputer = deltautil.deltacomputer(
1022 revlog,
1022 revlog,
1023 write_debug=ui.write,
1023 write_debug=ui.write,
1024 debug_search=True,
1024 debug_search=True,
1025 )
1025 )
1026
1026
1027 node = revlog.node(rev)
1027 node = revlog.node(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1029 p1 = revlog.node(p1r)
1029 p1 = revlog.node(p1r)
1030 p2 = revlog.node(p2r)
1030 p2 = revlog.node(p2r)
1031 btext = [revlog.revision(rev)]
1031 btext = [revlog.revision(rev)]
1032 textlen = len(btext[0])
1032 textlen = len(btext[0])
1033 cachedelta = None
1033 cachedelta = None
1034 flags = revlog.flags(rev)
1034 flags = revlog.flags(rev)
1035
1035
1036 revinfo = revlogutils.revisioninfo(
1036 revinfo = revlogutils.revisioninfo(
1037 node,
1037 node,
1038 p1,
1038 p1,
1039 p2,
1039 p2,
1040 btext,
1040 btext,
1041 textlen,
1041 textlen,
1042 cachedelta,
1042 cachedelta,
1043 flags,
1043 flags,
1044 )
1044 )
1045
1045
1046 fh = revlog._datafp()
1046 fh = revlog._datafp()
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048
1048
1049
1049
1050 @command(
1050 @command(
1051 b'debugdirstate|debugstate',
1051 b'debugdirstate|debugstate',
1052 [
1052 [
1053 (
1053 (
1054 b'',
1054 b'',
1055 b'nodates',
1055 b'nodates',
1056 None,
1056 None,
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1058 ),
1058 ),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 (
1061 (
1062 b'',
1062 b'',
1063 b'docket',
1063 b'docket',
1064 False,
1064 False,
1065 _(b'display the docket (metadata file) instead'),
1065 _(b'display the docket (metadata file) instead'),
1066 ),
1066 ),
1067 (
1067 (
1068 b'',
1068 b'',
1069 b'all',
1069 b'all',
1070 False,
1070 False,
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 ),
1072 ),
1073 ],
1073 ],
1074 _(b'[OPTION]...'),
1074 _(b'[OPTION]...'),
1075 )
1075 )
1076 def debugstate(ui, repo, **opts):
1076 def debugstate(ui, repo, **opts):
1077 """show the contents of the current dirstate"""
1077 """show the contents of the current dirstate"""
1078
1078
1079 if opts.get("docket"):
1079 if opts.get("docket"):
1080 if not repo.dirstate._use_dirstate_v2:
1080 if not repo.dirstate._use_dirstate_v2:
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082
1082
1083 docket = repo.dirstate._map.docket
1083 docket = repo.dirstate._map.docket
1084 (
1084 (
1085 start_offset,
1085 start_offset,
1086 root_nodes,
1086 root_nodes,
1087 nodes_with_entry,
1087 nodes_with_entry,
1088 nodes_with_copy,
1088 nodes_with_copy,
1089 unused_bytes,
1089 unused_bytes,
1090 _unused,
1090 _unused,
1091 ignore_pattern,
1091 ignore_pattern,
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093
1093
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 ui.write(
1101 ui.write(
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 )
1103 )
1104 return
1104 return
1105
1105
1106 nodates = not opts['dates']
1106 nodates = not opts['dates']
1107 if opts.get('nodates') is not None:
1107 if opts.get('nodates') is not None:
1108 nodates = True
1108 nodates = True
1109 datesort = opts.get('datesort')
1109 datesort = opts.get('datesort')
1110
1110
1111 if datesort:
1111 if datesort:
1112
1112
1113 def keyfunc(entry):
1113 def keyfunc(entry):
1114 filename, _state, _mode, _size, mtime = entry
1114 filename, _state, _mode, _size, mtime = entry
1115 return (mtime, filename)
1115 return (mtime, filename)
1116
1116
1117 else:
1117 else:
1118 keyfunc = None # sort by filename
1118 keyfunc = None # sort by filename
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 entries.sort(key=keyfunc)
1120 entries.sort(key=keyfunc)
1121 for entry in entries:
1121 for entry in entries:
1122 filename, state, mode, size, mtime = entry
1122 filename, state, mode, size, mtime = entry
1123 if mtime == -1:
1123 if mtime == -1:
1124 timestr = b'unset '
1124 timestr = b'unset '
1125 elif nodates:
1125 elif nodates:
1126 timestr = b'set '
1126 timestr = b'set '
1127 else:
1127 else:
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 timestr = encoding.strtolocal(timestr)
1129 timestr = encoding.strtolocal(timestr)
1130 if mode & 0o20000:
1130 if mode & 0o20000:
1131 mode = b'lnk'
1131 mode = b'lnk'
1132 else:
1132 else:
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 for f in repo.dirstate.copies():
1135 for f in repo.dirstate.copies():
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137
1137
1138
1138
1139 @command(
1139 @command(
1140 b'debugdirstateignorepatternshash',
1140 b'debugdirstateignorepatternshash',
1141 [],
1141 [],
1142 _(b''),
1142 _(b''),
1143 )
1143 )
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 """show the hash of ignore patterns stored in dirstate if v2,
1145 """show the hash of ignore patterns stored in dirstate if v2,
1146 or nothing for dirstate-v2
1146 or nothing for dirstate-v2
1147 """
1147 """
1148 if repo.dirstate._use_dirstate_v2:
1148 if repo.dirstate._use_dirstate_v2:
1149 docket = repo.dirstate._map.docket
1149 docket = repo.dirstate._map.docket
1150 hash_len = 20 # 160 bits for SHA-1
1150 hash_len = 20 # 160 bits for SHA-1
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153
1153
1154
1154
1155 @command(
1155 @command(
1156 b'debugdiscovery',
1156 b'debugdiscovery',
1157 [
1157 [
1158 (b'', b'old', None, _(b'use old-style discovery')),
1158 (b'', b'old', None, _(b'use old-style discovery')),
1159 (
1159 (
1160 b'',
1160 b'',
1161 b'nonheads',
1161 b'nonheads',
1162 None,
1162 None,
1163 _(b'use old-style discovery with non-heads included'),
1163 _(b'use old-style discovery with non-heads included'),
1164 ),
1164 ),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 (
1167 (
1168 b'',
1168 b'',
1169 b'local-as-revs',
1169 b'local-as-revs',
1170 b"",
1170 b"",
1171 b'treat local has having these revisions only',
1171 b'treat local has having these revisions only',
1172 ),
1172 ),
1173 (
1173 (
1174 b'',
1174 b'',
1175 b'remote-as-revs',
1175 b'remote-as-revs',
1176 b"",
1176 b"",
1177 b'use local as remote, with only these revisions',
1177 b'use local as remote, with only these revisions',
1178 ),
1178 ),
1179 ]
1179 ]
1180 + cmdutil.remoteopts
1180 + cmdutil.remoteopts
1181 + cmdutil.formatteropts,
1181 + cmdutil.formatteropts,
1182 _(b'[--rev REV] [OTHER]'),
1182 _(b'[--rev REV] [OTHER]'),
1183 )
1183 )
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 """runs the changeset discovery protocol in isolation
1185 """runs the changeset discovery protocol in isolation
1186
1186
1187 The local peer can be "replaced" by a subset of the local repository by
1187 The local peer can be "replaced" by a subset of the local repository by
1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1189 be "replaced" by a subset of the local repository using the
1189 be "replaced" by a subset of the local repository using the
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 discovery situation.
1191 discovery situation.
1192
1192
1193 The following developer oriented config are relevant for people playing with this command:
1193 The following developer oriented config are relevant for people playing with this command:
1194
1194
1195 * devel.discovery.exchange-heads=True
1195 * devel.discovery.exchange-heads=True
1196
1196
1197 If False, the discovery will not start with
1197 If False, the discovery will not start with
1198 remote head fetching and local head querying.
1198 remote head fetching and local head querying.
1199
1199
1200 * devel.discovery.grow-sample=True
1200 * devel.discovery.grow-sample=True
1201
1201
1202 If False, the sample size used in set discovery will not be increased
1202 If False, the sample size used in set discovery will not be increased
1203 through the process
1203 through the process
1204
1204
1205 * devel.discovery.grow-sample.dynamic=True
1205 * devel.discovery.grow-sample.dynamic=True
1206
1206
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 adapted to the shape of the undecided set (it is set to the max of:
1208 adapted to the shape of the undecided set (it is set to the max of:
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1210
1210
1211 * devel.discovery.grow-sample.rate=1.05
1211 * devel.discovery.grow-sample.rate=1.05
1212
1212
1213 the rate at which the sample grow
1213 the rate at which the sample grow
1214
1214
1215 * devel.discovery.randomize=True
1215 * devel.discovery.randomize=True
1216
1216
1217 If andom sampling during discovery are deterministic. It is meant for
1217 If andom sampling during discovery are deterministic. It is meant for
1218 integration tests.
1218 integration tests.
1219
1219
1220 * devel.discovery.sample-size=200
1220 * devel.discovery.sample-size=200
1221
1221
1222 Control the initial size of the discovery sample
1222 Control the initial size of the discovery sample
1223
1223
1224 * devel.discovery.sample-size.initial=100
1224 * devel.discovery.sample-size.initial=100
1225
1225
1226 Control the initial size of the discovery for initial change
1226 Control the initial size of the discovery for initial change
1227 """
1227 """
1228 opts = pycompat.byteskwargs(opts)
1228 opts = pycompat.byteskwargs(opts)
1229 unfi = repo.unfiltered()
1229 unfi = repo.unfiltered()
1230
1230
1231 # setup potential extra filtering
1231 # setup potential extra filtering
1232 local_revs = opts[b"local_as_revs"]
1232 local_revs = opts[b"local_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1234
1234
1235 # make sure tests are repeatable
1235 # make sure tests are repeatable
1236 random.seed(int(opts[b'seed']))
1236 random.seed(int(opts[b'seed']))
1237
1237
1238 if not remote_revs:
1238 if not remote_revs:
1239
1239
1240 remoteurl, branches = urlutil.get_unique_pull_path(
1240 remoteurl, branches = urlutil.get_unique_pull_path(
1241 b'debugdiscovery', repo, ui, remoteurl
1241 b'debugdiscovery', repo, ui, remoteurl
1242 )
1242 )
1243 remote = hg.peer(repo, opts, remoteurl)
1243 remote = hg.peer(repo, opts, remoteurl)
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 else:
1245 else:
1246 branches = (None, [])
1246 branches = (None, [])
1247 remote_filtered_revs = logcmdutil.revrange(
1247 remote_filtered_revs = logcmdutil.revrange(
1248 unfi, [b"not (::(%s))" % remote_revs]
1248 unfi, [b"not (::(%s))" % remote_revs]
1249 )
1249 )
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1251
1251
1252 def remote_func(x):
1252 def remote_func(x):
1253 return remote_filtered_revs
1253 return remote_filtered_revs
1254
1254
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256
1256
1257 remote = repo.peer()
1257 remote = repo.peer()
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259
1259
1260 if local_revs:
1260 if local_revs:
1261 local_filtered_revs = logcmdutil.revrange(
1261 local_filtered_revs = logcmdutil.revrange(
1262 unfi, [b"not (::(%s))" % local_revs]
1262 unfi, [b"not (::(%s))" % local_revs]
1263 )
1263 )
1264 local_filtered_revs = frozenset(local_filtered_revs)
1264 local_filtered_revs = frozenset(local_filtered_revs)
1265
1265
1266 def local_func(x):
1266 def local_func(x):
1267 return local_filtered_revs
1267 return local_filtered_revs
1268
1268
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1271
1271
1272 data = {}
1272 data = {}
1273 if opts.get(b'old'):
1273 if opts.get(b'old'):
1274
1274
1275 def doit(pushedrevs, remoteheads, remote=remote):
1275 def doit(pushedrevs, remoteheads, remote=remote):
1276 if not util.safehasattr(remote, b'branches'):
1276 if not util.safehasattr(remote, b'branches'):
1277 # enable in-client legacy support
1277 # enable in-client legacy support
1278 remote = localrepo.locallegacypeer(remote.local())
1278 remote = localrepo.locallegacypeer(remote.local())
1279 if remote_revs:
1279 if remote_revs:
1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1281 remote._repo = r
1281 remote._repo = r
1282 common, _in, hds = treediscovery.findcommonincoming(
1282 common, _in, hds = treediscovery.findcommonincoming(
1283 repo, remote, force=True, audit=data
1283 repo, remote, force=True, audit=data
1284 )
1284 )
1285 common = set(common)
1285 common = set(common)
1286 if not opts.get(b'nonheads'):
1286 if not opts.get(b'nonheads'):
1287 ui.writenoi18n(
1287 ui.writenoi18n(
1288 b"unpruned common: %s\n"
1288 b"unpruned common: %s\n"
1289 % b" ".join(sorted(short(n) for n in common))
1289 % b" ".join(sorted(short(n) for n in common))
1290 )
1290 )
1291
1291
1292 clnode = repo.changelog.node
1292 clnode = repo.changelog.node
1293 common = repo.revs(b'heads(::%ln)', common)
1293 common = repo.revs(b'heads(::%ln)', common)
1294 common = {clnode(r) for r in common}
1294 common = {clnode(r) for r in common}
1295 return common, hds
1295 return common, hds
1296
1296
1297 else:
1297 else:
1298
1298
1299 def doit(pushedrevs, remoteheads, remote=remote):
1299 def doit(pushedrevs, remoteheads, remote=remote):
1300 nodes = None
1300 nodes = None
1301 if pushedrevs:
1301 if pushedrevs:
1302 revs = logcmdutil.revrange(repo, pushedrevs)
1302 revs = logcmdutil.revrange(repo, pushedrevs)
1303 nodes = [repo[r].node() for r in revs]
1303 nodes = [repo[r].node() for r in revs]
1304 common, any, hds = setdiscovery.findcommonheads(
1304 common, any, hds = setdiscovery.findcommonheads(
1305 ui, repo, remote, ancestorsof=nodes, audit=data
1305 ui, repo, remote, ancestorsof=nodes, audit=data
1306 )
1306 )
1307 return common, hds
1307 return common, hds
1308
1308
1309 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1309 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1310 localrevs = opts[b'rev']
1310 localrevs = opts[b'rev']
1311
1311
1312 fm = ui.formatter(b'debugdiscovery', opts)
1312 fm = ui.formatter(b'debugdiscovery', opts)
1313 if fm.strict_format:
1313 if fm.strict_format:
1314
1314
1315 @contextlib.contextmanager
1315 @contextlib.contextmanager
1316 def may_capture_output():
1316 def may_capture_output():
1317 ui.pushbuffer()
1317 ui.pushbuffer()
1318 yield
1318 yield
1319 data[b'output'] = ui.popbuffer()
1319 data[b'output'] = ui.popbuffer()
1320
1320
1321 else:
1321 else:
1322 may_capture_output = util.nullcontextmanager
1322 may_capture_output = util.nullcontextmanager
1323 with may_capture_output():
1323 with may_capture_output():
1324 with util.timedcm('debug-discovery') as t:
1324 with util.timedcm('debug-discovery') as t:
1325 common, hds = doit(localrevs, remoterevs)
1325 common, hds = doit(localrevs, remoterevs)
1326
1326
1327 # compute all statistics
1327 # compute all statistics
1328 heads_common = set(common)
1328 heads_common = set(common)
1329 heads_remote = set(hds)
1329 heads_remote = set(hds)
1330 heads_local = set(repo.heads())
1330 heads_local = set(repo.heads())
1331 # note: they cannot be a local or remote head that is in common and not
1331 # note: they cannot be a local or remote head that is in common and not
1332 # itself a head of common.
1332 # itself a head of common.
1333 heads_common_local = heads_common & heads_local
1333 heads_common_local = heads_common & heads_local
1334 heads_common_remote = heads_common & heads_remote
1334 heads_common_remote = heads_common & heads_remote
1335 heads_common_both = heads_common & heads_remote & heads_local
1335 heads_common_both = heads_common & heads_remote & heads_local
1336
1336
1337 all = repo.revs(b'all()')
1337 all = repo.revs(b'all()')
1338 common = repo.revs(b'::%ln', common)
1338 common = repo.revs(b'::%ln', common)
1339 roots_common = repo.revs(b'roots(::%ld)', common)
1339 roots_common = repo.revs(b'roots(::%ld)', common)
1340 missing = repo.revs(b'not ::%ld', common)
1340 missing = repo.revs(b'not ::%ld', common)
1341 heads_missing = repo.revs(b'heads(%ld)', missing)
1341 heads_missing = repo.revs(b'heads(%ld)', missing)
1342 roots_missing = repo.revs(b'roots(%ld)', missing)
1342 roots_missing = repo.revs(b'roots(%ld)', missing)
1343 assert len(common) + len(missing) == len(all)
1343 assert len(common) + len(missing) == len(all)
1344
1344
1345 initial_undecided = repo.revs(
1345 initial_undecided = repo.revs(
1346 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1346 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1347 )
1347 )
1348 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1348 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1349 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1349 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1350 common_initial_undecided = initial_undecided & common
1350 common_initial_undecided = initial_undecided & common
1351 missing_initial_undecided = initial_undecided & missing
1351 missing_initial_undecided = initial_undecided & missing
1352
1352
1353 data[b'elapsed'] = t.elapsed
1353 data[b'elapsed'] = t.elapsed
1354 data[b'nb-common-heads'] = len(heads_common)
1354 data[b'nb-common-heads'] = len(heads_common)
1355 data[b'nb-common-heads-local'] = len(heads_common_local)
1355 data[b'nb-common-heads-local'] = len(heads_common_local)
1356 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1356 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1357 data[b'nb-common-heads-both'] = len(heads_common_both)
1357 data[b'nb-common-heads-both'] = len(heads_common_both)
1358 data[b'nb-common-roots'] = len(roots_common)
1358 data[b'nb-common-roots'] = len(roots_common)
1359 data[b'nb-head-local'] = len(heads_local)
1359 data[b'nb-head-local'] = len(heads_local)
1360 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1360 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1361 data[b'nb-head-remote'] = len(heads_remote)
1361 data[b'nb-head-remote'] = len(heads_remote)
1362 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1362 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1363 heads_common_remote
1363 heads_common_remote
1364 )
1364 )
1365 data[b'nb-revs'] = len(all)
1365 data[b'nb-revs'] = len(all)
1366 data[b'nb-revs-common'] = len(common)
1366 data[b'nb-revs-common'] = len(common)
1367 data[b'nb-revs-missing'] = len(missing)
1367 data[b'nb-revs-missing'] = len(missing)
1368 data[b'nb-missing-heads'] = len(heads_missing)
1368 data[b'nb-missing-heads'] = len(heads_missing)
1369 data[b'nb-missing-roots'] = len(roots_missing)
1369 data[b'nb-missing-roots'] = len(roots_missing)
1370 data[b'nb-ini_und'] = len(initial_undecided)
1370 data[b'nb-ini_und'] = len(initial_undecided)
1371 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1371 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1372 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1372 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1373 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1373 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1374 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1374 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1375
1375
1376 fm.startitem()
1376 fm.startitem()
1377 fm.data(**pycompat.strkwargs(data))
1377 fm.data(**pycompat.strkwargs(data))
1378 # display discovery summary
1378 # display discovery summary
1379 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1379 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1380 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1380 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1381 fm.plain(b"queries: %(total-queries)9d\n" % data)
1381 fm.plain(b"queries: %(total-queries)9d\n" % data)
1382 if b'total-queries-branches' in data:
1383 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1384 if b'total-queries-between' in data:
1385 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1382 fm.plain(b"heads summary:\n")
1386 fm.plain(b"heads summary:\n")
1383 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1387 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1384 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1388 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1385 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1389 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1386 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1390 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1387 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1391 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1388 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1392 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1389 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1393 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1390 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1394 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1391 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1395 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1392 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1396 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1393 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1397 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1394 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1398 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1395 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1399 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1396 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1400 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1397 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1401 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1398 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1402 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1399 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1403 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1400 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1404 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1401 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1405 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1402 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1406 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1403 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1407 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1404 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1408 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1405
1409
1406 if ui.verbose:
1410 if ui.verbose:
1407 fm.plain(
1411 fm.plain(
1408 b"common heads: %s\n"
1412 b"common heads: %s\n"
1409 % b" ".join(sorted(short(n) for n in heads_common))
1413 % b" ".join(sorted(short(n) for n in heads_common))
1410 )
1414 )
1411 fm.end()
1415 fm.end()
1412
1416
1413
1417
1414 _chunksize = 4 << 10
1418 _chunksize = 4 << 10
1415
1419
1416
1420
1417 @command(
1421 @command(
1418 b'debugdownload',
1422 b'debugdownload',
1419 [
1423 [
1420 (b'o', b'output', b'', _(b'path')),
1424 (b'o', b'output', b'', _(b'path')),
1421 ],
1425 ],
1422 optionalrepo=True,
1426 optionalrepo=True,
1423 )
1427 )
1424 def debugdownload(ui, repo, url, output=None, **opts):
1428 def debugdownload(ui, repo, url, output=None, **opts):
1425 """download a resource using Mercurial logic and config"""
1429 """download a resource using Mercurial logic and config"""
1426 fh = urlmod.open(ui, url, output)
1430 fh = urlmod.open(ui, url, output)
1427
1431
1428 dest = ui
1432 dest = ui
1429 if output:
1433 if output:
1430 dest = open(output, b"wb", _chunksize)
1434 dest = open(output, b"wb", _chunksize)
1431 try:
1435 try:
1432 data = fh.read(_chunksize)
1436 data = fh.read(_chunksize)
1433 while data:
1437 while data:
1434 dest.write(data)
1438 dest.write(data)
1435 data = fh.read(_chunksize)
1439 data = fh.read(_chunksize)
1436 finally:
1440 finally:
1437 if output:
1441 if output:
1438 dest.close()
1442 dest.close()
1439
1443
1440
1444
1441 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1445 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1442 def debugextensions(ui, repo, **opts):
1446 def debugextensions(ui, repo, **opts):
1443 '''show information about active extensions'''
1447 '''show information about active extensions'''
1444 opts = pycompat.byteskwargs(opts)
1448 opts = pycompat.byteskwargs(opts)
1445 exts = extensions.extensions(ui)
1449 exts = extensions.extensions(ui)
1446 hgver = util.version()
1450 hgver = util.version()
1447 fm = ui.formatter(b'debugextensions', opts)
1451 fm = ui.formatter(b'debugextensions', opts)
1448 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1452 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1449 isinternal = extensions.ismoduleinternal(extmod)
1453 isinternal = extensions.ismoduleinternal(extmod)
1450 extsource = None
1454 extsource = None
1451
1455
1452 if util.safehasattr(extmod, '__file__'):
1456 if util.safehasattr(extmod, '__file__'):
1453 extsource = pycompat.fsencode(extmod.__file__)
1457 extsource = pycompat.fsencode(extmod.__file__)
1454 elif getattr(sys, 'oxidized', False):
1458 elif getattr(sys, 'oxidized', False):
1455 extsource = pycompat.sysexecutable
1459 extsource = pycompat.sysexecutable
1456 if isinternal:
1460 if isinternal:
1457 exttestedwith = [] # never expose magic string to users
1461 exttestedwith = [] # never expose magic string to users
1458 else:
1462 else:
1459 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1463 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1460 extbuglink = getattr(extmod, 'buglink', None)
1464 extbuglink = getattr(extmod, 'buglink', None)
1461
1465
1462 fm.startitem()
1466 fm.startitem()
1463
1467
1464 if ui.quiet or ui.verbose:
1468 if ui.quiet or ui.verbose:
1465 fm.write(b'name', b'%s\n', extname)
1469 fm.write(b'name', b'%s\n', extname)
1466 else:
1470 else:
1467 fm.write(b'name', b'%s', extname)
1471 fm.write(b'name', b'%s', extname)
1468 if isinternal or hgver in exttestedwith:
1472 if isinternal or hgver in exttestedwith:
1469 fm.plain(b'\n')
1473 fm.plain(b'\n')
1470 elif not exttestedwith:
1474 elif not exttestedwith:
1471 fm.plain(_(b' (untested!)\n'))
1475 fm.plain(_(b' (untested!)\n'))
1472 else:
1476 else:
1473 lasttestedversion = exttestedwith[-1]
1477 lasttestedversion = exttestedwith[-1]
1474 fm.plain(b' (%s!)\n' % lasttestedversion)
1478 fm.plain(b' (%s!)\n' % lasttestedversion)
1475
1479
1476 fm.condwrite(
1480 fm.condwrite(
1477 ui.verbose and extsource,
1481 ui.verbose and extsource,
1478 b'source',
1482 b'source',
1479 _(b' location: %s\n'),
1483 _(b' location: %s\n'),
1480 extsource or b"",
1484 extsource or b"",
1481 )
1485 )
1482
1486
1483 if ui.verbose:
1487 if ui.verbose:
1484 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1488 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1485 fm.data(bundled=isinternal)
1489 fm.data(bundled=isinternal)
1486
1490
1487 fm.condwrite(
1491 fm.condwrite(
1488 ui.verbose and exttestedwith,
1492 ui.verbose and exttestedwith,
1489 b'testedwith',
1493 b'testedwith',
1490 _(b' tested with: %s\n'),
1494 _(b' tested with: %s\n'),
1491 fm.formatlist(exttestedwith, name=b'ver'),
1495 fm.formatlist(exttestedwith, name=b'ver'),
1492 )
1496 )
1493
1497
1494 fm.condwrite(
1498 fm.condwrite(
1495 ui.verbose and extbuglink,
1499 ui.verbose and extbuglink,
1496 b'buglink',
1500 b'buglink',
1497 _(b' bug reporting: %s\n'),
1501 _(b' bug reporting: %s\n'),
1498 extbuglink or b"",
1502 extbuglink or b"",
1499 )
1503 )
1500
1504
1501 fm.end()
1505 fm.end()
1502
1506
1503
1507
1504 @command(
1508 @command(
1505 b'debugfileset',
1509 b'debugfileset',
1506 [
1510 [
1507 (
1511 (
1508 b'r',
1512 b'r',
1509 b'rev',
1513 b'rev',
1510 b'',
1514 b'',
1511 _(b'apply the filespec on this revision'),
1515 _(b'apply the filespec on this revision'),
1512 _(b'REV'),
1516 _(b'REV'),
1513 ),
1517 ),
1514 (
1518 (
1515 b'',
1519 b'',
1516 b'all-files',
1520 b'all-files',
1517 False,
1521 False,
1518 _(b'test files from all revisions and working directory'),
1522 _(b'test files from all revisions and working directory'),
1519 ),
1523 ),
1520 (
1524 (
1521 b's',
1525 b's',
1522 b'show-matcher',
1526 b'show-matcher',
1523 None,
1527 None,
1524 _(b'print internal representation of matcher'),
1528 _(b'print internal representation of matcher'),
1525 ),
1529 ),
1526 (
1530 (
1527 b'p',
1531 b'p',
1528 b'show-stage',
1532 b'show-stage',
1529 [],
1533 [],
1530 _(b'print parsed tree at the given stage'),
1534 _(b'print parsed tree at the given stage'),
1531 _(b'NAME'),
1535 _(b'NAME'),
1532 ),
1536 ),
1533 ],
1537 ],
1534 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1538 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1535 )
1539 )
1536 def debugfileset(ui, repo, expr, **opts):
1540 def debugfileset(ui, repo, expr, **opts):
1537 '''parse and apply a fileset specification'''
1541 '''parse and apply a fileset specification'''
1538 from . import fileset
1542 from . import fileset
1539
1543
1540 fileset.symbols # force import of fileset so we have predicates to optimize
1544 fileset.symbols # force import of fileset so we have predicates to optimize
1541 opts = pycompat.byteskwargs(opts)
1545 opts = pycompat.byteskwargs(opts)
1542 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1546 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1543
1547
1544 stages = [
1548 stages = [
1545 (b'parsed', pycompat.identity),
1549 (b'parsed', pycompat.identity),
1546 (b'analyzed', filesetlang.analyze),
1550 (b'analyzed', filesetlang.analyze),
1547 (b'optimized', filesetlang.optimize),
1551 (b'optimized', filesetlang.optimize),
1548 ]
1552 ]
1549 stagenames = {n for n, f in stages}
1553 stagenames = {n for n, f in stages}
1550
1554
1551 showalways = set()
1555 showalways = set()
1552 if ui.verbose and not opts[b'show_stage']:
1556 if ui.verbose and not opts[b'show_stage']:
1553 # show parsed tree by --verbose (deprecated)
1557 # show parsed tree by --verbose (deprecated)
1554 showalways.add(b'parsed')
1558 showalways.add(b'parsed')
1555 if opts[b'show_stage'] == [b'all']:
1559 if opts[b'show_stage'] == [b'all']:
1556 showalways.update(stagenames)
1560 showalways.update(stagenames)
1557 else:
1561 else:
1558 for n in opts[b'show_stage']:
1562 for n in opts[b'show_stage']:
1559 if n not in stagenames:
1563 if n not in stagenames:
1560 raise error.Abort(_(b'invalid stage name: %s') % n)
1564 raise error.Abort(_(b'invalid stage name: %s') % n)
1561 showalways.update(opts[b'show_stage'])
1565 showalways.update(opts[b'show_stage'])
1562
1566
1563 tree = filesetlang.parse(expr)
1567 tree = filesetlang.parse(expr)
1564 for n, f in stages:
1568 for n, f in stages:
1565 tree = f(tree)
1569 tree = f(tree)
1566 if n in showalways:
1570 if n in showalways:
1567 if opts[b'show_stage'] or n != b'parsed':
1571 if opts[b'show_stage'] or n != b'parsed':
1568 ui.write(b"* %s:\n" % n)
1572 ui.write(b"* %s:\n" % n)
1569 ui.write(filesetlang.prettyformat(tree), b"\n")
1573 ui.write(filesetlang.prettyformat(tree), b"\n")
1570
1574
1571 files = set()
1575 files = set()
1572 if opts[b'all_files']:
1576 if opts[b'all_files']:
1573 for r in repo:
1577 for r in repo:
1574 c = repo[r]
1578 c = repo[r]
1575 files.update(c.files())
1579 files.update(c.files())
1576 files.update(c.substate)
1580 files.update(c.substate)
1577 if opts[b'all_files'] or ctx.rev() is None:
1581 if opts[b'all_files'] or ctx.rev() is None:
1578 wctx = repo[None]
1582 wctx = repo[None]
1579 files.update(
1583 files.update(
1580 repo.dirstate.walk(
1584 repo.dirstate.walk(
1581 scmutil.matchall(repo),
1585 scmutil.matchall(repo),
1582 subrepos=list(wctx.substate),
1586 subrepos=list(wctx.substate),
1583 unknown=True,
1587 unknown=True,
1584 ignored=True,
1588 ignored=True,
1585 )
1589 )
1586 )
1590 )
1587 files.update(wctx.substate)
1591 files.update(wctx.substate)
1588 else:
1592 else:
1589 files.update(ctx.files())
1593 files.update(ctx.files())
1590 files.update(ctx.substate)
1594 files.update(ctx.substate)
1591
1595
1592 m = ctx.matchfileset(repo.getcwd(), expr)
1596 m = ctx.matchfileset(repo.getcwd(), expr)
1593 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1597 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1594 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1598 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1595 for f in sorted(files):
1599 for f in sorted(files):
1596 if not m(f):
1600 if not m(f):
1597 continue
1601 continue
1598 ui.write(b"%s\n" % f)
1602 ui.write(b"%s\n" % f)
1599
1603
1600
1604
1601 @command(
1605 @command(
1602 b"debug-repair-issue6528",
1606 b"debug-repair-issue6528",
1603 [
1607 [
1604 (
1608 (
1605 b'',
1609 b'',
1606 b'to-report',
1610 b'to-report',
1607 b'',
1611 b'',
1608 _(b'build a report of affected revisions to this file'),
1612 _(b'build a report of affected revisions to this file'),
1609 _(b'FILE'),
1613 _(b'FILE'),
1610 ),
1614 ),
1611 (
1615 (
1612 b'',
1616 b'',
1613 b'from-report',
1617 b'from-report',
1614 b'',
1618 b'',
1615 _(b'repair revisions listed in this report file'),
1619 _(b'repair revisions listed in this report file'),
1616 _(b'FILE'),
1620 _(b'FILE'),
1617 ),
1621 ),
1618 (
1622 (
1619 b'',
1623 b'',
1620 b'paranoid',
1624 b'paranoid',
1621 False,
1625 False,
1622 _(b'check that both detection methods do the same thing'),
1626 _(b'check that both detection methods do the same thing'),
1623 ),
1627 ),
1624 ]
1628 ]
1625 + cmdutil.dryrunopts,
1629 + cmdutil.dryrunopts,
1626 )
1630 )
1627 def debug_repair_issue6528(ui, repo, **opts):
1631 def debug_repair_issue6528(ui, repo, **opts):
1628 """find affected revisions and repair them. See issue6528 for more details.
1632 """find affected revisions and repair them. See issue6528 for more details.
1629
1633
1630 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1634 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1631 computation of affected revisions for a given repository across clones.
1635 computation of affected revisions for a given repository across clones.
1632 The report format is line-based (with empty lines ignored):
1636 The report format is line-based (with empty lines ignored):
1633
1637
1634 ```
1638 ```
1635 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1639 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1636 ```
1640 ```
1637
1641
1638 There can be multiple broken revisions per filelog, they are separated by
1642 There can be multiple broken revisions per filelog, they are separated by
1639 a comma with no spaces. The only space is between the revision(s) and the
1643 a comma with no spaces. The only space is between the revision(s) and the
1640 filename.
1644 filename.
1641
1645
1642 Note that this does *not* mean that this repairs future affected revisions,
1646 Note that this does *not* mean that this repairs future affected revisions,
1643 that needs a separate fix at the exchange level that was introduced in
1647 that needs a separate fix at the exchange level that was introduced in
1644 Mercurial 5.9.1.
1648 Mercurial 5.9.1.
1645
1649
1646 There is a `--paranoid` flag to test that the fast implementation is correct
1650 There is a `--paranoid` flag to test that the fast implementation is correct
1647 by checking it against the slow implementation. Since this matter is quite
1651 by checking it against the slow implementation. Since this matter is quite
1648 urgent and testing every edge-case is probably quite costly, we use this
1652 urgent and testing every edge-case is probably quite costly, we use this
1649 method to test on large repositories as a fuzzing method of sorts.
1653 method to test on large repositories as a fuzzing method of sorts.
1650 """
1654 """
1651 cmdutil.check_incompatible_arguments(
1655 cmdutil.check_incompatible_arguments(
1652 opts, 'to_report', ['from_report', 'dry_run']
1656 opts, 'to_report', ['from_report', 'dry_run']
1653 )
1657 )
1654 dry_run = opts.get('dry_run')
1658 dry_run = opts.get('dry_run')
1655 to_report = opts.get('to_report')
1659 to_report = opts.get('to_report')
1656 from_report = opts.get('from_report')
1660 from_report = opts.get('from_report')
1657 paranoid = opts.get('paranoid')
1661 paranoid = opts.get('paranoid')
1658 # TODO maybe add filelog pattern and revision pattern parameters to help
1662 # TODO maybe add filelog pattern and revision pattern parameters to help
1659 # narrow down the search for users that know what they're looking for?
1663 # narrow down the search for users that know what they're looking for?
1660
1664
1661 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1665 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1662 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1666 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1663 raise error.Abort(_(msg))
1667 raise error.Abort(_(msg))
1664
1668
1665 rewrite.repair_issue6528(
1669 rewrite.repair_issue6528(
1666 ui,
1670 ui,
1667 repo,
1671 repo,
1668 dry_run=dry_run,
1672 dry_run=dry_run,
1669 to_report=to_report,
1673 to_report=to_report,
1670 from_report=from_report,
1674 from_report=from_report,
1671 paranoid=paranoid,
1675 paranoid=paranoid,
1672 )
1676 )
1673
1677
1674
1678
1675 @command(b'debugformat', [] + cmdutil.formatteropts)
1679 @command(b'debugformat', [] + cmdutil.formatteropts)
1676 def debugformat(ui, repo, **opts):
1680 def debugformat(ui, repo, **opts):
1677 """display format information about the current repository
1681 """display format information about the current repository
1678
1682
1679 Use --verbose to get extra information about current config value and
1683 Use --verbose to get extra information about current config value and
1680 Mercurial default."""
1684 Mercurial default."""
1681 opts = pycompat.byteskwargs(opts)
1685 opts = pycompat.byteskwargs(opts)
1682 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1686 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1683 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1687 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1684
1688
1685 def makeformatname(name):
1689 def makeformatname(name):
1686 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1690 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1687
1691
1688 fm = ui.formatter(b'debugformat', opts)
1692 fm = ui.formatter(b'debugformat', opts)
1689 if fm.isplain():
1693 if fm.isplain():
1690
1694
1691 def formatvalue(value):
1695 def formatvalue(value):
1692 if util.safehasattr(value, b'startswith'):
1696 if util.safehasattr(value, b'startswith'):
1693 return value
1697 return value
1694 if value:
1698 if value:
1695 return b'yes'
1699 return b'yes'
1696 else:
1700 else:
1697 return b'no'
1701 return b'no'
1698
1702
1699 else:
1703 else:
1700 formatvalue = pycompat.identity
1704 formatvalue = pycompat.identity
1701
1705
1702 fm.plain(b'format-variant')
1706 fm.plain(b'format-variant')
1703 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1707 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1704 fm.plain(b' repo')
1708 fm.plain(b' repo')
1705 if ui.verbose:
1709 if ui.verbose:
1706 fm.plain(b' config default')
1710 fm.plain(b' config default')
1707 fm.plain(b'\n')
1711 fm.plain(b'\n')
1708 for fv in upgrade.allformatvariant:
1712 for fv in upgrade.allformatvariant:
1709 fm.startitem()
1713 fm.startitem()
1710 repovalue = fv.fromrepo(repo)
1714 repovalue = fv.fromrepo(repo)
1711 configvalue = fv.fromconfig(repo)
1715 configvalue = fv.fromconfig(repo)
1712
1716
1713 if repovalue != configvalue:
1717 if repovalue != configvalue:
1714 namelabel = b'formatvariant.name.mismatchconfig'
1718 namelabel = b'formatvariant.name.mismatchconfig'
1715 repolabel = b'formatvariant.repo.mismatchconfig'
1719 repolabel = b'formatvariant.repo.mismatchconfig'
1716 elif repovalue != fv.default:
1720 elif repovalue != fv.default:
1717 namelabel = b'formatvariant.name.mismatchdefault'
1721 namelabel = b'formatvariant.name.mismatchdefault'
1718 repolabel = b'formatvariant.repo.mismatchdefault'
1722 repolabel = b'formatvariant.repo.mismatchdefault'
1719 else:
1723 else:
1720 namelabel = b'formatvariant.name.uptodate'
1724 namelabel = b'formatvariant.name.uptodate'
1721 repolabel = b'formatvariant.repo.uptodate'
1725 repolabel = b'formatvariant.repo.uptodate'
1722
1726
1723 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1727 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1724 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1728 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1725 if fv.default != configvalue:
1729 if fv.default != configvalue:
1726 configlabel = b'formatvariant.config.special'
1730 configlabel = b'formatvariant.config.special'
1727 else:
1731 else:
1728 configlabel = b'formatvariant.config.default'
1732 configlabel = b'formatvariant.config.default'
1729 fm.condwrite(
1733 fm.condwrite(
1730 ui.verbose,
1734 ui.verbose,
1731 b'config',
1735 b'config',
1732 b' %6s',
1736 b' %6s',
1733 formatvalue(configvalue),
1737 formatvalue(configvalue),
1734 label=configlabel,
1738 label=configlabel,
1735 )
1739 )
1736 fm.condwrite(
1740 fm.condwrite(
1737 ui.verbose,
1741 ui.verbose,
1738 b'default',
1742 b'default',
1739 b' %7s',
1743 b' %7s',
1740 formatvalue(fv.default),
1744 formatvalue(fv.default),
1741 label=b'formatvariant.default',
1745 label=b'formatvariant.default',
1742 )
1746 )
1743 fm.plain(b'\n')
1747 fm.plain(b'\n')
1744 fm.end()
1748 fm.end()
1745
1749
1746
1750
1747 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1751 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1748 def debugfsinfo(ui, path=b"."):
1752 def debugfsinfo(ui, path=b"."):
1749 """show information detected about current filesystem"""
1753 """show information detected about current filesystem"""
1750 ui.writenoi18n(b'path: %s\n' % path)
1754 ui.writenoi18n(b'path: %s\n' % path)
1751 ui.writenoi18n(
1755 ui.writenoi18n(
1752 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1756 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1753 )
1757 )
1754 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1758 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1755 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1759 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1756 ui.writenoi18n(
1760 ui.writenoi18n(
1757 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1761 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1758 )
1762 )
1759 ui.writenoi18n(
1763 ui.writenoi18n(
1760 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1764 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1761 )
1765 )
1762 casesensitive = b'(unknown)'
1766 casesensitive = b'(unknown)'
1763 try:
1767 try:
1764 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1768 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1765 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1769 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1766 except OSError:
1770 except OSError:
1767 pass
1771 pass
1768 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1772 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1769
1773
1770
1774
1771 @command(
1775 @command(
1772 b'debuggetbundle',
1776 b'debuggetbundle',
1773 [
1777 [
1774 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1778 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1775 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1779 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1776 (
1780 (
1777 b't',
1781 b't',
1778 b'type',
1782 b'type',
1779 b'bzip2',
1783 b'bzip2',
1780 _(b'bundle compression type to use'),
1784 _(b'bundle compression type to use'),
1781 _(b'TYPE'),
1785 _(b'TYPE'),
1782 ),
1786 ),
1783 ],
1787 ],
1784 _(b'REPO FILE [-H|-C ID]...'),
1788 _(b'REPO FILE [-H|-C ID]...'),
1785 norepo=True,
1789 norepo=True,
1786 )
1790 )
1787 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1791 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1788 """retrieves a bundle from a repo
1792 """retrieves a bundle from a repo
1789
1793
1790 Every ID must be a full-length hex node id string. Saves the bundle to the
1794 Every ID must be a full-length hex node id string. Saves the bundle to the
1791 given file.
1795 given file.
1792 """
1796 """
1793 opts = pycompat.byteskwargs(opts)
1797 opts = pycompat.byteskwargs(opts)
1794 repo = hg.peer(ui, opts, repopath)
1798 repo = hg.peer(ui, opts, repopath)
1795 if not repo.capable(b'getbundle'):
1799 if not repo.capable(b'getbundle'):
1796 raise error.Abort(b"getbundle() not supported by target repository")
1800 raise error.Abort(b"getbundle() not supported by target repository")
1797 args = {}
1801 args = {}
1798 if common:
1802 if common:
1799 args['common'] = [bin(s) for s in common]
1803 args['common'] = [bin(s) for s in common]
1800 if head:
1804 if head:
1801 args['heads'] = [bin(s) for s in head]
1805 args['heads'] = [bin(s) for s in head]
1802 # TODO: get desired bundlecaps from command line.
1806 # TODO: get desired bundlecaps from command line.
1803 args['bundlecaps'] = None
1807 args['bundlecaps'] = None
1804 bundle = repo.getbundle(b'debug', **args)
1808 bundle = repo.getbundle(b'debug', **args)
1805
1809
1806 bundletype = opts.get(b'type', b'bzip2').lower()
1810 bundletype = opts.get(b'type', b'bzip2').lower()
1807 btypes = {
1811 btypes = {
1808 b'none': b'HG10UN',
1812 b'none': b'HG10UN',
1809 b'bzip2': b'HG10BZ',
1813 b'bzip2': b'HG10BZ',
1810 b'gzip': b'HG10GZ',
1814 b'gzip': b'HG10GZ',
1811 b'bundle2': b'HG20',
1815 b'bundle2': b'HG20',
1812 }
1816 }
1813 bundletype = btypes.get(bundletype)
1817 bundletype = btypes.get(bundletype)
1814 if bundletype not in bundle2.bundletypes:
1818 if bundletype not in bundle2.bundletypes:
1815 raise error.Abort(_(b'unknown bundle type specified with --type'))
1819 raise error.Abort(_(b'unknown bundle type specified with --type'))
1816 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1820 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1817
1821
1818
1822
1819 @command(b'debugignore', [], b'[FILE]')
1823 @command(b'debugignore', [], b'[FILE]')
1820 def debugignore(ui, repo, *files, **opts):
1824 def debugignore(ui, repo, *files, **opts):
1821 """display the combined ignore pattern and information about ignored files
1825 """display the combined ignore pattern and information about ignored files
1822
1826
1823 With no argument display the combined ignore pattern.
1827 With no argument display the combined ignore pattern.
1824
1828
1825 Given space separated file names, shows if the given file is ignored and
1829 Given space separated file names, shows if the given file is ignored and
1826 if so, show the ignore rule (file and line number) that matched it.
1830 if so, show the ignore rule (file and line number) that matched it.
1827 """
1831 """
1828 ignore = repo.dirstate._ignore
1832 ignore = repo.dirstate._ignore
1829 if not files:
1833 if not files:
1830 # Show all the patterns
1834 # Show all the patterns
1831 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1835 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1832 else:
1836 else:
1833 m = scmutil.match(repo[None], pats=files)
1837 m = scmutil.match(repo[None], pats=files)
1834 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1838 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1835 for f in m.files():
1839 for f in m.files():
1836 nf = util.normpath(f)
1840 nf = util.normpath(f)
1837 ignored = None
1841 ignored = None
1838 ignoredata = None
1842 ignoredata = None
1839 if nf != b'.':
1843 if nf != b'.':
1840 if ignore(nf):
1844 if ignore(nf):
1841 ignored = nf
1845 ignored = nf
1842 ignoredata = repo.dirstate._ignorefileandline(nf)
1846 ignoredata = repo.dirstate._ignorefileandline(nf)
1843 else:
1847 else:
1844 for p in pathutil.finddirs(nf):
1848 for p in pathutil.finddirs(nf):
1845 if ignore(p):
1849 if ignore(p):
1846 ignored = p
1850 ignored = p
1847 ignoredata = repo.dirstate._ignorefileandline(p)
1851 ignoredata = repo.dirstate._ignorefileandline(p)
1848 break
1852 break
1849 if ignored:
1853 if ignored:
1850 if ignored == nf:
1854 if ignored == nf:
1851 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1855 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1852 else:
1856 else:
1853 ui.write(
1857 ui.write(
1854 _(
1858 _(
1855 b"%s is ignored because of "
1859 b"%s is ignored because of "
1856 b"containing directory %s\n"
1860 b"containing directory %s\n"
1857 )
1861 )
1858 % (uipathfn(f), ignored)
1862 % (uipathfn(f), ignored)
1859 )
1863 )
1860 ignorefile, lineno, line = ignoredata
1864 ignorefile, lineno, line = ignoredata
1861 ui.write(
1865 ui.write(
1862 _(b"(ignore rule in %s, line %d: '%s')\n")
1866 _(b"(ignore rule in %s, line %d: '%s')\n")
1863 % (ignorefile, lineno, line)
1867 % (ignorefile, lineno, line)
1864 )
1868 )
1865 else:
1869 else:
1866 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1870 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1867
1871
1868
1872
1869 @command(
1873 @command(
1870 b'debug-revlog-index|debugindex',
1874 b'debug-revlog-index|debugindex',
1871 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1875 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1872 _(b'-c|-m|FILE'),
1876 _(b'-c|-m|FILE'),
1873 )
1877 )
1874 def debugindex(ui, repo, file_=None, **opts):
1878 def debugindex(ui, repo, file_=None, **opts):
1875 """dump index data for a revlog"""
1879 """dump index data for a revlog"""
1876 opts = pycompat.byteskwargs(opts)
1880 opts = pycompat.byteskwargs(opts)
1877 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1881 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1878
1882
1879 fm = ui.formatter(b'debugindex', opts)
1883 fm = ui.formatter(b'debugindex', opts)
1880
1884
1881 revlog = getattr(store, b'_revlog', store)
1885 revlog = getattr(store, b'_revlog', store)
1882
1886
1883 return revlog_debug.debug_index(
1887 return revlog_debug.debug_index(
1884 ui,
1888 ui,
1885 repo,
1889 repo,
1886 formatter=fm,
1890 formatter=fm,
1887 revlog=revlog,
1891 revlog=revlog,
1888 full_node=ui.debugflag,
1892 full_node=ui.debugflag,
1889 )
1893 )
1890
1894
1891
1895
1892 @command(
1896 @command(
1893 b'debugindexdot',
1897 b'debugindexdot',
1894 cmdutil.debugrevlogopts,
1898 cmdutil.debugrevlogopts,
1895 _(b'-c|-m|FILE'),
1899 _(b'-c|-m|FILE'),
1896 optionalrepo=True,
1900 optionalrepo=True,
1897 )
1901 )
1898 def debugindexdot(ui, repo, file_=None, **opts):
1902 def debugindexdot(ui, repo, file_=None, **opts):
1899 """dump an index DAG as a graphviz dot file"""
1903 """dump an index DAG as a graphviz dot file"""
1900 opts = pycompat.byteskwargs(opts)
1904 opts = pycompat.byteskwargs(opts)
1901 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1905 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1902 ui.writenoi18n(b"digraph G {\n")
1906 ui.writenoi18n(b"digraph G {\n")
1903 for i in r:
1907 for i in r:
1904 node = r.node(i)
1908 node = r.node(i)
1905 pp = r.parents(node)
1909 pp = r.parents(node)
1906 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1910 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1907 if pp[1] != repo.nullid:
1911 if pp[1] != repo.nullid:
1908 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1912 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1909 ui.write(b"}\n")
1913 ui.write(b"}\n")
1910
1914
1911
1915
1912 @command(b'debugindexstats', [])
1916 @command(b'debugindexstats', [])
1913 def debugindexstats(ui, repo):
1917 def debugindexstats(ui, repo):
1914 """show stats related to the changelog index"""
1918 """show stats related to the changelog index"""
1915 repo.changelog.shortest(repo.nullid, 1)
1919 repo.changelog.shortest(repo.nullid, 1)
1916 index = repo.changelog.index
1920 index = repo.changelog.index
1917 if not util.safehasattr(index, b'stats'):
1921 if not util.safehasattr(index, b'stats'):
1918 raise error.Abort(_(b'debugindexstats only works with native code'))
1922 raise error.Abort(_(b'debugindexstats only works with native code'))
1919 for k, v in sorted(index.stats().items()):
1923 for k, v in sorted(index.stats().items()):
1920 ui.write(b'%s: %d\n' % (k, v))
1924 ui.write(b'%s: %d\n' % (k, v))
1921
1925
1922
1926
1923 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1927 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1924 def debuginstall(ui, **opts):
1928 def debuginstall(ui, **opts):
1925 """test Mercurial installation
1929 """test Mercurial installation
1926
1930
1927 Returns 0 on success.
1931 Returns 0 on success.
1928 """
1932 """
1929 opts = pycompat.byteskwargs(opts)
1933 opts = pycompat.byteskwargs(opts)
1930
1934
1931 problems = 0
1935 problems = 0
1932
1936
1933 fm = ui.formatter(b'debuginstall', opts)
1937 fm = ui.formatter(b'debuginstall', opts)
1934 fm.startitem()
1938 fm.startitem()
1935
1939
1936 # encoding might be unknown or wrong. don't translate these messages.
1940 # encoding might be unknown or wrong. don't translate these messages.
1937 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1941 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1938 err = None
1942 err = None
1939 try:
1943 try:
1940 codecs.lookup(pycompat.sysstr(encoding.encoding))
1944 codecs.lookup(pycompat.sysstr(encoding.encoding))
1941 except LookupError as inst:
1945 except LookupError as inst:
1942 err = stringutil.forcebytestr(inst)
1946 err = stringutil.forcebytestr(inst)
1943 problems += 1
1947 problems += 1
1944 fm.condwrite(
1948 fm.condwrite(
1945 err,
1949 err,
1946 b'encodingerror',
1950 b'encodingerror',
1947 b" %s\n (check that your locale is properly set)\n",
1951 b" %s\n (check that your locale is properly set)\n",
1948 err,
1952 err,
1949 )
1953 )
1950
1954
1951 # Python
1955 # Python
1952 pythonlib = None
1956 pythonlib = None
1953 if util.safehasattr(os, '__file__'):
1957 if util.safehasattr(os, '__file__'):
1954 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1958 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1955 elif getattr(sys, 'oxidized', False):
1959 elif getattr(sys, 'oxidized', False):
1956 pythonlib = pycompat.sysexecutable
1960 pythonlib = pycompat.sysexecutable
1957
1961
1958 fm.write(
1962 fm.write(
1959 b'pythonexe',
1963 b'pythonexe',
1960 _(b"checking Python executable (%s)\n"),
1964 _(b"checking Python executable (%s)\n"),
1961 pycompat.sysexecutable or _(b"unknown"),
1965 pycompat.sysexecutable or _(b"unknown"),
1962 )
1966 )
1963 fm.write(
1967 fm.write(
1964 b'pythonimplementation',
1968 b'pythonimplementation',
1965 _(b"checking Python implementation (%s)\n"),
1969 _(b"checking Python implementation (%s)\n"),
1966 pycompat.sysbytes(platform.python_implementation()),
1970 pycompat.sysbytes(platform.python_implementation()),
1967 )
1971 )
1968 fm.write(
1972 fm.write(
1969 b'pythonver',
1973 b'pythonver',
1970 _(b"checking Python version (%s)\n"),
1974 _(b"checking Python version (%s)\n"),
1971 (b"%d.%d.%d" % sys.version_info[:3]),
1975 (b"%d.%d.%d" % sys.version_info[:3]),
1972 )
1976 )
1973 fm.write(
1977 fm.write(
1974 b'pythonlib',
1978 b'pythonlib',
1975 _(b"checking Python lib (%s)...\n"),
1979 _(b"checking Python lib (%s)...\n"),
1976 pythonlib or _(b"unknown"),
1980 pythonlib or _(b"unknown"),
1977 )
1981 )
1978
1982
1979 try:
1983 try:
1980 from . import rustext # pytype: disable=import-error
1984 from . import rustext # pytype: disable=import-error
1981
1985
1982 rustext.__doc__ # trigger lazy import
1986 rustext.__doc__ # trigger lazy import
1983 except ImportError:
1987 except ImportError:
1984 rustext = None
1988 rustext = None
1985
1989
1986 security = set(sslutil.supportedprotocols)
1990 security = set(sslutil.supportedprotocols)
1987 if sslutil.hassni:
1991 if sslutil.hassni:
1988 security.add(b'sni')
1992 security.add(b'sni')
1989
1993
1990 fm.write(
1994 fm.write(
1991 b'pythonsecurity',
1995 b'pythonsecurity',
1992 _(b"checking Python security support (%s)\n"),
1996 _(b"checking Python security support (%s)\n"),
1993 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1997 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1994 )
1998 )
1995
1999
1996 # These are warnings, not errors. So don't increment problem count. This
2000 # These are warnings, not errors. So don't increment problem count. This
1997 # may change in the future.
2001 # may change in the future.
1998 if b'tls1.2' not in security:
2002 if b'tls1.2' not in security:
1999 fm.plain(
2003 fm.plain(
2000 _(
2004 _(
2001 b' TLS 1.2 not supported by Python install; '
2005 b' TLS 1.2 not supported by Python install; '
2002 b'network connections lack modern security\n'
2006 b'network connections lack modern security\n'
2003 )
2007 )
2004 )
2008 )
2005 if b'sni' not in security:
2009 if b'sni' not in security:
2006 fm.plain(
2010 fm.plain(
2007 _(
2011 _(
2008 b' SNI not supported by Python install; may have '
2012 b' SNI not supported by Python install; may have '
2009 b'connectivity issues with some servers\n'
2013 b'connectivity issues with some servers\n'
2010 )
2014 )
2011 )
2015 )
2012
2016
2013 fm.plain(
2017 fm.plain(
2014 _(
2018 _(
2015 b"checking Rust extensions (%s)\n"
2019 b"checking Rust extensions (%s)\n"
2016 % (b'missing' if rustext is None else b'installed')
2020 % (b'missing' if rustext is None else b'installed')
2017 ),
2021 ),
2018 )
2022 )
2019
2023
2020 # TODO print CA cert info
2024 # TODO print CA cert info
2021
2025
2022 # hg version
2026 # hg version
2023 hgver = util.version()
2027 hgver = util.version()
2024 fm.write(
2028 fm.write(
2025 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2029 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2026 )
2030 )
2027 fm.write(
2031 fm.write(
2028 b'hgverextra',
2032 b'hgverextra',
2029 _(b"checking Mercurial custom build (%s)\n"),
2033 _(b"checking Mercurial custom build (%s)\n"),
2030 b'+'.join(hgver.split(b'+')[1:]),
2034 b'+'.join(hgver.split(b'+')[1:]),
2031 )
2035 )
2032
2036
2033 # compiled modules
2037 # compiled modules
2034 hgmodules = None
2038 hgmodules = None
2035 if util.safehasattr(sys.modules[__name__], '__file__'):
2039 if util.safehasattr(sys.modules[__name__], '__file__'):
2036 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2040 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2037 elif getattr(sys, 'oxidized', False):
2041 elif getattr(sys, 'oxidized', False):
2038 hgmodules = pycompat.sysexecutable
2042 hgmodules = pycompat.sysexecutable
2039
2043
2040 fm.write(
2044 fm.write(
2041 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2045 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2042 )
2046 )
2043 fm.write(
2047 fm.write(
2044 b'hgmodules',
2048 b'hgmodules',
2045 _(b"checking installed modules (%s)...\n"),
2049 _(b"checking installed modules (%s)...\n"),
2046 hgmodules or _(b"unknown"),
2050 hgmodules or _(b"unknown"),
2047 )
2051 )
2048
2052
2049 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2053 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2050 rustext = rustandc # for now, that's the only case
2054 rustext = rustandc # for now, that's the only case
2051 cext = policy.policy in (b'c', b'allow') or rustandc
2055 cext = policy.policy in (b'c', b'allow') or rustandc
2052 nopure = cext or rustext
2056 nopure = cext or rustext
2053 if nopure:
2057 if nopure:
2054 err = None
2058 err = None
2055 try:
2059 try:
2056 if cext:
2060 if cext:
2057 from .cext import ( # pytype: disable=import-error
2061 from .cext import ( # pytype: disable=import-error
2058 base85,
2062 base85,
2059 bdiff,
2063 bdiff,
2060 mpatch,
2064 mpatch,
2061 osutil,
2065 osutil,
2062 )
2066 )
2063
2067
2064 # quiet pyflakes
2068 # quiet pyflakes
2065 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2069 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2066 if rustext:
2070 if rustext:
2067 from .rustext import ( # pytype: disable=import-error
2071 from .rustext import ( # pytype: disable=import-error
2068 ancestor,
2072 ancestor,
2069 dirstate,
2073 dirstate,
2070 )
2074 )
2071
2075
2072 dir(ancestor), dir(dirstate) # quiet pyflakes
2076 dir(ancestor), dir(dirstate) # quiet pyflakes
2073 except Exception as inst:
2077 except Exception as inst:
2074 err = stringutil.forcebytestr(inst)
2078 err = stringutil.forcebytestr(inst)
2075 problems += 1
2079 problems += 1
2076 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2080 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2077
2081
2078 compengines = util.compengines._engines.values()
2082 compengines = util.compengines._engines.values()
2079 fm.write(
2083 fm.write(
2080 b'compengines',
2084 b'compengines',
2081 _(b'checking registered compression engines (%s)\n'),
2085 _(b'checking registered compression engines (%s)\n'),
2082 fm.formatlist(
2086 fm.formatlist(
2083 sorted(e.name() for e in compengines),
2087 sorted(e.name() for e in compengines),
2084 name=b'compengine',
2088 name=b'compengine',
2085 fmt=b'%s',
2089 fmt=b'%s',
2086 sep=b', ',
2090 sep=b', ',
2087 ),
2091 ),
2088 )
2092 )
2089 fm.write(
2093 fm.write(
2090 b'compenginesavail',
2094 b'compenginesavail',
2091 _(b'checking available compression engines (%s)\n'),
2095 _(b'checking available compression engines (%s)\n'),
2092 fm.formatlist(
2096 fm.formatlist(
2093 sorted(e.name() for e in compengines if e.available()),
2097 sorted(e.name() for e in compengines if e.available()),
2094 name=b'compengine',
2098 name=b'compengine',
2095 fmt=b'%s',
2099 fmt=b'%s',
2096 sep=b', ',
2100 sep=b', ',
2097 ),
2101 ),
2098 )
2102 )
2099 wirecompengines = compression.compengines.supportedwireengines(
2103 wirecompengines = compression.compengines.supportedwireengines(
2100 compression.SERVERROLE
2104 compression.SERVERROLE
2101 )
2105 )
2102 fm.write(
2106 fm.write(
2103 b'compenginesserver',
2107 b'compenginesserver',
2104 _(
2108 _(
2105 b'checking available compression engines '
2109 b'checking available compression engines '
2106 b'for wire protocol (%s)\n'
2110 b'for wire protocol (%s)\n'
2107 ),
2111 ),
2108 fm.formatlist(
2112 fm.formatlist(
2109 [e.name() for e in wirecompengines if e.wireprotosupport()],
2113 [e.name() for e in wirecompengines if e.wireprotosupport()],
2110 name=b'compengine',
2114 name=b'compengine',
2111 fmt=b'%s',
2115 fmt=b'%s',
2112 sep=b', ',
2116 sep=b', ',
2113 ),
2117 ),
2114 )
2118 )
2115 re2 = b'missing'
2119 re2 = b'missing'
2116 if util._re2:
2120 if util._re2:
2117 re2 = b'available'
2121 re2 = b'available'
2118 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2122 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2119 fm.data(re2=bool(util._re2))
2123 fm.data(re2=bool(util._re2))
2120
2124
2121 # templates
2125 # templates
2122 p = templater.templatedir()
2126 p = templater.templatedir()
2123 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2127 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2124 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2128 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2125 if p:
2129 if p:
2126 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2130 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2127 if m:
2131 if m:
2128 # template found, check if it is working
2132 # template found, check if it is working
2129 err = None
2133 err = None
2130 try:
2134 try:
2131 templater.templater.frommapfile(m)
2135 templater.templater.frommapfile(m)
2132 except Exception as inst:
2136 except Exception as inst:
2133 err = stringutil.forcebytestr(inst)
2137 err = stringutil.forcebytestr(inst)
2134 p = None
2138 p = None
2135 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2139 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2136 else:
2140 else:
2137 p = None
2141 p = None
2138 fm.condwrite(
2142 fm.condwrite(
2139 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2143 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2140 )
2144 )
2141 fm.condwrite(
2145 fm.condwrite(
2142 not m,
2146 not m,
2143 b'defaulttemplatenotfound',
2147 b'defaulttemplatenotfound',
2144 _(b" template '%s' not found\n"),
2148 _(b" template '%s' not found\n"),
2145 b"default",
2149 b"default",
2146 )
2150 )
2147 if not p:
2151 if not p:
2148 problems += 1
2152 problems += 1
2149 fm.condwrite(
2153 fm.condwrite(
2150 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2154 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2151 )
2155 )
2152
2156
2153 # editor
2157 # editor
2154 editor = ui.geteditor()
2158 editor = ui.geteditor()
2155 editor = util.expandpath(editor)
2159 editor = util.expandpath(editor)
2156 editorbin = procutil.shellsplit(editor)[0]
2160 editorbin = procutil.shellsplit(editor)[0]
2157 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2161 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2158 cmdpath = procutil.findexe(editorbin)
2162 cmdpath = procutil.findexe(editorbin)
2159 fm.condwrite(
2163 fm.condwrite(
2160 not cmdpath and editor == b'vi',
2164 not cmdpath and editor == b'vi',
2161 b'vinotfound',
2165 b'vinotfound',
2162 _(
2166 _(
2163 b" No commit editor set and can't find %s in PATH\n"
2167 b" No commit editor set and can't find %s in PATH\n"
2164 b" (specify a commit editor in your configuration"
2168 b" (specify a commit editor in your configuration"
2165 b" file)\n"
2169 b" file)\n"
2166 ),
2170 ),
2167 not cmdpath and editor == b'vi' and editorbin,
2171 not cmdpath and editor == b'vi' and editorbin,
2168 )
2172 )
2169 fm.condwrite(
2173 fm.condwrite(
2170 not cmdpath and editor != b'vi',
2174 not cmdpath and editor != b'vi',
2171 b'editornotfound',
2175 b'editornotfound',
2172 _(
2176 _(
2173 b" Can't find editor '%s' in PATH\n"
2177 b" Can't find editor '%s' in PATH\n"
2174 b" (specify a commit editor in your configuration"
2178 b" (specify a commit editor in your configuration"
2175 b" file)\n"
2179 b" file)\n"
2176 ),
2180 ),
2177 not cmdpath and editorbin,
2181 not cmdpath and editorbin,
2178 )
2182 )
2179 if not cmdpath and editor != b'vi':
2183 if not cmdpath and editor != b'vi':
2180 problems += 1
2184 problems += 1
2181
2185
2182 # check username
2186 # check username
2183 username = None
2187 username = None
2184 err = None
2188 err = None
2185 try:
2189 try:
2186 username = ui.username()
2190 username = ui.username()
2187 except error.Abort as e:
2191 except error.Abort as e:
2188 err = e.message
2192 err = e.message
2189 problems += 1
2193 problems += 1
2190
2194
2191 fm.condwrite(
2195 fm.condwrite(
2192 username, b'username', _(b"checking username (%s)\n"), username
2196 username, b'username', _(b"checking username (%s)\n"), username
2193 )
2197 )
2194 fm.condwrite(
2198 fm.condwrite(
2195 err,
2199 err,
2196 b'usernameerror',
2200 b'usernameerror',
2197 _(
2201 _(
2198 b"checking username...\n %s\n"
2202 b"checking username...\n %s\n"
2199 b" (specify a username in your configuration file)\n"
2203 b" (specify a username in your configuration file)\n"
2200 ),
2204 ),
2201 err,
2205 err,
2202 )
2206 )
2203
2207
2204 for name, mod in extensions.extensions():
2208 for name, mod in extensions.extensions():
2205 handler = getattr(mod, 'debuginstall', None)
2209 handler = getattr(mod, 'debuginstall', None)
2206 if handler is not None:
2210 if handler is not None:
2207 problems += handler(ui, fm)
2211 problems += handler(ui, fm)
2208
2212
2209 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2213 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2210 if not problems:
2214 if not problems:
2211 fm.data(problems=problems)
2215 fm.data(problems=problems)
2212 fm.condwrite(
2216 fm.condwrite(
2213 problems,
2217 problems,
2214 b'problems',
2218 b'problems',
2215 _(b"%d problems detected, please check your install!\n"),
2219 _(b"%d problems detected, please check your install!\n"),
2216 problems,
2220 problems,
2217 )
2221 )
2218 fm.end()
2222 fm.end()
2219
2223
2220 return problems
2224 return problems
2221
2225
2222
2226
2223 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2227 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2224 def debugknown(ui, repopath, *ids, **opts):
2228 def debugknown(ui, repopath, *ids, **opts):
2225 """test whether node ids are known to a repo
2229 """test whether node ids are known to a repo
2226
2230
2227 Every ID must be a full-length hex node id string. Returns a list of 0s
2231 Every ID must be a full-length hex node id string. Returns a list of 0s
2228 and 1s indicating unknown/known.
2232 and 1s indicating unknown/known.
2229 """
2233 """
2230 opts = pycompat.byteskwargs(opts)
2234 opts = pycompat.byteskwargs(opts)
2231 repo = hg.peer(ui, opts, repopath)
2235 repo = hg.peer(ui, opts, repopath)
2232 if not repo.capable(b'known'):
2236 if not repo.capable(b'known'):
2233 raise error.Abort(b"known() not supported by target repository")
2237 raise error.Abort(b"known() not supported by target repository")
2234 flags = repo.known([bin(s) for s in ids])
2238 flags = repo.known([bin(s) for s in ids])
2235 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2239 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2236
2240
2237
2241
2238 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2242 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2239 def debuglabelcomplete(ui, repo, *args):
2243 def debuglabelcomplete(ui, repo, *args):
2240 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2244 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2241 debugnamecomplete(ui, repo, *args)
2245 debugnamecomplete(ui, repo, *args)
2242
2246
2243
2247
2244 @command(
2248 @command(
2245 b'debuglocks',
2249 b'debuglocks',
2246 [
2250 [
2247 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2251 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2248 (
2252 (
2249 b'W',
2253 b'W',
2250 b'force-free-wlock',
2254 b'force-free-wlock',
2251 None,
2255 None,
2252 _(b'free the working state lock (DANGEROUS)'),
2256 _(b'free the working state lock (DANGEROUS)'),
2253 ),
2257 ),
2254 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2258 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2255 (
2259 (
2256 b'S',
2260 b'S',
2257 b'set-wlock',
2261 b'set-wlock',
2258 None,
2262 None,
2259 _(b'set the working state lock until stopped'),
2263 _(b'set the working state lock until stopped'),
2260 ),
2264 ),
2261 ],
2265 ],
2262 _(b'[OPTION]...'),
2266 _(b'[OPTION]...'),
2263 )
2267 )
2264 def debuglocks(ui, repo, **opts):
2268 def debuglocks(ui, repo, **opts):
2265 """show or modify state of locks
2269 """show or modify state of locks
2266
2270
2267 By default, this command will show which locks are held. This
2271 By default, this command will show which locks are held. This
2268 includes the user and process holding the lock, the amount of time
2272 includes the user and process holding the lock, the amount of time
2269 the lock has been held, and the machine name where the process is
2273 the lock has been held, and the machine name where the process is
2270 running if it's not local.
2274 running if it's not local.
2271
2275
2272 Locks protect the integrity of Mercurial's data, so should be
2276 Locks protect the integrity of Mercurial's data, so should be
2273 treated with care. System crashes or other interruptions may cause
2277 treated with care. System crashes or other interruptions may cause
2274 locks to not be properly released, though Mercurial will usually
2278 locks to not be properly released, though Mercurial will usually
2275 detect and remove such stale locks automatically.
2279 detect and remove such stale locks automatically.
2276
2280
2277 However, detecting stale locks may not always be possible (for
2281 However, detecting stale locks may not always be possible (for
2278 instance, on a shared filesystem). Removing locks may also be
2282 instance, on a shared filesystem). Removing locks may also be
2279 blocked by filesystem permissions.
2283 blocked by filesystem permissions.
2280
2284
2281 Setting a lock will prevent other commands from changing the data.
2285 Setting a lock will prevent other commands from changing the data.
2282 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2286 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2283 The set locks are removed when the command exits.
2287 The set locks are removed when the command exits.
2284
2288
2285 Returns 0 if no locks are held.
2289 Returns 0 if no locks are held.
2286
2290
2287 """
2291 """
2288
2292
2289 if opts.get('force_free_lock'):
2293 if opts.get('force_free_lock'):
2290 repo.svfs.tryunlink(b'lock')
2294 repo.svfs.tryunlink(b'lock')
2291 if opts.get('force_free_wlock'):
2295 if opts.get('force_free_wlock'):
2292 repo.vfs.tryunlink(b'wlock')
2296 repo.vfs.tryunlink(b'wlock')
2293 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2297 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2294 return 0
2298 return 0
2295
2299
2296 locks = []
2300 locks = []
2297 try:
2301 try:
2298 if opts.get('set_wlock'):
2302 if opts.get('set_wlock'):
2299 try:
2303 try:
2300 locks.append(repo.wlock(False))
2304 locks.append(repo.wlock(False))
2301 except error.LockHeld:
2305 except error.LockHeld:
2302 raise error.Abort(_(b'wlock is already held'))
2306 raise error.Abort(_(b'wlock is already held'))
2303 if opts.get('set_lock'):
2307 if opts.get('set_lock'):
2304 try:
2308 try:
2305 locks.append(repo.lock(False))
2309 locks.append(repo.lock(False))
2306 except error.LockHeld:
2310 except error.LockHeld:
2307 raise error.Abort(_(b'lock is already held'))
2311 raise error.Abort(_(b'lock is already held'))
2308 if len(locks):
2312 if len(locks):
2309 try:
2313 try:
2310 if ui.interactive():
2314 if ui.interactive():
2311 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2315 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2312 ui.promptchoice(prompt)
2316 ui.promptchoice(prompt)
2313 else:
2317 else:
2314 msg = b"%d locks held, waiting for signal\n"
2318 msg = b"%d locks held, waiting for signal\n"
2315 msg %= len(locks)
2319 msg %= len(locks)
2316 ui.status(msg)
2320 ui.status(msg)
2317 while True: # XXX wait for a signal
2321 while True: # XXX wait for a signal
2318 time.sleep(0.1)
2322 time.sleep(0.1)
2319 except KeyboardInterrupt:
2323 except KeyboardInterrupt:
2320 msg = b"signal-received releasing locks\n"
2324 msg = b"signal-received releasing locks\n"
2321 ui.status(msg)
2325 ui.status(msg)
2322 return 0
2326 return 0
2323 finally:
2327 finally:
2324 release(*locks)
2328 release(*locks)
2325
2329
2326 now = time.time()
2330 now = time.time()
2327 held = 0
2331 held = 0
2328
2332
2329 def report(vfs, name, method):
2333 def report(vfs, name, method):
2330 # this causes stale locks to get reaped for more accurate reporting
2334 # this causes stale locks to get reaped for more accurate reporting
2331 try:
2335 try:
2332 l = method(False)
2336 l = method(False)
2333 except error.LockHeld:
2337 except error.LockHeld:
2334 l = None
2338 l = None
2335
2339
2336 if l:
2340 if l:
2337 l.release()
2341 l.release()
2338 else:
2342 else:
2339 try:
2343 try:
2340 st = vfs.lstat(name)
2344 st = vfs.lstat(name)
2341 age = now - st[stat.ST_MTIME]
2345 age = now - st[stat.ST_MTIME]
2342 user = util.username(st.st_uid)
2346 user = util.username(st.st_uid)
2343 locker = vfs.readlock(name)
2347 locker = vfs.readlock(name)
2344 if b":" in locker:
2348 if b":" in locker:
2345 host, pid = locker.split(b':')
2349 host, pid = locker.split(b':')
2346 if host == socket.gethostname():
2350 if host == socket.gethostname():
2347 locker = b'user %s, process %s' % (user or b'None', pid)
2351 locker = b'user %s, process %s' % (user or b'None', pid)
2348 else:
2352 else:
2349 locker = b'user %s, process %s, host %s' % (
2353 locker = b'user %s, process %s, host %s' % (
2350 user or b'None',
2354 user or b'None',
2351 pid,
2355 pid,
2352 host,
2356 host,
2353 )
2357 )
2354 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2358 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2355 return 1
2359 return 1
2356 except FileNotFoundError:
2360 except FileNotFoundError:
2357 pass
2361 pass
2358
2362
2359 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2363 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2360 return 0
2364 return 0
2361
2365
2362 held += report(repo.svfs, b"lock", repo.lock)
2366 held += report(repo.svfs, b"lock", repo.lock)
2363 held += report(repo.vfs, b"wlock", repo.wlock)
2367 held += report(repo.vfs, b"wlock", repo.wlock)
2364
2368
2365 return held
2369 return held
2366
2370
2367
2371
2368 @command(
2372 @command(
2369 b'debugmanifestfulltextcache',
2373 b'debugmanifestfulltextcache',
2370 [
2374 [
2371 (b'', b'clear', False, _(b'clear the cache')),
2375 (b'', b'clear', False, _(b'clear the cache')),
2372 (
2376 (
2373 b'a',
2377 b'a',
2374 b'add',
2378 b'add',
2375 [],
2379 [],
2376 _(b'add the given manifest nodes to the cache'),
2380 _(b'add the given manifest nodes to the cache'),
2377 _(b'NODE'),
2381 _(b'NODE'),
2378 ),
2382 ),
2379 ],
2383 ],
2380 b'',
2384 b'',
2381 )
2385 )
2382 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2386 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2383 """show, clear or amend the contents of the manifest fulltext cache"""
2387 """show, clear or amend the contents of the manifest fulltext cache"""
2384
2388
2385 def getcache():
2389 def getcache():
2386 r = repo.manifestlog.getstorage(b'')
2390 r = repo.manifestlog.getstorage(b'')
2387 try:
2391 try:
2388 return r._fulltextcache
2392 return r._fulltextcache
2389 except AttributeError:
2393 except AttributeError:
2390 msg = _(
2394 msg = _(
2391 b"Current revlog implementation doesn't appear to have a "
2395 b"Current revlog implementation doesn't appear to have a "
2392 b"manifest fulltext cache\n"
2396 b"manifest fulltext cache\n"
2393 )
2397 )
2394 raise error.Abort(msg)
2398 raise error.Abort(msg)
2395
2399
2396 if opts.get('clear'):
2400 if opts.get('clear'):
2397 with repo.wlock():
2401 with repo.wlock():
2398 cache = getcache()
2402 cache = getcache()
2399 cache.clear(clear_persisted_data=True)
2403 cache.clear(clear_persisted_data=True)
2400 return
2404 return
2401
2405
2402 if add:
2406 if add:
2403 with repo.wlock():
2407 with repo.wlock():
2404 m = repo.manifestlog
2408 m = repo.manifestlog
2405 store = m.getstorage(b'')
2409 store = m.getstorage(b'')
2406 for n in add:
2410 for n in add:
2407 try:
2411 try:
2408 manifest = m[store.lookup(n)]
2412 manifest = m[store.lookup(n)]
2409 except error.LookupError as e:
2413 except error.LookupError as e:
2410 raise error.Abort(
2414 raise error.Abort(
2411 bytes(e), hint=b"Check your manifest node id"
2415 bytes(e), hint=b"Check your manifest node id"
2412 )
2416 )
2413 manifest.read() # stores revisision in cache too
2417 manifest.read() # stores revisision in cache too
2414 return
2418 return
2415
2419
2416 cache = getcache()
2420 cache = getcache()
2417 if not len(cache):
2421 if not len(cache):
2418 ui.write(_(b'cache empty\n'))
2422 ui.write(_(b'cache empty\n'))
2419 else:
2423 else:
2420 ui.write(
2424 ui.write(
2421 _(
2425 _(
2422 b'cache contains %d manifest entries, in order of most to '
2426 b'cache contains %d manifest entries, in order of most to '
2423 b'least recent:\n'
2427 b'least recent:\n'
2424 )
2428 )
2425 % (len(cache),)
2429 % (len(cache),)
2426 )
2430 )
2427 totalsize = 0
2431 totalsize = 0
2428 for nodeid in cache:
2432 for nodeid in cache:
2429 # Use cache.get to not update the LRU order
2433 # Use cache.get to not update the LRU order
2430 data = cache.peek(nodeid)
2434 data = cache.peek(nodeid)
2431 size = len(data)
2435 size = len(data)
2432 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2436 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2433 ui.write(
2437 ui.write(
2434 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2438 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2435 )
2439 )
2436 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2440 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2437 ui.write(
2441 ui.write(
2438 _(b'total cache data size %s, on-disk %s\n')
2442 _(b'total cache data size %s, on-disk %s\n')
2439 % (util.bytecount(totalsize), util.bytecount(ondisk))
2443 % (util.bytecount(totalsize), util.bytecount(ondisk))
2440 )
2444 )
2441
2445
2442
2446
2443 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2447 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2444 def debugmergestate(ui, repo, *args, **opts):
2448 def debugmergestate(ui, repo, *args, **opts):
2445 """print merge state
2449 """print merge state
2446
2450
2447 Use --verbose to print out information about whether v1 or v2 merge state
2451 Use --verbose to print out information about whether v1 or v2 merge state
2448 was chosen."""
2452 was chosen."""
2449
2453
2450 if ui.verbose:
2454 if ui.verbose:
2451 ms = mergestatemod.mergestate(repo)
2455 ms = mergestatemod.mergestate(repo)
2452
2456
2453 # sort so that reasonable information is on top
2457 # sort so that reasonable information is on top
2454 v1records = ms._readrecordsv1()
2458 v1records = ms._readrecordsv1()
2455 v2records = ms._readrecordsv2()
2459 v2records = ms._readrecordsv2()
2456
2460
2457 if not v1records and not v2records:
2461 if not v1records and not v2records:
2458 pass
2462 pass
2459 elif not v2records:
2463 elif not v2records:
2460 ui.writenoi18n(b'no version 2 merge state\n')
2464 ui.writenoi18n(b'no version 2 merge state\n')
2461 elif ms._v1v2match(v1records, v2records):
2465 elif ms._v1v2match(v1records, v2records):
2462 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2466 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2463 else:
2467 else:
2464 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2468 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2465
2469
2466 opts = pycompat.byteskwargs(opts)
2470 opts = pycompat.byteskwargs(opts)
2467 if not opts[b'template']:
2471 if not opts[b'template']:
2468 opts[b'template'] = (
2472 opts[b'template'] = (
2469 b'{if(commits, "", "no merge state found\n")}'
2473 b'{if(commits, "", "no merge state found\n")}'
2470 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2474 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2471 b'{files % "file: {path} (state \\"{state}\\")\n'
2475 b'{files % "file: {path} (state \\"{state}\\")\n'
2472 b'{if(local_path, "'
2476 b'{if(local_path, "'
2473 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2477 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2474 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2478 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2475 b' other path: {other_path} (node {other_node})\n'
2479 b' other path: {other_path} (node {other_node})\n'
2476 b'")}'
2480 b'")}'
2477 b'{if(rename_side, "'
2481 b'{if(rename_side, "'
2478 b' rename side: {rename_side}\n'
2482 b' rename side: {rename_side}\n'
2479 b' renamed path: {renamed_path}\n'
2483 b' renamed path: {renamed_path}\n'
2480 b'")}'
2484 b'")}'
2481 b'{extras % " extra: {key} = {value}\n"}'
2485 b'{extras % " extra: {key} = {value}\n"}'
2482 b'"}'
2486 b'"}'
2483 b'{extras % "extra: {file} ({key} = {value})\n"}'
2487 b'{extras % "extra: {file} ({key} = {value})\n"}'
2484 )
2488 )
2485
2489
2486 ms = mergestatemod.mergestate.read(repo)
2490 ms = mergestatemod.mergestate.read(repo)
2487
2491
2488 fm = ui.formatter(b'debugmergestate', opts)
2492 fm = ui.formatter(b'debugmergestate', opts)
2489 fm.startitem()
2493 fm.startitem()
2490
2494
2491 fm_commits = fm.nested(b'commits')
2495 fm_commits = fm.nested(b'commits')
2492 if ms.active():
2496 if ms.active():
2493 for name, node, label_index in (
2497 for name, node, label_index in (
2494 (b'local', ms.local, 0),
2498 (b'local', ms.local, 0),
2495 (b'other', ms.other, 1),
2499 (b'other', ms.other, 1),
2496 ):
2500 ):
2497 fm_commits.startitem()
2501 fm_commits.startitem()
2498 fm_commits.data(name=name)
2502 fm_commits.data(name=name)
2499 fm_commits.data(node=hex(node))
2503 fm_commits.data(node=hex(node))
2500 if ms._labels and len(ms._labels) > label_index:
2504 if ms._labels and len(ms._labels) > label_index:
2501 fm_commits.data(label=ms._labels[label_index])
2505 fm_commits.data(label=ms._labels[label_index])
2502 fm_commits.end()
2506 fm_commits.end()
2503
2507
2504 fm_files = fm.nested(b'files')
2508 fm_files = fm.nested(b'files')
2505 if ms.active():
2509 if ms.active():
2506 for f in ms:
2510 for f in ms:
2507 fm_files.startitem()
2511 fm_files.startitem()
2508 fm_files.data(path=f)
2512 fm_files.data(path=f)
2509 state = ms._state[f]
2513 state = ms._state[f]
2510 fm_files.data(state=state[0])
2514 fm_files.data(state=state[0])
2511 if state[0] in (
2515 if state[0] in (
2512 mergestatemod.MERGE_RECORD_UNRESOLVED,
2516 mergestatemod.MERGE_RECORD_UNRESOLVED,
2513 mergestatemod.MERGE_RECORD_RESOLVED,
2517 mergestatemod.MERGE_RECORD_RESOLVED,
2514 ):
2518 ):
2515 fm_files.data(local_key=state[1])
2519 fm_files.data(local_key=state[1])
2516 fm_files.data(local_path=state[2])
2520 fm_files.data(local_path=state[2])
2517 fm_files.data(ancestor_path=state[3])
2521 fm_files.data(ancestor_path=state[3])
2518 fm_files.data(ancestor_node=state[4])
2522 fm_files.data(ancestor_node=state[4])
2519 fm_files.data(other_path=state[5])
2523 fm_files.data(other_path=state[5])
2520 fm_files.data(other_node=state[6])
2524 fm_files.data(other_node=state[6])
2521 fm_files.data(local_flags=state[7])
2525 fm_files.data(local_flags=state[7])
2522 elif state[0] in (
2526 elif state[0] in (
2523 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2527 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2524 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2528 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2525 ):
2529 ):
2526 fm_files.data(renamed_path=state[1])
2530 fm_files.data(renamed_path=state[1])
2527 fm_files.data(rename_side=state[2])
2531 fm_files.data(rename_side=state[2])
2528 fm_extras = fm_files.nested(b'extras')
2532 fm_extras = fm_files.nested(b'extras')
2529 for k, v in sorted(ms.extras(f).items()):
2533 for k, v in sorted(ms.extras(f).items()):
2530 fm_extras.startitem()
2534 fm_extras.startitem()
2531 fm_extras.data(key=k)
2535 fm_extras.data(key=k)
2532 fm_extras.data(value=v)
2536 fm_extras.data(value=v)
2533 fm_extras.end()
2537 fm_extras.end()
2534
2538
2535 fm_files.end()
2539 fm_files.end()
2536
2540
2537 fm_extras = fm.nested(b'extras')
2541 fm_extras = fm.nested(b'extras')
2538 for f, d in sorted(ms.allextras().items()):
2542 for f, d in sorted(ms.allextras().items()):
2539 if f in ms:
2543 if f in ms:
2540 # If file is in mergestate, we have already processed it's extras
2544 # If file is in mergestate, we have already processed it's extras
2541 continue
2545 continue
2542 for k, v in d.items():
2546 for k, v in d.items():
2543 fm_extras.startitem()
2547 fm_extras.startitem()
2544 fm_extras.data(file=f)
2548 fm_extras.data(file=f)
2545 fm_extras.data(key=k)
2549 fm_extras.data(key=k)
2546 fm_extras.data(value=v)
2550 fm_extras.data(value=v)
2547 fm_extras.end()
2551 fm_extras.end()
2548
2552
2549 fm.end()
2553 fm.end()
2550
2554
2551
2555
2552 @command(b'debugnamecomplete', [], _(b'NAME...'))
2556 @command(b'debugnamecomplete', [], _(b'NAME...'))
2553 def debugnamecomplete(ui, repo, *args):
2557 def debugnamecomplete(ui, repo, *args):
2554 '''complete "names" - tags, open branch names, bookmark names'''
2558 '''complete "names" - tags, open branch names, bookmark names'''
2555
2559
2556 names = set()
2560 names = set()
2557 # since we previously only listed open branches, we will handle that
2561 # since we previously only listed open branches, we will handle that
2558 # specially (after this for loop)
2562 # specially (after this for loop)
2559 for name, ns in repo.names.items():
2563 for name, ns in repo.names.items():
2560 if name != b'branches':
2564 if name != b'branches':
2561 names.update(ns.listnames(repo))
2565 names.update(ns.listnames(repo))
2562 names.update(
2566 names.update(
2563 tag
2567 tag
2564 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2568 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2565 if not closed
2569 if not closed
2566 )
2570 )
2567 completions = set()
2571 completions = set()
2568 if not args:
2572 if not args:
2569 args = [b'']
2573 args = [b'']
2570 for a in args:
2574 for a in args:
2571 completions.update(n for n in names if n.startswith(a))
2575 completions.update(n for n in names if n.startswith(a))
2572 ui.write(b'\n'.join(sorted(completions)))
2576 ui.write(b'\n'.join(sorted(completions)))
2573 ui.write(b'\n')
2577 ui.write(b'\n')
2574
2578
2575
2579
2576 @command(
2580 @command(
2577 b'debugnodemap',
2581 b'debugnodemap',
2578 [
2582 [
2579 (
2583 (
2580 b'',
2584 b'',
2581 b'dump-new',
2585 b'dump-new',
2582 False,
2586 False,
2583 _(b'write a (new) persistent binary nodemap on stdout'),
2587 _(b'write a (new) persistent binary nodemap on stdout'),
2584 ),
2588 ),
2585 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2589 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2586 (
2590 (
2587 b'',
2591 b'',
2588 b'check',
2592 b'check',
2589 False,
2593 False,
2590 _(b'check that the data on disk data are correct.'),
2594 _(b'check that the data on disk data are correct.'),
2591 ),
2595 ),
2592 (
2596 (
2593 b'',
2597 b'',
2594 b'metadata',
2598 b'metadata',
2595 False,
2599 False,
2596 _(b'display the on disk meta data for the nodemap'),
2600 _(b'display the on disk meta data for the nodemap'),
2597 ),
2601 ),
2598 ],
2602 ],
2599 )
2603 )
2600 def debugnodemap(ui, repo, **opts):
2604 def debugnodemap(ui, repo, **opts):
2601 """write and inspect on disk nodemap"""
2605 """write and inspect on disk nodemap"""
2602 if opts['dump_new']:
2606 if opts['dump_new']:
2603 unfi = repo.unfiltered()
2607 unfi = repo.unfiltered()
2604 cl = unfi.changelog
2608 cl = unfi.changelog
2605 if util.safehasattr(cl.index, "nodemap_data_all"):
2609 if util.safehasattr(cl.index, "nodemap_data_all"):
2606 data = cl.index.nodemap_data_all()
2610 data = cl.index.nodemap_data_all()
2607 else:
2611 else:
2608 data = nodemap.persistent_data(cl.index)
2612 data = nodemap.persistent_data(cl.index)
2609 ui.write(data)
2613 ui.write(data)
2610 elif opts['dump_disk']:
2614 elif opts['dump_disk']:
2611 unfi = repo.unfiltered()
2615 unfi = repo.unfiltered()
2612 cl = unfi.changelog
2616 cl = unfi.changelog
2613 nm_data = nodemap.persisted_data(cl)
2617 nm_data = nodemap.persisted_data(cl)
2614 if nm_data is not None:
2618 if nm_data is not None:
2615 docket, data = nm_data
2619 docket, data = nm_data
2616 ui.write(data[:])
2620 ui.write(data[:])
2617 elif opts['check']:
2621 elif opts['check']:
2618 unfi = repo.unfiltered()
2622 unfi = repo.unfiltered()
2619 cl = unfi.changelog
2623 cl = unfi.changelog
2620 nm_data = nodemap.persisted_data(cl)
2624 nm_data = nodemap.persisted_data(cl)
2621 if nm_data is not None:
2625 if nm_data is not None:
2622 docket, data = nm_data
2626 docket, data = nm_data
2623 return nodemap.check_data(ui, cl.index, data)
2627 return nodemap.check_data(ui, cl.index, data)
2624 elif opts['metadata']:
2628 elif opts['metadata']:
2625 unfi = repo.unfiltered()
2629 unfi = repo.unfiltered()
2626 cl = unfi.changelog
2630 cl = unfi.changelog
2627 nm_data = nodemap.persisted_data(cl)
2631 nm_data = nodemap.persisted_data(cl)
2628 if nm_data is not None:
2632 if nm_data is not None:
2629 docket, data = nm_data
2633 docket, data = nm_data
2630 ui.write((b"uid: %s\n") % docket.uid)
2634 ui.write((b"uid: %s\n") % docket.uid)
2631 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2635 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2632 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2636 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2633 ui.write((b"data-length: %d\n") % docket.data_length)
2637 ui.write((b"data-length: %d\n") % docket.data_length)
2634 ui.write((b"data-unused: %d\n") % docket.data_unused)
2638 ui.write((b"data-unused: %d\n") % docket.data_unused)
2635 unused_perc = docket.data_unused * 100.0 / docket.data_length
2639 unused_perc = docket.data_unused * 100.0 / docket.data_length
2636 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2640 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2637
2641
2638
2642
2639 @command(
2643 @command(
2640 b'debugobsolete',
2644 b'debugobsolete',
2641 [
2645 [
2642 (b'', b'flags', 0, _(b'markers flag')),
2646 (b'', b'flags', 0, _(b'markers flag')),
2643 (
2647 (
2644 b'',
2648 b'',
2645 b'record-parents',
2649 b'record-parents',
2646 False,
2650 False,
2647 _(b'record parent information for the precursor'),
2651 _(b'record parent information for the precursor'),
2648 ),
2652 ),
2649 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2653 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2650 (
2654 (
2651 b'',
2655 b'',
2652 b'exclusive',
2656 b'exclusive',
2653 False,
2657 False,
2654 _(b'restrict display to markers only relevant to REV'),
2658 _(b'restrict display to markers only relevant to REV'),
2655 ),
2659 ),
2656 (b'', b'index', False, _(b'display index of the marker')),
2660 (b'', b'index', False, _(b'display index of the marker')),
2657 (b'', b'delete', [], _(b'delete markers specified by indices')),
2661 (b'', b'delete', [], _(b'delete markers specified by indices')),
2658 ]
2662 ]
2659 + cmdutil.commitopts2
2663 + cmdutil.commitopts2
2660 + cmdutil.formatteropts,
2664 + cmdutil.formatteropts,
2661 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2665 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2662 )
2666 )
2663 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2667 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2664 """create arbitrary obsolete marker
2668 """create arbitrary obsolete marker
2665
2669
2666 With no arguments, displays the list of obsolescence markers."""
2670 With no arguments, displays the list of obsolescence markers."""
2667
2671
2668 opts = pycompat.byteskwargs(opts)
2672 opts = pycompat.byteskwargs(opts)
2669
2673
2670 def parsenodeid(s):
2674 def parsenodeid(s):
2671 try:
2675 try:
2672 # We do not use revsingle/revrange functions here to accept
2676 # We do not use revsingle/revrange functions here to accept
2673 # arbitrary node identifiers, possibly not present in the
2677 # arbitrary node identifiers, possibly not present in the
2674 # local repository.
2678 # local repository.
2675 n = bin(s)
2679 n = bin(s)
2676 if len(n) != repo.nodeconstants.nodelen:
2680 if len(n) != repo.nodeconstants.nodelen:
2677 raise ValueError
2681 raise ValueError
2678 return n
2682 return n
2679 except ValueError:
2683 except ValueError:
2680 raise error.InputError(
2684 raise error.InputError(
2681 b'changeset references must be full hexadecimal '
2685 b'changeset references must be full hexadecimal '
2682 b'node identifiers'
2686 b'node identifiers'
2683 )
2687 )
2684
2688
2685 if opts.get(b'delete'):
2689 if opts.get(b'delete'):
2686 indices = []
2690 indices = []
2687 for v in opts.get(b'delete'):
2691 for v in opts.get(b'delete'):
2688 try:
2692 try:
2689 indices.append(int(v))
2693 indices.append(int(v))
2690 except ValueError:
2694 except ValueError:
2691 raise error.InputError(
2695 raise error.InputError(
2692 _(b'invalid index value: %r') % v,
2696 _(b'invalid index value: %r') % v,
2693 hint=_(b'use integers for indices'),
2697 hint=_(b'use integers for indices'),
2694 )
2698 )
2695
2699
2696 if repo.currenttransaction():
2700 if repo.currenttransaction():
2697 raise error.Abort(
2701 raise error.Abort(
2698 _(b'cannot delete obsmarkers in the middle of transaction.')
2702 _(b'cannot delete obsmarkers in the middle of transaction.')
2699 )
2703 )
2700
2704
2701 with repo.lock():
2705 with repo.lock():
2702 n = repair.deleteobsmarkers(repo.obsstore, indices)
2706 n = repair.deleteobsmarkers(repo.obsstore, indices)
2703 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2707 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2704
2708
2705 return
2709 return
2706
2710
2707 if precursor is not None:
2711 if precursor is not None:
2708 if opts[b'rev']:
2712 if opts[b'rev']:
2709 raise error.InputError(
2713 raise error.InputError(
2710 b'cannot select revision when creating marker'
2714 b'cannot select revision when creating marker'
2711 )
2715 )
2712 metadata = {}
2716 metadata = {}
2713 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2717 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2714 succs = tuple(parsenodeid(succ) for succ in successors)
2718 succs = tuple(parsenodeid(succ) for succ in successors)
2715 l = repo.lock()
2719 l = repo.lock()
2716 try:
2720 try:
2717 tr = repo.transaction(b'debugobsolete')
2721 tr = repo.transaction(b'debugobsolete')
2718 try:
2722 try:
2719 date = opts.get(b'date')
2723 date = opts.get(b'date')
2720 if date:
2724 if date:
2721 date = dateutil.parsedate(date)
2725 date = dateutil.parsedate(date)
2722 else:
2726 else:
2723 date = None
2727 date = None
2724 prec = parsenodeid(precursor)
2728 prec = parsenodeid(precursor)
2725 parents = None
2729 parents = None
2726 if opts[b'record_parents']:
2730 if opts[b'record_parents']:
2727 if prec not in repo.unfiltered():
2731 if prec not in repo.unfiltered():
2728 raise error.Abort(
2732 raise error.Abort(
2729 b'cannot used --record-parents on '
2733 b'cannot used --record-parents on '
2730 b'unknown changesets'
2734 b'unknown changesets'
2731 )
2735 )
2732 parents = repo.unfiltered()[prec].parents()
2736 parents = repo.unfiltered()[prec].parents()
2733 parents = tuple(p.node() for p in parents)
2737 parents = tuple(p.node() for p in parents)
2734 repo.obsstore.create(
2738 repo.obsstore.create(
2735 tr,
2739 tr,
2736 prec,
2740 prec,
2737 succs,
2741 succs,
2738 opts[b'flags'],
2742 opts[b'flags'],
2739 parents=parents,
2743 parents=parents,
2740 date=date,
2744 date=date,
2741 metadata=metadata,
2745 metadata=metadata,
2742 ui=ui,
2746 ui=ui,
2743 )
2747 )
2744 tr.close()
2748 tr.close()
2745 except ValueError as exc:
2749 except ValueError as exc:
2746 raise error.Abort(
2750 raise error.Abort(
2747 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2751 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2748 )
2752 )
2749 finally:
2753 finally:
2750 tr.release()
2754 tr.release()
2751 finally:
2755 finally:
2752 l.release()
2756 l.release()
2753 else:
2757 else:
2754 if opts[b'rev']:
2758 if opts[b'rev']:
2755 revs = logcmdutil.revrange(repo, opts[b'rev'])
2759 revs = logcmdutil.revrange(repo, opts[b'rev'])
2756 nodes = [repo[r].node() for r in revs]
2760 nodes = [repo[r].node() for r in revs]
2757 markers = list(
2761 markers = list(
2758 obsutil.getmarkers(
2762 obsutil.getmarkers(
2759 repo, nodes=nodes, exclusive=opts[b'exclusive']
2763 repo, nodes=nodes, exclusive=opts[b'exclusive']
2760 )
2764 )
2761 )
2765 )
2762 markers.sort(key=lambda x: x._data)
2766 markers.sort(key=lambda x: x._data)
2763 else:
2767 else:
2764 markers = obsutil.getmarkers(repo)
2768 markers = obsutil.getmarkers(repo)
2765
2769
2766 markerstoiter = markers
2770 markerstoiter = markers
2767 isrelevant = lambda m: True
2771 isrelevant = lambda m: True
2768 if opts.get(b'rev') and opts.get(b'index'):
2772 if opts.get(b'rev') and opts.get(b'index'):
2769 markerstoiter = obsutil.getmarkers(repo)
2773 markerstoiter = obsutil.getmarkers(repo)
2770 markerset = set(markers)
2774 markerset = set(markers)
2771 isrelevant = lambda m: m in markerset
2775 isrelevant = lambda m: m in markerset
2772
2776
2773 fm = ui.formatter(b'debugobsolete', opts)
2777 fm = ui.formatter(b'debugobsolete', opts)
2774 for i, m in enumerate(markerstoiter):
2778 for i, m in enumerate(markerstoiter):
2775 if not isrelevant(m):
2779 if not isrelevant(m):
2776 # marker can be irrelevant when we're iterating over a set
2780 # marker can be irrelevant when we're iterating over a set
2777 # of markers (markerstoiter) which is bigger than the set
2781 # of markers (markerstoiter) which is bigger than the set
2778 # of markers we want to display (markers)
2782 # of markers we want to display (markers)
2779 # this can happen if both --index and --rev options are
2783 # this can happen if both --index and --rev options are
2780 # provided and thus we need to iterate over all of the markers
2784 # provided and thus we need to iterate over all of the markers
2781 # to get the correct indices, but only display the ones that
2785 # to get the correct indices, but only display the ones that
2782 # are relevant to --rev value
2786 # are relevant to --rev value
2783 continue
2787 continue
2784 fm.startitem()
2788 fm.startitem()
2785 ind = i if opts.get(b'index') else None
2789 ind = i if opts.get(b'index') else None
2786 cmdutil.showmarker(fm, m, index=ind)
2790 cmdutil.showmarker(fm, m, index=ind)
2787 fm.end()
2791 fm.end()
2788
2792
2789
2793
2790 @command(
2794 @command(
2791 b'debugp1copies',
2795 b'debugp1copies',
2792 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2796 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2793 _(b'[-r REV]'),
2797 _(b'[-r REV]'),
2794 )
2798 )
2795 def debugp1copies(ui, repo, **opts):
2799 def debugp1copies(ui, repo, **opts):
2796 """dump copy information compared to p1"""
2800 """dump copy information compared to p1"""
2797
2801
2798 opts = pycompat.byteskwargs(opts)
2802 opts = pycompat.byteskwargs(opts)
2799 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2803 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2800 for dst, src in ctx.p1copies().items():
2804 for dst, src in ctx.p1copies().items():
2801 ui.write(b'%s -> %s\n' % (src, dst))
2805 ui.write(b'%s -> %s\n' % (src, dst))
2802
2806
2803
2807
2804 @command(
2808 @command(
2805 b'debugp2copies',
2809 b'debugp2copies',
2806 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2810 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2807 _(b'[-r REV]'),
2811 _(b'[-r REV]'),
2808 )
2812 )
2809 def debugp2copies(ui, repo, **opts):
2813 def debugp2copies(ui, repo, **opts):
2810 """dump copy information compared to p2"""
2814 """dump copy information compared to p2"""
2811
2815
2812 opts = pycompat.byteskwargs(opts)
2816 opts = pycompat.byteskwargs(opts)
2813 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2817 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2814 for dst, src in ctx.p2copies().items():
2818 for dst, src in ctx.p2copies().items():
2815 ui.write(b'%s -> %s\n' % (src, dst))
2819 ui.write(b'%s -> %s\n' % (src, dst))
2816
2820
2817
2821
2818 @command(
2822 @command(
2819 b'debugpathcomplete',
2823 b'debugpathcomplete',
2820 [
2824 [
2821 (b'f', b'full', None, _(b'complete an entire path')),
2825 (b'f', b'full', None, _(b'complete an entire path')),
2822 (b'n', b'normal', None, _(b'show only normal files')),
2826 (b'n', b'normal', None, _(b'show only normal files')),
2823 (b'a', b'added', None, _(b'show only added files')),
2827 (b'a', b'added', None, _(b'show only added files')),
2824 (b'r', b'removed', None, _(b'show only removed files')),
2828 (b'r', b'removed', None, _(b'show only removed files')),
2825 ],
2829 ],
2826 _(b'FILESPEC...'),
2830 _(b'FILESPEC...'),
2827 )
2831 )
2828 def debugpathcomplete(ui, repo, *specs, **opts):
2832 def debugpathcomplete(ui, repo, *specs, **opts):
2829 """complete part or all of a tracked path
2833 """complete part or all of a tracked path
2830
2834
2831 This command supports shells that offer path name completion. It
2835 This command supports shells that offer path name completion. It
2832 currently completes only files already known to the dirstate.
2836 currently completes only files already known to the dirstate.
2833
2837
2834 Completion extends only to the next path segment unless
2838 Completion extends only to the next path segment unless
2835 --full is specified, in which case entire paths are used."""
2839 --full is specified, in which case entire paths are used."""
2836
2840
2837 def complete(path, acceptable):
2841 def complete(path, acceptable):
2838 dirstate = repo.dirstate
2842 dirstate = repo.dirstate
2839 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2843 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2840 rootdir = repo.root + pycompat.ossep
2844 rootdir = repo.root + pycompat.ossep
2841 if spec != repo.root and not spec.startswith(rootdir):
2845 if spec != repo.root and not spec.startswith(rootdir):
2842 return [], []
2846 return [], []
2843 if os.path.isdir(spec):
2847 if os.path.isdir(spec):
2844 spec += b'/'
2848 spec += b'/'
2845 spec = spec[len(rootdir) :]
2849 spec = spec[len(rootdir) :]
2846 fixpaths = pycompat.ossep != b'/'
2850 fixpaths = pycompat.ossep != b'/'
2847 if fixpaths:
2851 if fixpaths:
2848 spec = spec.replace(pycompat.ossep, b'/')
2852 spec = spec.replace(pycompat.ossep, b'/')
2849 speclen = len(spec)
2853 speclen = len(spec)
2850 fullpaths = opts['full']
2854 fullpaths = opts['full']
2851 files, dirs = set(), set()
2855 files, dirs = set(), set()
2852 adddir, addfile = dirs.add, files.add
2856 adddir, addfile = dirs.add, files.add
2853 for f, st in dirstate.items():
2857 for f, st in dirstate.items():
2854 if f.startswith(spec) and st.state in acceptable:
2858 if f.startswith(spec) and st.state in acceptable:
2855 if fixpaths:
2859 if fixpaths:
2856 f = f.replace(b'/', pycompat.ossep)
2860 f = f.replace(b'/', pycompat.ossep)
2857 if fullpaths:
2861 if fullpaths:
2858 addfile(f)
2862 addfile(f)
2859 continue
2863 continue
2860 s = f.find(pycompat.ossep, speclen)
2864 s = f.find(pycompat.ossep, speclen)
2861 if s >= 0:
2865 if s >= 0:
2862 adddir(f[:s])
2866 adddir(f[:s])
2863 else:
2867 else:
2864 addfile(f)
2868 addfile(f)
2865 return files, dirs
2869 return files, dirs
2866
2870
2867 acceptable = b''
2871 acceptable = b''
2868 if opts['normal']:
2872 if opts['normal']:
2869 acceptable += b'nm'
2873 acceptable += b'nm'
2870 if opts['added']:
2874 if opts['added']:
2871 acceptable += b'a'
2875 acceptable += b'a'
2872 if opts['removed']:
2876 if opts['removed']:
2873 acceptable += b'r'
2877 acceptable += b'r'
2874 cwd = repo.getcwd()
2878 cwd = repo.getcwd()
2875 if not specs:
2879 if not specs:
2876 specs = [b'.']
2880 specs = [b'.']
2877
2881
2878 files, dirs = set(), set()
2882 files, dirs = set(), set()
2879 for spec in specs:
2883 for spec in specs:
2880 f, d = complete(spec, acceptable or b'nmar')
2884 f, d = complete(spec, acceptable or b'nmar')
2881 files.update(f)
2885 files.update(f)
2882 dirs.update(d)
2886 dirs.update(d)
2883 files.update(dirs)
2887 files.update(dirs)
2884 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2888 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2885 ui.write(b'\n')
2889 ui.write(b'\n')
2886
2890
2887
2891
2888 @command(
2892 @command(
2889 b'debugpathcopies',
2893 b'debugpathcopies',
2890 cmdutil.walkopts,
2894 cmdutil.walkopts,
2891 b'hg debugpathcopies REV1 REV2 [FILE]',
2895 b'hg debugpathcopies REV1 REV2 [FILE]',
2892 inferrepo=True,
2896 inferrepo=True,
2893 )
2897 )
2894 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2898 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2895 """show copies between two revisions"""
2899 """show copies between two revisions"""
2896 ctx1 = scmutil.revsingle(repo, rev1)
2900 ctx1 = scmutil.revsingle(repo, rev1)
2897 ctx2 = scmutil.revsingle(repo, rev2)
2901 ctx2 = scmutil.revsingle(repo, rev2)
2898 m = scmutil.match(ctx1, pats, opts)
2902 m = scmutil.match(ctx1, pats, opts)
2899 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2903 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2900 ui.write(b'%s -> %s\n' % (src, dst))
2904 ui.write(b'%s -> %s\n' % (src, dst))
2901
2905
2902
2906
2903 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2907 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2904 def debugpeer(ui, path):
2908 def debugpeer(ui, path):
2905 """establish a connection to a peer repository"""
2909 """establish a connection to a peer repository"""
2906 # Always enable peer request logging. Requires --debug to display
2910 # Always enable peer request logging. Requires --debug to display
2907 # though.
2911 # though.
2908 overrides = {
2912 overrides = {
2909 (b'devel', b'debug.peer-request'): True,
2913 (b'devel', b'debug.peer-request'): True,
2910 }
2914 }
2911
2915
2912 with ui.configoverride(overrides):
2916 with ui.configoverride(overrides):
2913 peer = hg.peer(ui, {}, path)
2917 peer = hg.peer(ui, {}, path)
2914
2918
2915 try:
2919 try:
2916 local = peer.local() is not None
2920 local = peer.local() is not None
2917 canpush = peer.canpush()
2921 canpush = peer.canpush()
2918
2922
2919 ui.write(_(b'url: %s\n') % peer.url())
2923 ui.write(_(b'url: %s\n') % peer.url())
2920 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2924 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2921 ui.write(
2925 ui.write(
2922 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2926 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2923 )
2927 )
2924 finally:
2928 finally:
2925 peer.close()
2929 peer.close()
2926
2930
2927
2931
2928 @command(
2932 @command(
2929 b'debugpickmergetool',
2933 b'debugpickmergetool',
2930 [
2934 [
2931 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2935 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2932 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2936 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2933 ]
2937 ]
2934 + cmdutil.walkopts
2938 + cmdutil.walkopts
2935 + cmdutil.mergetoolopts,
2939 + cmdutil.mergetoolopts,
2936 _(b'[PATTERN]...'),
2940 _(b'[PATTERN]...'),
2937 inferrepo=True,
2941 inferrepo=True,
2938 )
2942 )
2939 def debugpickmergetool(ui, repo, *pats, **opts):
2943 def debugpickmergetool(ui, repo, *pats, **opts):
2940 """examine which merge tool is chosen for specified file
2944 """examine which merge tool is chosen for specified file
2941
2945
2942 As described in :hg:`help merge-tools`, Mercurial examines
2946 As described in :hg:`help merge-tools`, Mercurial examines
2943 configurations below in this order to decide which merge tool is
2947 configurations below in this order to decide which merge tool is
2944 chosen for specified file.
2948 chosen for specified file.
2945
2949
2946 1. ``--tool`` option
2950 1. ``--tool`` option
2947 2. ``HGMERGE`` environment variable
2951 2. ``HGMERGE`` environment variable
2948 3. configurations in ``merge-patterns`` section
2952 3. configurations in ``merge-patterns`` section
2949 4. configuration of ``ui.merge``
2953 4. configuration of ``ui.merge``
2950 5. configurations in ``merge-tools`` section
2954 5. configurations in ``merge-tools`` section
2951 6. ``hgmerge`` tool (for historical reason only)
2955 6. ``hgmerge`` tool (for historical reason only)
2952 7. default tool for fallback (``:merge`` or ``:prompt``)
2956 7. default tool for fallback (``:merge`` or ``:prompt``)
2953
2957
2954 This command writes out examination result in the style below::
2958 This command writes out examination result in the style below::
2955
2959
2956 FILE = MERGETOOL
2960 FILE = MERGETOOL
2957
2961
2958 By default, all files known in the first parent context of the
2962 By default, all files known in the first parent context of the
2959 working directory are examined. Use file patterns and/or -I/-X
2963 working directory are examined. Use file patterns and/or -I/-X
2960 options to limit target files. -r/--rev is also useful to examine
2964 options to limit target files. -r/--rev is also useful to examine
2961 files in another context without actual updating to it.
2965 files in another context without actual updating to it.
2962
2966
2963 With --debug, this command shows warning messages while matching
2967 With --debug, this command shows warning messages while matching
2964 against ``merge-patterns`` and so on, too. It is recommended to
2968 against ``merge-patterns`` and so on, too. It is recommended to
2965 use this option with explicit file patterns and/or -I/-X options,
2969 use this option with explicit file patterns and/or -I/-X options,
2966 because this option increases amount of output per file according
2970 because this option increases amount of output per file according
2967 to configurations in hgrc.
2971 to configurations in hgrc.
2968
2972
2969 With -v/--verbose, this command shows configurations below at
2973 With -v/--verbose, this command shows configurations below at
2970 first (only if specified).
2974 first (only if specified).
2971
2975
2972 - ``--tool`` option
2976 - ``--tool`` option
2973 - ``HGMERGE`` environment variable
2977 - ``HGMERGE`` environment variable
2974 - configuration of ``ui.merge``
2978 - configuration of ``ui.merge``
2975
2979
2976 If merge tool is chosen before matching against
2980 If merge tool is chosen before matching against
2977 ``merge-patterns``, this command can't show any helpful
2981 ``merge-patterns``, this command can't show any helpful
2978 information, even with --debug. In such case, information above is
2982 information, even with --debug. In such case, information above is
2979 useful to know why a merge tool is chosen.
2983 useful to know why a merge tool is chosen.
2980 """
2984 """
2981 opts = pycompat.byteskwargs(opts)
2985 opts = pycompat.byteskwargs(opts)
2982 overrides = {}
2986 overrides = {}
2983 if opts[b'tool']:
2987 if opts[b'tool']:
2984 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2988 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2985 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2989 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2986
2990
2987 with ui.configoverride(overrides, b'debugmergepatterns'):
2991 with ui.configoverride(overrides, b'debugmergepatterns'):
2988 hgmerge = encoding.environ.get(b"HGMERGE")
2992 hgmerge = encoding.environ.get(b"HGMERGE")
2989 if hgmerge is not None:
2993 if hgmerge is not None:
2990 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2994 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2991 uimerge = ui.config(b"ui", b"merge")
2995 uimerge = ui.config(b"ui", b"merge")
2992 if uimerge:
2996 if uimerge:
2993 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2997 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2994
2998
2995 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2999 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2996 m = scmutil.match(ctx, pats, opts)
3000 m = scmutil.match(ctx, pats, opts)
2997 changedelete = opts[b'changedelete']
3001 changedelete = opts[b'changedelete']
2998 for path in ctx.walk(m):
3002 for path in ctx.walk(m):
2999 fctx = ctx[path]
3003 fctx = ctx[path]
3000 with ui.silent(
3004 with ui.silent(
3001 error=True
3005 error=True
3002 ) if not ui.debugflag else util.nullcontextmanager():
3006 ) if not ui.debugflag else util.nullcontextmanager():
3003 tool, toolpath = filemerge._picktool(
3007 tool, toolpath = filemerge._picktool(
3004 repo,
3008 repo,
3005 ui,
3009 ui,
3006 path,
3010 path,
3007 fctx.isbinary(),
3011 fctx.isbinary(),
3008 b'l' in fctx.flags(),
3012 b'l' in fctx.flags(),
3009 changedelete,
3013 changedelete,
3010 )
3014 )
3011 ui.write(b'%s = %s\n' % (path, tool))
3015 ui.write(b'%s = %s\n' % (path, tool))
3012
3016
3013
3017
3014 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3018 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3015 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3019 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3016 """access the pushkey key/value protocol
3020 """access the pushkey key/value protocol
3017
3021
3018 With two args, list the keys in the given namespace.
3022 With two args, list the keys in the given namespace.
3019
3023
3020 With five args, set a key to new if it currently is set to old.
3024 With five args, set a key to new if it currently is set to old.
3021 Reports success or failure.
3025 Reports success or failure.
3022 """
3026 """
3023
3027
3024 target = hg.peer(ui, {}, repopath)
3028 target = hg.peer(ui, {}, repopath)
3025 try:
3029 try:
3026 if keyinfo:
3030 if keyinfo:
3027 key, old, new = keyinfo
3031 key, old, new = keyinfo
3028 with target.commandexecutor() as e:
3032 with target.commandexecutor() as e:
3029 r = e.callcommand(
3033 r = e.callcommand(
3030 b'pushkey',
3034 b'pushkey',
3031 {
3035 {
3032 b'namespace': namespace,
3036 b'namespace': namespace,
3033 b'key': key,
3037 b'key': key,
3034 b'old': old,
3038 b'old': old,
3035 b'new': new,
3039 b'new': new,
3036 },
3040 },
3037 ).result()
3041 ).result()
3038
3042
3039 ui.status(pycompat.bytestr(r) + b'\n')
3043 ui.status(pycompat.bytestr(r) + b'\n')
3040 return not r
3044 return not r
3041 else:
3045 else:
3042 for k, v in sorted(target.listkeys(namespace).items()):
3046 for k, v in sorted(target.listkeys(namespace).items()):
3043 ui.write(
3047 ui.write(
3044 b"%s\t%s\n"
3048 b"%s\t%s\n"
3045 % (stringutil.escapestr(k), stringutil.escapestr(v))
3049 % (stringutil.escapestr(k), stringutil.escapestr(v))
3046 )
3050 )
3047 finally:
3051 finally:
3048 target.close()
3052 target.close()
3049
3053
3050
3054
3051 @command(b'debugpvec', [], _(b'A B'))
3055 @command(b'debugpvec', [], _(b'A B'))
3052 def debugpvec(ui, repo, a, b=None):
3056 def debugpvec(ui, repo, a, b=None):
3053 ca = scmutil.revsingle(repo, a)
3057 ca = scmutil.revsingle(repo, a)
3054 cb = scmutil.revsingle(repo, b)
3058 cb = scmutil.revsingle(repo, b)
3055 pa = pvec.ctxpvec(ca)
3059 pa = pvec.ctxpvec(ca)
3056 pb = pvec.ctxpvec(cb)
3060 pb = pvec.ctxpvec(cb)
3057 if pa == pb:
3061 if pa == pb:
3058 rel = b"="
3062 rel = b"="
3059 elif pa > pb:
3063 elif pa > pb:
3060 rel = b">"
3064 rel = b">"
3061 elif pa < pb:
3065 elif pa < pb:
3062 rel = b"<"
3066 rel = b"<"
3063 elif pa | pb:
3067 elif pa | pb:
3064 rel = b"|"
3068 rel = b"|"
3065 ui.write(_(b"a: %s\n") % pa)
3069 ui.write(_(b"a: %s\n") % pa)
3066 ui.write(_(b"b: %s\n") % pb)
3070 ui.write(_(b"b: %s\n") % pb)
3067 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3071 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3068 ui.write(
3072 ui.write(
3069 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3073 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3070 % (
3074 % (
3071 abs(pa._depth - pb._depth),
3075 abs(pa._depth - pb._depth),
3072 pvec._hamming(pa._vec, pb._vec),
3076 pvec._hamming(pa._vec, pb._vec),
3073 pa.distance(pb),
3077 pa.distance(pb),
3074 rel,
3078 rel,
3075 )
3079 )
3076 )
3080 )
3077
3081
3078
3082
3079 @command(
3083 @command(
3080 b'debugrebuilddirstate|debugrebuildstate',
3084 b'debugrebuilddirstate|debugrebuildstate',
3081 [
3085 [
3082 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3086 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3083 (
3087 (
3084 b'',
3088 b'',
3085 b'minimal',
3089 b'minimal',
3086 None,
3090 None,
3087 _(
3091 _(
3088 b'only rebuild files that are inconsistent with '
3092 b'only rebuild files that are inconsistent with '
3089 b'the working copy parent'
3093 b'the working copy parent'
3090 ),
3094 ),
3091 ),
3095 ),
3092 ],
3096 ],
3093 _(b'[-r REV]'),
3097 _(b'[-r REV]'),
3094 )
3098 )
3095 def debugrebuilddirstate(ui, repo, rev, **opts):
3099 def debugrebuilddirstate(ui, repo, rev, **opts):
3096 """rebuild the dirstate as it would look like for the given revision
3100 """rebuild the dirstate as it would look like for the given revision
3097
3101
3098 If no revision is specified the first current parent will be used.
3102 If no revision is specified the first current parent will be used.
3099
3103
3100 The dirstate will be set to the files of the given revision.
3104 The dirstate will be set to the files of the given revision.
3101 The actual working directory content or existing dirstate
3105 The actual working directory content or existing dirstate
3102 information such as adds or removes is not considered.
3106 information such as adds or removes is not considered.
3103
3107
3104 ``minimal`` will only rebuild the dirstate status for files that claim to be
3108 ``minimal`` will only rebuild the dirstate status for files that claim to be
3105 tracked but are not in the parent manifest, or that exist in the parent
3109 tracked but are not in the parent manifest, or that exist in the parent
3106 manifest but are not in the dirstate. It will not change adds, removes, or
3110 manifest but are not in the dirstate. It will not change adds, removes, or
3107 modified files that are in the working copy parent.
3111 modified files that are in the working copy parent.
3108
3112
3109 One use of this command is to make the next :hg:`status` invocation
3113 One use of this command is to make the next :hg:`status` invocation
3110 check the actual file content.
3114 check the actual file content.
3111 """
3115 """
3112 ctx = scmutil.revsingle(repo, rev)
3116 ctx = scmutil.revsingle(repo, rev)
3113 with repo.wlock():
3117 with repo.wlock():
3114 dirstate = repo.dirstate
3118 dirstate = repo.dirstate
3115 changedfiles = None
3119 changedfiles = None
3116 # See command doc for what minimal does.
3120 # See command doc for what minimal does.
3117 if opts.get('minimal'):
3121 if opts.get('minimal'):
3118 manifestfiles = set(ctx.manifest().keys())
3122 manifestfiles = set(ctx.manifest().keys())
3119 dirstatefiles = set(dirstate)
3123 dirstatefiles = set(dirstate)
3120 manifestonly = manifestfiles - dirstatefiles
3124 manifestonly = manifestfiles - dirstatefiles
3121 dsonly = dirstatefiles - manifestfiles
3125 dsonly = dirstatefiles - manifestfiles
3122 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3126 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3123 changedfiles = manifestonly | dsnotadded
3127 changedfiles = manifestonly | dsnotadded
3124
3128
3125 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3129 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3126
3130
3127
3131
3128 @command(
3132 @command(
3129 b'debugrebuildfncache',
3133 b'debugrebuildfncache',
3130 [
3134 [
3131 (
3135 (
3132 b'',
3136 b'',
3133 b'only-data',
3137 b'only-data',
3134 False,
3138 False,
3135 _(b'only look for wrong .d files (much faster)'),
3139 _(b'only look for wrong .d files (much faster)'),
3136 )
3140 )
3137 ],
3141 ],
3138 b'',
3142 b'',
3139 )
3143 )
3140 def debugrebuildfncache(ui, repo, **opts):
3144 def debugrebuildfncache(ui, repo, **opts):
3141 """rebuild the fncache file"""
3145 """rebuild the fncache file"""
3142 opts = pycompat.byteskwargs(opts)
3146 opts = pycompat.byteskwargs(opts)
3143 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3147 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3144
3148
3145
3149
3146 @command(
3150 @command(
3147 b'debugrename',
3151 b'debugrename',
3148 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3152 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3149 _(b'[-r REV] [FILE]...'),
3153 _(b'[-r REV] [FILE]...'),
3150 )
3154 )
3151 def debugrename(ui, repo, *pats, **opts):
3155 def debugrename(ui, repo, *pats, **opts):
3152 """dump rename information"""
3156 """dump rename information"""
3153
3157
3154 opts = pycompat.byteskwargs(opts)
3158 opts = pycompat.byteskwargs(opts)
3155 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3159 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3156 m = scmutil.match(ctx, pats, opts)
3160 m = scmutil.match(ctx, pats, opts)
3157 for abs in ctx.walk(m):
3161 for abs in ctx.walk(m):
3158 fctx = ctx[abs]
3162 fctx = ctx[abs]
3159 o = fctx.filelog().renamed(fctx.filenode())
3163 o = fctx.filelog().renamed(fctx.filenode())
3160 rel = repo.pathto(abs)
3164 rel = repo.pathto(abs)
3161 if o:
3165 if o:
3162 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3166 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3163 else:
3167 else:
3164 ui.write(_(b"%s not renamed\n") % rel)
3168 ui.write(_(b"%s not renamed\n") % rel)
3165
3169
3166
3170
3167 @command(b'debugrequires|debugrequirements', [], b'')
3171 @command(b'debugrequires|debugrequirements', [], b'')
3168 def debugrequirements(ui, repo):
3172 def debugrequirements(ui, repo):
3169 """print the current repo requirements"""
3173 """print the current repo requirements"""
3170 for r in sorted(repo.requirements):
3174 for r in sorted(repo.requirements):
3171 ui.write(b"%s\n" % r)
3175 ui.write(b"%s\n" % r)
3172
3176
3173
3177
3174 @command(
3178 @command(
3175 b'debugrevlog',
3179 b'debugrevlog',
3176 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3180 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3177 _(b'-c|-m|FILE'),
3181 _(b'-c|-m|FILE'),
3178 optionalrepo=True,
3182 optionalrepo=True,
3179 )
3183 )
3180 def debugrevlog(ui, repo, file_=None, **opts):
3184 def debugrevlog(ui, repo, file_=None, **opts):
3181 """show data and statistics about a revlog"""
3185 """show data and statistics about a revlog"""
3182 opts = pycompat.byteskwargs(opts)
3186 opts = pycompat.byteskwargs(opts)
3183 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3187 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3184
3188
3185 if opts.get(b"dump"):
3189 if opts.get(b"dump"):
3186 numrevs = len(r)
3190 numrevs = len(r)
3187 ui.write(
3191 ui.write(
3188 (
3192 (
3189 b"# rev p1rev p2rev start end deltastart base p1 p2"
3193 b"# rev p1rev p2rev start end deltastart base p1 p2"
3190 b" rawsize totalsize compression heads chainlen\n"
3194 b" rawsize totalsize compression heads chainlen\n"
3191 )
3195 )
3192 )
3196 )
3193 ts = 0
3197 ts = 0
3194 heads = set()
3198 heads = set()
3195
3199
3196 for rev in range(numrevs):
3200 for rev in range(numrevs):
3197 dbase = r.deltaparent(rev)
3201 dbase = r.deltaparent(rev)
3198 if dbase == -1:
3202 if dbase == -1:
3199 dbase = rev
3203 dbase = rev
3200 cbase = r.chainbase(rev)
3204 cbase = r.chainbase(rev)
3201 clen = r.chainlen(rev)
3205 clen = r.chainlen(rev)
3202 p1, p2 = r.parentrevs(rev)
3206 p1, p2 = r.parentrevs(rev)
3203 rs = r.rawsize(rev)
3207 rs = r.rawsize(rev)
3204 ts = ts + rs
3208 ts = ts + rs
3205 heads -= set(r.parentrevs(rev))
3209 heads -= set(r.parentrevs(rev))
3206 heads.add(rev)
3210 heads.add(rev)
3207 try:
3211 try:
3208 compression = ts / r.end(rev)
3212 compression = ts / r.end(rev)
3209 except ZeroDivisionError:
3213 except ZeroDivisionError:
3210 compression = 0
3214 compression = 0
3211 ui.write(
3215 ui.write(
3212 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3216 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3213 b"%11d %5d %8d\n"
3217 b"%11d %5d %8d\n"
3214 % (
3218 % (
3215 rev,
3219 rev,
3216 p1,
3220 p1,
3217 p2,
3221 p2,
3218 r.start(rev),
3222 r.start(rev),
3219 r.end(rev),
3223 r.end(rev),
3220 r.start(dbase),
3224 r.start(dbase),
3221 r.start(cbase),
3225 r.start(cbase),
3222 r.start(p1),
3226 r.start(p1),
3223 r.start(p2),
3227 r.start(p2),
3224 rs,
3228 rs,
3225 ts,
3229 ts,
3226 compression,
3230 compression,
3227 len(heads),
3231 len(heads),
3228 clen,
3232 clen,
3229 )
3233 )
3230 )
3234 )
3231 return 0
3235 return 0
3232
3236
3233 format = r._format_version
3237 format = r._format_version
3234 v = r._format_flags
3238 v = r._format_flags
3235 flags = []
3239 flags = []
3236 gdelta = False
3240 gdelta = False
3237 if v & revlog.FLAG_INLINE_DATA:
3241 if v & revlog.FLAG_INLINE_DATA:
3238 flags.append(b'inline')
3242 flags.append(b'inline')
3239 if v & revlog.FLAG_GENERALDELTA:
3243 if v & revlog.FLAG_GENERALDELTA:
3240 gdelta = True
3244 gdelta = True
3241 flags.append(b'generaldelta')
3245 flags.append(b'generaldelta')
3242 if not flags:
3246 if not flags:
3243 flags = [b'(none)']
3247 flags = [b'(none)']
3244
3248
3245 ### tracks merge vs single parent
3249 ### tracks merge vs single parent
3246 nummerges = 0
3250 nummerges = 0
3247
3251
3248 ### tracks ways the "delta" are build
3252 ### tracks ways the "delta" are build
3249 # nodelta
3253 # nodelta
3250 numempty = 0
3254 numempty = 0
3251 numemptytext = 0
3255 numemptytext = 0
3252 numemptydelta = 0
3256 numemptydelta = 0
3253 # full file content
3257 # full file content
3254 numfull = 0
3258 numfull = 0
3255 # intermediate snapshot against a prior snapshot
3259 # intermediate snapshot against a prior snapshot
3256 numsemi = 0
3260 numsemi = 0
3257 # snapshot count per depth
3261 # snapshot count per depth
3258 numsnapdepth = collections.defaultdict(lambda: 0)
3262 numsnapdepth = collections.defaultdict(lambda: 0)
3259 # delta against previous revision
3263 # delta against previous revision
3260 numprev = 0
3264 numprev = 0
3261 # delta against first or second parent (not prev)
3265 # delta against first or second parent (not prev)
3262 nump1 = 0
3266 nump1 = 0
3263 nump2 = 0
3267 nump2 = 0
3264 # delta against neither prev nor parents
3268 # delta against neither prev nor parents
3265 numother = 0
3269 numother = 0
3266 # delta against prev that are also first or second parent
3270 # delta against prev that are also first or second parent
3267 # (details of `numprev`)
3271 # (details of `numprev`)
3268 nump1prev = 0
3272 nump1prev = 0
3269 nump2prev = 0
3273 nump2prev = 0
3270
3274
3271 # data about delta chain of each revs
3275 # data about delta chain of each revs
3272 chainlengths = []
3276 chainlengths = []
3273 chainbases = []
3277 chainbases = []
3274 chainspans = []
3278 chainspans = []
3275
3279
3276 # data about each revision
3280 # data about each revision
3277 datasize = [None, 0, 0]
3281 datasize = [None, 0, 0]
3278 fullsize = [None, 0, 0]
3282 fullsize = [None, 0, 0]
3279 semisize = [None, 0, 0]
3283 semisize = [None, 0, 0]
3280 # snapshot count per depth
3284 # snapshot count per depth
3281 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3285 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3282 deltasize = [None, 0, 0]
3286 deltasize = [None, 0, 0]
3283 chunktypecounts = {}
3287 chunktypecounts = {}
3284 chunktypesizes = {}
3288 chunktypesizes = {}
3285
3289
3286 def addsize(size, l):
3290 def addsize(size, l):
3287 if l[0] is None or size < l[0]:
3291 if l[0] is None or size < l[0]:
3288 l[0] = size
3292 l[0] = size
3289 if size > l[1]:
3293 if size > l[1]:
3290 l[1] = size
3294 l[1] = size
3291 l[2] += size
3295 l[2] += size
3292
3296
3293 numrevs = len(r)
3297 numrevs = len(r)
3294 for rev in range(numrevs):
3298 for rev in range(numrevs):
3295 p1, p2 = r.parentrevs(rev)
3299 p1, p2 = r.parentrevs(rev)
3296 delta = r.deltaparent(rev)
3300 delta = r.deltaparent(rev)
3297 if format > 0:
3301 if format > 0:
3298 addsize(r.rawsize(rev), datasize)
3302 addsize(r.rawsize(rev), datasize)
3299 if p2 != nullrev:
3303 if p2 != nullrev:
3300 nummerges += 1
3304 nummerges += 1
3301 size = r.length(rev)
3305 size = r.length(rev)
3302 if delta == nullrev:
3306 if delta == nullrev:
3303 chainlengths.append(0)
3307 chainlengths.append(0)
3304 chainbases.append(r.start(rev))
3308 chainbases.append(r.start(rev))
3305 chainspans.append(size)
3309 chainspans.append(size)
3306 if size == 0:
3310 if size == 0:
3307 numempty += 1
3311 numempty += 1
3308 numemptytext += 1
3312 numemptytext += 1
3309 else:
3313 else:
3310 numfull += 1
3314 numfull += 1
3311 numsnapdepth[0] += 1
3315 numsnapdepth[0] += 1
3312 addsize(size, fullsize)
3316 addsize(size, fullsize)
3313 addsize(size, snapsizedepth[0])
3317 addsize(size, snapsizedepth[0])
3314 else:
3318 else:
3315 chainlengths.append(chainlengths[delta] + 1)
3319 chainlengths.append(chainlengths[delta] + 1)
3316 baseaddr = chainbases[delta]
3320 baseaddr = chainbases[delta]
3317 revaddr = r.start(rev)
3321 revaddr = r.start(rev)
3318 chainbases.append(baseaddr)
3322 chainbases.append(baseaddr)
3319 chainspans.append((revaddr - baseaddr) + size)
3323 chainspans.append((revaddr - baseaddr) + size)
3320 if size == 0:
3324 if size == 0:
3321 numempty += 1
3325 numempty += 1
3322 numemptydelta += 1
3326 numemptydelta += 1
3323 elif r.issnapshot(rev):
3327 elif r.issnapshot(rev):
3324 addsize(size, semisize)
3328 addsize(size, semisize)
3325 numsemi += 1
3329 numsemi += 1
3326 depth = r.snapshotdepth(rev)
3330 depth = r.snapshotdepth(rev)
3327 numsnapdepth[depth] += 1
3331 numsnapdepth[depth] += 1
3328 addsize(size, snapsizedepth[depth])
3332 addsize(size, snapsizedepth[depth])
3329 else:
3333 else:
3330 addsize(size, deltasize)
3334 addsize(size, deltasize)
3331 if delta == rev - 1:
3335 if delta == rev - 1:
3332 numprev += 1
3336 numprev += 1
3333 if delta == p1:
3337 if delta == p1:
3334 nump1prev += 1
3338 nump1prev += 1
3335 elif delta == p2:
3339 elif delta == p2:
3336 nump2prev += 1
3340 nump2prev += 1
3337 elif delta == p1:
3341 elif delta == p1:
3338 nump1 += 1
3342 nump1 += 1
3339 elif delta == p2:
3343 elif delta == p2:
3340 nump2 += 1
3344 nump2 += 1
3341 elif delta != nullrev:
3345 elif delta != nullrev:
3342 numother += 1
3346 numother += 1
3343
3347
3344 # Obtain data on the raw chunks in the revlog.
3348 # Obtain data on the raw chunks in the revlog.
3345 if util.safehasattr(r, b'_getsegmentforrevs'):
3349 if util.safehasattr(r, b'_getsegmentforrevs'):
3346 segment = r._getsegmentforrevs(rev, rev)[1]
3350 segment = r._getsegmentforrevs(rev, rev)[1]
3347 else:
3351 else:
3348 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3352 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3349 if segment:
3353 if segment:
3350 chunktype = bytes(segment[0:1])
3354 chunktype = bytes(segment[0:1])
3351 else:
3355 else:
3352 chunktype = b'empty'
3356 chunktype = b'empty'
3353
3357
3354 if chunktype not in chunktypecounts:
3358 if chunktype not in chunktypecounts:
3355 chunktypecounts[chunktype] = 0
3359 chunktypecounts[chunktype] = 0
3356 chunktypesizes[chunktype] = 0
3360 chunktypesizes[chunktype] = 0
3357
3361
3358 chunktypecounts[chunktype] += 1
3362 chunktypecounts[chunktype] += 1
3359 chunktypesizes[chunktype] += size
3363 chunktypesizes[chunktype] += size
3360
3364
3361 # Adjust size min value for empty cases
3365 # Adjust size min value for empty cases
3362 for size in (datasize, fullsize, semisize, deltasize):
3366 for size in (datasize, fullsize, semisize, deltasize):
3363 if size[0] is None:
3367 if size[0] is None:
3364 size[0] = 0
3368 size[0] = 0
3365
3369
3366 numdeltas = numrevs - numfull - numempty - numsemi
3370 numdeltas = numrevs - numfull - numempty - numsemi
3367 numoprev = numprev - nump1prev - nump2prev
3371 numoprev = numprev - nump1prev - nump2prev
3368 totalrawsize = datasize[2]
3372 totalrawsize = datasize[2]
3369 datasize[2] /= numrevs
3373 datasize[2] /= numrevs
3370 fulltotal = fullsize[2]
3374 fulltotal = fullsize[2]
3371 if numfull == 0:
3375 if numfull == 0:
3372 fullsize[2] = 0
3376 fullsize[2] = 0
3373 else:
3377 else:
3374 fullsize[2] /= numfull
3378 fullsize[2] /= numfull
3375 semitotal = semisize[2]
3379 semitotal = semisize[2]
3376 snaptotal = {}
3380 snaptotal = {}
3377 if numsemi > 0:
3381 if numsemi > 0:
3378 semisize[2] /= numsemi
3382 semisize[2] /= numsemi
3379 for depth in snapsizedepth:
3383 for depth in snapsizedepth:
3380 snaptotal[depth] = snapsizedepth[depth][2]
3384 snaptotal[depth] = snapsizedepth[depth][2]
3381 snapsizedepth[depth][2] /= numsnapdepth[depth]
3385 snapsizedepth[depth][2] /= numsnapdepth[depth]
3382
3386
3383 deltatotal = deltasize[2]
3387 deltatotal = deltasize[2]
3384 if numdeltas > 0:
3388 if numdeltas > 0:
3385 deltasize[2] /= numdeltas
3389 deltasize[2] /= numdeltas
3386 totalsize = fulltotal + semitotal + deltatotal
3390 totalsize = fulltotal + semitotal + deltatotal
3387 avgchainlen = sum(chainlengths) / numrevs
3391 avgchainlen = sum(chainlengths) / numrevs
3388 maxchainlen = max(chainlengths)
3392 maxchainlen = max(chainlengths)
3389 maxchainspan = max(chainspans)
3393 maxchainspan = max(chainspans)
3390 compratio = 1
3394 compratio = 1
3391 if totalsize:
3395 if totalsize:
3392 compratio = totalrawsize / totalsize
3396 compratio = totalrawsize / totalsize
3393
3397
3394 basedfmtstr = b'%%%dd\n'
3398 basedfmtstr = b'%%%dd\n'
3395 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3399 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3396
3400
3397 def dfmtstr(max):
3401 def dfmtstr(max):
3398 return basedfmtstr % len(str(max))
3402 return basedfmtstr % len(str(max))
3399
3403
3400 def pcfmtstr(max, padding=0):
3404 def pcfmtstr(max, padding=0):
3401 return basepcfmtstr % (len(str(max)), b' ' * padding)
3405 return basepcfmtstr % (len(str(max)), b' ' * padding)
3402
3406
3403 def pcfmt(value, total):
3407 def pcfmt(value, total):
3404 if total:
3408 if total:
3405 return (value, 100 * float(value) / total)
3409 return (value, 100 * float(value) / total)
3406 else:
3410 else:
3407 return value, 100.0
3411 return value, 100.0
3408
3412
3409 ui.writenoi18n(b'format : %d\n' % format)
3413 ui.writenoi18n(b'format : %d\n' % format)
3410 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3414 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3411
3415
3412 ui.write(b'\n')
3416 ui.write(b'\n')
3413 fmt = pcfmtstr(totalsize)
3417 fmt = pcfmtstr(totalsize)
3414 fmt2 = dfmtstr(totalsize)
3418 fmt2 = dfmtstr(totalsize)
3415 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3419 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3416 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3420 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3417 ui.writenoi18n(
3421 ui.writenoi18n(
3418 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3422 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3419 )
3423 )
3420 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3424 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3421 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3425 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3422 ui.writenoi18n(
3426 ui.writenoi18n(
3423 b' text : '
3427 b' text : '
3424 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3428 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3425 )
3429 )
3426 ui.writenoi18n(
3430 ui.writenoi18n(
3427 b' delta : '
3431 b' delta : '
3428 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3432 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3429 )
3433 )
3430 ui.writenoi18n(
3434 ui.writenoi18n(
3431 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3435 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3432 )
3436 )
3433 for depth in sorted(numsnapdepth):
3437 for depth in sorted(numsnapdepth):
3434 ui.write(
3438 ui.write(
3435 (b' lvl-%-3d : ' % depth)
3439 (b' lvl-%-3d : ' % depth)
3436 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3440 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3437 )
3441 )
3438 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3442 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3439 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3443 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3440 ui.writenoi18n(
3444 ui.writenoi18n(
3441 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3445 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3442 )
3446 )
3443 for depth in sorted(numsnapdepth):
3447 for depth in sorted(numsnapdepth):
3444 ui.write(
3448 ui.write(
3445 (b' lvl-%-3d : ' % depth)
3449 (b' lvl-%-3d : ' % depth)
3446 + fmt % pcfmt(snaptotal[depth], totalsize)
3450 + fmt % pcfmt(snaptotal[depth], totalsize)
3447 )
3451 )
3448 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3452 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3449
3453
3450 def fmtchunktype(chunktype):
3454 def fmtchunktype(chunktype):
3451 if chunktype == b'empty':
3455 if chunktype == b'empty':
3452 return b' %s : ' % chunktype
3456 return b' %s : ' % chunktype
3453 elif chunktype in pycompat.bytestr(string.ascii_letters):
3457 elif chunktype in pycompat.bytestr(string.ascii_letters):
3454 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3458 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3455 else:
3459 else:
3456 return b' 0x%s : ' % hex(chunktype)
3460 return b' 0x%s : ' % hex(chunktype)
3457
3461
3458 ui.write(b'\n')
3462 ui.write(b'\n')
3459 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3463 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3460 for chunktype in sorted(chunktypecounts):
3464 for chunktype in sorted(chunktypecounts):
3461 ui.write(fmtchunktype(chunktype))
3465 ui.write(fmtchunktype(chunktype))
3462 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3466 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3463 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3467 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3464 for chunktype in sorted(chunktypecounts):
3468 for chunktype in sorted(chunktypecounts):
3465 ui.write(fmtchunktype(chunktype))
3469 ui.write(fmtchunktype(chunktype))
3466 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3470 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3467
3471
3468 ui.write(b'\n')
3472 ui.write(b'\n')
3469 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3473 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3470 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3474 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3471 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3475 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3472 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3476 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3473 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3477 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3474
3478
3475 if format > 0:
3479 if format > 0:
3476 ui.write(b'\n')
3480 ui.write(b'\n')
3477 ui.writenoi18n(
3481 ui.writenoi18n(
3478 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3482 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3479 % tuple(datasize)
3483 % tuple(datasize)
3480 )
3484 )
3481 ui.writenoi18n(
3485 ui.writenoi18n(
3482 b'full revision size (min/max/avg) : %d / %d / %d\n'
3486 b'full revision size (min/max/avg) : %d / %d / %d\n'
3483 % tuple(fullsize)
3487 % tuple(fullsize)
3484 )
3488 )
3485 ui.writenoi18n(
3489 ui.writenoi18n(
3486 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3490 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3487 % tuple(semisize)
3491 % tuple(semisize)
3488 )
3492 )
3489 for depth in sorted(snapsizedepth):
3493 for depth in sorted(snapsizedepth):
3490 if depth == 0:
3494 if depth == 0:
3491 continue
3495 continue
3492 ui.writenoi18n(
3496 ui.writenoi18n(
3493 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3497 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3494 % ((depth,) + tuple(snapsizedepth[depth]))
3498 % ((depth,) + tuple(snapsizedepth[depth]))
3495 )
3499 )
3496 ui.writenoi18n(
3500 ui.writenoi18n(
3497 b'delta size (min/max/avg) : %d / %d / %d\n'
3501 b'delta size (min/max/avg) : %d / %d / %d\n'
3498 % tuple(deltasize)
3502 % tuple(deltasize)
3499 )
3503 )
3500
3504
3501 if numdeltas > 0:
3505 if numdeltas > 0:
3502 ui.write(b'\n')
3506 ui.write(b'\n')
3503 fmt = pcfmtstr(numdeltas)
3507 fmt = pcfmtstr(numdeltas)
3504 fmt2 = pcfmtstr(numdeltas, 4)
3508 fmt2 = pcfmtstr(numdeltas, 4)
3505 ui.writenoi18n(
3509 ui.writenoi18n(
3506 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3510 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3507 )
3511 )
3508 if numprev > 0:
3512 if numprev > 0:
3509 ui.writenoi18n(
3513 ui.writenoi18n(
3510 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3514 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3511 )
3515 )
3512 ui.writenoi18n(
3516 ui.writenoi18n(
3513 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3517 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3514 )
3518 )
3515 ui.writenoi18n(
3519 ui.writenoi18n(
3516 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3520 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3517 )
3521 )
3518 if gdelta:
3522 if gdelta:
3519 ui.writenoi18n(
3523 ui.writenoi18n(
3520 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3524 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3521 )
3525 )
3522 ui.writenoi18n(
3526 ui.writenoi18n(
3523 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3527 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3524 )
3528 )
3525 ui.writenoi18n(
3529 ui.writenoi18n(
3526 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3530 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3527 )
3531 )
3528
3532
3529
3533
3530 @command(
3534 @command(
3531 b'debugrevlogindex',
3535 b'debugrevlogindex',
3532 cmdutil.debugrevlogopts
3536 cmdutil.debugrevlogopts
3533 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3537 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3534 _(b'[-f FORMAT] -c|-m|FILE'),
3538 _(b'[-f FORMAT] -c|-m|FILE'),
3535 optionalrepo=True,
3539 optionalrepo=True,
3536 )
3540 )
3537 def debugrevlogindex(ui, repo, file_=None, **opts):
3541 def debugrevlogindex(ui, repo, file_=None, **opts):
3538 """dump the contents of a revlog index"""
3542 """dump the contents of a revlog index"""
3539 opts = pycompat.byteskwargs(opts)
3543 opts = pycompat.byteskwargs(opts)
3540 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3544 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3541 format = opts.get(b'format', 0)
3545 format = opts.get(b'format', 0)
3542 if format not in (0, 1):
3546 if format not in (0, 1):
3543 raise error.Abort(_(b"unknown format %d") % format)
3547 raise error.Abort(_(b"unknown format %d") % format)
3544
3548
3545 if ui.debugflag:
3549 if ui.debugflag:
3546 shortfn = hex
3550 shortfn = hex
3547 else:
3551 else:
3548 shortfn = short
3552 shortfn = short
3549
3553
3550 # There might not be anything in r, so have a sane default
3554 # There might not be anything in r, so have a sane default
3551 idlen = 12
3555 idlen = 12
3552 for i in r:
3556 for i in r:
3553 idlen = len(shortfn(r.node(i)))
3557 idlen = len(shortfn(r.node(i)))
3554 break
3558 break
3555
3559
3556 if format == 0:
3560 if format == 0:
3557 if ui.verbose:
3561 if ui.verbose:
3558 ui.writenoi18n(
3562 ui.writenoi18n(
3559 b" rev offset length linkrev %s %s p2\n"
3563 b" rev offset length linkrev %s %s p2\n"
3560 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3564 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3561 )
3565 )
3562 else:
3566 else:
3563 ui.writenoi18n(
3567 ui.writenoi18n(
3564 b" rev linkrev %s %s p2\n"
3568 b" rev linkrev %s %s p2\n"
3565 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3569 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3566 )
3570 )
3567 elif format == 1:
3571 elif format == 1:
3568 if ui.verbose:
3572 if ui.verbose:
3569 ui.writenoi18n(
3573 ui.writenoi18n(
3570 (
3574 (
3571 b" rev flag offset length size link p1"
3575 b" rev flag offset length size link p1"
3572 b" p2 %s\n"
3576 b" p2 %s\n"
3573 )
3577 )
3574 % b"nodeid".rjust(idlen)
3578 % b"nodeid".rjust(idlen)
3575 )
3579 )
3576 else:
3580 else:
3577 ui.writenoi18n(
3581 ui.writenoi18n(
3578 b" rev flag size link p1 p2 %s\n"
3582 b" rev flag size link p1 p2 %s\n"
3579 % b"nodeid".rjust(idlen)
3583 % b"nodeid".rjust(idlen)
3580 )
3584 )
3581
3585
3582 for i in r:
3586 for i in r:
3583 node = r.node(i)
3587 node = r.node(i)
3584 if format == 0:
3588 if format == 0:
3585 try:
3589 try:
3586 pp = r.parents(node)
3590 pp = r.parents(node)
3587 except Exception:
3591 except Exception:
3588 pp = [repo.nullid, repo.nullid]
3592 pp = [repo.nullid, repo.nullid]
3589 if ui.verbose:
3593 if ui.verbose:
3590 ui.write(
3594 ui.write(
3591 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3595 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3592 % (
3596 % (
3593 i,
3597 i,
3594 r.start(i),
3598 r.start(i),
3595 r.length(i),
3599 r.length(i),
3596 r.linkrev(i),
3600 r.linkrev(i),
3597 shortfn(node),
3601 shortfn(node),
3598 shortfn(pp[0]),
3602 shortfn(pp[0]),
3599 shortfn(pp[1]),
3603 shortfn(pp[1]),
3600 )
3604 )
3601 )
3605 )
3602 else:
3606 else:
3603 ui.write(
3607 ui.write(
3604 b"% 6d % 7d %s %s %s\n"
3608 b"% 6d % 7d %s %s %s\n"
3605 % (
3609 % (
3606 i,
3610 i,
3607 r.linkrev(i),
3611 r.linkrev(i),
3608 shortfn(node),
3612 shortfn(node),
3609 shortfn(pp[0]),
3613 shortfn(pp[0]),
3610 shortfn(pp[1]),
3614 shortfn(pp[1]),
3611 )
3615 )
3612 )
3616 )
3613 elif format == 1:
3617 elif format == 1:
3614 pr = r.parentrevs(i)
3618 pr = r.parentrevs(i)
3615 if ui.verbose:
3619 if ui.verbose:
3616 ui.write(
3620 ui.write(
3617 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3621 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3618 % (
3622 % (
3619 i,
3623 i,
3620 r.flags(i),
3624 r.flags(i),
3621 r.start(i),
3625 r.start(i),
3622 r.length(i),
3626 r.length(i),
3623 r.rawsize(i),
3627 r.rawsize(i),
3624 r.linkrev(i),
3628 r.linkrev(i),
3625 pr[0],
3629 pr[0],
3626 pr[1],
3630 pr[1],
3627 shortfn(node),
3631 shortfn(node),
3628 )
3632 )
3629 )
3633 )
3630 else:
3634 else:
3631 ui.write(
3635 ui.write(
3632 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3636 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3633 % (
3637 % (
3634 i,
3638 i,
3635 r.flags(i),
3639 r.flags(i),
3636 r.rawsize(i),
3640 r.rawsize(i),
3637 r.linkrev(i),
3641 r.linkrev(i),
3638 pr[0],
3642 pr[0],
3639 pr[1],
3643 pr[1],
3640 shortfn(node),
3644 shortfn(node),
3641 )
3645 )
3642 )
3646 )
3643
3647
3644
3648
3645 @command(
3649 @command(
3646 b'debugrevspec',
3650 b'debugrevspec',
3647 [
3651 [
3648 (
3652 (
3649 b'',
3653 b'',
3650 b'optimize',
3654 b'optimize',
3651 None,
3655 None,
3652 _(b'print parsed tree after optimizing (DEPRECATED)'),
3656 _(b'print parsed tree after optimizing (DEPRECATED)'),
3653 ),
3657 ),
3654 (
3658 (
3655 b'',
3659 b'',
3656 b'show-revs',
3660 b'show-revs',
3657 True,
3661 True,
3658 _(b'print list of result revisions (default)'),
3662 _(b'print list of result revisions (default)'),
3659 ),
3663 ),
3660 (
3664 (
3661 b's',
3665 b's',
3662 b'show-set',
3666 b'show-set',
3663 None,
3667 None,
3664 _(b'print internal representation of result set'),
3668 _(b'print internal representation of result set'),
3665 ),
3669 ),
3666 (
3670 (
3667 b'p',
3671 b'p',
3668 b'show-stage',
3672 b'show-stage',
3669 [],
3673 [],
3670 _(b'print parsed tree at the given stage'),
3674 _(b'print parsed tree at the given stage'),
3671 _(b'NAME'),
3675 _(b'NAME'),
3672 ),
3676 ),
3673 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3677 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3674 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3678 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3675 ],
3679 ],
3676 b'REVSPEC',
3680 b'REVSPEC',
3677 )
3681 )
3678 def debugrevspec(ui, repo, expr, **opts):
3682 def debugrevspec(ui, repo, expr, **opts):
3679 """parse and apply a revision specification
3683 """parse and apply a revision specification
3680
3684
3681 Use -p/--show-stage option to print the parsed tree at the given stages.
3685 Use -p/--show-stage option to print the parsed tree at the given stages.
3682 Use -p all to print tree at every stage.
3686 Use -p all to print tree at every stage.
3683
3687
3684 Use --no-show-revs option with -s or -p to print only the set
3688 Use --no-show-revs option with -s or -p to print only the set
3685 representation or the parsed tree respectively.
3689 representation or the parsed tree respectively.
3686
3690
3687 Use --verify-optimized to compare the optimized result with the unoptimized
3691 Use --verify-optimized to compare the optimized result with the unoptimized
3688 one. Returns 1 if the optimized result differs.
3692 one. Returns 1 if the optimized result differs.
3689 """
3693 """
3690 opts = pycompat.byteskwargs(opts)
3694 opts = pycompat.byteskwargs(opts)
3691 aliases = ui.configitems(b'revsetalias')
3695 aliases = ui.configitems(b'revsetalias')
3692 stages = [
3696 stages = [
3693 (b'parsed', lambda tree: tree),
3697 (b'parsed', lambda tree: tree),
3694 (
3698 (
3695 b'expanded',
3699 b'expanded',
3696 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3700 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3697 ),
3701 ),
3698 (b'concatenated', revsetlang.foldconcat),
3702 (b'concatenated', revsetlang.foldconcat),
3699 (b'analyzed', revsetlang.analyze),
3703 (b'analyzed', revsetlang.analyze),
3700 (b'optimized', revsetlang.optimize),
3704 (b'optimized', revsetlang.optimize),
3701 ]
3705 ]
3702 if opts[b'no_optimized']:
3706 if opts[b'no_optimized']:
3703 stages = stages[:-1]
3707 stages = stages[:-1]
3704 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3708 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3705 raise error.Abort(
3709 raise error.Abort(
3706 _(b'cannot use --verify-optimized with --no-optimized')
3710 _(b'cannot use --verify-optimized with --no-optimized')
3707 )
3711 )
3708 stagenames = {n for n, f in stages}
3712 stagenames = {n for n, f in stages}
3709
3713
3710 showalways = set()
3714 showalways = set()
3711 showchanged = set()
3715 showchanged = set()
3712 if ui.verbose and not opts[b'show_stage']:
3716 if ui.verbose and not opts[b'show_stage']:
3713 # show parsed tree by --verbose (deprecated)
3717 # show parsed tree by --verbose (deprecated)
3714 showalways.add(b'parsed')
3718 showalways.add(b'parsed')
3715 showchanged.update([b'expanded', b'concatenated'])
3719 showchanged.update([b'expanded', b'concatenated'])
3716 if opts[b'optimize']:
3720 if opts[b'optimize']:
3717 showalways.add(b'optimized')
3721 showalways.add(b'optimized')
3718 if opts[b'show_stage'] and opts[b'optimize']:
3722 if opts[b'show_stage'] and opts[b'optimize']:
3719 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3723 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3720 if opts[b'show_stage'] == [b'all']:
3724 if opts[b'show_stage'] == [b'all']:
3721 showalways.update(stagenames)
3725 showalways.update(stagenames)
3722 else:
3726 else:
3723 for n in opts[b'show_stage']:
3727 for n in opts[b'show_stage']:
3724 if n not in stagenames:
3728 if n not in stagenames:
3725 raise error.Abort(_(b'invalid stage name: %s') % n)
3729 raise error.Abort(_(b'invalid stage name: %s') % n)
3726 showalways.update(opts[b'show_stage'])
3730 showalways.update(opts[b'show_stage'])
3727
3731
3728 treebystage = {}
3732 treebystage = {}
3729 printedtree = None
3733 printedtree = None
3730 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3734 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3731 for n, f in stages:
3735 for n, f in stages:
3732 treebystage[n] = tree = f(tree)
3736 treebystage[n] = tree = f(tree)
3733 if n in showalways or (n in showchanged and tree != printedtree):
3737 if n in showalways or (n in showchanged and tree != printedtree):
3734 if opts[b'show_stage'] or n != b'parsed':
3738 if opts[b'show_stage'] or n != b'parsed':
3735 ui.write(b"* %s:\n" % n)
3739 ui.write(b"* %s:\n" % n)
3736 ui.write(revsetlang.prettyformat(tree), b"\n")
3740 ui.write(revsetlang.prettyformat(tree), b"\n")
3737 printedtree = tree
3741 printedtree = tree
3738
3742
3739 if opts[b'verify_optimized']:
3743 if opts[b'verify_optimized']:
3740 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3744 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3741 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3745 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3742 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3746 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3743 ui.writenoi18n(
3747 ui.writenoi18n(
3744 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3748 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3745 )
3749 )
3746 ui.writenoi18n(
3750 ui.writenoi18n(
3747 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3751 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3748 )
3752 )
3749 arevs = list(arevs)
3753 arevs = list(arevs)
3750 brevs = list(brevs)
3754 brevs = list(brevs)
3751 if arevs == brevs:
3755 if arevs == brevs:
3752 return 0
3756 return 0
3753 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3757 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3754 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3758 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3755 sm = difflib.SequenceMatcher(None, arevs, brevs)
3759 sm = difflib.SequenceMatcher(None, arevs, brevs)
3756 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3760 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3757 if tag in ('delete', 'replace'):
3761 if tag in ('delete', 'replace'):
3758 for c in arevs[alo:ahi]:
3762 for c in arevs[alo:ahi]:
3759 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3763 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3760 if tag in ('insert', 'replace'):
3764 if tag in ('insert', 'replace'):
3761 for c in brevs[blo:bhi]:
3765 for c in brevs[blo:bhi]:
3762 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3766 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3763 if tag == 'equal':
3767 if tag == 'equal':
3764 for c in arevs[alo:ahi]:
3768 for c in arevs[alo:ahi]:
3765 ui.write(b' %d\n' % c)
3769 ui.write(b' %d\n' % c)
3766 return 1
3770 return 1
3767
3771
3768 func = revset.makematcher(tree)
3772 func = revset.makematcher(tree)
3769 revs = func(repo)
3773 revs = func(repo)
3770 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3774 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3771 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3775 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3772 if not opts[b'show_revs']:
3776 if not opts[b'show_revs']:
3773 return
3777 return
3774 for c in revs:
3778 for c in revs:
3775 ui.write(b"%d\n" % c)
3779 ui.write(b"%d\n" % c)
3776
3780
3777
3781
3778 @command(
3782 @command(
3779 b'debugserve',
3783 b'debugserve',
3780 [
3784 [
3781 (
3785 (
3782 b'',
3786 b'',
3783 b'sshstdio',
3787 b'sshstdio',
3784 False,
3788 False,
3785 _(b'run an SSH server bound to process handles'),
3789 _(b'run an SSH server bound to process handles'),
3786 ),
3790 ),
3787 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3791 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3788 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3792 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3789 ],
3793 ],
3790 b'',
3794 b'',
3791 )
3795 )
3792 def debugserve(ui, repo, **opts):
3796 def debugserve(ui, repo, **opts):
3793 """run a server with advanced settings
3797 """run a server with advanced settings
3794
3798
3795 This command is similar to :hg:`serve`. It exists partially as a
3799 This command is similar to :hg:`serve`. It exists partially as a
3796 workaround to the fact that ``hg serve --stdio`` must have specific
3800 workaround to the fact that ``hg serve --stdio`` must have specific
3797 arguments for security reasons.
3801 arguments for security reasons.
3798 """
3802 """
3799 opts = pycompat.byteskwargs(opts)
3803 opts = pycompat.byteskwargs(opts)
3800
3804
3801 if not opts[b'sshstdio']:
3805 if not opts[b'sshstdio']:
3802 raise error.Abort(_(b'only --sshstdio is currently supported'))
3806 raise error.Abort(_(b'only --sshstdio is currently supported'))
3803
3807
3804 logfh = None
3808 logfh = None
3805
3809
3806 if opts[b'logiofd'] and opts[b'logiofile']:
3810 if opts[b'logiofd'] and opts[b'logiofile']:
3807 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3811 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3808
3812
3809 if opts[b'logiofd']:
3813 if opts[b'logiofd']:
3810 # Ideally we would be line buffered. But line buffering in binary
3814 # Ideally we would be line buffered. But line buffering in binary
3811 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3815 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3812 # buffering could have performance impacts. But since this isn't
3816 # buffering could have performance impacts. But since this isn't
3813 # performance critical code, it should be fine.
3817 # performance critical code, it should be fine.
3814 try:
3818 try:
3815 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3819 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3816 except OSError as e:
3820 except OSError as e:
3817 if e.errno != errno.ESPIPE:
3821 if e.errno != errno.ESPIPE:
3818 raise
3822 raise
3819 # can't seek a pipe, so `ab` mode fails on py3
3823 # can't seek a pipe, so `ab` mode fails on py3
3820 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3824 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3821 elif opts[b'logiofile']:
3825 elif opts[b'logiofile']:
3822 logfh = open(opts[b'logiofile'], b'ab', 0)
3826 logfh = open(opts[b'logiofile'], b'ab', 0)
3823
3827
3824 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3828 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3825 s.serve_forever()
3829 s.serve_forever()
3826
3830
3827
3831
3828 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3832 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3829 def debugsetparents(ui, repo, rev1, rev2=None):
3833 def debugsetparents(ui, repo, rev1, rev2=None):
3830 """manually set the parents of the current working directory (DANGEROUS)
3834 """manually set the parents of the current working directory (DANGEROUS)
3831
3835
3832 This command is not what you are looking for and should not be used. Using
3836 This command is not what you are looking for and should not be used. Using
3833 this command will most certainly results in slight corruption of the file
3837 this command will most certainly results in slight corruption of the file
3834 level histories withing your repository. DO NOT USE THIS COMMAND.
3838 level histories withing your repository. DO NOT USE THIS COMMAND.
3835
3839
3836 The command update the p1 and p2 field in the dirstate, and not touching
3840 The command update the p1 and p2 field in the dirstate, and not touching
3837 anything else. This useful for writing repository conversion tools, but
3841 anything else. This useful for writing repository conversion tools, but
3838 should be used with extreme care. For example, neither the working
3842 should be used with extreme care. For example, neither the working
3839 directory nor the dirstate is updated, so file status may be incorrect
3843 directory nor the dirstate is updated, so file status may be incorrect
3840 after running this command. Only used if you are one of the few people that
3844 after running this command. Only used if you are one of the few people that
3841 deeply unstand both conversion tools and file level histories. If you are
3845 deeply unstand both conversion tools and file level histories. If you are
3842 reading this help, you are not one of this people (most of them sailed west
3846 reading this help, you are not one of this people (most of them sailed west
3843 from Mithlond anyway.
3847 from Mithlond anyway.
3844
3848
3845 So one last time DO NOT USE THIS COMMAND.
3849 So one last time DO NOT USE THIS COMMAND.
3846
3850
3847 Returns 0 on success.
3851 Returns 0 on success.
3848 """
3852 """
3849
3853
3850 node1 = scmutil.revsingle(repo, rev1).node()
3854 node1 = scmutil.revsingle(repo, rev1).node()
3851 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3855 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3852
3856
3853 with repo.wlock():
3857 with repo.wlock():
3854 repo.setparents(node1, node2)
3858 repo.setparents(node1, node2)
3855
3859
3856
3860
3857 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3861 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3858 def debugsidedata(ui, repo, file_, rev=None, **opts):
3862 def debugsidedata(ui, repo, file_, rev=None, **opts):
3859 """dump the side data for a cl/manifest/file revision
3863 """dump the side data for a cl/manifest/file revision
3860
3864
3861 Use --verbose to dump the sidedata content."""
3865 Use --verbose to dump the sidedata content."""
3862 opts = pycompat.byteskwargs(opts)
3866 opts = pycompat.byteskwargs(opts)
3863 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3867 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3864 if rev is not None:
3868 if rev is not None:
3865 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3869 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3866 file_, rev = None, file_
3870 file_, rev = None, file_
3867 elif rev is None:
3871 elif rev is None:
3868 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3872 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3869 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3873 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3870 r = getattr(r, '_revlog', r)
3874 r = getattr(r, '_revlog', r)
3871 try:
3875 try:
3872 sidedata = r.sidedata(r.lookup(rev))
3876 sidedata = r.sidedata(r.lookup(rev))
3873 except KeyError:
3877 except KeyError:
3874 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3878 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3875 if sidedata:
3879 if sidedata:
3876 sidedata = list(sidedata.items())
3880 sidedata = list(sidedata.items())
3877 sidedata.sort()
3881 sidedata.sort()
3878 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3882 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3879 for key, value in sidedata:
3883 for key, value in sidedata:
3880 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3884 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3881 if ui.verbose:
3885 if ui.verbose:
3882 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3886 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3883
3887
3884
3888
3885 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3889 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3886 def debugssl(ui, repo, source=None, **opts):
3890 def debugssl(ui, repo, source=None, **opts):
3887 """test a secure connection to a server
3891 """test a secure connection to a server
3888
3892
3889 This builds the certificate chain for the server on Windows, installing the
3893 This builds the certificate chain for the server on Windows, installing the
3890 missing intermediates and trusted root via Windows Update if necessary. It
3894 missing intermediates and trusted root via Windows Update if necessary. It
3891 does nothing on other platforms.
3895 does nothing on other platforms.
3892
3896
3893 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3897 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3894 that server is used. See :hg:`help urls` for more information.
3898 that server is used. See :hg:`help urls` for more information.
3895
3899
3896 If the update succeeds, retry the original operation. Otherwise, the cause
3900 If the update succeeds, retry the original operation. Otherwise, the cause
3897 of the SSL error is likely another issue.
3901 of the SSL error is likely another issue.
3898 """
3902 """
3899 if not pycompat.iswindows:
3903 if not pycompat.iswindows:
3900 raise error.Abort(
3904 raise error.Abort(
3901 _(b'certificate chain building is only possible on Windows')
3905 _(b'certificate chain building is only possible on Windows')
3902 )
3906 )
3903
3907
3904 if not source:
3908 if not source:
3905 if not repo:
3909 if not repo:
3906 raise error.Abort(
3910 raise error.Abort(
3907 _(
3911 _(
3908 b"there is no Mercurial repository here, and no "
3912 b"there is no Mercurial repository here, and no "
3909 b"server specified"
3913 b"server specified"
3910 )
3914 )
3911 )
3915 )
3912 source = b"default"
3916 source = b"default"
3913
3917
3914 source, branches = urlutil.get_unique_pull_path(
3918 source, branches = urlutil.get_unique_pull_path(
3915 b'debugssl', repo, ui, source
3919 b'debugssl', repo, ui, source
3916 )
3920 )
3917 url = urlutil.url(source)
3921 url = urlutil.url(source)
3918
3922
3919 defaultport = {b'https': 443, b'ssh': 22}
3923 defaultport = {b'https': 443, b'ssh': 22}
3920 if url.scheme in defaultport:
3924 if url.scheme in defaultport:
3921 try:
3925 try:
3922 addr = (url.host, int(url.port or defaultport[url.scheme]))
3926 addr = (url.host, int(url.port or defaultport[url.scheme]))
3923 except ValueError:
3927 except ValueError:
3924 raise error.Abort(_(b"malformed port number in URL"))
3928 raise error.Abort(_(b"malformed port number in URL"))
3925 else:
3929 else:
3926 raise error.Abort(_(b"only https and ssh connections are supported"))
3930 raise error.Abort(_(b"only https and ssh connections are supported"))
3927
3931
3928 from . import win32
3932 from . import win32
3929
3933
3930 s = ssl.wrap_socket(
3934 s = ssl.wrap_socket(
3931 socket.socket(),
3935 socket.socket(),
3932 ssl_version=ssl.PROTOCOL_TLS,
3936 ssl_version=ssl.PROTOCOL_TLS,
3933 cert_reqs=ssl.CERT_NONE,
3937 cert_reqs=ssl.CERT_NONE,
3934 ca_certs=None,
3938 ca_certs=None,
3935 )
3939 )
3936
3940
3937 try:
3941 try:
3938 s.connect(addr)
3942 s.connect(addr)
3939 cert = s.getpeercert(True)
3943 cert = s.getpeercert(True)
3940
3944
3941 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3945 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3942
3946
3943 complete = win32.checkcertificatechain(cert, build=False)
3947 complete = win32.checkcertificatechain(cert, build=False)
3944
3948
3945 if not complete:
3949 if not complete:
3946 ui.status(_(b'certificate chain is incomplete, updating... '))
3950 ui.status(_(b'certificate chain is incomplete, updating... '))
3947
3951
3948 if not win32.checkcertificatechain(cert):
3952 if not win32.checkcertificatechain(cert):
3949 ui.status(_(b'failed.\n'))
3953 ui.status(_(b'failed.\n'))
3950 else:
3954 else:
3951 ui.status(_(b'done.\n'))
3955 ui.status(_(b'done.\n'))
3952 else:
3956 else:
3953 ui.status(_(b'full certificate chain is available\n'))
3957 ui.status(_(b'full certificate chain is available\n'))
3954 finally:
3958 finally:
3955 s.close()
3959 s.close()
3956
3960
3957
3961
3958 @command(
3962 @command(
3959 b"debugbackupbundle",
3963 b"debugbackupbundle",
3960 [
3964 [
3961 (
3965 (
3962 b"",
3966 b"",
3963 b"recover",
3967 b"recover",
3964 b"",
3968 b"",
3965 b"brings the specified changeset back into the repository",
3969 b"brings the specified changeset back into the repository",
3966 )
3970 )
3967 ]
3971 ]
3968 + cmdutil.logopts,
3972 + cmdutil.logopts,
3969 _(b"hg debugbackupbundle [--recover HASH]"),
3973 _(b"hg debugbackupbundle [--recover HASH]"),
3970 )
3974 )
3971 def debugbackupbundle(ui, repo, *pats, **opts):
3975 def debugbackupbundle(ui, repo, *pats, **opts):
3972 """lists the changesets available in backup bundles
3976 """lists the changesets available in backup bundles
3973
3977
3974 Without any arguments, this command prints a list of the changesets in each
3978 Without any arguments, this command prints a list of the changesets in each
3975 backup bundle.
3979 backup bundle.
3976
3980
3977 --recover takes a changeset hash and unbundles the first bundle that
3981 --recover takes a changeset hash and unbundles the first bundle that
3978 contains that hash, which puts that changeset back in your repository.
3982 contains that hash, which puts that changeset back in your repository.
3979
3983
3980 --verbose will print the entire commit message and the bundle path for that
3984 --verbose will print the entire commit message and the bundle path for that
3981 backup.
3985 backup.
3982 """
3986 """
3983 backups = list(
3987 backups = list(
3984 filter(
3988 filter(
3985 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3989 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3986 )
3990 )
3987 )
3991 )
3988 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3992 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3989
3993
3990 opts = pycompat.byteskwargs(opts)
3994 opts = pycompat.byteskwargs(opts)
3991 opts[b"bundle"] = b""
3995 opts[b"bundle"] = b""
3992 opts[b"force"] = None
3996 opts[b"force"] = None
3993 limit = logcmdutil.getlimit(opts)
3997 limit = logcmdutil.getlimit(opts)
3994
3998
3995 def display(other, chlist, displayer):
3999 def display(other, chlist, displayer):
3996 if opts.get(b"newest_first"):
4000 if opts.get(b"newest_first"):
3997 chlist.reverse()
4001 chlist.reverse()
3998 count = 0
4002 count = 0
3999 for n in chlist:
4003 for n in chlist:
4000 if limit is not None and count >= limit:
4004 if limit is not None and count >= limit:
4001 break
4005 break
4002 parents = [
4006 parents = [
4003 True for p in other.changelog.parents(n) if p != repo.nullid
4007 True for p in other.changelog.parents(n) if p != repo.nullid
4004 ]
4008 ]
4005 if opts.get(b"no_merges") and len(parents) == 2:
4009 if opts.get(b"no_merges") and len(parents) == 2:
4006 continue
4010 continue
4007 count += 1
4011 count += 1
4008 displayer.show(other[n])
4012 displayer.show(other[n])
4009
4013
4010 recovernode = opts.get(b"recover")
4014 recovernode = opts.get(b"recover")
4011 if recovernode:
4015 if recovernode:
4012 if scmutil.isrevsymbol(repo, recovernode):
4016 if scmutil.isrevsymbol(repo, recovernode):
4013 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4017 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4014 return
4018 return
4015 elif backups:
4019 elif backups:
4016 msg = _(
4020 msg = _(
4017 b"Recover changesets using: hg debugbackupbundle --recover "
4021 b"Recover changesets using: hg debugbackupbundle --recover "
4018 b"<changeset hash>\n\nAvailable backup changesets:"
4022 b"<changeset hash>\n\nAvailable backup changesets:"
4019 )
4023 )
4020 ui.status(msg, label=b"status.removed")
4024 ui.status(msg, label=b"status.removed")
4021 else:
4025 else:
4022 ui.status(_(b"no backup changesets found\n"))
4026 ui.status(_(b"no backup changesets found\n"))
4023 return
4027 return
4024
4028
4025 for backup in backups:
4029 for backup in backups:
4026 # Much of this is copied from the hg incoming logic
4030 # Much of this is copied from the hg incoming logic
4027 source = os.path.relpath(backup, encoding.getcwd())
4031 source = os.path.relpath(backup, encoding.getcwd())
4028 source, branches = urlutil.get_unique_pull_path(
4032 source, branches = urlutil.get_unique_pull_path(
4029 b'debugbackupbundle',
4033 b'debugbackupbundle',
4030 repo,
4034 repo,
4031 ui,
4035 ui,
4032 source,
4036 source,
4033 default_branches=opts.get(b'branch'),
4037 default_branches=opts.get(b'branch'),
4034 )
4038 )
4035 try:
4039 try:
4036 other = hg.peer(repo, opts, source)
4040 other = hg.peer(repo, opts, source)
4037 except error.LookupError as ex:
4041 except error.LookupError as ex:
4038 msg = _(b"\nwarning: unable to open bundle %s") % source
4042 msg = _(b"\nwarning: unable to open bundle %s") % source
4039 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4043 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4040 ui.warn(msg, hint=hint)
4044 ui.warn(msg, hint=hint)
4041 continue
4045 continue
4042 revs, checkout = hg.addbranchrevs(
4046 revs, checkout = hg.addbranchrevs(
4043 repo, other, branches, opts.get(b"rev")
4047 repo, other, branches, opts.get(b"rev")
4044 )
4048 )
4045
4049
4046 if revs:
4050 if revs:
4047 revs = [other.lookup(rev) for rev in revs]
4051 revs = [other.lookup(rev) for rev in revs]
4048
4052
4049 with ui.silent():
4053 with ui.silent():
4050 try:
4054 try:
4051 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4055 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4052 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4056 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4053 )
4057 )
4054 except error.LookupError:
4058 except error.LookupError:
4055 continue
4059 continue
4056
4060
4057 try:
4061 try:
4058 if not chlist:
4062 if not chlist:
4059 continue
4063 continue
4060 if recovernode:
4064 if recovernode:
4061 with repo.lock(), repo.transaction(b"unbundle") as tr:
4065 with repo.lock(), repo.transaction(b"unbundle") as tr:
4062 if scmutil.isrevsymbol(other, recovernode):
4066 if scmutil.isrevsymbol(other, recovernode):
4063 ui.status(_(b"Unbundling %s\n") % (recovernode))
4067 ui.status(_(b"Unbundling %s\n") % (recovernode))
4064 f = hg.openpath(ui, source)
4068 f = hg.openpath(ui, source)
4065 gen = exchange.readbundle(ui, f, source)
4069 gen = exchange.readbundle(ui, f, source)
4066 if isinstance(gen, bundle2.unbundle20):
4070 if isinstance(gen, bundle2.unbundle20):
4067 bundle2.applybundle(
4071 bundle2.applybundle(
4068 repo,
4072 repo,
4069 gen,
4073 gen,
4070 tr,
4074 tr,
4071 source=b"unbundle",
4075 source=b"unbundle",
4072 url=b"bundle:" + source,
4076 url=b"bundle:" + source,
4073 )
4077 )
4074 else:
4078 else:
4075 gen.apply(repo, b"unbundle", b"bundle:" + source)
4079 gen.apply(repo, b"unbundle", b"bundle:" + source)
4076 break
4080 break
4077 else:
4081 else:
4078 backupdate = encoding.strtolocal(
4082 backupdate = encoding.strtolocal(
4079 time.strftime(
4083 time.strftime(
4080 "%a %H:%M, %Y-%m-%d",
4084 "%a %H:%M, %Y-%m-%d",
4081 time.localtime(os.path.getmtime(source)),
4085 time.localtime(os.path.getmtime(source)),
4082 )
4086 )
4083 )
4087 )
4084 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4088 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4085 if ui.verbose:
4089 if ui.verbose:
4086 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4090 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4087 else:
4091 else:
4088 opts[
4092 opts[
4089 b"template"
4093 b"template"
4090 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4094 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4091 displayer = logcmdutil.changesetdisplayer(
4095 displayer = logcmdutil.changesetdisplayer(
4092 ui, other, opts, False
4096 ui, other, opts, False
4093 )
4097 )
4094 display(other, chlist, displayer)
4098 display(other, chlist, displayer)
4095 displayer.close()
4099 displayer.close()
4096 finally:
4100 finally:
4097 cleanupfn()
4101 cleanupfn()
4098
4102
4099
4103
4100 @command(
4104 @command(
4101 b'debugsub',
4105 b'debugsub',
4102 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4106 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4103 _(b'[-r REV] [REV]'),
4107 _(b'[-r REV] [REV]'),
4104 )
4108 )
4105 def debugsub(ui, repo, rev=None):
4109 def debugsub(ui, repo, rev=None):
4106 ctx = scmutil.revsingle(repo, rev, None)
4110 ctx = scmutil.revsingle(repo, rev, None)
4107 for k, v in sorted(ctx.substate.items()):
4111 for k, v in sorted(ctx.substate.items()):
4108 ui.writenoi18n(b'path %s\n' % k)
4112 ui.writenoi18n(b'path %s\n' % k)
4109 ui.writenoi18n(b' source %s\n' % v[0])
4113 ui.writenoi18n(b' source %s\n' % v[0])
4110 ui.writenoi18n(b' revision %s\n' % v[1])
4114 ui.writenoi18n(b' revision %s\n' % v[1])
4111
4115
4112
4116
4113 @command(b'debugshell', optionalrepo=True)
4117 @command(b'debugshell', optionalrepo=True)
4114 def debugshell(ui, repo):
4118 def debugshell(ui, repo):
4115 """run an interactive Python interpreter
4119 """run an interactive Python interpreter
4116
4120
4117 The local namespace is provided with a reference to the ui and
4121 The local namespace is provided with a reference to the ui and
4118 the repo instance (if available).
4122 the repo instance (if available).
4119 """
4123 """
4120 import code
4124 import code
4121
4125
4122 imported_objects = {
4126 imported_objects = {
4123 'ui': ui,
4127 'ui': ui,
4124 'repo': repo,
4128 'repo': repo,
4125 }
4129 }
4126
4130
4127 code.interact(local=imported_objects)
4131 code.interact(local=imported_objects)
4128
4132
4129
4133
4130 @command(
4134 @command(
4131 b'debugsuccessorssets',
4135 b'debugsuccessorssets',
4132 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4136 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4133 _(b'[REV]'),
4137 _(b'[REV]'),
4134 )
4138 )
4135 def debugsuccessorssets(ui, repo, *revs, **opts):
4139 def debugsuccessorssets(ui, repo, *revs, **opts):
4136 """show set of successors for revision
4140 """show set of successors for revision
4137
4141
4138 A successors set of changeset A is a consistent group of revisions that
4142 A successors set of changeset A is a consistent group of revisions that
4139 succeed A. It contains non-obsolete changesets only unless closests
4143 succeed A. It contains non-obsolete changesets only unless closests
4140 successors set is set.
4144 successors set is set.
4141
4145
4142 In most cases a changeset A has a single successors set containing a single
4146 In most cases a changeset A has a single successors set containing a single
4143 successor (changeset A replaced by A').
4147 successor (changeset A replaced by A').
4144
4148
4145 A changeset that is made obsolete with no successors are called "pruned".
4149 A changeset that is made obsolete with no successors are called "pruned".
4146 Such changesets have no successors sets at all.
4150 Such changesets have no successors sets at all.
4147
4151
4148 A changeset that has been "split" will have a successors set containing
4152 A changeset that has been "split" will have a successors set containing
4149 more than one successor.
4153 more than one successor.
4150
4154
4151 A changeset that has been rewritten in multiple different ways is called
4155 A changeset that has been rewritten in multiple different ways is called
4152 "divergent". Such changesets have multiple successor sets (each of which
4156 "divergent". Such changesets have multiple successor sets (each of which
4153 may also be split, i.e. have multiple successors).
4157 may also be split, i.e. have multiple successors).
4154
4158
4155 Results are displayed as follows::
4159 Results are displayed as follows::
4156
4160
4157 <rev1>
4161 <rev1>
4158 <successors-1A>
4162 <successors-1A>
4159 <rev2>
4163 <rev2>
4160 <successors-2A>
4164 <successors-2A>
4161 <successors-2B1> <successors-2B2> <successors-2B3>
4165 <successors-2B1> <successors-2B2> <successors-2B3>
4162
4166
4163 Here rev2 has two possible (i.e. divergent) successors sets. The first
4167 Here rev2 has two possible (i.e. divergent) successors sets. The first
4164 holds one element, whereas the second holds three (i.e. the changeset has
4168 holds one element, whereas the second holds three (i.e. the changeset has
4165 been split).
4169 been split).
4166 """
4170 """
4167 # passed to successorssets caching computation from one call to another
4171 # passed to successorssets caching computation from one call to another
4168 cache = {}
4172 cache = {}
4169 ctx2str = bytes
4173 ctx2str = bytes
4170 node2str = short
4174 node2str = short
4171 for rev in logcmdutil.revrange(repo, revs):
4175 for rev in logcmdutil.revrange(repo, revs):
4172 ctx = repo[rev]
4176 ctx = repo[rev]
4173 ui.write(b'%s\n' % ctx2str(ctx))
4177 ui.write(b'%s\n' % ctx2str(ctx))
4174 for succsset in obsutil.successorssets(
4178 for succsset in obsutil.successorssets(
4175 repo, ctx.node(), closest=opts['closest'], cache=cache
4179 repo, ctx.node(), closest=opts['closest'], cache=cache
4176 ):
4180 ):
4177 if succsset:
4181 if succsset:
4178 ui.write(b' ')
4182 ui.write(b' ')
4179 ui.write(node2str(succsset[0]))
4183 ui.write(node2str(succsset[0]))
4180 for node in succsset[1:]:
4184 for node in succsset[1:]:
4181 ui.write(b' ')
4185 ui.write(b' ')
4182 ui.write(node2str(node))
4186 ui.write(node2str(node))
4183 ui.write(b'\n')
4187 ui.write(b'\n')
4184
4188
4185
4189
4186 @command(b'debugtagscache', [])
4190 @command(b'debugtagscache', [])
4187 def debugtagscache(ui, repo):
4191 def debugtagscache(ui, repo):
4188 """display the contents of .hg/cache/hgtagsfnodes1"""
4192 """display the contents of .hg/cache/hgtagsfnodes1"""
4189 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4193 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4190 flog = repo.file(b'.hgtags')
4194 flog = repo.file(b'.hgtags')
4191 for r in repo:
4195 for r in repo:
4192 node = repo[r].node()
4196 node = repo[r].node()
4193 tagsnode = cache.getfnode(node, computemissing=False)
4197 tagsnode = cache.getfnode(node, computemissing=False)
4194 if tagsnode:
4198 if tagsnode:
4195 tagsnodedisplay = hex(tagsnode)
4199 tagsnodedisplay = hex(tagsnode)
4196 if not flog.hasnode(tagsnode):
4200 if not flog.hasnode(tagsnode):
4197 tagsnodedisplay += b' (unknown node)'
4201 tagsnodedisplay += b' (unknown node)'
4198 elif tagsnode is None:
4202 elif tagsnode is None:
4199 tagsnodedisplay = b'missing'
4203 tagsnodedisplay = b'missing'
4200 else:
4204 else:
4201 tagsnodedisplay = b'invalid'
4205 tagsnodedisplay = b'invalid'
4202
4206
4203 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4207 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4204
4208
4205
4209
4206 @command(
4210 @command(
4207 b'debugtemplate',
4211 b'debugtemplate',
4208 [
4212 [
4209 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4213 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4210 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4214 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4211 ],
4215 ],
4212 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4216 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4213 optionalrepo=True,
4217 optionalrepo=True,
4214 )
4218 )
4215 def debugtemplate(ui, repo, tmpl, **opts):
4219 def debugtemplate(ui, repo, tmpl, **opts):
4216 """parse and apply a template
4220 """parse and apply a template
4217
4221
4218 If -r/--rev is given, the template is processed as a log template and
4222 If -r/--rev is given, the template is processed as a log template and
4219 applied to the given changesets. Otherwise, it is processed as a generic
4223 applied to the given changesets. Otherwise, it is processed as a generic
4220 template.
4224 template.
4221
4225
4222 Use --verbose to print the parsed tree.
4226 Use --verbose to print the parsed tree.
4223 """
4227 """
4224 revs = None
4228 revs = None
4225 if opts['rev']:
4229 if opts['rev']:
4226 if repo is None:
4230 if repo is None:
4227 raise error.RepoError(
4231 raise error.RepoError(
4228 _(b'there is no Mercurial repository here (.hg not found)')
4232 _(b'there is no Mercurial repository here (.hg not found)')
4229 )
4233 )
4230 revs = logcmdutil.revrange(repo, opts['rev'])
4234 revs = logcmdutil.revrange(repo, opts['rev'])
4231
4235
4232 props = {}
4236 props = {}
4233 for d in opts['define']:
4237 for d in opts['define']:
4234 try:
4238 try:
4235 k, v = (e.strip() for e in d.split(b'=', 1))
4239 k, v = (e.strip() for e in d.split(b'=', 1))
4236 if not k or k == b'ui':
4240 if not k or k == b'ui':
4237 raise ValueError
4241 raise ValueError
4238 props[k] = v
4242 props[k] = v
4239 except ValueError:
4243 except ValueError:
4240 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4244 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4241
4245
4242 if ui.verbose:
4246 if ui.verbose:
4243 aliases = ui.configitems(b'templatealias')
4247 aliases = ui.configitems(b'templatealias')
4244 tree = templater.parse(tmpl)
4248 tree = templater.parse(tmpl)
4245 ui.note(templater.prettyformat(tree), b'\n')
4249 ui.note(templater.prettyformat(tree), b'\n')
4246 newtree = templater.expandaliases(tree, aliases)
4250 newtree = templater.expandaliases(tree, aliases)
4247 if newtree != tree:
4251 if newtree != tree:
4248 ui.notenoi18n(
4252 ui.notenoi18n(
4249 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4253 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4250 )
4254 )
4251
4255
4252 if revs is None:
4256 if revs is None:
4253 tres = formatter.templateresources(ui, repo)
4257 tres = formatter.templateresources(ui, repo)
4254 t = formatter.maketemplater(ui, tmpl, resources=tres)
4258 t = formatter.maketemplater(ui, tmpl, resources=tres)
4255 if ui.verbose:
4259 if ui.verbose:
4256 kwds, funcs = t.symbolsuseddefault()
4260 kwds, funcs = t.symbolsuseddefault()
4257 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4261 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4258 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4262 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4259 ui.write(t.renderdefault(props))
4263 ui.write(t.renderdefault(props))
4260 else:
4264 else:
4261 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4265 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4262 if ui.verbose:
4266 if ui.verbose:
4263 kwds, funcs = displayer.t.symbolsuseddefault()
4267 kwds, funcs = displayer.t.symbolsuseddefault()
4264 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4268 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4265 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4269 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4266 for r in revs:
4270 for r in revs:
4267 displayer.show(repo[r], **pycompat.strkwargs(props))
4271 displayer.show(repo[r], **pycompat.strkwargs(props))
4268 displayer.close()
4272 displayer.close()
4269
4273
4270
4274
4271 @command(
4275 @command(
4272 b'debuguigetpass',
4276 b'debuguigetpass',
4273 [
4277 [
4274 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4278 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4275 ],
4279 ],
4276 _(b'[-p TEXT]'),
4280 _(b'[-p TEXT]'),
4277 norepo=True,
4281 norepo=True,
4278 )
4282 )
4279 def debuguigetpass(ui, prompt=b''):
4283 def debuguigetpass(ui, prompt=b''):
4280 """show prompt to type password"""
4284 """show prompt to type password"""
4281 r = ui.getpass(prompt)
4285 r = ui.getpass(prompt)
4282 if r is None:
4286 if r is None:
4283 r = b"<default response>"
4287 r = b"<default response>"
4284 ui.writenoi18n(b'response: %s\n' % r)
4288 ui.writenoi18n(b'response: %s\n' % r)
4285
4289
4286
4290
4287 @command(
4291 @command(
4288 b'debuguiprompt',
4292 b'debuguiprompt',
4289 [
4293 [
4290 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4294 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4291 ],
4295 ],
4292 _(b'[-p TEXT]'),
4296 _(b'[-p TEXT]'),
4293 norepo=True,
4297 norepo=True,
4294 )
4298 )
4295 def debuguiprompt(ui, prompt=b''):
4299 def debuguiprompt(ui, prompt=b''):
4296 """show plain prompt"""
4300 """show plain prompt"""
4297 r = ui.prompt(prompt)
4301 r = ui.prompt(prompt)
4298 ui.writenoi18n(b'response: %s\n' % r)
4302 ui.writenoi18n(b'response: %s\n' % r)
4299
4303
4300
4304
4301 @command(b'debugupdatecaches', [])
4305 @command(b'debugupdatecaches', [])
4302 def debugupdatecaches(ui, repo, *pats, **opts):
4306 def debugupdatecaches(ui, repo, *pats, **opts):
4303 """warm all known caches in the repository"""
4307 """warm all known caches in the repository"""
4304 with repo.wlock(), repo.lock():
4308 with repo.wlock(), repo.lock():
4305 repo.updatecaches(caches=repository.CACHES_ALL)
4309 repo.updatecaches(caches=repository.CACHES_ALL)
4306
4310
4307
4311
4308 @command(
4312 @command(
4309 b'debugupgraderepo',
4313 b'debugupgraderepo',
4310 [
4314 [
4311 (
4315 (
4312 b'o',
4316 b'o',
4313 b'optimize',
4317 b'optimize',
4314 [],
4318 [],
4315 _(b'extra optimization to perform'),
4319 _(b'extra optimization to perform'),
4316 _(b'NAME'),
4320 _(b'NAME'),
4317 ),
4321 ),
4318 (b'', b'run', False, _(b'performs an upgrade')),
4322 (b'', b'run', False, _(b'performs an upgrade')),
4319 (b'', b'backup', True, _(b'keep the old repository content around')),
4323 (b'', b'backup', True, _(b'keep the old repository content around')),
4320 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4324 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4321 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4325 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4322 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4326 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4323 ],
4327 ],
4324 )
4328 )
4325 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4329 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4326 """upgrade a repository to use different features
4330 """upgrade a repository to use different features
4327
4331
4328 If no arguments are specified, the repository is evaluated for upgrade
4332 If no arguments are specified, the repository is evaluated for upgrade
4329 and a list of problems and potential optimizations is printed.
4333 and a list of problems and potential optimizations is printed.
4330
4334
4331 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4335 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4332 can be influenced via additional arguments. More details will be provided
4336 can be influenced via additional arguments. More details will be provided
4333 by the command output when run without ``--run``.
4337 by the command output when run without ``--run``.
4334
4338
4335 During the upgrade, the repository will be locked and no writes will be
4339 During the upgrade, the repository will be locked and no writes will be
4336 allowed.
4340 allowed.
4337
4341
4338 At the end of the upgrade, the repository may not be readable while new
4342 At the end of the upgrade, the repository may not be readable while new
4339 repository data is swapped in. This window will be as long as it takes to
4343 repository data is swapped in. This window will be as long as it takes to
4340 rename some directories inside the ``.hg`` directory. On most machines, this
4344 rename some directories inside the ``.hg`` directory. On most machines, this
4341 should complete almost instantaneously and the chances of a consumer being
4345 should complete almost instantaneously and the chances of a consumer being
4342 unable to access the repository should be low.
4346 unable to access the repository should be low.
4343
4347
4344 By default, all revlogs will be upgraded. You can restrict this using flags
4348 By default, all revlogs will be upgraded. You can restrict this using flags
4345 such as `--manifest`:
4349 such as `--manifest`:
4346
4350
4347 * `--manifest`: only optimize the manifest
4351 * `--manifest`: only optimize the manifest
4348 * `--no-manifest`: optimize all revlog but the manifest
4352 * `--no-manifest`: optimize all revlog but the manifest
4349 * `--changelog`: optimize the changelog only
4353 * `--changelog`: optimize the changelog only
4350 * `--no-changelog --no-manifest`: optimize filelogs only
4354 * `--no-changelog --no-manifest`: optimize filelogs only
4351 * `--filelogs`: optimize the filelogs only
4355 * `--filelogs`: optimize the filelogs only
4352 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4356 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4353 """
4357 """
4354 return upgrade.upgraderepo(
4358 return upgrade.upgraderepo(
4355 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4359 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4356 )
4360 )
4357
4361
4358
4362
4359 @command(
4363 @command(
4360 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4364 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4361 )
4365 )
4362 def debugwalk(ui, repo, *pats, **opts):
4366 def debugwalk(ui, repo, *pats, **opts):
4363 """show how files match on given patterns"""
4367 """show how files match on given patterns"""
4364 opts = pycompat.byteskwargs(opts)
4368 opts = pycompat.byteskwargs(opts)
4365 m = scmutil.match(repo[None], pats, opts)
4369 m = scmutil.match(repo[None], pats, opts)
4366 if ui.verbose:
4370 if ui.verbose:
4367 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4371 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4368 items = list(repo[None].walk(m))
4372 items = list(repo[None].walk(m))
4369 if not items:
4373 if not items:
4370 return
4374 return
4371 f = lambda fn: fn
4375 f = lambda fn: fn
4372 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4376 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4373 f = lambda fn: util.normpath(fn)
4377 f = lambda fn: util.normpath(fn)
4374 fmt = b'f %%-%ds %%-%ds %%s' % (
4378 fmt = b'f %%-%ds %%-%ds %%s' % (
4375 max([len(abs) for abs in items]),
4379 max([len(abs) for abs in items]),
4376 max([len(repo.pathto(abs)) for abs in items]),
4380 max([len(repo.pathto(abs)) for abs in items]),
4377 )
4381 )
4378 for abs in items:
4382 for abs in items:
4379 line = fmt % (
4383 line = fmt % (
4380 abs,
4384 abs,
4381 f(repo.pathto(abs)),
4385 f(repo.pathto(abs)),
4382 m.exact(abs) and b'exact' or b'',
4386 m.exact(abs) and b'exact' or b'',
4383 )
4387 )
4384 ui.write(b"%s\n" % line.rstrip())
4388 ui.write(b"%s\n" % line.rstrip())
4385
4389
4386
4390
4387 @command(b'debugwhyunstable', [], _(b'REV'))
4391 @command(b'debugwhyunstable', [], _(b'REV'))
4388 def debugwhyunstable(ui, repo, rev):
4392 def debugwhyunstable(ui, repo, rev):
4389 """explain instabilities of a changeset"""
4393 """explain instabilities of a changeset"""
4390 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4394 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4391 dnodes = b''
4395 dnodes = b''
4392 if entry.get(b'divergentnodes'):
4396 if entry.get(b'divergentnodes'):
4393 dnodes = (
4397 dnodes = (
4394 b' '.join(
4398 b' '.join(
4395 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4399 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4396 for ctx in entry[b'divergentnodes']
4400 for ctx in entry[b'divergentnodes']
4397 )
4401 )
4398 + b' '
4402 + b' '
4399 )
4403 )
4400 ui.write(
4404 ui.write(
4401 b'%s: %s%s %s\n'
4405 b'%s: %s%s %s\n'
4402 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4406 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4403 )
4407 )
4404
4408
4405
4409
4406 @command(
4410 @command(
4407 b'debugwireargs',
4411 b'debugwireargs',
4408 [
4412 [
4409 (b'', b'three', b'', b'three'),
4413 (b'', b'three', b'', b'three'),
4410 (b'', b'four', b'', b'four'),
4414 (b'', b'four', b'', b'four'),
4411 (b'', b'five', b'', b'five'),
4415 (b'', b'five', b'', b'five'),
4412 ]
4416 ]
4413 + cmdutil.remoteopts,
4417 + cmdutil.remoteopts,
4414 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4418 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4415 norepo=True,
4419 norepo=True,
4416 )
4420 )
4417 def debugwireargs(ui, repopath, *vals, **opts):
4421 def debugwireargs(ui, repopath, *vals, **opts):
4418 opts = pycompat.byteskwargs(opts)
4422 opts = pycompat.byteskwargs(opts)
4419 repo = hg.peer(ui, opts, repopath)
4423 repo = hg.peer(ui, opts, repopath)
4420 try:
4424 try:
4421 for opt in cmdutil.remoteopts:
4425 for opt in cmdutil.remoteopts:
4422 del opts[opt[1]]
4426 del opts[opt[1]]
4423 args = {}
4427 args = {}
4424 for k, v in opts.items():
4428 for k, v in opts.items():
4425 if v:
4429 if v:
4426 args[k] = v
4430 args[k] = v
4427 args = pycompat.strkwargs(args)
4431 args = pycompat.strkwargs(args)
4428 # run twice to check that we don't mess up the stream for the next command
4432 # run twice to check that we don't mess up the stream for the next command
4429 res1 = repo.debugwireargs(*vals, **args)
4433 res1 = repo.debugwireargs(*vals, **args)
4430 res2 = repo.debugwireargs(*vals, **args)
4434 res2 = repo.debugwireargs(*vals, **args)
4431 ui.write(b"%s\n" % res1)
4435 ui.write(b"%s\n" % res1)
4432 if res1 != res2:
4436 if res1 != res2:
4433 ui.warn(b"%s\n" % res2)
4437 ui.warn(b"%s\n" % res2)
4434 finally:
4438 finally:
4435 repo.close()
4439 repo.close()
4436
4440
4437
4441
4438 def _parsewirelangblocks(fh):
4442 def _parsewirelangblocks(fh):
4439 activeaction = None
4443 activeaction = None
4440 blocklines = []
4444 blocklines = []
4441 lastindent = 0
4445 lastindent = 0
4442
4446
4443 for line in fh:
4447 for line in fh:
4444 line = line.rstrip()
4448 line = line.rstrip()
4445 if not line:
4449 if not line:
4446 continue
4450 continue
4447
4451
4448 if line.startswith(b'#'):
4452 if line.startswith(b'#'):
4449 continue
4453 continue
4450
4454
4451 if not line.startswith(b' '):
4455 if not line.startswith(b' '):
4452 # New block. Flush previous one.
4456 # New block. Flush previous one.
4453 if activeaction:
4457 if activeaction:
4454 yield activeaction, blocklines
4458 yield activeaction, blocklines
4455
4459
4456 activeaction = line
4460 activeaction = line
4457 blocklines = []
4461 blocklines = []
4458 lastindent = 0
4462 lastindent = 0
4459 continue
4463 continue
4460
4464
4461 # Else we start with an indent.
4465 # Else we start with an indent.
4462
4466
4463 if not activeaction:
4467 if not activeaction:
4464 raise error.Abort(_(b'indented line outside of block'))
4468 raise error.Abort(_(b'indented line outside of block'))
4465
4469
4466 indent = len(line) - len(line.lstrip())
4470 indent = len(line) - len(line.lstrip())
4467
4471
4468 # If this line is indented more than the last line, concatenate it.
4472 # If this line is indented more than the last line, concatenate it.
4469 if indent > lastindent and blocklines:
4473 if indent > lastindent and blocklines:
4470 blocklines[-1] += line.lstrip()
4474 blocklines[-1] += line.lstrip()
4471 else:
4475 else:
4472 blocklines.append(line)
4476 blocklines.append(line)
4473 lastindent = indent
4477 lastindent = indent
4474
4478
4475 # Flush last block.
4479 # Flush last block.
4476 if activeaction:
4480 if activeaction:
4477 yield activeaction, blocklines
4481 yield activeaction, blocklines
4478
4482
4479
4483
4480 @command(
4484 @command(
4481 b'debugwireproto',
4485 b'debugwireproto',
4482 [
4486 [
4483 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4487 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4484 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4488 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4485 (
4489 (
4486 b'',
4490 b'',
4487 b'noreadstderr',
4491 b'noreadstderr',
4488 False,
4492 False,
4489 _(b'do not read from stderr of the remote'),
4493 _(b'do not read from stderr of the remote'),
4490 ),
4494 ),
4491 (
4495 (
4492 b'',
4496 b'',
4493 b'nologhandshake',
4497 b'nologhandshake',
4494 False,
4498 False,
4495 _(b'do not log I/O related to the peer handshake'),
4499 _(b'do not log I/O related to the peer handshake'),
4496 ),
4500 ),
4497 ]
4501 ]
4498 + cmdutil.remoteopts,
4502 + cmdutil.remoteopts,
4499 _(b'[PATH]'),
4503 _(b'[PATH]'),
4500 optionalrepo=True,
4504 optionalrepo=True,
4501 )
4505 )
4502 def debugwireproto(ui, repo, path=None, **opts):
4506 def debugwireproto(ui, repo, path=None, **opts):
4503 """send wire protocol commands to a server
4507 """send wire protocol commands to a server
4504
4508
4505 This command can be used to issue wire protocol commands to remote
4509 This command can be used to issue wire protocol commands to remote
4506 peers and to debug the raw data being exchanged.
4510 peers and to debug the raw data being exchanged.
4507
4511
4508 ``--localssh`` will start an SSH server against the current repository
4512 ``--localssh`` will start an SSH server against the current repository
4509 and connect to that. By default, the connection will perform a handshake
4513 and connect to that. By default, the connection will perform a handshake
4510 and establish an appropriate peer instance.
4514 and establish an appropriate peer instance.
4511
4515
4512 ``--peer`` can be used to bypass the handshake protocol and construct a
4516 ``--peer`` can be used to bypass the handshake protocol and construct a
4513 peer instance using the specified class type. Valid values are ``raw``,
4517 peer instance using the specified class type. Valid values are ``raw``,
4514 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4518 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4515 don't support higher-level command actions.
4519 don't support higher-level command actions.
4516
4520
4517 ``--noreadstderr`` can be used to disable automatic reading from stderr
4521 ``--noreadstderr`` can be used to disable automatic reading from stderr
4518 of the peer (for SSH connections only). Disabling automatic reading of
4522 of the peer (for SSH connections only). Disabling automatic reading of
4519 stderr is useful for making output more deterministic.
4523 stderr is useful for making output more deterministic.
4520
4524
4521 Commands are issued via a mini language which is specified via stdin.
4525 Commands are issued via a mini language which is specified via stdin.
4522 The language consists of individual actions to perform. An action is
4526 The language consists of individual actions to perform. An action is
4523 defined by a block. A block is defined as a line with no leading
4527 defined by a block. A block is defined as a line with no leading
4524 space followed by 0 or more lines with leading space. Blocks are
4528 space followed by 0 or more lines with leading space. Blocks are
4525 effectively a high-level command with additional metadata.
4529 effectively a high-level command with additional metadata.
4526
4530
4527 Lines beginning with ``#`` are ignored.
4531 Lines beginning with ``#`` are ignored.
4528
4532
4529 The following sections denote available actions.
4533 The following sections denote available actions.
4530
4534
4531 raw
4535 raw
4532 ---
4536 ---
4533
4537
4534 Send raw data to the server.
4538 Send raw data to the server.
4535
4539
4536 The block payload contains the raw data to send as one atomic send
4540 The block payload contains the raw data to send as one atomic send
4537 operation. The data may not actually be delivered in a single system
4541 operation. The data may not actually be delivered in a single system
4538 call: it depends on the abilities of the transport being used.
4542 call: it depends on the abilities of the transport being used.
4539
4543
4540 Each line in the block is de-indented and concatenated. Then, that
4544 Each line in the block is de-indented and concatenated. Then, that
4541 value is evaluated as a Python b'' literal. This allows the use of
4545 value is evaluated as a Python b'' literal. This allows the use of
4542 backslash escaping, etc.
4546 backslash escaping, etc.
4543
4547
4544 raw+
4548 raw+
4545 ----
4549 ----
4546
4550
4547 Behaves like ``raw`` except flushes output afterwards.
4551 Behaves like ``raw`` except flushes output afterwards.
4548
4552
4549 command <X>
4553 command <X>
4550 -----------
4554 -----------
4551
4555
4552 Send a request to run a named command, whose name follows the ``command``
4556 Send a request to run a named command, whose name follows the ``command``
4553 string.
4557 string.
4554
4558
4555 Arguments to the command are defined as lines in this block. The format of
4559 Arguments to the command are defined as lines in this block. The format of
4556 each line is ``<key> <value>``. e.g.::
4560 each line is ``<key> <value>``. e.g.::
4557
4561
4558 command listkeys
4562 command listkeys
4559 namespace bookmarks
4563 namespace bookmarks
4560
4564
4561 If the value begins with ``eval:``, it will be interpreted as a Python
4565 If the value begins with ``eval:``, it will be interpreted as a Python
4562 literal expression. Otherwise values are interpreted as Python b'' literals.
4566 literal expression. Otherwise values are interpreted as Python b'' literals.
4563 This allows sending complex types and encoding special byte sequences via
4567 This allows sending complex types and encoding special byte sequences via
4564 backslash escaping.
4568 backslash escaping.
4565
4569
4566 The following arguments have special meaning:
4570 The following arguments have special meaning:
4567
4571
4568 ``PUSHFILE``
4572 ``PUSHFILE``
4569 When defined, the *push* mechanism of the peer will be used instead
4573 When defined, the *push* mechanism of the peer will be used instead
4570 of the static request-response mechanism and the content of the
4574 of the static request-response mechanism and the content of the
4571 file specified in the value of this argument will be sent as the
4575 file specified in the value of this argument will be sent as the
4572 command payload.
4576 command payload.
4573
4577
4574 This can be used to submit a local bundle file to the remote.
4578 This can be used to submit a local bundle file to the remote.
4575
4579
4576 batchbegin
4580 batchbegin
4577 ----------
4581 ----------
4578
4582
4579 Instruct the peer to begin a batched send.
4583 Instruct the peer to begin a batched send.
4580
4584
4581 All ``command`` blocks are queued for execution until the next
4585 All ``command`` blocks are queued for execution until the next
4582 ``batchsubmit`` block.
4586 ``batchsubmit`` block.
4583
4587
4584 batchsubmit
4588 batchsubmit
4585 -----------
4589 -----------
4586
4590
4587 Submit previously queued ``command`` blocks as a batch request.
4591 Submit previously queued ``command`` blocks as a batch request.
4588
4592
4589 This action MUST be paired with a ``batchbegin`` action.
4593 This action MUST be paired with a ``batchbegin`` action.
4590
4594
4591 httprequest <method> <path>
4595 httprequest <method> <path>
4592 ---------------------------
4596 ---------------------------
4593
4597
4594 (HTTP peer only)
4598 (HTTP peer only)
4595
4599
4596 Send an HTTP request to the peer.
4600 Send an HTTP request to the peer.
4597
4601
4598 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4602 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4599
4603
4600 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4604 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4601 headers to add to the request. e.g. ``Accept: foo``.
4605 headers to add to the request. e.g. ``Accept: foo``.
4602
4606
4603 The following arguments are special:
4607 The following arguments are special:
4604
4608
4605 ``BODYFILE``
4609 ``BODYFILE``
4606 The content of the file defined as the value to this argument will be
4610 The content of the file defined as the value to this argument will be
4607 transferred verbatim as the HTTP request body.
4611 transferred verbatim as the HTTP request body.
4608
4612
4609 ``frame <type> <flags> <payload>``
4613 ``frame <type> <flags> <payload>``
4610 Send a unified protocol frame as part of the request body.
4614 Send a unified protocol frame as part of the request body.
4611
4615
4612 All frames will be collected and sent as the body to the HTTP
4616 All frames will be collected and sent as the body to the HTTP
4613 request.
4617 request.
4614
4618
4615 close
4619 close
4616 -----
4620 -----
4617
4621
4618 Close the connection to the server.
4622 Close the connection to the server.
4619
4623
4620 flush
4624 flush
4621 -----
4625 -----
4622
4626
4623 Flush data written to the server.
4627 Flush data written to the server.
4624
4628
4625 readavailable
4629 readavailable
4626 -------------
4630 -------------
4627
4631
4628 Close the write end of the connection and read all available data from
4632 Close the write end of the connection and read all available data from
4629 the server.
4633 the server.
4630
4634
4631 If the connection to the server encompasses multiple pipes, we poll both
4635 If the connection to the server encompasses multiple pipes, we poll both
4632 pipes and read available data.
4636 pipes and read available data.
4633
4637
4634 readline
4638 readline
4635 --------
4639 --------
4636
4640
4637 Read a line of output from the server. If there are multiple output
4641 Read a line of output from the server. If there are multiple output
4638 pipes, reads only the main pipe.
4642 pipes, reads only the main pipe.
4639
4643
4640 ereadline
4644 ereadline
4641 ---------
4645 ---------
4642
4646
4643 Like ``readline``, but read from the stderr pipe, if available.
4647 Like ``readline``, but read from the stderr pipe, if available.
4644
4648
4645 read <X>
4649 read <X>
4646 --------
4650 --------
4647
4651
4648 ``read()`` N bytes from the server's main output pipe.
4652 ``read()`` N bytes from the server's main output pipe.
4649
4653
4650 eread <X>
4654 eread <X>
4651 ---------
4655 ---------
4652
4656
4653 ``read()`` N bytes from the server's stderr pipe, if available.
4657 ``read()`` N bytes from the server's stderr pipe, if available.
4654
4658
4655 Specifying Unified Frame-Based Protocol Frames
4659 Specifying Unified Frame-Based Protocol Frames
4656 ----------------------------------------------
4660 ----------------------------------------------
4657
4661
4658 It is possible to emit a *Unified Frame-Based Protocol* by using special
4662 It is possible to emit a *Unified Frame-Based Protocol* by using special
4659 syntax.
4663 syntax.
4660
4664
4661 A frame is composed as a type, flags, and payload. These can be parsed
4665 A frame is composed as a type, flags, and payload. These can be parsed
4662 from a string of the form:
4666 from a string of the form:
4663
4667
4664 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4668 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4665
4669
4666 ``request-id`` and ``stream-id`` are integers defining the request and
4670 ``request-id`` and ``stream-id`` are integers defining the request and
4667 stream identifiers.
4671 stream identifiers.
4668
4672
4669 ``type`` can be an integer value for the frame type or the string name
4673 ``type`` can be an integer value for the frame type or the string name
4670 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4674 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4671 ``command-name``.
4675 ``command-name``.
4672
4676
4673 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4677 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4674 components. Each component (and there can be just one) can be an integer
4678 components. Each component (and there can be just one) can be an integer
4675 or a flag name for stream flags or frame flags, respectively. Values are
4679 or a flag name for stream flags or frame flags, respectively. Values are
4676 resolved to integers and then bitwise OR'd together.
4680 resolved to integers and then bitwise OR'd together.
4677
4681
4678 ``payload`` represents the raw frame payload. If it begins with
4682 ``payload`` represents the raw frame payload. If it begins with
4679 ``cbor:``, the following string is evaluated as Python code and the
4683 ``cbor:``, the following string is evaluated as Python code and the
4680 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4684 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4681 as a Python byte string literal.
4685 as a Python byte string literal.
4682 """
4686 """
4683 opts = pycompat.byteskwargs(opts)
4687 opts = pycompat.byteskwargs(opts)
4684
4688
4685 if opts[b'localssh'] and not repo:
4689 if opts[b'localssh'] and not repo:
4686 raise error.Abort(_(b'--localssh requires a repository'))
4690 raise error.Abort(_(b'--localssh requires a repository'))
4687
4691
4688 if opts[b'peer'] and opts[b'peer'] not in (
4692 if opts[b'peer'] and opts[b'peer'] not in (
4689 b'raw',
4693 b'raw',
4690 b'ssh1',
4694 b'ssh1',
4691 ):
4695 ):
4692 raise error.Abort(
4696 raise error.Abort(
4693 _(b'invalid value for --peer'),
4697 _(b'invalid value for --peer'),
4694 hint=_(b'valid values are "raw" and "ssh1"'),
4698 hint=_(b'valid values are "raw" and "ssh1"'),
4695 )
4699 )
4696
4700
4697 if path and opts[b'localssh']:
4701 if path and opts[b'localssh']:
4698 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4702 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4699
4703
4700 if ui.interactive():
4704 if ui.interactive():
4701 ui.write(_(b'(waiting for commands on stdin)\n'))
4705 ui.write(_(b'(waiting for commands on stdin)\n'))
4702
4706
4703 blocks = list(_parsewirelangblocks(ui.fin))
4707 blocks = list(_parsewirelangblocks(ui.fin))
4704
4708
4705 proc = None
4709 proc = None
4706 stdin = None
4710 stdin = None
4707 stdout = None
4711 stdout = None
4708 stderr = None
4712 stderr = None
4709 opener = None
4713 opener = None
4710
4714
4711 if opts[b'localssh']:
4715 if opts[b'localssh']:
4712 # We start the SSH server in its own process so there is process
4716 # We start the SSH server in its own process so there is process
4713 # separation. This prevents a whole class of potential bugs around
4717 # separation. This prevents a whole class of potential bugs around
4714 # shared state from interfering with server operation.
4718 # shared state from interfering with server operation.
4715 args = procutil.hgcmd() + [
4719 args = procutil.hgcmd() + [
4716 b'-R',
4720 b'-R',
4717 repo.root,
4721 repo.root,
4718 b'debugserve',
4722 b'debugserve',
4719 b'--sshstdio',
4723 b'--sshstdio',
4720 ]
4724 ]
4721 proc = subprocess.Popen(
4725 proc = subprocess.Popen(
4722 pycompat.rapply(procutil.tonativestr, args),
4726 pycompat.rapply(procutil.tonativestr, args),
4723 stdin=subprocess.PIPE,
4727 stdin=subprocess.PIPE,
4724 stdout=subprocess.PIPE,
4728 stdout=subprocess.PIPE,
4725 stderr=subprocess.PIPE,
4729 stderr=subprocess.PIPE,
4726 bufsize=0,
4730 bufsize=0,
4727 )
4731 )
4728
4732
4729 stdin = proc.stdin
4733 stdin = proc.stdin
4730 stdout = proc.stdout
4734 stdout = proc.stdout
4731 stderr = proc.stderr
4735 stderr = proc.stderr
4732
4736
4733 # We turn the pipes into observers so we can log I/O.
4737 # We turn the pipes into observers so we can log I/O.
4734 if ui.verbose or opts[b'peer'] == b'raw':
4738 if ui.verbose or opts[b'peer'] == b'raw':
4735 stdin = util.makeloggingfileobject(
4739 stdin = util.makeloggingfileobject(
4736 ui, proc.stdin, b'i', logdata=True
4740 ui, proc.stdin, b'i', logdata=True
4737 )
4741 )
4738 stdout = util.makeloggingfileobject(
4742 stdout = util.makeloggingfileobject(
4739 ui, proc.stdout, b'o', logdata=True
4743 ui, proc.stdout, b'o', logdata=True
4740 )
4744 )
4741 stderr = util.makeloggingfileobject(
4745 stderr = util.makeloggingfileobject(
4742 ui, proc.stderr, b'e', logdata=True
4746 ui, proc.stderr, b'e', logdata=True
4743 )
4747 )
4744
4748
4745 # --localssh also implies the peer connection settings.
4749 # --localssh also implies the peer connection settings.
4746
4750
4747 url = b'ssh://localserver'
4751 url = b'ssh://localserver'
4748 autoreadstderr = not opts[b'noreadstderr']
4752 autoreadstderr = not opts[b'noreadstderr']
4749
4753
4750 if opts[b'peer'] == b'ssh1':
4754 if opts[b'peer'] == b'ssh1':
4751 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4755 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4752 peer = sshpeer.sshv1peer(
4756 peer = sshpeer.sshv1peer(
4753 ui,
4757 ui,
4754 url,
4758 url,
4755 proc,
4759 proc,
4756 stdin,
4760 stdin,
4757 stdout,
4761 stdout,
4758 stderr,
4762 stderr,
4759 None,
4763 None,
4760 autoreadstderr=autoreadstderr,
4764 autoreadstderr=autoreadstderr,
4761 )
4765 )
4762 elif opts[b'peer'] == b'raw':
4766 elif opts[b'peer'] == b'raw':
4763 ui.write(_(b'using raw connection to peer\n'))
4767 ui.write(_(b'using raw connection to peer\n'))
4764 peer = None
4768 peer = None
4765 else:
4769 else:
4766 ui.write(_(b'creating ssh peer from handshake results\n'))
4770 ui.write(_(b'creating ssh peer from handshake results\n'))
4767 peer = sshpeer.makepeer(
4771 peer = sshpeer.makepeer(
4768 ui,
4772 ui,
4769 url,
4773 url,
4770 proc,
4774 proc,
4771 stdin,
4775 stdin,
4772 stdout,
4776 stdout,
4773 stderr,
4777 stderr,
4774 autoreadstderr=autoreadstderr,
4778 autoreadstderr=autoreadstderr,
4775 )
4779 )
4776
4780
4777 elif path:
4781 elif path:
4778 # We bypass hg.peer() so we can proxy the sockets.
4782 # We bypass hg.peer() so we can proxy the sockets.
4779 # TODO consider not doing this because we skip
4783 # TODO consider not doing this because we skip
4780 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4784 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4781 u = urlutil.url(path)
4785 u = urlutil.url(path)
4782 if u.scheme != b'http':
4786 if u.scheme != b'http':
4783 raise error.Abort(_(b'only http:// paths are currently supported'))
4787 raise error.Abort(_(b'only http:// paths are currently supported'))
4784
4788
4785 url, authinfo = u.authinfo()
4789 url, authinfo = u.authinfo()
4786 openerargs = {
4790 openerargs = {
4787 'useragent': b'Mercurial debugwireproto',
4791 'useragent': b'Mercurial debugwireproto',
4788 }
4792 }
4789
4793
4790 # Turn pipes/sockets into observers so we can log I/O.
4794 # Turn pipes/sockets into observers so we can log I/O.
4791 if ui.verbose:
4795 if ui.verbose:
4792 openerargs.update(
4796 openerargs.update(
4793 {
4797 {
4794 'loggingfh': ui,
4798 'loggingfh': ui,
4795 'loggingname': b's',
4799 'loggingname': b's',
4796 'loggingopts': {
4800 'loggingopts': {
4797 'logdata': True,
4801 'logdata': True,
4798 'logdataapis': False,
4802 'logdataapis': False,
4799 },
4803 },
4800 }
4804 }
4801 )
4805 )
4802
4806
4803 if ui.debugflag:
4807 if ui.debugflag:
4804 openerargs['loggingopts']['logdataapis'] = True
4808 openerargs['loggingopts']['logdataapis'] = True
4805
4809
4806 # Don't send default headers when in raw mode. This allows us to
4810 # Don't send default headers when in raw mode. This allows us to
4807 # bypass most of the behavior of our URL handling code so we can
4811 # bypass most of the behavior of our URL handling code so we can
4808 # have near complete control over what's sent on the wire.
4812 # have near complete control over what's sent on the wire.
4809 if opts[b'peer'] == b'raw':
4813 if opts[b'peer'] == b'raw':
4810 openerargs['sendaccept'] = False
4814 openerargs['sendaccept'] = False
4811
4815
4812 opener = urlmod.opener(ui, authinfo, **openerargs)
4816 opener = urlmod.opener(ui, authinfo, **openerargs)
4813
4817
4814 if opts[b'peer'] == b'raw':
4818 if opts[b'peer'] == b'raw':
4815 ui.write(_(b'using raw connection to peer\n'))
4819 ui.write(_(b'using raw connection to peer\n'))
4816 peer = None
4820 peer = None
4817 elif opts[b'peer']:
4821 elif opts[b'peer']:
4818 raise error.Abort(
4822 raise error.Abort(
4819 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4823 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4820 )
4824 )
4821 else:
4825 else:
4822 peer = httppeer.makepeer(ui, path, opener=opener)
4826 peer = httppeer.makepeer(ui, path, opener=opener)
4823
4827
4824 # We /could/ populate stdin/stdout with sock.makefile()...
4828 # We /could/ populate stdin/stdout with sock.makefile()...
4825 else:
4829 else:
4826 raise error.Abort(_(b'unsupported connection configuration'))
4830 raise error.Abort(_(b'unsupported connection configuration'))
4827
4831
4828 batchedcommands = None
4832 batchedcommands = None
4829
4833
4830 # Now perform actions based on the parsed wire language instructions.
4834 # Now perform actions based on the parsed wire language instructions.
4831 for action, lines in blocks:
4835 for action, lines in blocks:
4832 if action in (b'raw', b'raw+'):
4836 if action in (b'raw', b'raw+'):
4833 if not stdin:
4837 if not stdin:
4834 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4838 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4835
4839
4836 # Concatenate the data together.
4840 # Concatenate the data together.
4837 data = b''.join(l.lstrip() for l in lines)
4841 data = b''.join(l.lstrip() for l in lines)
4838 data = stringutil.unescapestr(data)
4842 data = stringutil.unescapestr(data)
4839 stdin.write(data)
4843 stdin.write(data)
4840
4844
4841 if action == b'raw+':
4845 if action == b'raw+':
4842 stdin.flush()
4846 stdin.flush()
4843 elif action == b'flush':
4847 elif action == b'flush':
4844 if not stdin:
4848 if not stdin:
4845 raise error.Abort(_(b'cannot call flush on this peer'))
4849 raise error.Abort(_(b'cannot call flush on this peer'))
4846 stdin.flush()
4850 stdin.flush()
4847 elif action.startswith(b'command'):
4851 elif action.startswith(b'command'):
4848 if not peer:
4852 if not peer:
4849 raise error.Abort(
4853 raise error.Abort(
4850 _(
4854 _(
4851 b'cannot send commands unless peer instance '
4855 b'cannot send commands unless peer instance '
4852 b'is available'
4856 b'is available'
4853 )
4857 )
4854 )
4858 )
4855
4859
4856 command = action.split(b' ', 1)[1]
4860 command = action.split(b' ', 1)[1]
4857
4861
4858 args = {}
4862 args = {}
4859 for line in lines:
4863 for line in lines:
4860 # We need to allow empty values.
4864 # We need to allow empty values.
4861 fields = line.lstrip().split(b' ', 1)
4865 fields = line.lstrip().split(b' ', 1)
4862 if len(fields) == 1:
4866 if len(fields) == 1:
4863 key = fields[0]
4867 key = fields[0]
4864 value = b''
4868 value = b''
4865 else:
4869 else:
4866 key, value = fields
4870 key, value = fields
4867
4871
4868 if value.startswith(b'eval:'):
4872 if value.startswith(b'eval:'):
4869 value = stringutil.evalpythonliteral(value[5:])
4873 value = stringutil.evalpythonliteral(value[5:])
4870 else:
4874 else:
4871 value = stringutil.unescapestr(value)
4875 value = stringutil.unescapestr(value)
4872
4876
4873 args[key] = value
4877 args[key] = value
4874
4878
4875 if batchedcommands is not None:
4879 if batchedcommands is not None:
4876 batchedcommands.append((command, args))
4880 batchedcommands.append((command, args))
4877 continue
4881 continue
4878
4882
4879 ui.status(_(b'sending %s command\n') % command)
4883 ui.status(_(b'sending %s command\n') % command)
4880
4884
4881 if b'PUSHFILE' in args:
4885 if b'PUSHFILE' in args:
4882 with open(args[b'PUSHFILE'], 'rb') as fh:
4886 with open(args[b'PUSHFILE'], 'rb') as fh:
4883 del args[b'PUSHFILE']
4887 del args[b'PUSHFILE']
4884 res, output = peer._callpush(
4888 res, output = peer._callpush(
4885 command, fh, **pycompat.strkwargs(args)
4889 command, fh, **pycompat.strkwargs(args)
4886 )
4890 )
4887 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4891 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4888 ui.status(
4892 ui.status(
4889 _(b'remote output: %s\n') % stringutil.escapestr(output)
4893 _(b'remote output: %s\n') % stringutil.escapestr(output)
4890 )
4894 )
4891 else:
4895 else:
4892 with peer.commandexecutor() as e:
4896 with peer.commandexecutor() as e:
4893 res = e.callcommand(command, args).result()
4897 res = e.callcommand(command, args).result()
4894
4898
4895 ui.status(
4899 ui.status(
4896 _(b'response: %s\n')
4900 _(b'response: %s\n')
4897 % stringutil.pprint(res, bprefix=True, indent=2)
4901 % stringutil.pprint(res, bprefix=True, indent=2)
4898 )
4902 )
4899
4903
4900 elif action == b'batchbegin':
4904 elif action == b'batchbegin':
4901 if batchedcommands is not None:
4905 if batchedcommands is not None:
4902 raise error.Abort(_(b'nested batchbegin not allowed'))
4906 raise error.Abort(_(b'nested batchbegin not allowed'))
4903
4907
4904 batchedcommands = []
4908 batchedcommands = []
4905 elif action == b'batchsubmit':
4909 elif action == b'batchsubmit':
4906 # There is a batching API we could go through. But it would be
4910 # There is a batching API we could go through. But it would be
4907 # difficult to normalize requests into function calls. It is easier
4911 # difficult to normalize requests into function calls. It is easier
4908 # to bypass this layer and normalize to commands + args.
4912 # to bypass this layer and normalize to commands + args.
4909 ui.status(
4913 ui.status(
4910 _(b'sending batch with %d sub-commands\n')
4914 _(b'sending batch with %d sub-commands\n')
4911 % len(batchedcommands)
4915 % len(batchedcommands)
4912 )
4916 )
4913 assert peer is not None
4917 assert peer is not None
4914 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4918 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4915 ui.status(
4919 ui.status(
4916 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4920 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4917 )
4921 )
4918
4922
4919 batchedcommands = None
4923 batchedcommands = None
4920
4924
4921 elif action.startswith(b'httprequest '):
4925 elif action.startswith(b'httprequest '):
4922 if not opener:
4926 if not opener:
4923 raise error.Abort(
4927 raise error.Abort(
4924 _(b'cannot use httprequest without an HTTP peer')
4928 _(b'cannot use httprequest without an HTTP peer')
4925 )
4929 )
4926
4930
4927 request = action.split(b' ', 2)
4931 request = action.split(b' ', 2)
4928 if len(request) != 3:
4932 if len(request) != 3:
4929 raise error.Abort(
4933 raise error.Abort(
4930 _(
4934 _(
4931 b'invalid httprequest: expected format is '
4935 b'invalid httprequest: expected format is '
4932 b'"httprequest <method> <path>'
4936 b'"httprequest <method> <path>'
4933 )
4937 )
4934 )
4938 )
4935
4939
4936 method, httppath = request[1:]
4940 method, httppath = request[1:]
4937 headers = {}
4941 headers = {}
4938 body = None
4942 body = None
4939 frames = []
4943 frames = []
4940 for line in lines:
4944 for line in lines:
4941 line = line.lstrip()
4945 line = line.lstrip()
4942 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4946 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4943 if m:
4947 if m:
4944 # Headers need to use native strings.
4948 # Headers need to use native strings.
4945 key = pycompat.strurl(m.group(1))
4949 key = pycompat.strurl(m.group(1))
4946 value = pycompat.strurl(m.group(2))
4950 value = pycompat.strurl(m.group(2))
4947 headers[key] = value
4951 headers[key] = value
4948 continue
4952 continue
4949
4953
4950 if line.startswith(b'BODYFILE '):
4954 if line.startswith(b'BODYFILE '):
4951 with open(line.split(b' ', 1), b'rb') as fh:
4955 with open(line.split(b' ', 1), b'rb') as fh:
4952 body = fh.read()
4956 body = fh.read()
4953 elif line.startswith(b'frame '):
4957 elif line.startswith(b'frame '):
4954 frame = wireprotoframing.makeframefromhumanstring(
4958 frame = wireprotoframing.makeframefromhumanstring(
4955 line[len(b'frame ') :]
4959 line[len(b'frame ') :]
4956 )
4960 )
4957
4961
4958 frames.append(frame)
4962 frames.append(frame)
4959 else:
4963 else:
4960 raise error.Abort(
4964 raise error.Abort(
4961 _(b'unknown argument to httprequest: %s') % line
4965 _(b'unknown argument to httprequest: %s') % line
4962 )
4966 )
4963
4967
4964 url = path + httppath
4968 url = path + httppath
4965
4969
4966 if frames:
4970 if frames:
4967 body = b''.join(bytes(f) for f in frames)
4971 body = b''.join(bytes(f) for f in frames)
4968
4972
4969 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4973 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4970
4974
4971 # urllib.Request insists on using has_data() as a proxy for
4975 # urllib.Request insists on using has_data() as a proxy for
4972 # determining the request method. Override that to use our
4976 # determining the request method. Override that to use our
4973 # explicitly requested method.
4977 # explicitly requested method.
4974 req.get_method = lambda: pycompat.sysstr(method)
4978 req.get_method = lambda: pycompat.sysstr(method)
4975
4979
4976 try:
4980 try:
4977 res = opener.open(req)
4981 res = opener.open(req)
4978 body = res.read()
4982 body = res.read()
4979 except util.urlerr.urlerror as e:
4983 except util.urlerr.urlerror as e:
4980 # read() method must be called, but only exists in Python 2
4984 # read() method must be called, but only exists in Python 2
4981 getattr(e, 'read', lambda: None)()
4985 getattr(e, 'read', lambda: None)()
4982 continue
4986 continue
4983
4987
4984 ct = res.headers.get('Content-Type')
4988 ct = res.headers.get('Content-Type')
4985 if ct == 'application/mercurial-cbor':
4989 if ct == 'application/mercurial-cbor':
4986 ui.write(
4990 ui.write(
4987 _(b'cbor> %s\n')
4991 _(b'cbor> %s\n')
4988 % stringutil.pprint(
4992 % stringutil.pprint(
4989 cborutil.decodeall(body), bprefix=True, indent=2
4993 cborutil.decodeall(body), bprefix=True, indent=2
4990 )
4994 )
4991 )
4995 )
4992
4996
4993 elif action == b'close':
4997 elif action == b'close':
4994 assert peer is not None
4998 assert peer is not None
4995 peer.close()
4999 peer.close()
4996 elif action == b'readavailable':
5000 elif action == b'readavailable':
4997 if not stdout or not stderr:
5001 if not stdout or not stderr:
4998 raise error.Abort(
5002 raise error.Abort(
4999 _(b'readavailable not available on this peer')
5003 _(b'readavailable not available on this peer')
5000 )
5004 )
5001
5005
5002 stdin.close()
5006 stdin.close()
5003 stdout.read()
5007 stdout.read()
5004 stderr.read()
5008 stderr.read()
5005
5009
5006 elif action == b'readline':
5010 elif action == b'readline':
5007 if not stdout:
5011 if not stdout:
5008 raise error.Abort(_(b'readline not available on this peer'))
5012 raise error.Abort(_(b'readline not available on this peer'))
5009 stdout.readline()
5013 stdout.readline()
5010 elif action == b'ereadline':
5014 elif action == b'ereadline':
5011 if not stderr:
5015 if not stderr:
5012 raise error.Abort(_(b'ereadline not available on this peer'))
5016 raise error.Abort(_(b'ereadline not available on this peer'))
5013 stderr.readline()
5017 stderr.readline()
5014 elif action.startswith(b'read '):
5018 elif action.startswith(b'read '):
5015 count = int(action.split(b' ', 1)[1])
5019 count = int(action.split(b' ', 1)[1])
5016 if not stdout:
5020 if not stdout:
5017 raise error.Abort(_(b'read not available on this peer'))
5021 raise error.Abort(_(b'read not available on this peer'))
5018 stdout.read(count)
5022 stdout.read(count)
5019 elif action.startswith(b'eread '):
5023 elif action.startswith(b'eread '):
5020 count = int(action.split(b' ', 1)[1])
5024 count = int(action.split(b' ', 1)[1])
5021 if not stderr:
5025 if not stderr:
5022 raise error.Abort(_(b'eread not available on this peer'))
5026 raise error.Abort(_(b'eread not available on this peer'))
5023 stderr.read(count)
5027 stderr.read(count)
5024 else:
5028 else:
5025 raise error.Abort(_(b'unknown action: %s') % action)
5029 raise error.Abort(_(b'unknown action: %s') % action)
5026
5030
5027 if batchedcommands is not None:
5031 if batchedcommands is not None:
5028 raise error.Abort(_(b'unclosed "batchbegin" request'))
5032 raise error.Abort(_(b'unclosed "batchbegin" request'))
5029
5033
5030 if peer:
5034 if peer:
5031 peer.close()
5035 peer.close()
5032
5036
5033 if proc:
5037 if proc:
5034 proc.kill()
5038 proc.kill()
@@ -1,193 +1,198 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2010 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10
10
11 from .i18n import _
11 from .i18n import _
12 from .node import short
12 from .node import short
13 from . import (
13 from . import (
14 error,
14 error,
15 )
15 )
16
16
17
17
18 def findcommonincoming(repo, remote, heads=None, force=False, audit=None):
18 def findcommonincoming(repo, remote, heads=None, force=False, audit=None):
19 """Return a tuple (common, fetch, heads) used to identify the common
19 """Return a tuple (common, fetch, heads) used to identify the common
20 subset of nodes between repo and remote.
20 subset of nodes between repo and remote.
21
21
22 "common" is a list of (at least) the heads of the common subset.
22 "common" is a list of (at least) the heads of the common subset.
23 "fetch" is a list of roots of the nodes that would be incoming, to be
23 "fetch" is a list of roots of the nodes that would be incoming, to be
24 supplied to changegroupsubset.
24 supplied to changegroupsubset.
25 "heads" is either the supplied heads, or else the remote's heads.
25 "heads" is either the supplied heads, or else the remote's heads.
26 """
26 """
27
27
28 knownnode = repo.changelog.hasnode
28 knownnode = repo.changelog.hasnode
29 search = []
29 search = []
30 fetch = set()
30 fetch = set()
31 seen = set()
31 seen = set()
32 seenbranch = set()
32 seenbranch = set()
33 base = set()
33 base = set()
34
34
35 if not heads:
35 if not heads:
36 with remote.commandexecutor() as e:
36 with remote.commandexecutor() as e:
37 heads = e.callcommand(b'heads', {}).result()
37 heads = e.callcommand(b'heads', {}).result()
38
38
39 if audit is not None:
39 if audit is not None:
40 audit[b'total-roundtrips'] = 1
40 audit[b'total-roundtrips'] = 1
41 audit[b'total-queries'] = 0
41 audit[b'total-queries'] = 0
42 audit[b'total-queries-branches'] = 0
43 audit[b'total-queries-between'] = 0
42
44
43 if repo.changelog.tip() == repo.nullid:
45 if repo.changelog.tip() == repo.nullid:
44 base.add(repo.nullid)
46 base.add(repo.nullid)
45 if heads != [repo.nullid]:
47 if heads != [repo.nullid]:
46 return [repo.nullid], [repo.nullid], list(heads)
48 return [repo.nullid], [repo.nullid], list(heads)
47 return [repo.nullid], [], heads
49 return [repo.nullid], [], heads
48
50
49 # assume we're closer to the tip than the root
51 # assume we're closer to the tip than the root
50 # and start by examining the heads
52 # and start by examining the heads
51 repo.ui.status(_(b"searching for changes\n"))
53 repo.ui.status(_(b"searching for changes\n"))
52
54
53 unknown = []
55 unknown = []
54 for h in heads:
56 for h in heads:
55 if not knownnode(h):
57 if not knownnode(h):
56 unknown.append(h)
58 unknown.append(h)
57 else:
59 else:
58 base.add(h)
60 base.add(h)
59
61
60 if not unknown:
62 if not unknown:
61 return list(base), [], list(heads)
63 return list(base), [], list(heads)
62
64
63 req = set(unknown)
65 req = set(unknown)
64 reqcnt = 0
66 reqcnt = 0
65 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
67 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
66
68
67 # search through remote branches
69 # search through remote branches
68 # a 'branch' here is a linear segment of history, with four parts:
70 # a 'branch' here is a linear segment of history, with four parts:
69 # head, root, first parent, second parent
71 # head, root, first parent, second parent
70 # (a branch always has two parents (or none) by definition)
72 # (a branch always has two parents (or none) by definition)
71 with remote.commandexecutor() as e:
73 with remote.commandexecutor() as e:
72 if audit is not None:
74 if audit is not None:
73 audit[b'total-queries'] += len(unknown)
75 audit[b'total-queries'] += len(unknown)
76 audit[b'total-queries-branches'] += len(unknown)
74 branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
77 branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
75
78
76 unknown = collections.deque(branches)
79 unknown = collections.deque(branches)
77 while unknown:
80 while unknown:
78 r = []
81 r = []
79 while unknown:
82 while unknown:
80 n = unknown.popleft()
83 n = unknown.popleft()
81 if n[0] in seen:
84 if n[0] in seen:
82 continue
85 continue
83
86
84 repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
87 repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
85 if n[0] == repo.nullid: # found the end of the branch
88 if n[0] == repo.nullid: # found the end of the branch
86 pass
89 pass
87 elif n in seenbranch:
90 elif n in seenbranch:
88 repo.ui.debug(b"branch already found\n")
91 repo.ui.debug(b"branch already found\n")
89 continue
92 continue
90 elif n[1] and knownnode(n[1]): # do we know the base?
93 elif n[1] and knownnode(n[1]): # do we know the base?
91 repo.ui.debug(
94 repo.ui.debug(
92 b"found incomplete branch %s:%s\n"
95 b"found incomplete branch %s:%s\n"
93 % (short(n[0]), short(n[1]))
96 % (short(n[0]), short(n[1]))
94 )
97 )
95 search.append(n[0:2]) # schedule branch range for scanning
98 search.append(n[0:2]) # schedule branch range for scanning
96 seenbranch.add(n)
99 seenbranch.add(n)
97 else:
100 else:
98 if n[1] not in seen and n[1] not in fetch:
101 if n[1] not in seen and n[1] not in fetch:
99 if knownnode(n[2]) and knownnode(n[3]):
102 if knownnode(n[2]) and knownnode(n[3]):
100 repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
103 repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
101 fetch.add(n[1]) # earliest unknown
104 fetch.add(n[1]) # earliest unknown
102 for p in n[2:4]:
105 for p in n[2:4]:
103 if knownnode(p):
106 if knownnode(p):
104 base.add(p) # latest known
107 base.add(p) # latest known
105
108
106 for p in n[2:4]:
109 for p in n[2:4]:
107 if p not in req and not knownnode(p):
110 if p not in req and not knownnode(p):
108 r.append(p)
111 r.append(p)
109 req.add(p)
112 req.add(p)
110 seen.add(n[0])
113 seen.add(n[0])
111
114
112 if r:
115 if r:
113 reqcnt += 1
116 reqcnt += 1
114 progress.increment()
117 progress.increment()
115 repo.ui.debug(
118 repo.ui.debug(
116 b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
119 b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
117 )
120 )
118 for p in range(0, len(r), 10):
121 for p in range(0, len(r), 10):
119 with remote.commandexecutor() as e:
122 with remote.commandexecutor() as e:
120 subset = r[p : p + 10]
123 subset = r[p : p + 10]
121 if audit is not None:
124 if audit is not None:
122 audit[b'total-queries'] += len(subset)
125 audit[b'total-queries'] += len(subset)
126 audit[b'total-queries-branches'] += len(subset)
123 branches = e.callcommand(
127 branches = e.callcommand(
124 b'branches',
128 b'branches',
125 {
129 {
126 b'nodes': subset,
130 b'nodes': subset,
127 },
131 },
128 ).result()
132 ).result()
129
133
130 for b in branches:
134 for b in branches:
131 repo.ui.debug(
135 repo.ui.debug(
132 b"received %s:%s\n" % (short(b[0]), short(b[1]))
136 b"received %s:%s\n" % (short(b[0]), short(b[1]))
133 )
137 )
134 unknown.append(b)
138 unknown.append(b)
135
139
136 # do binary search on the branches we found
140 # do binary search on the branches we found
137 while search:
141 while search:
138 newsearch = []
142 newsearch = []
139 reqcnt += 1
143 reqcnt += 1
140 progress.increment()
144 progress.increment()
141
145
142 with remote.commandexecutor() as e:
146 with remote.commandexecutor() as e:
143 if audit is not None:
147 if audit is not None:
144 audit[b'total-queries'] += len(search)
148 audit[b'total-queries'] += len(search)
149 audit[b'total-queries-between'] += len(search)
145 between = e.callcommand(b'between', {b'pairs': search}).result()
150 between = e.callcommand(b'between', {b'pairs': search}).result()
146
151
147 for n, l in zip(search, between):
152 for n, l in zip(search, between):
148 l.append(n[1])
153 l.append(n[1])
149 p = n[0]
154 p = n[0]
150 f = 1
155 f = 1
151 for i in l:
156 for i in l:
152 repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
157 repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
153 if knownnode(i):
158 if knownnode(i):
154 if f <= 2:
159 if f <= 2:
155 repo.ui.debug(
160 repo.ui.debug(
156 b"found new branch changeset %s\n" % short(p)
161 b"found new branch changeset %s\n" % short(p)
157 )
162 )
158 fetch.add(p)
163 fetch.add(p)
159 base.add(i)
164 base.add(i)
160 else:
165 else:
161 repo.ui.debug(
166 repo.ui.debug(
162 b"narrowed branch search to %s:%s\n"
167 b"narrowed branch search to %s:%s\n"
163 % (short(p), short(i))
168 % (short(p), short(i))
164 )
169 )
165 newsearch.append((p, i))
170 newsearch.append((p, i))
166 break
171 break
167 p, f = i, f * 2
172 p, f = i, f * 2
168 search = newsearch
173 search = newsearch
169
174
170 # sanity check our fetch list
175 # sanity check our fetch list
171 for f in fetch:
176 for f in fetch:
172 if knownnode(f):
177 if knownnode(f):
173 raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
178 raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
174
179
175 base = list(base)
180 base = list(base)
176 if base == [repo.nullid]:
181 if base == [repo.nullid]:
177 if force:
182 if force:
178 repo.ui.warn(_(b"warning: repository is unrelated\n"))
183 repo.ui.warn(_(b"warning: repository is unrelated\n"))
179 else:
184 else:
180 raise error.Abort(_(b"repository is unrelated"))
185 raise error.Abort(_(b"repository is unrelated"))
181
186
182 repo.ui.debug(
187 repo.ui.debug(
183 b"found new changesets starting at "
188 b"found new changesets starting at "
184 + b" ".join([short(f) for f in fetch])
189 + b" ".join([short(f) for f in fetch])
185 + b"\n"
190 + b"\n"
186 )
191 )
187
192
188 progress.complete()
193 progress.complete()
189 repo.ui.debug(b"%d total queries\n" % reqcnt)
194 repo.ui.debug(b"%d total queries\n" % reqcnt)
190 if audit is not None:
195 if audit is not None:
191 audit[b'total-roundtrips'] = reqcnt
196 audit[b'total-roundtrips'] = reqcnt
192
197
193 return base, list(fetch), heads
198 return base, list(fetch), heads
@@ -1,1806 +1,1830 b''
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 > echo
18 > echo
19 > echo "% -- a -> b set (tip only)"
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
21 > echo
22 > echo "% -- b -> a tree"
22 > echo "% -- b -> a tree"
23 > hg -R b debugdiscovery a --verbose --old
23 > hg -R b debugdiscovery a --verbose --old
24 > echo
24 > echo
25 > echo "% -- b -> a set"
25 > echo "% -- b -> a set"
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
27 > echo
28 > echo "% -- b -> a set (tip only)"
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 > cd ..
30 > cd ..
31 > }
31 > }
32
32
33
33
34 Small superset:
34 Small superset:
35
35
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 > +2:f +1:a1:b1
37 > +2:f +1:a1:b1
38 > <f +4 :a2
38 > <f +4 :a2
39 > +5 :b2
39 > +5 :b2
40 > <f +3 :b3'
40 > <f +3 :b3'
41
41
42 % -- a -> b tree
42 % -- a -> b tree
43 comparing with b
43 comparing with b
44 searching for changes
44 searching for changes
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 elapsed time: * seconds (glob)
46 elapsed time: * seconds (glob)
47 round-trips: 2
47 round-trips: 2
48 queries: 6
48 queries: 6
49 queries-branches: 2
50 queries-between: 4
49 heads summary:
51 heads summary:
50 total common heads: 2
52 total common heads: 2
51 also local heads: 2
53 also local heads: 2
52 also remote heads: 1
54 also remote heads: 1
53 both: 1
55 both: 1
54 local heads: 2
56 local heads: 2
55 common: 2
57 common: 2
56 missing: 0
58 missing: 0
57 remote heads: 3
59 remote heads: 3
58 common: 1
60 common: 1
59 unknown: 2
61 unknown: 2
60 local changesets: 7
62 local changesets: 7
61 common: 7
63 common: 7
62 heads: 2
64 heads: 2
63 roots: 1
65 roots: 1
64 missing: 0
66 missing: 0
65 heads: 0
67 heads: 0
66 roots: 0
68 roots: 0
67 first undecided set: 3
69 first undecided set: 3
68 heads: 1
70 heads: 1
69 roots: 1
71 roots: 1
70 common: 3
72 common: 3
71 missing: 0
73 missing: 0
72 common heads: 01241442b3c2 b5714e113bc0
74 common heads: 01241442b3c2 b5714e113bc0
73
75
74 % -- a -> b set
76 % -- a -> b set
75 comparing with b
77 comparing with b
76 query 1; heads
78 query 1; heads
77 searching for changes
79 searching for changes
78 all local changesets known remotely
80 all local changesets known remotely
79 elapsed time: * seconds (glob)
81 elapsed time: * seconds (glob)
80 round-trips: 1
82 round-trips: 1
81 queries: 2
83 queries: 2
82 heads summary:
84 heads summary:
83 total common heads: 2
85 total common heads: 2
84 also local heads: 2
86 also local heads: 2
85 also remote heads: 1
87 also remote heads: 1
86 both: 1
88 both: 1
87 local heads: 2
89 local heads: 2
88 common: 2
90 common: 2
89 missing: 0
91 missing: 0
90 remote heads: 3
92 remote heads: 3
91 common: 1
93 common: 1
92 unknown: 2
94 unknown: 2
93 local changesets: 7
95 local changesets: 7
94 common: 7
96 common: 7
95 heads: 2
97 heads: 2
96 roots: 1
98 roots: 1
97 missing: 0
99 missing: 0
98 heads: 0
100 heads: 0
99 roots: 0
101 roots: 0
100 first undecided set: 3
102 first undecided set: 3
101 heads: 1
103 heads: 1
102 roots: 1
104 roots: 1
103 common: 3
105 common: 3
104 missing: 0
106 missing: 0
105 common heads: 01241442b3c2 b5714e113bc0
107 common heads: 01241442b3c2 b5714e113bc0
106
108
107 % -- a -> b set (tip only)
109 % -- a -> b set (tip only)
108 comparing with b
110 comparing with b
109 query 1; heads
111 query 1; heads
110 searching for changes
112 searching for changes
111 all local changesets known remotely
113 all local changesets known remotely
112 elapsed time: * seconds (glob)
114 elapsed time: * seconds (glob)
113 round-trips: 1
115 round-trips: 1
114 queries: 1
116 queries: 1
115 heads summary:
117 heads summary:
116 total common heads: 1
118 total common heads: 1
117 also local heads: 1
119 also local heads: 1
118 also remote heads: 0
120 also remote heads: 0
119 both: 0
121 both: 0
120 local heads: 2
122 local heads: 2
121 common: 1
123 common: 1
122 missing: 1
124 missing: 1
123 remote heads: 3
125 remote heads: 3
124 common: 0
126 common: 0
125 unknown: 3
127 unknown: 3
126 local changesets: 7
128 local changesets: 7
127 common: 6
129 common: 6
128 heads: 1
130 heads: 1
129 roots: 1
131 roots: 1
130 missing: 1
132 missing: 1
131 heads: 1
133 heads: 1
132 roots: 1
134 roots: 1
133 first undecided set: 6
135 first undecided set: 6
134 heads: 2
136 heads: 2
135 roots: 1
137 roots: 1
136 common: 5
138 common: 5
137 missing: 1
139 missing: 1
138 common heads: b5714e113bc0
140 common heads: b5714e113bc0
139
141
140 % -- b -> a tree
142 % -- b -> a tree
141 comparing with a
143 comparing with a
142 searching for changes
144 searching for changes
143 unpruned common: 01241442b3c2 b5714e113bc0
145 unpruned common: 01241442b3c2 b5714e113bc0
144 elapsed time: * seconds (glob)
146 elapsed time: * seconds (glob)
145 round-trips: 1
147 round-trips: 1
146 queries: 0
148 queries: 0
149 queries-branches: 0
150 queries-between: 0
147 heads summary:
151 heads summary:
148 total common heads: 2
152 total common heads: 2
149 also local heads: 1
153 also local heads: 1
150 also remote heads: 2
154 also remote heads: 2
151 both: 1
155 both: 1
152 local heads: 3
156 local heads: 3
153 common: 1
157 common: 1
154 missing: 2
158 missing: 2
155 remote heads: 2
159 remote heads: 2
156 common: 2
160 common: 2
157 unknown: 0
161 unknown: 0
158 local changesets: 15
162 local changesets: 15
159 common: 7
163 common: 7
160 heads: 2
164 heads: 2
161 roots: 1
165 roots: 1
162 missing: 8
166 missing: 8
163 heads: 2
167 heads: 2
164 roots: 2
168 roots: 2
165 first undecided set: 8
169 first undecided set: 8
166 heads: 2
170 heads: 2
167 roots: 2
171 roots: 2
168 common: 0
172 common: 0
169 missing: 8
173 missing: 8
170 common heads: 01241442b3c2 b5714e113bc0
174 common heads: 01241442b3c2 b5714e113bc0
171
175
172 % -- b -> a set
176 % -- b -> a set
173 comparing with a
177 comparing with a
174 query 1; heads
178 query 1; heads
175 searching for changes
179 searching for changes
176 all remote heads known locally
180 all remote heads known locally
177 elapsed time: * seconds (glob)
181 elapsed time: * seconds (glob)
178 round-trips: 1
182 round-trips: 1
179 queries: 3
183 queries: 3
180 heads summary:
184 heads summary:
181 total common heads: 2
185 total common heads: 2
182 also local heads: 1
186 also local heads: 1
183 also remote heads: 2
187 also remote heads: 2
184 both: 1
188 both: 1
185 local heads: 3
189 local heads: 3
186 common: 1
190 common: 1
187 missing: 2
191 missing: 2
188 remote heads: 2
192 remote heads: 2
189 common: 2
193 common: 2
190 unknown: 0
194 unknown: 0
191 local changesets: 15
195 local changesets: 15
192 common: 7
196 common: 7
193 heads: 2
197 heads: 2
194 roots: 1
198 roots: 1
195 missing: 8
199 missing: 8
196 heads: 2
200 heads: 2
197 roots: 2
201 roots: 2
198 first undecided set: 8
202 first undecided set: 8
199 heads: 2
203 heads: 2
200 roots: 2
204 roots: 2
201 common: 0
205 common: 0
202 missing: 8
206 missing: 8
203 common heads: 01241442b3c2 b5714e113bc0
207 common heads: 01241442b3c2 b5714e113bc0
204
208
205 % -- b -> a set (tip only)
209 % -- b -> a set (tip only)
206 comparing with a
210 comparing with a
207 query 1; heads
211 query 1; heads
208 searching for changes
212 searching for changes
209 all remote heads known locally
213 all remote heads known locally
210 elapsed time: * seconds (glob)
214 elapsed time: * seconds (glob)
211 round-trips: 1
215 round-trips: 1
212 queries: 1
216 queries: 1
213 heads summary:
217 heads summary:
214 total common heads: 2
218 total common heads: 2
215 also local heads: 1
219 also local heads: 1
216 also remote heads: 2
220 also remote heads: 2
217 both: 1
221 both: 1
218 local heads: 3
222 local heads: 3
219 common: 1
223 common: 1
220 missing: 2
224 missing: 2
221 remote heads: 2
225 remote heads: 2
222 common: 2
226 common: 2
223 unknown: 0
227 unknown: 0
224 local changesets: 15
228 local changesets: 15
225 common: 7
229 common: 7
226 heads: 2
230 heads: 2
227 roots: 1
231 roots: 1
228 missing: 8
232 missing: 8
229 heads: 2
233 heads: 2
230 roots: 2
234 roots: 2
231 first undecided set: 8
235 first undecided set: 8
232 heads: 2
236 heads: 2
233 roots: 2
237 roots: 2
234 common: 0
238 common: 0
235 missing: 8
239 missing: 8
236 common heads: 01241442b3c2 b5714e113bc0
240 common heads: 01241442b3c2 b5714e113bc0
237
241
238
242
239 Many new:
243 Many new:
240
244
241 $ testdesc '-ra1 -ra2' '-rb' '
245 $ testdesc '-ra1 -ra2' '-rb' '
242 > +2:f +3:a1 +3:b
246 > +2:f +3:a1 +3:b
243 > <f +30 :a2'
247 > <f +30 :a2'
244
248
245 % -- a -> b tree
249 % -- a -> b tree
246 comparing with b
250 comparing with b
247 searching for changes
251 searching for changes
248 unpruned common: bebd167eb94d
252 unpruned common: bebd167eb94d
249 elapsed time: * seconds (glob)
253 elapsed time: * seconds (glob)
250 round-trips: 2
254 round-trips: 2
251 queries: 3
255 queries: 3
256 queries-branches: 1
257 queries-between: 2
252 heads summary:
258 heads summary:
253 total common heads: 1
259 total common heads: 1
254 also local heads: 1
260 also local heads: 1
255 also remote heads: 0
261 also remote heads: 0
256 both: 0
262 both: 0
257 local heads: 2
263 local heads: 2
258 common: 1
264 common: 1
259 missing: 1
265 missing: 1
260 remote heads: 1
266 remote heads: 1
261 common: 0
267 common: 0
262 unknown: 1
268 unknown: 1
263 local changesets: 35
269 local changesets: 35
264 common: 5
270 common: 5
265 heads: 1
271 heads: 1
266 roots: 1
272 roots: 1
267 missing: 30
273 missing: 30
268 heads: 1
274 heads: 1
269 roots: 1
275 roots: 1
270 first undecided set: 34
276 first undecided set: 34
271 heads: 2
277 heads: 2
272 roots: 1
278 roots: 1
273 common: 4
279 common: 4
274 missing: 30
280 missing: 30
275 common heads: bebd167eb94d
281 common heads: bebd167eb94d
276
282
277 % -- a -> b set
283 % -- a -> b set
278 comparing with b
284 comparing with b
279 query 1; heads
285 query 1; heads
280 searching for changes
286 searching for changes
281 taking initial sample
287 taking initial sample
282 searching: 2 queries
288 searching: 2 queries
283 query 2; still undecided: 29, sample size is: 29
289 query 2; still undecided: 29, sample size is: 29
284 2 total queries in *.????s (glob)
290 2 total queries in *.????s (glob)
285 elapsed time: * seconds (glob)
291 elapsed time: * seconds (glob)
286 round-trips: 2
292 round-trips: 2
287 queries: 31
293 queries: 31
288 heads summary:
294 heads summary:
289 total common heads: 1
295 total common heads: 1
290 also local heads: 1
296 also local heads: 1
291 also remote heads: 0
297 also remote heads: 0
292 both: 0
298 both: 0
293 local heads: 2
299 local heads: 2
294 common: 1
300 common: 1
295 missing: 1
301 missing: 1
296 remote heads: 1
302 remote heads: 1
297 common: 0
303 common: 0
298 unknown: 1
304 unknown: 1
299 local changesets: 35
305 local changesets: 35
300 common: 5
306 common: 5
301 heads: 1
307 heads: 1
302 roots: 1
308 roots: 1
303 missing: 30
309 missing: 30
304 heads: 1
310 heads: 1
305 roots: 1
311 roots: 1
306 first undecided set: 34
312 first undecided set: 34
307 heads: 2
313 heads: 2
308 roots: 1
314 roots: 1
309 common: 4
315 common: 4
310 missing: 30
316 missing: 30
311 common heads: bebd167eb94d
317 common heads: bebd167eb94d
312
318
313 % -- a -> b set (tip only)
319 % -- a -> b set (tip only)
314 comparing with b
320 comparing with b
315 query 1; heads
321 query 1; heads
316 searching for changes
322 searching for changes
317 taking quick initial sample
323 taking quick initial sample
318 searching: 2 queries
324 searching: 2 queries
319 query 2; still undecided: 31, sample size is: 31
325 query 2; still undecided: 31, sample size is: 31
320 2 total queries in *.????s (glob)
326 2 total queries in *.????s (glob)
321 elapsed time: * seconds (glob)
327 elapsed time: * seconds (glob)
322 round-trips: 2
328 round-trips: 2
323 queries: 32
329 queries: 32
324 heads summary:
330 heads summary:
325 total common heads: 1
331 total common heads: 1
326 also local heads: 0
332 also local heads: 0
327 also remote heads: 0
333 also remote heads: 0
328 both: 0
334 both: 0
329 local heads: 2
335 local heads: 2
330 common: 0
336 common: 0
331 missing: 2
337 missing: 2
332 remote heads: 1
338 remote heads: 1
333 common: 0
339 common: 0
334 unknown: 1
340 unknown: 1
335 local changesets: 35
341 local changesets: 35
336 common: 2
342 common: 2
337 heads: 1
343 heads: 1
338 roots: 1
344 roots: 1
339 missing: 33
345 missing: 33
340 heads: 2
346 heads: 2
341 roots: 2
347 roots: 2
342 first undecided set: 35
348 first undecided set: 35
343 heads: 2
349 heads: 2
344 roots: 1
350 roots: 1
345 common: 2
351 common: 2
346 missing: 33
352 missing: 33
347 common heads: 66f7d451a68b
353 common heads: 66f7d451a68b
348
354
349 % -- b -> a tree
355 % -- b -> a tree
350 comparing with a
356 comparing with a
351 searching for changes
357 searching for changes
352 unpruned common: 66f7d451a68b bebd167eb94d
358 unpruned common: 66f7d451a68b bebd167eb94d
353 elapsed time: * seconds (glob)
359 elapsed time: * seconds (glob)
354 round-trips: 4
360 round-trips: 4
355 queries: 5
361 queries: 5
362 queries-branches: 1
363 queries-between: 4
356 heads summary:
364 heads summary:
357 total common heads: 1
365 total common heads: 1
358 also local heads: 0
366 also local heads: 0
359 also remote heads: 1
367 also remote heads: 1
360 both: 0
368 both: 0
361 local heads: 1
369 local heads: 1
362 common: 0
370 common: 0
363 missing: 1
371 missing: 1
364 remote heads: 2
372 remote heads: 2
365 common: 1
373 common: 1
366 unknown: 1
374 unknown: 1
367 local changesets: 8
375 local changesets: 8
368 common: 5
376 common: 5
369 heads: 1
377 heads: 1
370 roots: 1
378 roots: 1
371 missing: 3
379 missing: 3
372 heads: 1
380 heads: 1
373 roots: 1
381 roots: 1
374 first undecided set: 3
382 first undecided set: 3
375 heads: 1
383 heads: 1
376 roots: 1
384 roots: 1
377 common: 0
385 common: 0
378 missing: 3
386 missing: 3
379 common heads: bebd167eb94d
387 common heads: bebd167eb94d
380
388
381 % -- b -> a set
389 % -- b -> a set
382 comparing with a
390 comparing with a
383 query 1; heads
391 query 1; heads
384 searching for changes
392 searching for changes
385 taking initial sample
393 taking initial sample
386 searching: 2 queries
394 searching: 2 queries
387 query 2; still undecided: 2, sample size is: 2
395 query 2; still undecided: 2, sample size is: 2
388 2 total queries in *.????s (glob)
396 2 total queries in *.????s (glob)
389 elapsed time: * seconds (glob)
397 elapsed time: * seconds (glob)
390 round-trips: 2
398 round-trips: 2
391 queries: 3
399 queries: 3
392 heads summary:
400 heads summary:
393 total common heads: 1
401 total common heads: 1
394 also local heads: 0
402 also local heads: 0
395 also remote heads: 1
403 also remote heads: 1
396 both: 0
404 both: 0
397 local heads: 1
405 local heads: 1
398 common: 0
406 common: 0
399 missing: 1
407 missing: 1
400 remote heads: 2
408 remote heads: 2
401 common: 1
409 common: 1
402 unknown: 1
410 unknown: 1
403 local changesets: 8
411 local changesets: 8
404 common: 5
412 common: 5
405 heads: 1
413 heads: 1
406 roots: 1
414 roots: 1
407 missing: 3
415 missing: 3
408 heads: 1
416 heads: 1
409 roots: 1
417 roots: 1
410 first undecided set: 3
418 first undecided set: 3
411 heads: 1
419 heads: 1
412 roots: 1
420 roots: 1
413 common: 0
421 common: 0
414 missing: 3
422 missing: 3
415 common heads: bebd167eb94d
423 common heads: bebd167eb94d
416
424
417 % -- b -> a set (tip only)
425 % -- b -> a set (tip only)
418 comparing with a
426 comparing with a
419 query 1; heads
427 query 1; heads
420 searching for changes
428 searching for changes
421 taking initial sample
429 taking initial sample
422 searching: 2 queries
430 searching: 2 queries
423 query 2; still undecided: 2, sample size is: 2
431 query 2; still undecided: 2, sample size is: 2
424 2 total queries in *.????s (glob)
432 2 total queries in *.????s (glob)
425 elapsed time: * seconds (glob)
433 elapsed time: * seconds (glob)
426 round-trips: 2
434 round-trips: 2
427 queries: 3
435 queries: 3
428 heads summary:
436 heads summary:
429 total common heads: 1
437 total common heads: 1
430 also local heads: 0
438 also local heads: 0
431 also remote heads: 1
439 also remote heads: 1
432 both: 0
440 both: 0
433 local heads: 1
441 local heads: 1
434 common: 0
442 common: 0
435 missing: 1
443 missing: 1
436 remote heads: 2
444 remote heads: 2
437 common: 1
445 common: 1
438 unknown: 1
446 unknown: 1
439 local changesets: 8
447 local changesets: 8
440 common: 5
448 common: 5
441 heads: 1
449 heads: 1
442 roots: 1
450 roots: 1
443 missing: 3
451 missing: 3
444 heads: 1
452 heads: 1
445 roots: 1
453 roots: 1
446 first undecided set: 3
454 first undecided set: 3
447 heads: 1
455 heads: 1
448 roots: 1
456 roots: 1
449 common: 0
457 common: 0
450 missing: 3
458 missing: 3
451 common heads: bebd167eb94d
459 common heads: bebd167eb94d
452
460
453 Both sides many new with stub:
461 Both sides many new with stub:
454
462
455 $ testdesc '-ra1 -ra2' '-rb' '
463 $ testdesc '-ra1 -ra2' '-rb' '
456 > +2:f +2:a1 +30 :b
464 > +2:f +2:a1 +30 :b
457 > <f +30 :a2'
465 > <f +30 :a2'
458
466
459 % -- a -> b tree
467 % -- a -> b tree
460 comparing with b
468 comparing with b
461 searching for changes
469 searching for changes
462 unpruned common: 2dc09a01254d
470 unpruned common: 2dc09a01254d
463 elapsed time: * seconds (glob)
471 elapsed time: * seconds (glob)
464 round-trips: 4
472 round-trips: 4
465 queries: 5
473 queries: 5
474 queries-branches: 1
475 queries-between: 4
466 heads summary:
476 heads summary:
467 total common heads: 1
477 total common heads: 1
468 also local heads: 1
478 also local heads: 1
469 also remote heads: 0
479 also remote heads: 0
470 both: 0
480 both: 0
471 local heads: 2
481 local heads: 2
472 common: 1
482 common: 1
473 missing: 1
483 missing: 1
474 remote heads: 1
484 remote heads: 1
475 common: 0
485 common: 0
476 unknown: 1
486 unknown: 1
477 local changesets: 34
487 local changesets: 34
478 common: 4
488 common: 4
479 heads: 1
489 heads: 1
480 roots: 1
490 roots: 1
481 missing: 30
491 missing: 30
482 heads: 1
492 heads: 1
483 roots: 1
493 roots: 1
484 first undecided set: 33
494 first undecided set: 33
485 heads: 2
495 heads: 2
486 roots: 1
496 roots: 1
487 common: 3
497 common: 3
488 missing: 30
498 missing: 30
489 common heads: 2dc09a01254d
499 common heads: 2dc09a01254d
490
500
491 % -- a -> b set
501 % -- a -> b set
492 comparing with b
502 comparing with b
493 query 1; heads
503 query 1; heads
494 searching for changes
504 searching for changes
495 taking initial sample
505 taking initial sample
496 searching: 2 queries
506 searching: 2 queries
497 query 2; still undecided: 29, sample size is: 29
507 query 2; still undecided: 29, sample size is: 29
498 2 total queries in *.????s (glob)
508 2 total queries in *.????s (glob)
499 elapsed time: * seconds (glob)
509 elapsed time: * seconds (glob)
500 round-trips: 2
510 round-trips: 2
501 queries: 31
511 queries: 31
502 heads summary:
512 heads summary:
503 total common heads: 1
513 total common heads: 1
504 also local heads: 1
514 also local heads: 1
505 also remote heads: 0
515 also remote heads: 0
506 both: 0
516 both: 0
507 local heads: 2
517 local heads: 2
508 common: 1
518 common: 1
509 missing: 1
519 missing: 1
510 remote heads: 1
520 remote heads: 1
511 common: 0
521 common: 0
512 unknown: 1
522 unknown: 1
513 local changesets: 34
523 local changesets: 34
514 common: 4
524 common: 4
515 heads: 1
525 heads: 1
516 roots: 1
526 roots: 1
517 missing: 30
527 missing: 30
518 heads: 1
528 heads: 1
519 roots: 1
529 roots: 1
520 first undecided set: 33
530 first undecided set: 33
521 heads: 2
531 heads: 2
522 roots: 1
532 roots: 1
523 common: 3
533 common: 3
524 missing: 30
534 missing: 30
525 common heads: 2dc09a01254d
535 common heads: 2dc09a01254d
526
536
527 % -- a -> b set (tip only)
537 % -- a -> b set (tip only)
528 comparing with b
538 comparing with b
529 query 1; heads
539 query 1; heads
530 searching for changes
540 searching for changes
531 taking quick initial sample
541 taking quick initial sample
532 searching: 2 queries
542 searching: 2 queries
533 query 2; still undecided: 31, sample size is: 31
543 query 2; still undecided: 31, sample size is: 31
534 2 total queries in *.????s (glob)
544 2 total queries in *.????s (glob)
535 elapsed time: * seconds (glob)
545 elapsed time: * seconds (glob)
536 round-trips: 2
546 round-trips: 2
537 queries: 32
547 queries: 32
538 heads summary:
548 heads summary:
539 total common heads: 1
549 total common heads: 1
540 also local heads: 0
550 also local heads: 0
541 also remote heads: 0
551 also remote heads: 0
542 both: 0
552 both: 0
543 local heads: 2
553 local heads: 2
544 common: 0
554 common: 0
545 missing: 2
555 missing: 2
546 remote heads: 1
556 remote heads: 1
547 common: 0
557 common: 0
548 unknown: 1
558 unknown: 1
549 local changesets: 34
559 local changesets: 34
550 common: 2
560 common: 2
551 heads: 1
561 heads: 1
552 roots: 1
562 roots: 1
553 missing: 32
563 missing: 32
554 heads: 2
564 heads: 2
555 roots: 2
565 roots: 2
556 first undecided set: 34
566 first undecided set: 34
557 heads: 2
567 heads: 2
558 roots: 1
568 roots: 1
559 common: 2
569 common: 2
560 missing: 32
570 missing: 32
561 common heads: 66f7d451a68b
571 common heads: 66f7d451a68b
562
572
563 % -- b -> a tree
573 % -- b -> a tree
564 comparing with a
574 comparing with a
565 searching for changes
575 searching for changes
566 unpruned common: 2dc09a01254d 66f7d451a68b
576 unpruned common: 2dc09a01254d 66f7d451a68b
567 elapsed time: * seconds (glob)
577 elapsed time: * seconds (glob)
568 round-trips: 4
578 round-trips: 4
569 queries: 5
579 queries: 5
580 queries-branches: 1
581 queries-between: 4
570 heads summary:
582 heads summary:
571 total common heads: 1
583 total common heads: 1
572 also local heads: 0
584 also local heads: 0
573 also remote heads: 1
585 also remote heads: 1
574 both: 0
586 both: 0
575 local heads: 1
587 local heads: 1
576 common: 0
588 common: 0
577 missing: 1
589 missing: 1
578 remote heads: 2
590 remote heads: 2
579 common: 1
591 common: 1
580 unknown: 1
592 unknown: 1
581 local changesets: 34
593 local changesets: 34
582 common: 4
594 common: 4
583 heads: 1
595 heads: 1
584 roots: 1
596 roots: 1
585 missing: 30
597 missing: 30
586 heads: 1
598 heads: 1
587 roots: 1
599 roots: 1
588 first undecided set: 30
600 first undecided set: 30
589 heads: 1
601 heads: 1
590 roots: 1
602 roots: 1
591 common: 0
603 common: 0
592 missing: 30
604 missing: 30
593 common heads: 2dc09a01254d
605 common heads: 2dc09a01254d
594
606
595 % -- b -> a set
607 % -- b -> a set
596 comparing with a
608 comparing with a
597 query 1; heads
609 query 1; heads
598 searching for changes
610 searching for changes
599 taking initial sample
611 taking initial sample
600 searching: 2 queries
612 searching: 2 queries
601 query 2; still undecided: 29, sample size is: 29
613 query 2; still undecided: 29, sample size is: 29
602 2 total queries in *.????s (glob)
614 2 total queries in *.????s (glob)
603 elapsed time: * seconds (glob)
615 elapsed time: * seconds (glob)
604 round-trips: 2
616 round-trips: 2
605 queries: 30
617 queries: 30
606 heads summary:
618 heads summary:
607 total common heads: 1
619 total common heads: 1
608 also local heads: 0
620 also local heads: 0
609 also remote heads: 1
621 also remote heads: 1
610 both: 0
622 both: 0
611 local heads: 1
623 local heads: 1
612 common: 0
624 common: 0
613 missing: 1
625 missing: 1
614 remote heads: 2
626 remote heads: 2
615 common: 1
627 common: 1
616 unknown: 1
628 unknown: 1
617 local changesets: 34
629 local changesets: 34
618 common: 4
630 common: 4
619 heads: 1
631 heads: 1
620 roots: 1
632 roots: 1
621 missing: 30
633 missing: 30
622 heads: 1
634 heads: 1
623 roots: 1
635 roots: 1
624 first undecided set: 30
636 first undecided set: 30
625 heads: 1
637 heads: 1
626 roots: 1
638 roots: 1
627 common: 0
639 common: 0
628 missing: 30
640 missing: 30
629 common heads: 2dc09a01254d
641 common heads: 2dc09a01254d
630
642
631 % -- b -> a set (tip only)
643 % -- b -> a set (tip only)
632 comparing with a
644 comparing with a
633 query 1; heads
645 query 1; heads
634 searching for changes
646 searching for changes
635 taking initial sample
647 taking initial sample
636 searching: 2 queries
648 searching: 2 queries
637 query 2; still undecided: 29, sample size is: 29
649 query 2; still undecided: 29, sample size is: 29
638 2 total queries in *.????s (glob)
650 2 total queries in *.????s (glob)
639 elapsed time: * seconds (glob)
651 elapsed time: * seconds (glob)
640 round-trips: 2
652 round-trips: 2
641 queries: 30
653 queries: 30
642 heads summary:
654 heads summary:
643 total common heads: 1
655 total common heads: 1
644 also local heads: 0
656 also local heads: 0
645 also remote heads: 1
657 also remote heads: 1
646 both: 0
658 both: 0
647 local heads: 1
659 local heads: 1
648 common: 0
660 common: 0
649 missing: 1
661 missing: 1
650 remote heads: 2
662 remote heads: 2
651 common: 1
663 common: 1
652 unknown: 1
664 unknown: 1
653 local changesets: 34
665 local changesets: 34
654 common: 4
666 common: 4
655 heads: 1
667 heads: 1
656 roots: 1
668 roots: 1
657 missing: 30
669 missing: 30
658 heads: 1
670 heads: 1
659 roots: 1
671 roots: 1
660 first undecided set: 30
672 first undecided set: 30
661 heads: 1
673 heads: 1
662 roots: 1
674 roots: 1
663 common: 0
675 common: 0
664 missing: 30
676 missing: 30
665 common heads: 2dc09a01254d
677 common heads: 2dc09a01254d
666
678
667
679
668 Both many new:
680 Both many new:
669
681
670 $ testdesc '-ra' '-rb' '
682 $ testdesc '-ra' '-rb' '
671 > +2:f +30 :b
683 > +2:f +30 :b
672 > <f +30 :a'
684 > <f +30 :a'
673
685
674 % -- a -> b tree
686 % -- a -> b tree
675 comparing with b
687 comparing with b
676 searching for changes
688 searching for changes
677 unpruned common: 66f7d451a68b
689 unpruned common: 66f7d451a68b
678 elapsed time: * seconds (glob)
690 elapsed time: * seconds (glob)
679 round-trips: 4
691 round-trips: 4
680 queries: 5
692 queries: 5
693 queries-branches: 1
694 queries-between: 4
681 heads summary:
695 heads summary:
682 total common heads: 1
696 total common heads: 1
683 also local heads: 0
697 also local heads: 0
684 also remote heads: 0
698 also remote heads: 0
685 both: 0
699 both: 0
686 local heads: 1
700 local heads: 1
687 common: 0
701 common: 0
688 missing: 1
702 missing: 1
689 remote heads: 1
703 remote heads: 1
690 common: 0
704 common: 0
691 unknown: 1
705 unknown: 1
692 local changesets: 32
706 local changesets: 32
693 common: 2
707 common: 2
694 heads: 1
708 heads: 1
695 roots: 1
709 roots: 1
696 missing: 30
710 missing: 30
697 heads: 1
711 heads: 1
698 roots: 1
712 roots: 1
699 first undecided set: 32
713 first undecided set: 32
700 heads: 1
714 heads: 1
701 roots: 1
715 roots: 1
702 common: 2
716 common: 2
703 missing: 30
717 missing: 30
704 common heads: 66f7d451a68b
718 common heads: 66f7d451a68b
705
719
706 % -- a -> b set
720 % -- a -> b set
707 comparing with b
721 comparing with b
708 query 1; heads
722 query 1; heads
709 searching for changes
723 searching for changes
710 taking quick initial sample
724 taking quick initial sample
711 searching: 2 queries
725 searching: 2 queries
712 query 2; still undecided: 31, sample size is: 31
726 query 2; still undecided: 31, sample size is: 31
713 2 total queries in *.????s (glob)
727 2 total queries in *.????s (glob)
714 elapsed time: * seconds (glob)
728 elapsed time: * seconds (glob)
715 round-trips: 2
729 round-trips: 2
716 queries: 32
730 queries: 32
717 heads summary:
731 heads summary:
718 total common heads: 1
732 total common heads: 1
719 also local heads: 0
733 also local heads: 0
720 also remote heads: 0
734 also remote heads: 0
721 both: 0
735 both: 0
722 local heads: 1
736 local heads: 1
723 common: 0
737 common: 0
724 missing: 1
738 missing: 1
725 remote heads: 1
739 remote heads: 1
726 common: 0
740 common: 0
727 unknown: 1
741 unknown: 1
728 local changesets: 32
742 local changesets: 32
729 common: 2
743 common: 2
730 heads: 1
744 heads: 1
731 roots: 1
745 roots: 1
732 missing: 30
746 missing: 30
733 heads: 1
747 heads: 1
734 roots: 1
748 roots: 1
735 first undecided set: 32
749 first undecided set: 32
736 heads: 1
750 heads: 1
737 roots: 1
751 roots: 1
738 common: 2
752 common: 2
739 missing: 30
753 missing: 30
740 common heads: 66f7d451a68b
754 common heads: 66f7d451a68b
741
755
742 % -- a -> b set (tip only)
756 % -- a -> b set (tip only)
743 comparing with b
757 comparing with b
744 query 1; heads
758 query 1; heads
745 searching for changes
759 searching for changes
746 taking quick initial sample
760 taking quick initial sample
747 searching: 2 queries
761 searching: 2 queries
748 query 2; still undecided: 31, sample size is: 31
762 query 2; still undecided: 31, sample size is: 31
749 2 total queries in *.????s (glob)
763 2 total queries in *.????s (glob)
750 elapsed time: * seconds (glob)
764 elapsed time: * seconds (glob)
751 round-trips: 2
765 round-trips: 2
752 queries: 32
766 queries: 32
753 heads summary:
767 heads summary:
754 total common heads: 1
768 total common heads: 1
755 also local heads: 0
769 also local heads: 0
756 also remote heads: 0
770 also remote heads: 0
757 both: 0
771 both: 0
758 local heads: 1
772 local heads: 1
759 common: 0
773 common: 0
760 missing: 1
774 missing: 1
761 remote heads: 1
775 remote heads: 1
762 common: 0
776 common: 0
763 unknown: 1
777 unknown: 1
764 local changesets: 32
778 local changesets: 32
765 common: 2
779 common: 2
766 heads: 1
780 heads: 1
767 roots: 1
781 roots: 1
768 missing: 30
782 missing: 30
769 heads: 1
783 heads: 1
770 roots: 1
784 roots: 1
771 first undecided set: 32
785 first undecided set: 32
772 heads: 1
786 heads: 1
773 roots: 1
787 roots: 1
774 common: 2
788 common: 2
775 missing: 30
789 missing: 30
776 common heads: 66f7d451a68b
790 common heads: 66f7d451a68b
777
791
778 % -- b -> a tree
792 % -- b -> a tree
779 comparing with a
793 comparing with a
780 searching for changes
794 searching for changes
781 unpruned common: 66f7d451a68b
795 unpruned common: 66f7d451a68b
782 elapsed time: * seconds (glob)
796 elapsed time: * seconds (glob)
783 round-trips: 4
797 round-trips: 4
784 queries: 5
798 queries: 5
799 queries-branches: 1
800 queries-between: 4
785 heads summary:
801 heads summary:
786 total common heads: 1
802 total common heads: 1
787 also local heads: 0
803 also local heads: 0
788 also remote heads: 0
804 also remote heads: 0
789 both: 0
805 both: 0
790 local heads: 1
806 local heads: 1
791 common: 0
807 common: 0
792 missing: 1
808 missing: 1
793 remote heads: 1
809 remote heads: 1
794 common: 0
810 common: 0
795 unknown: 1
811 unknown: 1
796 local changesets: 32
812 local changesets: 32
797 common: 2
813 common: 2
798 heads: 1
814 heads: 1
799 roots: 1
815 roots: 1
800 missing: 30
816 missing: 30
801 heads: 1
817 heads: 1
802 roots: 1
818 roots: 1
803 first undecided set: 32
819 first undecided set: 32
804 heads: 1
820 heads: 1
805 roots: 1
821 roots: 1
806 common: 2
822 common: 2
807 missing: 30
823 missing: 30
808 common heads: 66f7d451a68b
824 common heads: 66f7d451a68b
809
825
810 % -- b -> a set
826 % -- b -> a set
811 comparing with a
827 comparing with a
812 query 1; heads
828 query 1; heads
813 searching for changes
829 searching for changes
814 taking quick initial sample
830 taking quick initial sample
815 searching: 2 queries
831 searching: 2 queries
816 query 2; still undecided: 31, sample size is: 31
832 query 2; still undecided: 31, sample size is: 31
817 2 total queries in *.????s (glob)
833 2 total queries in *.????s (glob)
818 elapsed time: * seconds (glob)
834 elapsed time: * seconds (glob)
819 round-trips: 2
835 round-trips: 2
820 queries: 32
836 queries: 32
821 heads summary:
837 heads summary:
822 total common heads: 1
838 total common heads: 1
823 also local heads: 0
839 also local heads: 0
824 also remote heads: 0
840 also remote heads: 0
825 both: 0
841 both: 0
826 local heads: 1
842 local heads: 1
827 common: 0
843 common: 0
828 missing: 1
844 missing: 1
829 remote heads: 1
845 remote heads: 1
830 common: 0
846 common: 0
831 unknown: 1
847 unknown: 1
832 local changesets: 32
848 local changesets: 32
833 common: 2
849 common: 2
834 heads: 1
850 heads: 1
835 roots: 1
851 roots: 1
836 missing: 30
852 missing: 30
837 heads: 1
853 heads: 1
838 roots: 1
854 roots: 1
839 first undecided set: 32
855 first undecided set: 32
840 heads: 1
856 heads: 1
841 roots: 1
857 roots: 1
842 common: 2
858 common: 2
843 missing: 30
859 missing: 30
844 common heads: 66f7d451a68b
860 common heads: 66f7d451a68b
845
861
846 % -- b -> a set (tip only)
862 % -- b -> a set (tip only)
847 comparing with a
863 comparing with a
848 query 1; heads
864 query 1; heads
849 searching for changes
865 searching for changes
850 taking quick initial sample
866 taking quick initial sample
851 searching: 2 queries
867 searching: 2 queries
852 query 2; still undecided: 31, sample size is: 31
868 query 2; still undecided: 31, sample size is: 31
853 2 total queries in *.????s (glob)
869 2 total queries in *.????s (glob)
854 elapsed time: * seconds (glob)
870 elapsed time: * seconds (glob)
855 round-trips: 2
871 round-trips: 2
856 queries: 32
872 queries: 32
857 heads summary:
873 heads summary:
858 total common heads: 1
874 total common heads: 1
859 also local heads: 0
875 also local heads: 0
860 also remote heads: 0
876 also remote heads: 0
861 both: 0
877 both: 0
862 local heads: 1
878 local heads: 1
863 common: 0
879 common: 0
864 missing: 1
880 missing: 1
865 remote heads: 1
881 remote heads: 1
866 common: 0
882 common: 0
867 unknown: 1
883 unknown: 1
868 local changesets: 32
884 local changesets: 32
869 common: 2
885 common: 2
870 heads: 1
886 heads: 1
871 roots: 1
887 roots: 1
872 missing: 30
888 missing: 30
873 heads: 1
889 heads: 1
874 roots: 1
890 roots: 1
875 first undecided set: 32
891 first undecided set: 32
876 heads: 1
892 heads: 1
877 roots: 1
893 roots: 1
878 common: 2
894 common: 2
879 missing: 30
895 missing: 30
880 common heads: 66f7d451a68b
896 common heads: 66f7d451a68b
881
897
882
898
883 Both many new skewed:
899 Both many new skewed:
884
900
885 $ testdesc '-ra' '-rb' '
901 $ testdesc '-ra' '-rb' '
886 > +2:f +30 :b
902 > +2:f +30 :b
887 > <f +50 :a'
903 > <f +50 :a'
888
904
889 % -- a -> b tree
905 % -- a -> b tree
890 comparing with b
906 comparing with b
891 searching for changes
907 searching for changes
892 unpruned common: 66f7d451a68b
908 unpruned common: 66f7d451a68b
893 elapsed time: * seconds (glob)
909 elapsed time: * seconds (glob)
894 round-trips: 4
910 round-trips: 4
895 queries: 5
911 queries: 5
912 queries-branches: 1
913 queries-between: 4
896 heads summary:
914 heads summary:
897 total common heads: 1
915 total common heads: 1
898 also local heads: 0
916 also local heads: 0
899 also remote heads: 0
917 also remote heads: 0
900 both: 0
918 both: 0
901 local heads: 1
919 local heads: 1
902 common: 0
920 common: 0
903 missing: 1
921 missing: 1
904 remote heads: 1
922 remote heads: 1
905 common: 0
923 common: 0
906 unknown: 1
924 unknown: 1
907 local changesets: 52
925 local changesets: 52
908 common: 2
926 common: 2
909 heads: 1
927 heads: 1
910 roots: 1
928 roots: 1
911 missing: 50
929 missing: 50
912 heads: 1
930 heads: 1
913 roots: 1
931 roots: 1
914 first undecided set: 52
932 first undecided set: 52
915 heads: 1
933 heads: 1
916 roots: 1
934 roots: 1
917 common: 2
935 common: 2
918 missing: 50
936 missing: 50
919 common heads: 66f7d451a68b
937 common heads: 66f7d451a68b
920
938
921 % -- a -> b set
939 % -- a -> b set
922 comparing with b
940 comparing with b
923 query 1; heads
941 query 1; heads
924 searching for changes
942 searching for changes
925 taking quick initial sample
943 taking quick initial sample
926 searching: 2 queries
944 searching: 2 queries
927 query 2; still undecided: 51, sample size is: 51
945 query 2; still undecided: 51, sample size is: 51
928 2 total queries in *.????s (glob)
946 2 total queries in *.????s (glob)
929 elapsed time: * seconds (glob)
947 elapsed time: * seconds (glob)
930 round-trips: 2
948 round-trips: 2
931 queries: 52
949 queries: 52
932 heads summary:
950 heads summary:
933 total common heads: 1
951 total common heads: 1
934 also local heads: 0
952 also local heads: 0
935 also remote heads: 0
953 also remote heads: 0
936 both: 0
954 both: 0
937 local heads: 1
955 local heads: 1
938 common: 0
956 common: 0
939 missing: 1
957 missing: 1
940 remote heads: 1
958 remote heads: 1
941 common: 0
959 common: 0
942 unknown: 1
960 unknown: 1
943 local changesets: 52
961 local changesets: 52
944 common: 2
962 common: 2
945 heads: 1
963 heads: 1
946 roots: 1
964 roots: 1
947 missing: 50
965 missing: 50
948 heads: 1
966 heads: 1
949 roots: 1
967 roots: 1
950 first undecided set: 52
968 first undecided set: 52
951 heads: 1
969 heads: 1
952 roots: 1
970 roots: 1
953 common: 2
971 common: 2
954 missing: 50
972 missing: 50
955 common heads: 66f7d451a68b
973 common heads: 66f7d451a68b
956
974
957 % -- a -> b set (tip only)
975 % -- a -> b set (tip only)
958 comparing with b
976 comparing with b
959 query 1; heads
977 query 1; heads
960 searching for changes
978 searching for changes
961 taking quick initial sample
979 taking quick initial sample
962 searching: 2 queries
980 searching: 2 queries
963 query 2; still undecided: 51, sample size is: 51
981 query 2; still undecided: 51, sample size is: 51
964 2 total queries in *.????s (glob)
982 2 total queries in *.????s (glob)
965 elapsed time: * seconds (glob)
983 elapsed time: * seconds (glob)
966 round-trips: 2
984 round-trips: 2
967 queries: 52
985 queries: 52
968 heads summary:
986 heads summary:
969 total common heads: 1
987 total common heads: 1
970 also local heads: 0
988 also local heads: 0
971 also remote heads: 0
989 also remote heads: 0
972 both: 0
990 both: 0
973 local heads: 1
991 local heads: 1
974 common: 0
992 common: 0
975 missing: 1
993 missing: 1
976 remote heads: 1
994 remote heads: 1
977 common: 0
995 common: 0
978 unknown: 1
996 unknown: 1
979 local changesets: 52
997 local changesets: 52
980 common: 2
998 common: 2
981 heads: 1
999 heads: 1
982 roots: 1
1000 roots: 1
983 missing: 50
1001 missing: 50
984 heads: 1
1002 heads: 1
985 roots: 1
1003 roots: 1
986 first undecided set: 52
1004 first undecided set: 52
987 heads: 1
1005 heads: 1
988 roots: 1
1006 roots: 1
989 common: 2
1007 common: 2
990 missing: 50
1008 missing: 50
991 common heads: 66f7d451a68b
1009 common heads: 66f7d451a68b
992
1010
993 % -- b -> a tree
1011 % -- b -> a tree
994 comparing with a
1012 comparing with a
995 searching for changes
1013 searching for changes
996 unpruned common: 66f7d451a68b
1014 unpruned common: 66f7d451a68b
997 elapsed time: * seconds (glob)
1015 elapsed time: * seconds (glob)
998 round-trips: 3
1016 round-trips: 3
999 queries: 4
1017 queries: 4
1018 queries-branches: 1
1019 queries-between: 3
1000 heads summary:
1020 heads summary:
1001 total common heads: 1
1021 total common heads: 1
1002 also local heads: 0
1022 also local heads: 0
1003 also remote heads: 0
1023 also remote heads: 0
1004 both: 0
1024 both: 0
1005 local heads: 1
1025 local heads: 1
1006 common: 0
1026 common: 0
1007 missing: 1
1027 missing: 1
1008 remote heads: 1
1028 remote heads: 1
1009 common: 0
1029 common: 0
1010 unknown: 1
1030 unknown: 1
1011 local changesets: 32
1031 local changesets: 32
1012 common: 2
1032 common: 2
1013 heads: 1
1033 heads: 1
1014 roots: 1
1034 roots: 1
1015 missing: 30
1035 missing: 30
1016 heads: 1
1036 heads: 1
1017 roots: 1
1037 roots: 1
1018 first undecided set: 32
1038 first undecided set: 32
1019 heads: 1
1039 heads: 1
1020 roots: 1
1040 roots: 1
1021 common: 2
1041 common: 2
1022 missing: 30
1042 missing: 30
1023 common heads: 66f7d451a68b
1043 common heads: 66f7d451a68b
1024
1044
1025 % -- b -> a set
1045 % -- b -> a set
1026 comparing with a
1046 comparing with a
1027 query 1; heads
1047 query 1; heads
1028 searching for changes
1048 searching for changes
1029 taking quick initial sample
1049 taking quick initial sample
1030 searching: 2 queries
1050 searching: 2 queries
1031 query 2; still undecided: 31, sample size is: 31
1051 query 2; still undecided: 31, sample size is: 31
1032 2 total queries in *.????s (glob)
1052 2 total queries in *.????s (glob)
1033 elapsed time: * seconds (glob)
1053 elapsed time: * seconds (glob)
1034 round-trips: 2
1054 round-trips: 2
1035 queries: 32
1055 queries: 32
1036 heads summary:
1056 heads summary:
1037 total common heads: 1
1057 total common heads: 1
1038 also local heads: 0
1058 also local heads: 0
1039 also remote heads: 0
1059 also remote heads: 0
1040 both: 0
1060 both: 0
1041 local heads: 1
1061 local heads: 1
1042 common: 0
1062 common: 0
1043 missing: 1
1063 missing: 1
1044 remote heads: 1
1064 remote heads: 1
1045 common: 0
1065 common: 0
1046 unknown: 1
1066 unknown: 1
1047 local changesets: 32
1067 local changesets: 32
1048 common: 2
1068 common: 2
1049 heads: 1
1069 heads: 1
1050 roots: 1
1070 roots: 1
1051 missing: 30
1071 missing: 30
1052 heads: 1
1072 heads: 1
1053 roots: 1
1073 roots: 1
1054 first undecided set: 32
1074 first undecided set: 32
1055 heads: 1
1075 heads: 1
1056 roots: 1
1076 roots: 1
1057 common: 2
1077 common: 2
1058 missing: 30
1078 missing: 30
1059 common heads: 66f7d451a68b
1079 common heads: 66f7d451a68b
1060
1080
1061 % -- b -> a set (tip only)
1081 % -- b -> a set (tip only)
1062 comparing with a
1082 comparing with a
1063 query 1; heads
1083 query 1; heads
1064 searching for changes
1084 searching for changes
1065 taking quick initial sample
1085 taking quick initial sample
1066 searching: 2 queries
1086 searching: 2 queries
1067 query 2; still undecided: 31, sample size is: 31
1087 query 2; still undecided: 31, sample size is: 31
1068 2 total queries in *.????s (glob)
1088 2 total queries in *.????s (glob)
1069 elapsed time: * seconds (glob)
1089 elapsed time: * seconds (glob)
1070 round-trips: 2
1090 round-trips: 2
1071 queries: 32
1091 queries: 32
1072 heads summary:
1092 heads summary:
1073 total common heads: 1
1093 total common heads: 1
1074 also local heads: 0
1094 also local heads: 0
1075 also remote heads: 0
1095 also remote heads: 0
1076 both: 0
1096 both: 0
1077 local heads: 1
1097 local heads: 1
1078 common: 0
1098 common: 0
1079 missing: 1
1099 missing: 1
1080 remote heads: 1
1100 remote heads: 1
1081 common: 0
1101 common: 0
1082 unknown: 1
1102 unknown: 1
1083 local changesets: 32
1103 local changesets: 32
1084 common: 2
1104 common: 2
1085 heads: 1
1105 heads: 1
1086 roots: 1
1106 roots: 1
1087 missing: 30
1107 missing: 30
1088 heads: 1
1108 heads: 1
1089 roots: 1
1109 roots: 1
1090 first undecided set: 32
1110 first undecided set: 32
1091 heads: 1
1111 heads: 1
1092 roots: 1
1112 roots: 1
1093 common: 2
1113 common: 2
1094 missing: 30
1114 missing: 30
1095 common heads: 66f7d451a68b
1115 common heads: 66f7d451a68b
1096
1116
1097
1117
1098 Both many new on top of long history:
1118 Both many new on top of long history:
1099
1119
1100 $ testdesc '-ra' '-rb' '
1120 $ testdesc '-ra' '-rb' '
1101 > +1000:f +30 :b
1121 > +1000:f +30 :b
1102 > <f +50 :a'
1122 > <f +50 :a'
1103
1123
1104 % -- a -> b tree
1124 % -- a -> b tree
1105 comparing with b
1125 comparing with b
1106 searching for changes
1126 searching for changes
1107 unpruned common: 7ead0cba2838
1127 unpruned common: 7ead0cba2838
1108 elapsed time: * seconds (glob)
1128 elapsed time: * seconds (glob)
1109 round-trips: 4
1129 round-trips: 4
1110 queries: 5
1130 queries: 5
1131 queries-branches: 1
1132 queries-between: 4
1111 heads summary:
1133 heads summary:
1112 total common heads: 1
1134 total common heads: 1
1113 also local heads: 0
1135 also local heads: 0
1114 also remote heads: 0
1136 also remote heads: 0
1115 both: 0
1137 both: 0
1116 local heads: 1
1138 local heads: 1
1117 common: 0
1139 common: 0
1118 missing: 1
1140 missing: 1
1119 remote heads: 1
1141 remote heads: 1
1120 common: 0
1142 common: 0
1121 unknown: 1
1143 unknown: 1
1122 local changesets: 1050
1144 local changesets: 1050
1123 common: 1000
1145 common: 1000
1124 heads: 1
1146 heads: 1
1125 roots: 1
1147 roots: 1
1126 missing: 50
1148 missing: 50
1127 heads: 1
1149 heads: 1
1128 roots: 1
1150 roots: 1
1129 first undecided set: 1050
1151 first undecided set: 1050
1130 heads: 1
1152 heads: 1
1131 roots: 1
1153 roots: 1
1132 common: 1000
1154 common: 1000
1133 missing: 50
1155 missing: 50
1134 common heads: 7ead0cba2838
1156 common heads: 7ead0cba2838
1135
1157
1136 % -- a -> b set
1158 % -- a -> b set
1137 comparing with b
1159 comparing with b
1138 query 1; heads
1160 query 1; heads
1139 searching for changes
1161 searching for changes
1140 taking quick initial sample
1162 taking quick initial sample
1141 searching: 2 queries
1163 searching: 2 queries
1142 query 2; still undecided: 1049, sample size is: 11
1164 query 2; still undecided: 1049, sample size is: 11
1143 sampling from both directions
1165 sampling from both directions
1144 searching: 3 queries
1166 searching: 3 queries
1145 query 3; still undecided: 31, sample size is: 31
1167 query 3; still undecided: 31, sample size is: 31
1146 3 total queries in *.????s (glob)
1168 3 total queries in *.????s (glob)
1147 elapsed time: * seconds (glob)
1169 elapsed time: * seconds (glob)
1148 round-trips: 3
1170 round-trips: 3
1149 queries: 43
1171 queries: 43
1150 heads summary:
1172 heads summary:
1151 total common heads: 1
1173 total common heads: 1
1152 also local heads: 0
1174 also local heads: 0
1153 also remote heads: 0
1175 also remote heads: 0
1154 both: 0
1176 both: 0
1155 local heads: 1
1177 local heads: 1
1156 common: 0
1178 common: 0
1157 missing: 1
1179 missing: 1
1158 remote heads: 1
1180 remote heads: 1
1159 common: 0
1181 common: 0
1160 unknown: 1
1182 unknown: 1
1161 local changesets: 1050
1183 local changesets: 1050
1162 common: 1000
1184 common: 1000
1163 heads: 1
1185 heads: 1
1164 roots: 1
1186 roots: 1
1165 missing: 50
1187 missing: 50
1166 heads: 1
1188 heads: 1
1167 roots: 1
1189 roots: 1
1168 first undecided set: 1050
1190 first undecided set: 1050
1169 heads: 1
1191 heads: 1
1170 roots: 1
1192 roots: 1
1171 common: 1000
1193 common: 1000
1172 missing: 50
1194 missing: 50
1173 common heads: 7ead0cba2838
1195 common heads: 7ead0cba2838
1174
1196
1175 % -- a -> b set (tip only)
1197 % -- a -> b set (tip only)
1176 comparing with b
1198 comparing with b
1177 query 1; heads
1199 query 1; heads
1178 searching for changes
1200 searching for changes
1179 taking quick initial sample
1201 taking quick initial sample
1180 searching: 2 queries
1202 searching: 2 queries
1181 query 2; still undecided: 1049, sample size is: 11
1203 query 2; still undecided: 1049, sample size is: 11
1182 sampling from both directions
1204 sampling from both directions
1183 searching: 3 queries
1205 searching: 3 queries
1184 query 3; still undecided: 31, sample size is: 31
1206 query 3; still undecided: 31, sample size is: 31
1185 3 total queries in *.????s (glob)
1207 3 total queries in *.????s (glob)
1186 elapsed time: * seconds (glob)
1208 elapsed time: * seconds (glob)
1187 round-trips: 3
1209 round-trips: 3
1188 queries: 43
1210 queries: 43
1189 heads summary:
1211 heads summary:
1190 total common heads: 1
1212 total common heads: 1
1191 also local heads: 0
1213 also local heads: 0
1192 also remote heads: 0
1214 also remote heads: 0
1193 both: 0
1215 both: 0
1194 local heads: 1
1216 local heads: 1
1195 common: 0
1217 common: 0
1196 missing: 1
1218 missing: 1
1197 remote heads: 1
1219 remote heads: 1
1198 common: 0
1220 common: 0
1199 unknown: 1
1221 unknown: 1
1200 local changesets: 1050
1222 local changesets: 1050
1201 common: 1000
1223 common: 1000
1202 heads: 1
1224 heads: 1
1203 roots: 1
1225 roots: 1
1204 missing: 50
1226 missing: 50
1205 heads: 1
1227 heads: 1
1206 roots: 1
1228 roots: 1
1207 first undecided set: 1050
1229 first undecided set: 1050
1208 heads: 1
1230 heads: 1
1209 roots: 1
1231 roots: 1
1210 common: 1000
1232 common: 1000
1211 missing: 50
1233 missing: 50
1212 common heads: 7ead0cba2838
1234 common heads: 7ead0cba2838
1213
1235
1214 % -- b -> a tree
1236 % -- b -> a tree
1215 comparing with a
1237 comparing with a
1216 searching for changes
1238 searching for changes
1217 unpruned common: 7ead0cba2838
1239 unpruned common: 7ead0cba2838
1218 elapsed time: * seconds (glob)
1240 elapsed time: * seconds (glob)
1219 round-trips: 3
1241 round-trips: 3
1220 queries: 4
1242 queries: 4
1243 queries-branches: 1
1244 queries-between: 3
1221 heads summary:
1245 heads summary:
1222 total common heads: 1
1246 total common heads: 1
1223 also local heads: 0
1247 also local heads: 0
1224 also remote heads: 0
1248 also remote heads: 0
1225 both: 0
1249 both: 0
1226 local heads: 1
1250 local heads: 1
1227 common: 0
1251 common: 0
1228 missing: 1
1252 missing: 1
1229 remote heads: 1
1253 remote heads: 1
1230 common: 0
1254 common: 0
1231 unknown: 1
1255 unknown: 1
1232 local changesets: 1030
1256 local changesets: 1030
1233 common: 1000
1257 common: 1000
1234 heads: 1
1258 heads: 1
1235 roots: 1
1259 roots: 1
1236 missing: 30
1260 missing: 30
1237 heads: 1
1261 heads: 1
1238 roots: 1
1262 roots: 1
1239 first undecided set: 1030
1263 first undecided set: 1030
1240 heads: 1
1264 heads: 1
1241 roots: 1
1265 roots: 1
1242 common: 1000
1266 common: 1000
1243 missing: 30
1267 missing: 30
1244 common heads: 7ead0cba2838
1268 common heads: 7ead0cba2838
1245
1269
1246 % -- b -> a set
1270 % -- b -> a set
1247 comparing with a
1271 comparing with a
1248 query 1; heads
1272 query 1; heads
1249 searching for changes
1273 searching for changes
1250 taking quick initial sample
1274 taking quick initial sample
1251 searching: 2 queries
1275 searching: 2 queries
1252 query 2; still undecided: 1029, sample size is: 11
1276 query 2; still undecided: 1029, sample size is: 11
1253 sampling from both directions
1277 sampling from both directions
1254 searching: 3 queries
1278 searching: 3 queries
1255 query 3; still undecided: 15, sample size is: 15
1279 query 3; still undecided: 15, sample size is: 15
1256 3 total queries in *.????s (glob)
1280 3 total queries in *.????s (glob)
1257 elapsed time: * seconds (glob)
1281 elapsed time: * seconds (glob)
1258 round-trips: 3
1282 round-trips: 3
1259 queries: 27
1283 queries: 27
1260 heads summary:
1284 heads summary:
1261 total common heads: 1
1285 total common heads: 1
1262 also local heads: 0
1286 also local heads: 0
1263 also remote heads: 0
1287 also remote heads: 0
1264 both: 0
1288 both: 0
1265 local heads: 1
1289 local heads: 1
1266 common: 0
1290 common: 0
1267 missing: 1
1291 missing: 1
1268 remote heads: 1
1292 remote heads: 1
1269 common: 0
1293 common: 0
1270 unknown: 1
1294 unknown: 1
1271 local changesets: 1030
1295 local changesets: 1030
1272 common: 1000
1296 common: 1000
1273 heads: 1
1297 heads: 1
1274 roots: 1
1298 roots: 1
1275 missing: 30
1299 missing: 30
1276 heads: 1
1300 heads: 1
1277 roots: 1
1301 roots: 1
1278 first undecided set: 1030
1302 first undecided set: 1030
1279 heads: 1
1303 heads: 1
1280 roots: 1
1304 roots: 1
1281 common: 1000
1305 common: 1000
1282 missing: 30
1306 missing: 30
1283 common heads: 7ead0cba2838
1307 common heads: 7ead0cba2838
1284
1308
1285 % -- b -> a set (tip only)
1309 % -- b -> a set (tip only)
1286 comparing with a
1310 comparing with a
1287 query 1; heads
1311 query 1; heads
1288 searching for changes
1312 searching for changes
1289 taking quick initial sample
1313 taking quick initial sample
1290 searching: 2 queries
1314 searching: 2 queries
1291 query 2; still undecided: 1029, sample size is: 11
1315 query 2; still undecided: 1029, sample size is: 11
1292 sampling from both directions
1316 sampling from both directions
1293 searching: 3 queries
1317 searching: 3 queries
1294 query 3; still undecided: 15, sample size is: 15
1318 query 3; still undecided: 15, sample size is: 15
1295 3 total queries in *.????s (glob)
1319 3 total queries in *.????s (glob)
1296 elapsed time: * seconds (glob)
1320 elapsed time: * seconds (glob)
1297 round-trips: 3
1321 round-trips: 3
1298 queries: 27
1322 queries: 27
1299 heads summary:
1323 heads summary:
1300 total common heads: 1
1324 total common heads: 1
1301 also local heads: 0
1325 also local heads: 0
1302 also remote heads: 0
1326 also remote heads: 0
1303 both: 0
1327 both: 0
1304 local heads: 1
1328 local heads: 1
1305 common: 0
1329 common: 0
1306 missing: 1
1330 missing: 1
1307 remote heads: 1
1331 remote heads: 1
1308 common: 0
1332 common: 0
1309 unknown: 1
1333 unknown: 1
1310 local changesets: 1030
1334 local changesets: 1030
1311 common: 1000
1335 common: 1000
1312 heads: 1
1336 heads: 1
1313 roots: 1
1337 roots: 1
1314 missing: 30
1338 missing: 30
1315 heads: 1
1339 heads: 1
1316 roots: 1
1340 roots: 1
1317 first undecided set: 1030
1341 first undecided set: 1030
1318 heads: 1
1342 heads: 1
1319 roots: 1
1343 roots: 1
1320 common: 1000
1344 common: 1000
1321 missing: 30
1345 missing: 30
1322 common heads: 7ead0cba2838
1346 common heads: 7ead0cba2838
1323
1347
1324
1348
1325 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1349 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1326
1350
1327 $ hg init manyheads
1351 $ hg init manyheads
1328 $ cd manyheads
1352 $ cd manyheads
1329 $ echo "+300:r @a" >dagdesc
1353 $ echo "+300:r @a" >dagdesc
1330 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1354 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1331 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1355 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1332 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1356 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1333 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1357 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1334 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1358 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1335 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1359 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1336 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1360 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1337 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1361 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1338 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1362 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1339 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1363 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1340 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1364 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1341 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1365 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1342 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1366 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1343 $ echo "@b *r+3" >>dagdesc # one more head
1367 $ echo "@b *r+3" >>dagdesc # one more head
1344 $ hg debugbuilddag <dagdesc
1368 $ hg debugbuilddag <dagdesc
1345 reading DAG from stdin
1369 reading DAG from stdin
1346
1370
1347 $ hg heads -t --template . | wc -c
1371 $ hg heads -t --template . | wc -c
1348 \s*261 (re)
1372 \s*261 (re)
1349
1373
1350 $ hg clone -b a . a
1374 $ hg clone -b a . a
1351 adding changesets
1375 adding changesets
1352 adding manifests
1376 adding manifests
1353 adding file changes
1377 adding file changes
1354 added 1340 changesets with 0 changes to 0 files (+259 heads)
1378 added 1340 changesets with 0 changes to 0 files (+259 heads)
1355 new changesets 1ea73414a91b:1c51e2c80832
1379 new changesets 1ea73414a91b:1c51e2c80832
1356 updating to branch a
1380 updating to branch a
1357 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1381 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1358 $ hg clone -b b . b
1382 $ hg clone -b b . b
1359 adding changesets
1383 adding changesets
1360 adding manifests
1384 adding manifests
1361 adding file changes
1385 adding file changes
1362 added 304 changesets with 0 changes to 0 files
1386 added 304 changesets with 0 changes to 0 files
1363 new changesets 1ea73414a91b:513314ca8b3a
1387 new changesets 1ea73414a91b:513314ca8b3a
1364 updating to branch b
1388 updating to branch b
1365 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1389 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1366
1390
1367 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
1391 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
1368 comparing with b
1392 comparing with b
1369 query 1; heads
1393 query 1; heads
1370 searching for changes
1394 searching for changes
1371 taking quick initial sample
1395 taking quick initial sample
1372 searching: 2 queries
1396 searching: 2 queries
1373 query 2; still undecided: 1080, sample size is: 50
1397 query 2; still undecided: 1080, sample size is: 50
1374 sampling from both directions
1398 sampling from both directions
1375 searching: 3 queries
1399 searching: 3 queries
1376 query 3; still undecided: 1030, sample size is: 200
1400 query 3; still undecided: 1030, sample size is: 200
1377 sampling from both directions
1401 sampling from both directions
1378 searching: 4 queries
1402 searching: 4 queries
1379 query 4; still undecided: 547, sample size is: 210
1403 query 4; still undecided: 547, sample size is: 210
1380 sampling from both directions
1404 sampling from both directions
1381 searching: 5 queries
1405 searching: 5 queries
1382 query 5; still undecided: 336, sample size is: 220
1406 query 5; still undecided: 336, sample size is: 220
1383 sampling from both directions
1407 sampling from both directions
1384 searching: 6 queries
1408 searching: 6 queries
1385 query 6; still undecided: 114, sample size is: 114
1409 query 6; still undecided: 114, sample size is: 114
1386 6 total queries in *.????s (glob)
1410 6 total queries in *.????s (glob)
1387 elapsed time: * seconds (glob)
1411 elapsed time: * seconds (glob)
1388 round-trips: 6
1412 round-trips: 6
1389 queries: 1054
1413 queries: 1054
1390 heads summary:
1414 heads summary:
1391 total common heads: 1
1415 total common heads: 1
1392 also local heads: 0
1416 also local heads: 0
1393 also remote heads: 0
1417 also remote heads: 0
1394 both: 0
1418 both: 0
1395 local heads: 260
1419 local heads: 260
1396 common: 0
1420 common: 0
1397 missing: 260
1421 missing: 260
1398 remote heads: 1
1422 remote heads: 1
1399 common: 0
1423 common: 0
1400 unknown: 1
1424 unknown: 1
1401 local changesets: 1340
1425 local changesets: 1340
1402 common: 300
1426 common: 300
1403 heads: 1
1427 heads: 1
1404 roots: 1
1428 roots: 1
1405 missing: 1040
1429 missing: 1040
1406 heads: 260
1430 heads: 260
1407 roots: 260
1431 roots: 260
1408 first undecided set: 1340
1432 first undecided set: 1340
1409 heads: 260
1433 heads: 260
1410 roots: 1
1434 roots: 1
1411 common: 300
1435 common: 300
1412 missing: 1040
1436 missing: 1040
1413 common heads: 3ee37d65064a
1437 common heads: 3ee37d65064a
1414 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1438 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1415 comparing with b
1439 comparing with b
1416 query 1; heads
1440 query 1; heads
1417 searching for changes
1441 searching for changes
1418 taking quick initial sample
1442 taking quick initial sample
1419 searching: 2 queries
1443 searching: 2 queries
1420 query 2; still undecided: 303, sample size is: 9
1444 query 2; still undecided: 303, sample size is: 9
1421 sampling from both directions
1445 sampling from both directions
1422 searching: 3 queries
1446 searching: 3 queries
1423 query 3; still undecided: 3, sample size is: 3
1447 query 3; still undecided: 3, sample size is: 3
1424 3 total queries in *.????s (glob)
1448 3 total queries in *.????s (glob)
1425 elapsed time: * seconds (glob)
1449 elapsed time: * seconds (glob)
1426 round-trips: 3
1450 round-trips: 3
1427 queries: 13
1451 queries: 13
1428 heads summary:
1452 heads summary:
1429 total common heads: 1
1453 total common heads: 1
1430 also local heads: 0
1454 also local heads: 0
1431 also remote heads: 0
1455 also remote heads: 0
1432 both: 0
1456 both: 0
1433 local heads: 260
1457 local heads: 260
1434 common: 0
1458 common: 0
1435 missing: 260
1459 missing: 260
1436 remote heads: 1
1460 remote heads: 1
1437 common: 0
1461 common: 0
1438 unknown: 1
1462 unknown: 1
1439 local changesets: 1340
1463 local changesets: 1340
1440 common: 300
1464 common: 300
1441 heads: 1
1465 heads: 1
1442 roots: 1
1466 roots: 1
1443 missing: 1040
1467 missing: 1040
1444 heads: 260
1468 heads: 260
1445 roots: 260
1469 roots: 260
1446 first undecided set: 1340
1470 first undecided set: 1340
1447 heads: 260
1471 heads: 260
1448 roots: 1
1472 roots: 1
1449 common: 300
1473 common: 300
1450 missing: 1040
1474 missing: 1040
1451 common heads: 3ee37d65064a
1475 common heads: 3ee37d65064a
1452
1476
1453 $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
1477 $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
1454 comparing with b
1478 comparing with b
1455 searching for changes
1479 searching for changes
1456 sampling from both directions
1480 sampling from both directions
1457 query 1; still undecided: 1340, sample size is: 50
1481 query 1; still undecided: 1340, sample size is: 50
1458 sampling from both directions
1482 sampling from both directions
1459 query 2; still undecided: 995, sample size is: 60
1483 query 2; still undecided: 995, sample size is: 60
1460 sampling from both directions
1484 sampling from both directions
1461 query 3; still undecided: 913, sample size is: 72
1485 query 3; still undecided: 913, sample size is: 72
1462 sampling from both directions
1486 sampling from both directions
1463 query 4; still undecided: 816, sample size is: 204
1487 query 4; still undecided: 816, sample size is: 204
1464 sampling from both directions
1488 sampling from both directions
1465 query 5; still undecided: 612, sample size is: 153
1489 query 5; still undecided: 612, sample size is: 153
1466 sampling from both directions
1490 sampling from both directions
1467 query 6; still undecided: 456, sample size is: 123
1491 query 6; still undecided: 456, sample size is: 123
1468 sampling from both directions
1492 sampling from both directions
1469 query 7; still undecided: 332, sample size is: 147
1493 query 7; still undecided: 332, sample size is: 147
1470 sampling from both directions
1494 sampling from both directions
1471 query 8; still undecided: 184, sample size is: 176
1495 query 8; still undecided: 184, sample size is: 176
1472 sampling from both directions
1496 sampling from both directions
1473 query 9; still undecided: 8, sample size is: 8
1497 query 9; still undecided: 8, sample size is: 8
1474 9 total queries in *s (glob)
1498 9 total queries in *s (glob)
1475 elapsed time: * seconds (glob)
1499 elapsed time: * seconds (glob)
1476 round-trips: 9
1500 round-trips: 9
1477 queries: 993
1501 queries: 993
1478 heads summary:
1502 heads summary:
1479 total common heads: 1
1503 total common heads: 1
1480 also local heads: 0
1504 also local heads: 0
1481 also remote heads: 0
1505 also remote heads: 0
1482 both: 0
1506 both: 0
1483 local heads: 260
1507 local heads: 260
1484 common: 0
1508 common: 0
1485 missing: 260
1509 missing: 260
1486 remote heads: 1
1510 remote heads: 1
1487 common: 0
1511 common: 0
1488 unknown: 1
1512 unknown: 1
1489 local changesets: 1340
1513 local changesets: 1340
1490 common: 300
1514 common: 300
1491 heads: 1
1515 heads: 1
1492 roots: 1
1516 roots: 1
1493 missing: 1040
1517 missing: 1040
1494 heads: 260
1518 heads: 260
1495 roots: 260
1519 roots: 260
1496 first undecided set: 1340
1520 first undecided set: 1340
1497 heads: 260
1521 heads: 260
1498 roots: 1
1522 roots: 1
1499 common: 300
1523 common: 300
1500 missing: 1040
1524 missing: 1040
1501 common heads: 3ee37d65064a
1525 common heads: 3ee37d65064a
1502
1526
1503 Test actual protocol when pulling one new head in addition to common heads
1527 Test actual protocol when pulling one new head in addition to common heads
1504
1528
1505 $ hg clone -U b c
1529 $ hg clone -U b c
1506 $ hg -R c id -ir tip
1530 $ hg -R c id -ir tip
1507 513314ca8b3a
1531 513314ca8b3a
1508 $ hg -R c up -qr default
1532 $ hg -R c up -qr default
1509 $ touch c/f
1533 $ touch c/f
1510 $ hg -R c ci -Aqm "extra head"
1534 $ hg -R c ci -Aqm "extra head"
1511 $ hg -R c id -i
1535 $ hg -R c id -i
1512 e64a39e7da8b
1536 e64a39e7da8b
1513
1537
1514 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1538 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1515 $ cat hg.pid >> $DAEMON_PIDS
1539 $ cat hg.pid >> $DAEMON_PIDS
1516
1540
1517 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1541 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1518 comparing with http://localhost:$HGPORT/
1542 comparing with http://localhost:$HGPORT/
1519 searching for changes
1543 searching for changes
1520 e64a39e7da8b
1544 e64a39e7da8b
1521
1545
1522 $ killdaemons.py
1546 $ killdaemons.py
1523 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1547 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1524 "GET /?cmd=capabilities HTTP/1.1" 200 -
1548 "GET /?cmd=capabilities HTTP/1.1" 200 -
1525 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1549 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1526 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1550 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1527 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1551 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1528 $ cat errors.log
1552 $ cat errors.log
1529
1553
1530 $ cd ..
1554 $ cd ..
1531
1555
1532
1556
1533 Issue 4438 - test coverage for 3ef893520a85 issues.
1557 Issue 4438 - test coverage for 3ef893520a85 issues.
1534
1558
1535 $ mkdir issue4438
1559 $ mkdir issue4438
1536 $ cd issue4438
1560 $ cd issue4438
1537 #if false
1561 #if false
1538 generate new bundles:
1562 generate new bundles:
1539 $ hg init r1
1563 $ hg init r1
1540 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1564 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1541 $ hg clone -q r1 r2
1565 $ hg clone -q r1 r2
1542 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1566 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1543 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1567 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1544 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1568 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1545 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1569 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1546 #else
1570 #else
1547 use existing bundles:
1571 use existing bundles:
1548 $ hg init r1
1572 $ hg init r1
1549 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1573 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1550 $ hg -R r1 -q up
1574 $ hg -R r1 -q up
1551 $ hg init r2
1575 $ hg init r2
1552 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1576 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1553 $ hg -R r2 -q up
1577 $ hg -R r2 -q up
1554 #endif
1578 #endif
1555
1579
1556 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1580 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1557
1581
1558 $ hg -R r1 outgoing r2 -T'{rev} '
1582 $ hg -R r1 outgoing r2 -T'{rev} '
1559 comparing with r2
1583 comparing with r2
1560 searching for changes
1584 searching for changes
1561 101 102 103 104 105 106 107 108 109 110 (no-eol)
1585 101 102 103 104 105 106 107 108 109 110 (no-eol)
1562
1586
1563 The case where all the 'initialsamplesize' samples already were common would
1587 The case where all the 'initialsamplesize' samples already were common would
1564 give 'all remote heads known locally' without checking the remaining heads -
1588 give 'all remote heads known locally' without checking the remaining heads -
1565 fixed in 86c35b7ae300:
1589 fixed in 86c35b7ae300:
1566
1590
1567 $ cat >> r1/.hg/hgrc << EOF
1591 $ cat >> r1/.hg/hgrc << EOF
1568 > [devel]
1592 > [devel]
1569 > discovery.randomize = False
1593 > discovery.randomize = False
1570 > EOF
1594 > EOF
1571
1595
1572 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1596 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1573 > --config blackbox.track='command commandfinish discovery'
1597 > --config blackbox.track='command commandfinish discovery'
1574 comparing with r2
1598 comparing with r2
1575 searching for changes
1599 searching for changes
1576 101 102 103 104 105 106 107 108 109 110 (no-eol)
1600 101 102 103 104 105 106 107 108 109 110 (no-eol)
1577 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1601 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1578 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --no-profile --cmdserver chgunix * (glob) (chg !)
1602 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --no-profile --cmdserver chgunix * (glob) (chg !)
1579 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1603 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1580 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1604 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1581 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1605 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1582 $ cd ..
1606 $ cd ..
1583
1607
1584 Even if the set of revs to discover is restricted, unrelated revs may be
1608 Even if the set of revs to discover is restricted, unrelated revs may be
1585 returned as common heads.
1609 returned as common heads.
1586
1610
1587 $ mkdir ancestorsof
1611 $ mkdir ancestorsof
1588 $ cd ancestorsof
1612 $ cd ancestorsof
1589 $ hg init a
1613 $ hg init a
1590 $ hg clone a b -q
1614 $ hg clone a b -q
1591 $ cd b
1615 $ cd b
1592 $ hg debugbuilddag '.:root *root *root'
1616 $ hg debugbuilddag '.:root *root *root'
1593 $ hg log -G -T '{node|short}'
1617 $ hg log -G -T '{node|short}'
1594 o fa942426a6fd
1618 o fa942426a6fd
1595 |
1619 |
1596 | o 66f7d451a68b
1620 | o 66f7d451a68b
1597 |/
1621 |/
1598 o 1ea73414a91b
1622 o 1ea73414a91b
1599
1623
1600 $ hg push -r 66f7d451a68b -q
1624 $ hg push -r 66f7d451a68b -q
1601 $ hg debugdiscovery --verbose --rev fa942426a6fd
1625 $ hg debugdiscovery --verbose --rev fa942426a6fd
1602 comparing with $TESTTMP/ancestorsof/a
1626 comparing with $TESTTMP/ancestorsof/a
1603 searching for changes
1627 searching for changes
1604 elapsed time: * seconds (glob)
1628 elapsed time: * seconds (glob)
1605 round-trips: 1
1629 round-trips: 1
1606 queries: 1
1630 queries: 1
1607 heads summary:
1631 heads summary:
1608 total common heads: 1
1632 total common heads: 1
1609 also local heads: 1
1633 also local heads: 1
1610 also remote heads: 1
1634 also remote heads: 1
1611 both: 1
1635 both: 1
1612 local heads: 2
1636 local heads: 2
1613 common: 1
1637 common: 1
1614 missing: 1
1638 missing: 1
1615 remote heads: 1
1639 remote heads: 1
1616 common: 1
1640 common: 1
1617 unknown: 0
1641 unknown: 0
1618 local changesets: 3
1642 local changesets: 3
1619 common: 2
1643 common: 2
1620 heads: 1
1644 heads: 1
1621 roots: 1
1645 roots: 1
1622 missing: 1
1646 missing: 1
1623 heads: 1
1647 heads: 1
1624 roots: 1
1648 roots: 1
1625 first undecided set: 1
1649 first undecided set: 1
1626 heads: 1
1650 heads: 1
1627 roots: 1
1651 roots: 1
1628 common: 0
1652 common: 0
1629 missing: 1
1653 missing: 1
1630 common heads: 66f7d451a68b
1654 common heads: 66f7d451a68b
1631
1655
1632 $ cd ..
1656 $ cd ..
1633
1657
1634
1658
1635 Test debuging discovery using different subset of the same repository
1659 Test debuging discovery using different subset of the same repository
1636 =====================================================================
1660 =====================================================================
1637
1661
1638 remote is a local subset
1662 remote is a local subset
1639 ------------------------
1663 ------------------------
1640
1664
1641 remote will be last 25 heads of the local graph
1665 remote will be last 25 heads of the local graph
1642
1666
1643 $ cd $TESTTMP/manyheads
1667 $ cd $TESTTMP/manyheads
1644 $ hg -R a debugdiscovery \
1668 $ hg -R a debugdiscovery \
1645 > --debug \
1669 > --debug \
1646 > --remote-as-revs 'last(heads(all()), 25)' \
1670 > --remote-as-revs 'last(heads(all()), 25)' \
1647 > --config devel.discovery.randomize=false
1671 > --config devel.discovery.randomize=false
1648 query 1; heads
1672 query 1; heads
1649 searching for changes
1673 searching for changes
1650 all remote heads known locally
1674 all remote heads known locally
1651 elapsed time: * seconds (glob)
1675 elapsed time: * seconds (glob)
1652 round-trips: 1
1676 round-trips: 1
1653 queries: 260
1677 queries: 260
1654 heads summary:
1678 heads summary:
1655 total common heads: 25
1679 total common heads: 25
1656 also local heads: 25
1680 also local heads: 25
1657 also remote heads: 25
1681 also remote heads: 25
1658 both: 25
1682 both: 25
1659 local heads: 260
1683 local heads: 260
1660 common: 25
1684 common: 25
1661 missing: 235
1685 missing: 235
1662 remote heads: 25
1686 remote heads: 25
1663 common: 25
1687 common: 25
1664 unknown: 0
1688 unknown: 0
1665 local changesets: 1340
1689 local changesets: 1340
1666 common: 400
1690 common: 400
1667 heads: 25
1691 heads: 25
1668 roots: 1
1692 roots: 1
1669 missing: 940
1693 missing: 940
1670 heads: 235
1694 heads: 235
1671 roots: 235
1695 roots: 235
1672 first undecided set: 940
1696 first undecided set: 940
1673 heads: 235
1697 heads: 235
1674 roots: 235
1698 roots: 235
1675 common: 0
1699 common: 0
1676 missing: 940
1700 missing: 940
1677 common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
1701 common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
1678
1702
1679 local is a local subset
1703 local is a local subset
1680 ------------------------
1704 ------------------------
1681
1705
1682 remote will be last 25 heads of the local graph
1706 remote will be last 25 heads of the local graph
1683
1707
1684 $ cd $TESTTMP/manyheads
1708 $ cd $TESTTMP/manyheads
1685 $ hg -R a debugdiscovery b \
1709 $ hg -R a debugdiscovery b \
1686 > --debug \
1710 > --debug \
1687 > --local-as-revs 'first(heads(all()), 25)' \
1711 > --local-as-revs 'first(heads(all()), 25)' \
1688 > --config devel.discovery.randomize=false
1712 > --config devel.discovery.randomize=false
1689 comparing with b
1713 comparing with b
1690 query 1; heads
1714 query 1; heads
1691 searching for changes
1715 searching for changes
1692 taking quick initial sample
1716 taking quick initial sample
1693 query 2; still undecided: 375, sample size is: 81
1717 query 2; still undecided: 375, sample size is: 81
1694 sampling from both directions
1718 sampling from both directions
1695 query 3; still undecided: 3, sample size is: 3
1719 query 3; still undecided: 3, sample size is: 3
1696 3 total queries *s (glob)
1720 3 total queries *s (glob)
1697 elapsed time: * seconds (glob)
1721 elapsed time: * seconds (glob)
1698 round-trips: 3
1722 round-trips: 3
1699 queries: 109
1723 queries: 109
1700 heads summary:
1724 heads summary:
1701 total common heads: 1
1725 total common heads: 1
1702 also local heads: 0
1726 also local heads: 0
1703 also remote heads: 0
1727 also remote heads: 0
1704 both: 0
1728 both: 0
1705 local heads: 25
1729 local heads: 25
1706 common: 0
1730 common: 0
1707 missing: 25
1731 missing: 25
1708 remote heads: 1
1732 remote heads: 1
1709 common: 0
1733 common: 0
1710 unknown: 1
1734 unknown: 1
1711 local changesets: 400
1735 local changesets: 400
1712 common: 300
1736 common: 300
1713 heads: 1
1737 heads: 1
1714 roots: 1
1738 roots: 1
1715 missing: 100
1739 missing: 100
1716 heads: 25
1740 heads: 25
1717 roots: 25
1741 roots: 25
1718 first undecided set: 400
1742 first undecided set: 400
1719 heads: 25
1743 heads: 25
1720 roots: 1
1744 roots: 1
1721 common: 300
1745 common: 300
1722 missing: 100
1746 missing: 100
1723 common heads: 3ee37d65064a
1747 common heads: 3ee37d65064a
1724
1748
1725 both local and remove are subset
1749 both local and remove are subset
1726 ------------------------
1750 ------------------------
1727
1751
1728 remote will be last 25 heads of the local graph
1752 remote will be last 25 heads of the local graph
1729
1753
1730 $ cd $TESTTMP/manyheads
1754 $ cd $TESTTMP/manyheads
1731 $ hg -R a debugdiscovery \
1755 $ hg -R a debugdiscovery \
1732 > --debug \
1756 > --debug \
1733 > --local-as-revs 'first(heads(all()), 25)' \
1757 > --local-as-revs 'first(heads(all()), 25)' \
1734 > --remote-as-revs 'last(heads(all()), 25)' \
1758 > --remote-as-revs 'last(heads(all()), 25)' \
1735 > --config devel.discovery.randomize=false
1759 > --config devel.discovery.randomize=false
1736 query 1; heads
1760 query 1; heads
1737 searching for changes
1761 searching for changes
1738 taking quick initial sample
1762 taking quick initial sample
1739 query 2; still undecided: 375, sample size is: 81
1763 query 2; still undecided: 375, sample size is: 81
1740 sampling from both directions
1764 sampling from both directions
1741 query 3; still undecided: 3, sample size is: 3
1765 query 3; still undecided: 3, sample size is: 3
1742 3 total queries in *s (glob)
1766 3 total queries in *s (glob)
1743 elapsed time: * seconds (glob)
1767 elapsed time: * seconds (glob)
1744 round-trips: 3
1768 round-trips: 3
1745 queries: 109
1769 queries: 109
1746 heads summary:
1770 heads summary:
1747 total common heads: 1
1771 total common heads: 1
1748 also local heads: 0
1772 also local heads: 0
1749 also remote heads: 0
1773 also remote heads: 0
1750 both: 0
1774 both: 0
1751 local heads: 25
1775 local heads: 25
1752 common: 0
1776 common: 0
1753 missing: 25
1777 missing: 25
1754 remote heads: 25
1778 remote heads: 25
1755 common: 0
1779 common: 0
1756 unknown: 25
1780 unknown: 25
1757 local changesets: 400
1781 local changesets: 400
1758 common: 300
1782 common: 300
1759 heads: 1
1783 heads: 1
1760 roots: 1
1784 roots: 1
1761 missing: 100
1785 missing: 100
1762 heads: 25
1786 heads: 25
1763 roots: 25
1787 roots: 25
1764 first undecided set: 400
1788 first undecided set: 400
1765 heads: 25
1789 heads: 25
1766 roots: 1
1790 roots: 1
1767 common: 300
1791 common: 300
1768 missing: 100
1792 missing: 100
1769 common heads: 3ee37d65064a
1793 common heads: 3ee37d65064a
1770
1794
1771 Test -T json output
1795 Test -T json output
1772 -------------------
1796 -------------------
1773
1797
1774 $ hg -R a debugdiscovery \
1798 $ hg -R a debugdiscovery \
1775 > -T json \
1799 > -T json \
1776 > --debug \
1800 > --debug \
1777 > --local-as-revs 'first(heads(all()), 25)' \
1801 > --local-as-revs 'first(heads(all()), 25)' \
1778 > --remote-as-revs 'last(heads(all()), 25)' \
1802 > --remote-as-revs 'last(heads(all()), 25)' \
1779 > --config devel.discovery.randomize=false
1803 > --config devel.discovery.randomize=false
1780 [
1804 [
1781 {
1805 {
1782 "elapsed": *, (glob)
1806 "elapsed": *, (glob)
1783 "nb-common-heads": 1,
1807 "nb-common-heads": 1,
1784 "nb-common-heads-both": 0,
1808 "nb-common-heads-both": 0,
1785 "nb-common-heads-local": 0,
1809 "nb-common-heads-local": 0,
1786 "nb-common-heads-remote": 0,
1810 "nb-common-heads-remote": 0,
1787 "nb-common-roots": 1,
1811 "nb-common-roots": 1,
1788 "nb-head-local": 25,
1812 "nb-head-local": 25,
1789 "nb-head-local-missing": 25,
1813 "nb-head-local-missing": 25,
1790 "nb-head-remote": 25,
1814 "nb-head-remote": 25,
1791 "nb-head-remote-unknown": 25,
1815 "nb-head-remote-unknown": 25,
1792 "nb-ini_und": 400,
1816 "nb-ini_und": 400,
1793 "nb-ini_und-common": 300,
1817 "nb-ini_und-common": 300,
1794 "nb-ini_und-heads": 25,
1818 "nb-ini_und-heads": 25,
1795 "nb-ini_und-missing": 100,
1819 "nb-ini_und-missing": 100,
1796 "nb-ini_und-roots": 1,
1820 "nb-ini_und-roots": 1,
1797 "nb-missing-heads": 25,
1821 "nb-missing-heads": 25,
1798 "nb-missing-roots": 25,
1822 "nb-missing-roots": 25,
1799 "nb-revs": 400,
1823 "nb-revs": 400,
1800 "nb-revs-common": 300,
1824 "nb-revs-common": 300,
1801 "nb-revs-missing": 100,
1825 "nb-revs-missing": 100,
1802 "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob)
1826 "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob)
1803 "total-queries": 109,
1827 "total-queries": 109,
1804 "total-roundtrips": 3
1828 "total-roundtrips": 3
1805 }
1829 }
1806 ]
1830 ]
General Comments 0
You need to be logged in to leave comments. Login now