##// END OF EJS Templates
debug-discovery: also gather details on tree-discovery queries type...
marmoute -
r50296:362c0026 stable
parent child Browse files
Show More
@@ -1,5034 +1,5038 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revlogutils,
77 77 revset,
78 78 revsetlang,
79 79 scmutil,
80 80 setdiscovery,
81 81 simplemerge,
82 82 sshpeer,
83 83 sslutil,
84 84 streamclone,
85 85 strip,
86 86 tags as tagsmod,
87 87 templater,
88 88 treediscovery,
89 89 upgrade,
90 90 url as urlmod,
91 91 util,
92 92 vfs as vfsmod,
93 93 wireprotoframing,
94 94 wireprotoserver,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 constants as revlog_constants,
108 108 debug as revlog_debug,
109 109 deltas as deltautil,
110 110 nodemap,
111 111 rewrite,
112 112 sidedata,
113 113 )
114 114
115 115 release = lockmod.release
116 116
117 117 table = {}
118 118 table.update(strip.command._table)
119 119 command = registrar.command(table)
120 120
121 121
122 122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 123 def debugancestor(ui, repo, *args):
124 124 """find the ancestor revision of two revisions in a given index"""
125 125 if len(args) == 3:
126 126 index, rev1, rev2 = args
127 127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 128 lookup = r.lookup
129 129 elif len(args) == 2:
130 130 if not repo:
131 131 raise error.Abort(
132 132 _(b'there is no Mercurial repository here (.hg not found)')
133 133 )
134 134 rev1, rev2 = args
135 135 r = repo.changelog
136 136 lookup = repo.lookup
137 137 else:
138 138 raise error.Abort(_(b'either two or three arguments required'))
139 139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 141
142 142
143 143 @command(b'debugantivirusrunning', [])
144 144 def debugantivirusrunning(ui, repo):
145 145 """attempt to trigger an antivirus scanner to see if one is active"""
146 146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 147 f.write(
148 148 util.b85decode(
149 149 # This is a base85-armored version of the EICAR test file. See
150 150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 153 )
154 154 )
155 155 # Give an AV engine time to scan the file.
156 156 time.sleep(2)
157 157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 158
159 159
160 160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 161 def debugapplystreamclonebundle(ui, repo, fname):
162 162 """apply a stream clone bundle file"""
163 163 f = hg.openpath(ui, fname)
164 164 gen = exchange.readbundle(ui, f, fname)
165 165 gen.apply(repo)
166 166
167 167
168 168 @command(
169 169 b'debugbuilddag',
170 170 [
171 171 (
172 172 b'm',
173 173 b'mergeable-file',
174 174 None,
175 175 _(b'add single file mergeable changes'),
176 176 ),
177 177 (
178 178 b'o',
179 179 b'overwritten-file',
180 180 None,
181 181 _(b'add single file all revs overwrite'),
182 182 ),
183 183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 184 (
185 185 b'',
186 186 b'from-existing',
187 187 None,
188 188 _(b'continue from a non-empty repository'),
189 189 ),
190 190 ],
191 191 _(b'[OPTION]... [TEXT]'),
192 192 )
193 193 def debugbuilddag(
194 194 ui,
195 195 repo,
196 196 text=None,
197 197 mergeable_file=False,
198 198 overwritten_file=False,
199 199 new_file=False,
200 200 from_existing=False,
201 201 ):
202 202 """builds a repo with a given DAG from scratch in the current empty repo
203 203
204 204 The description of the DAG is read from stdin if not given on the
205 205 command line.
206 206
207 207 Elements:
208 208
209 209 - "+n" is a linear run of n nodes based on the current default parent
210 210 - "." is a single node based on the current default parent
211 211 - "$" resets the default parent to null (implied at the start);
212 212 otherwise the default parent is always the last node created
213 213 - "<p" sets the default parent to the backref p
214 214 - "*p" is a fork at parent p, which is a backref
215 215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 216 - "/p2" is a merge of the preceding node and p2
217 217 - ":tag" defines a local tag for the preceding node
218 218 - "@branch" sets the named branch for subsequent nodes
219 219 - "#...\\n" is a comment up to the end of the line
220 220
221 221 Whitespace between the above elements is ignored.
222 222
223 223 A backref is either
224 224
225 225 - a number n, which references the node curr-n, where curr is the current
226 226 node, or
227 227 - the name of a local tag you placed earlier using ":tag", or
228 228 - empty to denote the default parent.
229 229
230 230 All string valued-elements are either strictly alphanumeric, or must
231 231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 232 """
233 233
234 234 if text is None:
235 235 ui.status(_(b"reading DAG from stdin\n"))
236 236 text = ui.fin.read()
237 237
238 238 cl = repo.changelog
239 239 if len(cl) > 0 and not from_existing:
240 240 raise error.Abort(_(b'repository is not empty'))
241 241
242 242 # determine number of revs in DAG
243 243 total = 0
244 244 for type, data in dagparser.parsedag(text):
245 245 if type == b'n':
246 246 total += 1
247 247
248 248 if mergeable_file:
249 249 linesperrev = 2
250 250 # make a file with k lines per rev
251 251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 252 initialmergedlines.append(b"")
253 253
254 254 tags = []
255 255 progress = ui.makeprogress(
256 256 _(b'building'), unit=_(b'revisions'), total=total
257 257 )
258 258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 259 at = -1
260 260 atbranch = b'default'
261 261 nodeids = []
262 262 id = 0
263 263 progress.update(id)
264 264 for type, data in dagparser.parsedag(text):
265 265 if type == b'n':
266 266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 267 id, ps = data
268 268
269 269 files = []
270 270 filecontent = {}
271 271
272 272 p2 = None
273 273 if mergeable_file:
274 274 fn = b"mf"
275 275 p1 = repo[ps[0]]
276 276 if len(ps) > 1:
277 277 p2 = repo[ps[1]]
278 278 pa = p1.ancestor(p2)
279 279 base, local, other = [
280 280 x[fn].data() for x in (pa, p1, p2)
281 281 ]
282 282 m3 = simplemerge.Merge3Text(base, local, other)
283 283 ml = [
284 284 l.strip()
285 285 for l in simplemerge.render_minimized(m3)[0]
286 286 ]
287 287 ml.append(b"")
288 288 elif at > 0:
289 289 ml = p1[fn].data().split(b"\n")
290 290 else:
291 291 ml = initialmergedlines
292 292 ml[id * linesperrev] += b" r%i" % id
293 293 mergedtext = b"\n".join(ml)
294 294 files.append(fn)
295 295 filecontent[fn] = mergedtext
296 296
297 297 if overwritten_file:
298 298 fn = b"of"
299 299 files.append(fn)
300 300 filecontent[fn] = b"r%i\n" % id
301 301
302 302 if new_file:
303 303 fn = b"nf%i" % id
304 304 files.append(fn)
305 305 filecontent[fn] = b"r%i\n" % id
306 306 if len(ps) > 1:
307 307 if not p2:
308 308 p2 = repo[ps[1]]
309 309 for fn in p2:
310 310 if fn.startswith(b"nf"):
311 311 files.append(fn)
312 312 filecontent[fn] = p2[fn].data()
313 313
314 314 def fctxfn(repo, cx, path):
315 315 if path in filecontent:
316 316 return context.memfilectx(
317 317 repo, cx, path, filecontent[path]
318 318 )
319 319 return None
320 320
321 321 if len(ps) == 0 or ps[0] < 0:
322 322 pars = [None, None]
323 323 elif len(ps) == 1:
324 324 pars = [nodeids[ps[0]], None]
325 325 else:
326 326 pars = [nodeids[p] for p in ps]
327 327 cx = context.memctx(
328 328 repo,
329 329 pars,
330 330 b"r%i" % id,
331 331 files,
332 332 fctxfn,
333 333 date=(id, 0),
334 334 user=b"debugbuilddag",
335 335 extra={b'branch': atbranch},
336 336 )
337 337 nodeid = repo.commitctx(cx)
338 338 nodeids.append(nodeid)
339 339 at = id
340 340 elif type == b'l':
341 341 id, name = data
342 342 ui.note((b'tag %s\n' % name))
343 343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 344 elif type == b'a':
345 345 ui.note((b'branch %s\n' % data))
346 346 atbranch = data
347 347 progress.update(id)
348 348
349 349 if tags:
350 350 repo.vfs.write(b"localtags", b"".join(tags))
351 351
352 352
353 353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 354 indent_string = b' ' * indent
355 355 if all:
356 356 ui.writenoi18n(
357 357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 358 % indent_string
359 359 )
360 360
361 361 def showchunks(named):
362 362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 363 for deltadata in gen.deltaiter():
364 364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 365 ui.write(
366 366 b"%s%s %s %s %s %s %d\n"
367 367 % (
368 368 indent_string,
369 369 hex(node),
370 370 hex(p1),
371 371 hex(p2),
372 372 hex(cs),
373 373 hex(deltabase),
374 374 len(delta),
375 375 )
376 376 )
377 377
378 378 gen.changelogheader()
379 379 showchunks(b"changelog")
380 380 gen.manifestheader()
381 381 showchunks(b"manifest")
382 382 for chunkdata in iter(gen.filelogheader, {}):
383 383 fname = chunkdata[b'filename']
384 384 showchunks(fname)
385 385 else:
386 386 if isinstance(gen, bundle2.unbundle20):
387 387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 388 gen.changelogheader()
389 389 for deltadata in gen.deltaiter():
390 390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 392
393 393
394 394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 395 """display version and markers contained in 'data'"""
396 396 opts = pycompat.byteskwargs(opts)
397 397 data = part.read()
398 398 indent_string = b' ' * indent
399 399 try:
400 400 version, markers = obsolete._readmarkers(data)
401 401 except error.UnknownVersion as exc:
402 402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 403 msg %= indent_string, exc.version, len(data)
404 404 ui.write(msg)
405 405 else:
406 406 msg = b"%sversion: %d (%d bytes)\n"
407 407 msg %= indent_string, version, len(data)
408 408 ui.write(msg)
409 409 fm = ui.formatter(b'debugobsolete', opts)
410 410 for rawmarker in sorted(markers):
411 411 m = obsutil.marker(None, rawmarker)
412 412 fm.startitem()
413 413 fm.plain(indent_string)
414 414 cmdutil.showmarker(fm, m)
415 415 fm.end()
416 416
417 417
418 418 def _debugphaseheads(ui, data, indent=0):
419 419 """display version and markers contained in 'data'"""
420 420 indent_string = b' ' * indent
421 421 headsbyphase = phases.binarydecode(data)
422 422 for phase in phases.allphases:
423 423 for head in headsbyphase[phase]:
424 424 ui.write(indent_string)
425 425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 426
427 427
428 428 def _quasirepr(thing):
429 429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 430 return b'{%s}' % (
431 431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 432 )
433 433 return pycompat.bytestr(repr(thing))
434 434
435 435
436 436 def _debugbundle2(ui, gen, all=None, **opts):
437 437 """lists the contents of a bundle2"""
438 438 if not isinstance(gen, bundle2.unbundle20):
439 439 raise error.Abort(_(b'not a bundle2 file'))
440 440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 441 parttypes = opts.get('part_type', [])
442 442 for part in gen.iterparts():
443 443 if parttypes and part.type not in parttypes:
444 444 continue
445 445 msg = b'%s -- %s (mandatory: %r)\n'
446 446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 447 if part.type == b'changegroup':
448 448 version = part.params.get(b'version', b'01')
449 449 cg = changegroup.getunbundler(version, part, b'UN')
450 450 if not ui.quiet:
451 451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 452 if part.type == b'obsmarkers':
453 453 if not ui.quiet:
454 454 _debugobsmarkers(ui, part, indent=4, **opts)
455 455 if part.type == b'phase-heads':
456 456 if not ui.quiet:
457 457 _debugphaseheads(ui, part, indent=4)
458 458
459 459
460 460 @command(
461 461 b'debugbundle',
462 462 [
463 463 (b'a', b'all', None, _(b'show all details')),
464 464 (b'', b'part-type', [], _(b'show only the named part type')),
465 465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 466 ],
467 467 _(b'FILE'),
468 468 norepo=True,
469 469 )
470 470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 471 """lists the contents of a bundle"""
472 472 with hg.openpath(ui, bundlepath) as f:
473 473 if spec:
474 474 spec = exchange.getbundlespec(ui, f)
475 475 ui.write(b'%s\n' % spec)
476 476 return
477 477
478 478 gen = exchange.readbundle(ui, f, bundlepath)
479 479 if isinstance(gen, bundle2.unbundle20):
480 480 return _debugbundle2(ui, gen, all=all, **opts)
481 481 _debugchangegroup(ui, gen, all=all, **opts)
482 482
483 483
484 484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 485 def debugcapabilities(ui, path, **opts):
486 486 """lists the capabilities of a remote peer"""
487 487 opts = pycompat.byteskwargs(opts)
488 488 peer = hg.peer(ui, opts, path)
489 489 try:
490 490 caps = peer.capabilities()
491 491 ui.writenoi18n(b'Main capabilities:\n')
492 492 for c in sorted(caps):
493 493 ui.write(b' %s\n' % c)
494 494 b2caps = bundle2.bundle2caps(peer)
495 495 if b2caps:
496 496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 497 for key, values in sorted(b2caps.items()):
498 498 ui.write(b' %s\n' % key)
499 499 for v in values:
500 500 ui.write(b' %s\n' % v)
501 501 finally:
502 502 peer.close()
503 503
504 504
505 505 @command(
506 506 b'debugchangedfiles',
507 507 [
508 508 (
509 509 b'',
510 510 b'compute',
511 511 False,
512 512 b"compute information instead of reading it from storage",
513 513 ),
514 514 ],
515 515 b'REV',
516 516 )
517 517 def debugchangedfiles(ui, repo, rev, **opts):
518 518 """list the stored files changes for a revision"""
519 519 ctx = logcmdutil.revsingle(repo, rev, None)
520 520 files = None
521 521
522 522 if opts['compute']:
523 523 files = metadata.compute_all_files_changes(ctx)
524 524 else:
525 525 sd = repo.changelog.sidedata(ctx.rev())
526 526 files_block = sd.get(sidedata.SD_FILES)
527 527 if files_block is not None:
528 528 files = metadata.decode_files_sidedata(sd)
529 529 if files is not None:
530 530 for f in sorted(files.touched):
531 531 if f in files.added:
532 532 action = b"added"
533 533 elif f in files.removed:
534 534 action = b"removed"
535 535 elif f in files.merged:
536 536 action = b"merged"
537 537 elif f in files.salvaged:
538 538 action = b"salvaged"
539 539 else:
540 540 action = b"touched"
541 541
542 542 copy_parent = b""
543 543 copy_source = b""
544 544 if f in files.copied_from_p1:
545 545 copy_parent = b"p1"
546 546 copy_source = files.copied_from_p1[f]
547 547 elif f in files.copied_from_p2:
548 548 copy_parent = b"p2"
549 549 copy_source = files.copied_from_p2[f]
550 550
551 551 data = (action, copy_parent, f, copy_source)
552 552 template = b"%-8s %2s: %s, %s;\n"
553 553 ui.write(template % data)
554 554
555 555
556 556 @command(b'debugcheckstate', [], b'')
557 557 def debugcheckstate(ui, repo):
558 558 """validate the correctness of the current dirstate"""
559 559 parent1, parent2 = repo.dirstate.parents()
560 560 m1 = repo[parent1].manifest()
561 561 m2 = repo[parent2].manifest()
562 562 errors = 0
563 563 for err in repo.dirstate.verify(m1, m2):
564 564 ui.warn(err[0] % err[1:])
565 565 errors += 1
566 566 if errors:
567 567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 568 raise error.Abort(errstr)
569 569
570 570
571 571 @command(
572 572 b'debugcolor',
573 573 [(b'', b'style', None, _(b'show all configured styles'))],
574 574 b'hg debugcolor',
575 575 )
576 576 def debugcolor(ui, repo, **opts):
577 577 """show available color, effects or style"""
578 578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 579 if opts.get('style'):
580 580 return _debugdisplaystyle(ui)
581 581 else:
582 582 return _debugdisplaycolor(ui)
583 583
584 584
585 585 def _debugdisplaycolor(ui):
586 586 ui = ui.copy()
587 587 ui._styles.clear()
588 588 for effect in color._activeeffects(ui).keys():
589 589 ui._styles[effect] = effect
590 590 if ui._terminfoparams:
591 591 for k, v in ui.configitems(b'color'):
592 592 if k.startswith(b'color.'):
593 593 ui._styles[k] = k[6:]
594 594 elif k.startswith(b'terminfo.'):
595 595 ui._styles[k] = k[9:]
596 596 ui.write(_(b'available colors:\n'))
597 597 # sort label with a '_' after the other to group '_background' entry.
598 598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 599 for colorname, label in items:
600 600 ui.write(b'%s\n' % colorname, label=label)
601 601
602 602
603 603 def _debugdisplaystyle(ui):
604 604 ui.write(_(b'available style:\n'))
605 605 if not ui._styles:
606 606 return
607 607 width = max(len(s) for s in ui._styles)
608 608 for label, effects in sorted(ui._styles.items()):
609 609 ui.write(b'%s' % label, label=label)
610 610 if effects:
611 611 # 50
612 612 ui.write(b': ')
613 613 ui.write(b' ' * (max(0, width - len(label))))
614 614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 615 ui.write(b'\n')
616 616
617 617
618 618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 619 def debugcreatestreamclonebundle(ui, repo, fname):
620 620 """create a stream clone bundle file
621 621
622 622 Stream bundles are special bundles that are essentially archives of
623 623 revlog files. They are commonly used for cloning very quickly.
624 624 """
625 625 # TODO we may want to turn this into an abort when this functionality
626 626 # is moved into `hg bundle`.
627 627 if phases.hassecret(repo):
628 628 ui.warn(
629 629 _(
630 630 b'(warning: stream clone bundle will contain secret '
631 631 b'revisions)\n'
632 632 )
633 633 )
634 634
635 635 requirements, gen = streamclone.generatebundlev1(repo)
636 636 changegroup.writechunks(ui, gen, fname)
637 637
638 638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 639
640 640
641 641 @command(
642 642 b'debugdag',
643 643 [
644 644 (b't', b'tags', None, _(b'use tags as labels')),
645 645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 646 (b'', b'dots', None, _(b'use dots for runs')),
647 647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 648 ],
649 649 _(b'[OPTION]... [FILE [REV]...]'),
650 650 optionalrepo=True,
651 651 )
652 652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 653 """format the changelog or an index DAG as a concise textual description
654 654
655 655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 656 revision numbers, they get labeled in the output as rN.
657 657
658 658 Otherwise, the changelog DAG of the current repo is emitted.
659 659 """
660 660 spaces = opts.get('spaces')
661 661 dots = opts.get('dots')
662 662 if file_:
663 663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 664 revs = {int(r) for r in revs}
665 665
666 666 def events():
667 667 for r in rlog:
668 668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 669 if r in revs:
670 670 yield b'l', (r, b"r%i" % r)
671 671
672 672 elif repo:
673 673 cl = repo.changelog
674 674 tags = opts.get('tags')
675 675 branches = opts.get('branches')
676 676 if tags:
677 677 labels = {}
678 678 for l, n in repo.tags().items():
679 679 labels.setdefault(cl.rev(n), []).append(l)
680 680
681 681 def events():
682 682 b = b"default"
683 683 for r in cl:
684 684 if branches:
685 685 newb = cl.read(cl.node(r))[5][b'branch']
686 686 if newb != b:
687 687 yield b'a', newb
688 688 b = newb
689 689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 690 if tags:
691 691 ls = labels.get(r)
692 692 if ls:
693 693 for l in ls:
694 694 yield b'l', (r, l)
695 695
696 696 else:
697 697 raise error.Abort(_(b'need repo for changelog dag'))
698 698
699 699 for line in dagparser.dagtextlines(
700 700 events(),
701 701 addspaces=spaces,
702 702 wraplabels=True,
703 703 wrapannotations=True,
704 704 wrapnonlinear=dots,
705 705 usedots=dots,
706 706 maxlinewidth=70,
707 707 ):
708 708 ui.write(line)
709 709 ui.write(b"\n")
710 710
711 711
712 712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 713 def debugdata(ui, repo, file_, rev=None, **opts):
714 714 """dump the contents of a data file revision"""
715 715 opts = pycompat.byteskwargs(opts)
716 716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 717 if rev is not None:
718 718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 719 file_, rev = None, file_
720 720 elif rev is None:
721 721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 723 try:
724 724 ui.write(r.rawdata(r.lookup(rev)))
725 725 except KeyError:
726 726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 727
728 728
729 729 @command(
730 730 b'debugdate',
731 731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 732 _(b'[-e] DATE [RANGE]'),
733 733 norepo=True,
734 734 optionalrepo=True,
735 735 )
736 736 def debugdate(ui, date, range=None, **opts):
737 737 """parse and display a date"""
738 738 if opts["extended"]:
739 739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 740 else:
741 741 d = dateutil.parsedate(date)
742 742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 744 if range:
745 745 m = dateutil.matchdate(range)
746 746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 747
748 748
749 749 @command(
750 750 b'debugdeltachain',
751 751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 752 _(b'-c|-m|FILE'),
753 753 optionalrepo=True,
754 754 )
755 755 def debugdeltachain(ui, repo, file_=None, **opts):
756 756 """dump information about delta chains in a revlog
757 757
758 758 Output can be templatized. Available template keywords are:
759 759
760 760 :``rev``: revision number
761 761 :``p1``: parent 1 revision number (for reference)
762 762 :``p2``: parent 2 revision number (for reference)
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 767 - base: a full snapshot
768 768 - snap: an intermediate snapshot
769 769 - p1: a delta against the first parent
770 770 - p2: a delta against the second parent
771 771 - skip1: a delta against the same base as p1
772 772 (when p1 has empty delta
773 773 - skip2: a delta against the same base as p2
774 774 (when p2 has empty delta
775 775 - prev: a delta against the previous revision
776 776 - other: a delta against an arbitrary revision
777 777 :``compsize``: compressed size of revision
778 778 :``uncompsize``: uncompressed size of revision
779 779 :``chainsize``: total size of compressed revisions in chain
780 780 :``chainratio``: total chain size divided by uncompressed revision size
781 781 (new delta chains typically start at ratio 2.00)
782 782 :``lindist``: linear distance from base revision in delta chain to end
783 783 of this revision
784 784 :``extradist``: total size of revisions not part of this delta chain from
785 785 base of delta chain to end of this revision; a measurement
786 786 of how much extra data we need to read/seek across to read
787 787 the delta chain for this revision
788 788 :``extraratio``: extradist divided by chainsize; another representation of
789 789 how much unrelated data is needed to load this delta chain
790 790
791 791 If the repository is configured to use the sparse read, additional keywords
792 792 are available:
793 793
794 794 :``readsize``: total size of data read from the disk for a revision
795 795 (sum of the sizes of all the blocks)
796 796 :``largestblock``: size of the largest block of data read from the disk
797 797 :``readdensity``: density of useful bytes in the data read from the disk
798 798 :``srchunks``: in how many data hunks the whole revision would be read
799 799
800 800 The sparse read can be enabled with experimental.sparse-read = True
801 801 """
802 802 opts = pycompat.byteskwargs(opts)
803 803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 804 index = r.index
805 805 start = r.start
806 806 length = r.length
807 807 generaldelta = r._generaldelta
808 808 withsparseread = getattr(r, '_withsparseread', False)
809 809
810 810 # security to avoid crash on corrupted revlogs
811 811 total_revs = len(index)
812 812
813 813 def revinfo(rev):
814 814 e = index[rev]
815 815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 817 chainsize = 0
818 818
819 819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822 822
823 823 # If the parents of a revision has an empty delta, we never try to delta
824 824 # against that parent, but directly against the delta base of that
825 825 # parent (recursively). It avoids adding a useless entry in the chain.
826 826 #
827 827 # However we need to detect that as a special case for delta-type, that
828 828 # is not simply "other".
829 829 p1_base = p1
830 830 if p1 != nullrev and p1 < total_revs:
831 831 e1 = index[p1]
832 832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 834 if (
835 835 new_base == p1_base
836 836 or new_base == nullrev
837 837 or new_base >= total_revs
838 838 ):
839 839 break
840 840 p1_base = new_base
841 841 e1 = index[p1_base]
842 842 p2_base = p2
843 843 if p2 != nullrev and p2 < total_revs:
844 844 e2 = index[p2]
845 845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 847 if (
848 848 new_base == p2_base
849 849 or new_base == nullrev
850 850 or new_base >= total_revs
851 851 ):
852 852 break
853 853 p2_base = new_base
854 854 e2 = index[p2_base]
855 855
856 856 if generaldelta:
857 857 if base == p1:
858 858 deltatype = b'p1'
859 859 elif base == p2:
860 860 deltatype = b'p2'
861 861 elif base == rev:
862 862 deltatype = b'base'
863 863 elif base == p1_base:
864 864 deltatype = b'skip1'
865 865 elif base == p2_base:
866 866 deltatype = b'skip2'
867 867 elif r.issnapshot(rev):
868 868 deltatype = b'snap'
869 869 elif base == rev - 1:
870 870 deltatype = b'prev'
871 871 else:
872 872 deltatype = b'other'
873 873 else:
874 874 if base == rev:
875 875 deltatype = b'base'
876 876 else:
877 877 deltatype = b'prev'
878 878
879 879 chain = r._deltachain(rev)[0]
880 880 for iterrev in chain:
881 881 e = index[iterrev]
882 882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 883
884 884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885 885
886 886 fm = ui.formatter(b'debugdeltachain', opts)
887 887
888 888 fm.plain(
889 889 b' rev p1 p2 chain# chainlen prev delta '
890 890 b'size rawsize chainsize ratio lindist extradist '
891 891 b'extraratio'
892 892 )
893 893 if withsparseread:
894 894 fm.plain(b' readsize largestblk rddensity srchunks')
895 895 fm.plain(b'\n')
896 896
897 897 chainbases = {}
898 898 for rev in r:
899 899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 900 chainbase = chain[0]
901 901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 902 basestart = start(chainbase)
903 903 revstart = start(rev)
904 904 lineardist = revstart + comp - basestart
905 905 extradist = lineardist - chainsize
906 906 try:
907 907 prevrev = chain[-2]
908 908 except IndexError:
909 909 prevrev = -1
910 910
911 911 if uncomp != 0:
912 912 chainratio = float(chainsize) / float(uncomp)
913 913 else:
914 914 chainratio = chainsize
915 915
916 916 if chainsize != 0:
917 917 extraratio = float(extradist) / float(chainsize)
918 918 else:
919 919 extraratio = extradist
920 920
921 921 fm.startitem()
922 922 fm.write(
923 923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 924 b'uncompsize chainsize chainratio lindist extradist '
925 925 b'extraratio',
926 926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 927 rev,
928 928 p1,
929 929 p2,
930 930 chainid,
931 931 len(chain),
932 932 prevrev,
933 933 deltatype,
934 934 comp,
935 935 uncomp,
936 936 chainsize,
937 937 chainratio,
938 938 lineardist,
939 939 extradist,
940 940 extraratio,
941 941 rev=rev,
942 942 chainid=chainid,
943 943 chainlen=len(chain),
944 944 prevrev=prevrev,
945 945 deltatype=deltatype,
946 946 compsize=comp,
947 947 uncompsize=uncomp,
948 948 chainsize=chainsize,
949 949 chainratio=chainratio,
950 950 lindist=lineardist,
951 951 extradist=extradist,
952 952 extraratio=extraratio,
953 953 )
954 954 if withsparseread:
955 955 readsize = 0
956 956 largestblock = 0
957 957 srchunks = 0
958 958
959 959 for revschunk in deltautil.slicechunk(r, chain):
960 960 srchunks += 1
961 961 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 962 blksize = blkend - start(revschunk[0])
963 963
964 964 readsize += blksize
965 965 if largestblock < blksize:
966 966 largestblock = blksize
967 967
968 968 if readsize:
969 969 readdensity = float(chainsize) / float(readsize)
970 970 else:
971 971 readdensity = 1
972 972
973 973 fm.write(
974 974 b'readsize largestblock readdensity srchunks',
975 975 b' %10d %10d %9.5f %8d',
976 976 readsize,
977 977 largestblock,
978 978 readdensity,
979 979 srchunks,
980 980 readsize=readsize,
981 981 largestblock=largestblock,
982 982 readdensity=readdensity,
983 983 srchunks=srchunks,
984 984 )
985 985
986 986 fm.plain(b'\n')
987 987
988 988 fm.end()
989 989
990 990
991 991 @command(
992 992 b'debug-delta-find',
993 993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 994 _(b'-c|-m|FILE REV'),
995 995 optionalrepo=True,
996 996 )
997 997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
998 998 """display the computation to get to a valid delta for storing REV
999 999
1000 1000 This command will replay the process used to find the "best" delta to store
1001 1001 a revision and display information about all the steps used to get to that
1002 1002 result.
1003 1003
1004 1004 The revision use the revision number of the target storage (not changelog
1005 1005 revision number).
1006 1006
1007 1007 note: the process is initiated from a full text of the revision to store.
1008 1008 """
1009 1009 opts = pycompat.byteskwargs(opts)
1010 1010 if arg_2 is None:
1011 1011 file_ = None
1012 1012 rev = arg_1
1013 1013 else:
1014 1014 file_ = arg_1
1015 1015 rev = arg_2
1016 1016
1017 1017 rev = int(rev)
1018 1018
1019 1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020 1020
1021 1021 deltacomputer = deltautil.deltacomputer(
1022 1022 revlog,
1023 1023 write_debug=ui.write,
1024 1024 debug_search=True,
1025 1025 )
1026 1026
1027 1027 node = revlog.node(rev)
1028 1028 p1r, p2r = revlog.parentrevs(rev)
1029 1029 p1 = revlog.node(p1r)
1030 1030 p2 = revlog.node(p2r)
1031 1031 btext = [revlog.revision(rev)]
1032 1032 textlen = len(btext[0])
1033 1033 cachedelta = None
1034 1034 flags = revlog.flags(rev)
1035 1035
1036 1036 revinfo = revlogutils.revisioninfo(
1037 1037 node,
1038 1038 p1,
1039 1039 p2,
1040 1040 btext,
1041 1041 textlen,
1042 1042 cachedelta,
1043 1043 flags,
1044 1044 )
1045 1045
1046 1046 fh = revlog._datafp()
1047 1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048 1048
1049 1049
1050 1050 @command(
1051 1051 b'debugdirstate|debugstate',
1052 1052 [
1053 1053 (
1054 1054 b'',
1055 1055 b'nodates',
1056 1056 None,
1057 1057 _(b'do not display the saved mtime (DEPRECATED)'),
1058 1058 ),
1059 1059 (b'', b'dates', True, _(b'display the saved mtime')),
1060 1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 1061 (
1062 1062 b'',
1063 1063 b'docket',
1064 1064 False,
1065 1065 _(b'display the docket (metadata file) instead'),
1066 1066 ),
1067 1067 (
1068 1068 b'',
1069 1069 b'all',
1070 1070 False,
1071 1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 1072 ),
1073 1073 ],
1074 1074 _(b'[OPTION]...'),
1075 1075 )
1076 1076 def debugstate(ui, repo, **opts):
1077 1077 """show the contents of the current dirstate"""
1078 1078
1079 1079 if opts.get("docket"):
1080 1080 if not repo.dirstate._use_dirstate_v2:
1081 1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082 1082
1083 1083 docket = repo.dirstate._map.docket
1084 1084 (
1085 1085 start_offset,
1086 1086 root_nodes,
1087 1087 nodes_with_entry,
1088 1088 nodes_with_copy,
1089 1089 unused_bytes,
1090 1090 _unused,
1091 1091 ignore_pattern,
1092 1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093 1093
1094 1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 1101 ui.write(
1102 1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 1103 )
1104 1104 return
1105 1105
1106 1106 nodates = not opts['dates']
1107 1107 if opts.get('nodates') is not None:
1108 1108 nodates = True
1109 1109 datesort = opts.get('datesort')
1110 1110
1111 1111 if datesort:
1112 1112
1113 1113 def keyfunc(entry):
1114 1114 filename, _state, _mode, _size, mtime = entry
1115 1115 return (mtime, filename)
1116 1116
1117 1117 else:
1118 1118 keyfunc = None # sort by filename
1119 1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 1120 entries.sort(key=keyfunc)
1121 1121 for entry in entries:
1122 1122 filename, state, mode, size, mtime = entry
1123 1123 if mtime == -1:
1124 1124 timestr = b'unset '
1125 1125 elif nodates:
1126 1126 timestr = b'set '
1127 1127 else:
1128 1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 1129 timestr = encoding.strtolocal(timestr)
1130 1130 if mode & 0o20000:
1131 1131 mode = b'lnk'
1132 1132 else:
1133 1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 1135 for f in repo.dirstate.copies():
1136 1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137 1137
1138 1138
1139 1139 @command(
1140 1140 b'debugdirstateignorepatternshash',
1141 1141 [],
1142 1142 _(b''),
1143 1143 )
1144 1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 1145 """show the hash of ignore patterns stored in dirstate if v2,
1146 1146 or nothing for dirstate-v2
1147 1147 """
1148 1148 if repo.dirstate._use_dirstate_v2:
1149 1149 docket = repo.dirstate._map.docket
1150 1150 hash_len = 20 # 160 bits for SHA-1
1151 1151 hash_bytes = docket.tree_metadata[-hash_len:]
1152 1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153 1153
1154 1154
1155 1155 @command(
1156 1156 b'debugdiscovery',
1157 1157 [
1158 1158 (b'', b'old', None, _(b'use old-style discovery')),
1159 1159 (
1160 1160 b'',
1161 1161 b'nonheads',
1162 1162 None,
1163 1163 _(b'use old-style discovery with non-heads included'),
1164 1164 ),
1165 1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 1167 (
1168 1168 b'',
1169 1169 b'local-as-revs',
1170 1170 b"",
1171 1171 b'treat local has having these revisions only',
1172 1172 ),
1173 1173 (
1174 1174 b'',
1175 1175 b'remote-as-revs',
1176 1176 b"",
1177 1177 b'use local as remote, with only these revisions',
1178 1178 ),
1179 1179 ]
1180 1180 + cmdutil.remoteopts
1181 1181 + cmdutil.formatteropts,
1182 1182 _(b'[--rev REV] [OTHER]'),
1183 1183 )
1184 1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 1185 """runs the changeset discovery protocol in isolation
1186 1186
1187 1187 The local peer can be "replaced" by a subset of the local repository by
1188 1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1189 1189 be "replaced" by a subset of the local repository using the
1190 1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 1191 discovery situation.
1192 1192
1193 1193 The following developer oriented config are relevant for people playing with this command:
1194 1194
1195 1195 * devel.discovery.exchange-heads=True
1196 1196
1197 1197 If False, the discovery will not start with
1198 1198 remote head fetching and local head querying.
1199 1199
1200 1200 * devel.discovery.grow-sample=True
1201 1201
1202 1202 If False, the sample size used in set discovery will not be increased
1203 1203 through the process
1204 1204
1205 1205 * devel.discovery.grow-sample.dynamic=True
1206 1206
1207 1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 1208 adapted to the shape of the undecided set (it is set to the max of:
1209 1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1210 1210
1211 1211 * devel.discovery.grow-sample.rate=1.05
1212 1212
1213 1213 the rate at which the sample grow
1214 1214
1215 1215 * devel.discovery.randomize=True
1216 1216
1217 1217 If andom sampling during discovery are deterministic. It is meant for
1218 1218 integration tests.
1219 1219
1220 1220 * devel.discovery.sample-size=200
1221 1221
1222 1222 Control the initial size of the discovery sample
1223 1223
1224 1224 * devel.discovery.sample-size.initial=100
1225 1225
1226 1226 Control the initial size of the discovery for initial change
1227 1227 """
1228 1228 opts = pycompat.byteskwargs(opts)
1229 1229 unfi = repo.unfiltered()
1230 1230
1231 1231 # setup potential extra filtering
1232 1232 local_revs = opts[b"local_as_revs"]
1233 1233 remote_revs = opts[b"remote_as_revs"]
1234 1234
1235 1235 # make sure tests are repeatable
1236 1236 random.seed(int(opts[b'seed']))
1237 1237
1238 1238 if not remote_revs:
1239 1239
1240 1240 remoteurl, branches = urlutil.get_unique_pull_path(
1241 1241 b'debugdiscovery', repo, ui, remoteurl
1242 1242 )
1243 1243 remote = hg.peer(repo, opts, remoteurl)
1244 1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 1245 else:
1246 1246 branches = (None, [])
1247 1247 remote_filtered_revs = logcmdutil.revrange(
1248 1248 unfi, [b"not (::(%s))" % remote_revs]
1249 1249 )
1250 1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1251 1251
1252 1252 def remote_func(x):
1253 1253 return remote_filtered_revs
1254 1254
1255 1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256 1256
1257 1257 remote = repo.peer()
1258 1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259 1259
1260 1260 if local_revs:
1261 1261 local_filtered_revs = logcmdutil.revrange(
1262 1262 unfi, [b"not (::(%s))" % local_revs]
1263 1263 )
1264 1264 local_filtered_revs = frozenset(local_filtered_revs)
1265 1265
1266 1266 def local_func(x):
1267 1267 return local_filtered_revs
1268 1268
1269 1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 1270 repo = repo.filtered(b'debug-discovery-local-filter')
1271 1271
1272 1272 data = {}
1273 1273 if opts.get(b'old'):
1274 1274
1275 1275 def doit(pushedrevs, remoteheads, remote=remote):
1276 1276 if not util.safehasattr(remote, b'branches'):
1277 1277 # enable in-client legacy support
1278 1278 remote = localrepo.locallegacypeer(remote.local())
1279 1279 if remote_revs:
1280 1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1281 1281 remote._repo = r
1282 1282 common, _in, hds = treediscovery.findcommonincoming(
1283 1283 repo, remote, force=True, audit=data
1284 1284 )
1285 1285 common = set(common)
1286 1286 if not opts.get(b'nonheads'):
1287 1287 ui.writenoi18n(
1288 1288 b"unpruned common: %s\n"
1289 1289 % b" ".join(sorted(short(n) for n in common))
1290 1290 )
1291 1291
1292 1292 clnode = repo.changelog.node
1293 1293 common = repo.revs(b'heads(::%ln)', common)
1294 1294 common = {clnode(r) for r in common}
1295 1295 return common, hds
1296 1296
1297 1297 else:
1298 1298
1299 1299 def doit(pushedrevs, remoteheads, remote=remote):
1300 1300 nodes = None
1301 1301 if pushedrevs:
1302 1302 revs = logcmdutil.revrange(repo, pushedrevs)
1303 1303 nodes = [repo[r].node() for r in revs]
1304 1304 common, any, hds = setdiscovery.findcommonheads(
1305 1305 ui, repo, remote, ancestorsof=nodes, audit=data
1306 1306 )
1307 1307 return common, hds
1308 1308
1309 1309 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1310 1310 localrevs = opts[b'rev']
1311 1311
1312 1312 fm = ui.formatter(b'debugdiscovery', opts)
1313 1313 if fm.strict_format:
1314 1314
1315 1315 @contextlib.contextmanager
1316 1316 def may_capture_output():
1317 1317 ui.pushbuffer()
1318 1318 yield
1319 1319 data[b'output'] = ui.popbuffer()
1320 1320
1321 1321 else:
1322 1322 may_capture_output = util.nullcontextmanager
1323 1323 with may_capture_output():
1324 1324 with util.timedcm('debug-discovery') as t:
1325 1325 common, hds = doit(localrevs, remoterevs)
1326 1326
1327 1327 # compute all statistics
1328 1328 heads_common = set(common)
1329 1329 heads_remote = set(hds)
1330 1330 heads_local = set(repo.heads())
1331 1331 # note: they cannot be a local or remote head that is in common and not
1332 1332 # itself a head of common.
1333 1333 heads_common_local = heads_common & heads_local
1334 1334 heads_common_remote = heads_common & heads_remote
1335 1335 heads_common_both = heads_common & heads_remote & heads_local
1336 1336
1337 1337 all = repo.revs(b'all()')
1338 1338 common = repo.revs(b'::%ln', common)
1339 1339 roots_common = repo.revs(b'roots(::%ld)', common)
1340 1340 missing = repo.revs(b'not ::%ld', common)
1341 1341 heads_missing = repo.revs(b'heads(%ld)', missing)
1342 1342 roots_missing = repo.revs(b'roots(%ld)', missing)
1343 1343 assert len(common) + len(missing) == len(all)
1344 1344
1345 1345 initial_undecided = repo.revs(
1346 1346 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1347 1347 )
1348 1348 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1349 1349 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1350 1350 common_initial_undecided = initial_undecided & common
1351 1351 missing_initial_undecided = initial_undecided & missing
1352 1352
1353 1353 data[b'elapsed'] = t.elapsed
1354 1354 data[b'nb-common-heads'] = len(heads_common)
1355 1355 data[b'nb-common-heads-local'] = len(heads_common_local)
1356 1356 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1357 1357 data[b'nb-common-heads-both'] = len(heads_common_both)
1358 1358 data[b'nb-common-roots'] = len(roots_common)
1359 1359 data[b'nb-head-local'] = len(heads_local)
1360 1360 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1361 1361 data[b'nb-head-remote'] = len(heads_remote)
1362 1362 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1363 1363 heads_common_remote
1364 1364 )
1365 1365 data[b'nb-revs'] = len(all)
1366 1366 data[b'nb-revs-common'] = len(common)
1367 1367 data[b'nb-revs-missing'] = len(missing)
1368 1368 data[b'nb-missing-heads'] = len(heads_missing)
1369 1369 data[b'nb-missing-roots'] = len(roots_missing)
1370 1370 data[b'nb-ini_und'] = len(initial_undecided)
1371 1371 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1372 1372 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1373 1373 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1374 1374 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1375 1375
1376 1376 fm.startitem()
1377 1377 fm.data(**pycompat.strkwargs(data))
1378 1378 # display discovery summary
1379 1379 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1380 1380 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1381 1381 fm.plain(b"queries: %(total-queries)9d\n" % data)
1382 if b'total-queries-branches' in data:
1383 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1384 if b'total-queries-between' in data:
1385 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1382 1386 fm.plain(b"heads summary:\n")
1383 1387 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1384 1388 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1385 1389 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1386 1390 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1387 1391 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1388 1392 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1389 1393 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1390 1394 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1391 1395 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1392 1396 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1393 1397 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1394 1398 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1395 1399 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1396 1400 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1397 1401 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1398 1402 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1399 1403 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1400 1404 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1401 1405 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1402 1406 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1403 1407 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1404 1408 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1405 1409
1406 1410 if ui.verbose:
1407 1411 fm.plain(
1408 1412 b"common heads: %s\n"
1409 1413 % b" ".join(sorted(short(n) for n in heads_common))
1410 1414 )
1411 1415 fm.end()
1412 1416
1413 1417
1414 1418 _chunksize = 4 << 10
1415 1419
1416 1420
1417 1421 @command(
1418 1422 b'debugdownload',
1419 1423 [
1420 1424 (b'o', b'output', b'', _(b'path')),
1421 1425 ],
1422 1426 optionalrepo=True,
1423 1427 )
1424 1428 def debugdownload(ui, repo, url, output=None, **opts):
1425 1429 """download a resource using Mercurial logic and config"""
1426 1430 fh = urlmod.open(ui, url, output)
1427 1431
1428 1432 dest = ui
1429 1433 if output:
1430 1434 dest = open(output, b"wb", _chunksize)
1431 1435 try:
1432 1436 data = fh.read(_chunksize)
1433 1437 while data:
1434 1438 dest.write(data)
1435 1439 data = fh.read(_chunksize)
1436 1440 finally:
1437 1441 if output:
1438 1442 dest.close()
1439 1443
1440 1444
1441 1445 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1442 1446 def debugextensions(ui, repo, **opts):
1443 1447 '''show information about active extensions'''
1444 1448 opts = pycompat.byteskwargs(opts)
1445 1449 exts = extensions.extensions(ui)
1446 1450 hgver = util.version()
1447 1451 fm = ui.formatter(b'debugextensions', opts)
1448 1452 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1449 1453 isinternal = extensions.ismoduleinternal(extmod)
1450 1454 extsource = None
1451 1455
1452 1456 if util.safehasattr(extmod, '__file__'):
1453 1457 extsource = pycompat.fsencode(extmod.__file__)
1454 1458 elif getattr(sys, 'oxidized', False):
1455 1459 extsource = pycompat.sysexecutable
1456 1460 if isinternal:
1457 1461 exttestedwith = [] # never expose magic string to users
1458 1462 else:
1459 1463 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1460 1464 extbuglink = getattr(extmod, 'buglink', None)
1461 1465
1462 1466 fm.startitem()
1463 1467
1464 1468 if ui.quiet or ui.verbose:
1465 1469 fm.write(b'name', b'%s\n', extname)
1466 1470 else:
1467 1471 fm.write(b'name', b'%s', extname)
1468 1472 if isinternal or hgver in exttestedwith:
1469 1473 fm.plain(b'\n')
1470 1474 elif not exttestedwith:
1471 1475 fm.plain(_(b' (untested!)\n'))
1472 1476 else:
1473 1477 lasttestedversion = exttestedwith[-1]
1474 1478 fm.plain(b' (%s!)\n' % lasttestedversion)
1475 1479
1476 1480 fm.condwrite(
1477 1481 ui.verbose and extsource,
1478 1482 b'source',
1479 1483 _(b' location: %s\n'),
1480 1484 extsource or b"",
1481 1485 )
1482 1486
1483 1487 if ui.verbose:
1484 1488 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1485 1489 fm.data(bundled=isinternal)
1486 1490
1487 1491 fm.condwrite(
1488 1492 ui.verbose and exttestedwith,
1489 1493 b'testedwith',
1490 1494 _(b' tested with: %s\n'),
1491 1495 fm.formatlist(exttestedwith, name=b'ver'),
1492 1496 )
1493 1497
1494 1498 fm.condwrite(
1495 1499 ui.verbose and extbuglink,
1496 1500 b'buglink',
1497 1501 _(b' bug reporting: %s\n'),
1498 1502 extbuglink or b"",
1499 1503 )
1500 1504
1501 1505 fm.end()
1502 1506
1503 1507
1504 1508 @command(
1505 1509 b'debugfileset',
1506 1510 [
1507 1511 (
1508 1512 b'r',
1509 1513 b'rev',
1510 1514 b'',
1511 1515 _(b'apply the filespec on this revision'),
1512 1516 _(b'REV'),
1513 1517 ),
1514 1518 (
1515 1519 b'',
1516 1520 b'all-files',
1517 1521 False,
1518 1522 _(b'test files from all revisions and working directory'),
1519 1523 ),
1520 1524 (
1521 1525 b's',
1522 1526 b'show-matcher',
1523 1527 None,
1524 1528 _(b'print internal representation of matcher'),
1525 1529 ),
1526 1530 (
1527 1531 b'p',
1528 1532 b'show-stage',
1529 1533 [],
1530 1534 _(b'print parsed tree at the given stage'),
1531 1535 _(b'NAME'),
1532 1536 ),
1533 1537 ],
1534 1538 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1535 1539 )
1536 1540 def debugfileset(ui, repo, expr, **opts):
1537 1541 '''parse and apply a fileset specification'''
1538 1542 from . import fileset
1539 1543
1540 1544 fileset.symbols # force import of fileset so we have predicates to optimize
1541 1545 opts = pycompat.byteskwargs(opts)
1542 1546 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1543 1547
1544 1548 stages = [
1545 1549 (b'parsed', pycompat.identity),
1546 1550 (b'analyzed', filesetlang.analyze),
1547 1551 (b'optimized', filesetlang.optimize),
1548 1552 ]
1549 1553 stagenames = {n for n, f in stages}
1550 1554
1551 1555 showalways = set()
1552 1556 if ui.verbose and not opts[b'show_stage']:
1553 1557 # show parsed tree by --verbose (deprecated)
1554 1558 showalways.add(b'parsed')
1555 1559 if opts[b'show_stage'] == [b'all']:
1556 1560 showalways.update(stagenames)
1557 1561 else:
1558 1562 for n in opts[b'show_stage']:
1559 1563 if n not in stagenames:
1560 1564 raise error.Abort(_(b'invalid stage name: %s') % n)
1561 1565 showalways.update(opts[b'show_stage'])
1562 1566
1563 1567 tree = filesetlang.parse(expr)
1564 1568 for n, f in stages:
1565 1569 tree = f(tree)
1566 1570 if n in showalways:
1567 1571 if opts[b'show_stage'] or n != b'parsed':
1568 1572 ui.write(b"* %s:\n" % n)
1569 1573 ui.write(filesetlang.prettyformat(tree), b"\n")
1570 1574
1571 1575 files = set()
1572 1576 if opts[b'all_files']:
1573 1577 for r in repo:
1574 1578 c = repo[r]
1575 1579 files.update(c.files())
1576 1580 files.update(c.substate)
1577 1581 if opts[b'all_files'] or ctx.rev() is None:
1578 1582 wctx = repo[None]
1579 1583 files.update(
1580 1584 repo.dirstate.walk(
1581 1585 scmutil.matchall(repo),
1582 1586 subrepos=list(wctx.substate),
1583 1587 unknown=True,
1584 1588 ignored=True,
1585 1589 )
1586 1590 )
1587 1591 files.update(wctx.substate)
1588 1592 else:
1589 1593 files.update(ctx.files())
1590 1594 files.update(ctx.substate)
1591 1595
1592 1596 m = ctx.matchfileset(repo.getcwd(), expr)
1593 1597 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1594 1598 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1595 1599 for f in sorted(files):
1596 1600 if not m(f):
1597 1601 continue
1598 1602 ui.write(b"%s\n" % f)
1599 1603
1600 1604
1601 1605 @command(
1602 1606 b"debug-repair-issue6528",
1603 1607 [
1604 1608 (
1605 1609 b'',
1606 1610 b'to-report',
1607 1611 b'',
1608 1612 _(b'build a report of affected revisions to this file'),
1609 1613 _(b'FILE'),
1610 1614 ),
1611 1615 (
1612 1616 b'',
1613 1617 b'from-report',
1614 1618 b'',
1615 1619 _(b'repair revisions listed in this report file'),
1616 1620 _(b'FILE'),
1617 1621 ),
1618 1622 (
1619 1623 b'',
1620 1624 b'paranoid',
1621 1625 False,
1622 1626 _(b'check that both detection methods do the same thing'),
1623 1627 ),
1624 1628 ]
1625 1629 + cmdutil.dryrunopts,
1626 1630 )
1627 1631 def debug_repair_issue6528(ui, repo, **opts):
1628 1632 """find affected revisions and repair them. See issue6528 for more details.
1629 1633
1630 1634 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1631 1635 computation of affected revisions for a given repository across clones.
1632 1636 The report format is line-based (with empty lines ignored):
1633 1637
1634 1638 ```
1635 1639 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1636 1640 ```
1637 1641
1638 1642 There can be multiple broken revisions per filelog, they are separated by
1639 1643 a comma with no spaces. The only space is between the revision(s) and the
1640 1644 filename.
1641 1645
1642 1646 Note that this does *not* mean that this repairs future affected revisions,
1643 1647 that needs a separate fix at the exchange level that was introduced in
1644 1648 Mercurial 5.9.1.
1645 1649
1646 1650 There is a `--paranoid` flag to test that the fast implementation is correct
1647 1651 by checking it against the slow implementation. Since this matter is quite
1648 1652 urgent and testing every edge-case is probably quite costly, we use this
1649 1653 method to test on large repositories as a fuzzing method of sorts.
1650 1654 """
1651 1655 cmdutil.check_incompatible_arguments(
1652 1656 opts, 'to_report', ['from_report', 'dry_run']
1653 1657 )
1654 1658 dry_run = opts.get('dry_run')
1655 1659 to_report = opts.get('to_report')
1656 1660 from_report = opts.get('from_report')
1657 1661 paranoid = opts.get('paranoid')
1658 1662 # TODO maybe add filelog pattern and revision pattern parameters to help
1659 1663 # narrow down the search for users that know what they're looking for?
1660 1664
1661 1665 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1662 1666 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1663 1667 raise error.Abort(_(msg))
1664 1668
1665 1669 rewrite.repair_issue6528(
1666 1670 ui,
1667 1671 repo,
1668 1672 dry_run=dry_run,
1669 1673 to_report=to_report,
1670 1674 from_report=from_report,
1671 1675 paranoid=paranoid,
1672 1676 )
1673 1677
1674 1678
1675 1679 @command(b'debugformat', [] + cmdutil.formatteropts)
1676 1680 def debugformat(ui, repo, **opts):
1677 1681 """display format information about the current repository
1678 1682
1679 1683 Use --verbose to get extra information about current config value and
1680 1684 Mercurial default."""
1681 1685 opts = pycompat.byteskwargs(opts)
1682 1686 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1683 1687 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1684 1688
1685 1689 def makeformatname(name):
1686 1690 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1687 1691
1688 1692 fm = ui.formatter(b'debugformat', opts)
1689 1693 if fm.isplain():
1690 1694
1691 1695 def formatvalue(value):
1692 1696 if util.safehasattr(value, b'startswith'):
1693 1697 return value
1694 1698 if value:
1695 1699 return b'yes'
1696 1700 else:
1697 1701 return b'no'
1698 1702
1699 1703 else:
1700 1704 formatvalue = pycompat.identity
1701 1705
1702 1706 fm.plain(b'format-variant')
1703 1707 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1704 1708 fm.plain(b' repo')
1705 1709 if ui.verbose:
1706 1710 fm.plain(b' config default')
1707 1711 fm.plain(b'\n')
1708 1712 for fv in upgrade.allformatvariant:
1709 1713 fm.startitem()
1710 1714 repovalue = fv.fromrepo(repo)
1711 1715 configvalue = fv.fromconfig(repo)
1712 1716
1713 1717 if repovalue != configvalue:
1714 1718 namelabel = b'formatvariant.name.mismatchconfig'
1715 1719 repolabel = b'formatvariant.repo.mismatchconfig'
1716 1720 elif repovalue != fv.default:
1717 1721 namelabel = b'formatvariant.name.mismatchdefault'
1718 1722 repolabel = b'formatvariant.repo.mismatchdefault'
1719 1723 else:
1720 1724 namelabel = b'formatvariant.name.uptodate'
1721 1725 repolabel = b'formatvariant.repo.uptodate'
1722 1726
1723 1727 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1724 1728 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1725 1729 if fv.default != configvalue:
1726 1730 configlabel = b'formatvariant.config.special'
1727 1731 else:
1728 1732 configlabel = b'formatvariant.config.default'
1729 1733 fm.condwrite(
1730 1734 ui.verbose,
1731 1735 b'config',
1732 1736 b' %6s',
1733 1737 formatvalue(configvalue),
1734 1738 label=configlabel,
1735 1739 )
1736 1740 fm.condwrite(
1737 1741 ui.verbose,
1738 1742 b'default',
1739 1743 b' %7s',
1740 1744 formatvalue(fv.default),
1741 1745 label=b'formatvariant.default',
1742 1746 )
1743 1747 fm.plain(b'\n')
1744 1748 fm.end()
1745 1749
1746 1750
1747 1751 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1748 1752 def debugfsinfo(ui, path=b"."):
1749 1753 """show information detected about current filesystem"""
1750 1754 ui.writenoi18n(b'path: %s\n' % path)
1751 1755 ui.writenoi18n(
1752 1756 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1753 1757 )
1754 1758 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1755 1759 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1756 1760 ui.writenoi18n(
1757 1761 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1758 1762 )
1759 1763 ui.writenoi18n(
1760 1764 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1761 1765 )
1762 1766 casesensitive = b'(unknown)'
1763 1767 try:
1764 1768 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1765 1769 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1766 1770 except OSError:
1767 1771 pass
1768 1772 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1769 1773
1770 1774
1771 1775 @command(
1772 1776 b'debuggetbundle',
1773 1777 [
1774 1778 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1775 1779 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1776 1780 (
1777 1781 b't',
1778 1782 b'type',
1779 1783 b'bzip2',
1780 1784 _(b'bundle compression type to use'),
1781 1785 _(b'TYPE'),
1782 1786 ),
1783 1787 ],
1784 1788 _(b'REPO FILE [-H|-C ID]...'),
1785 1789 norepo=True,
1786 1790 )
1787 1791 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1788 1792 """retrieves a bundle from a repo
1789 1793
1790 1794 Every ID must be a full-length hex node id string. Saves the bundle to the
1791 1795 given file.
1792 1796 """
1793 1797 opts = pycompat.byteskwargs(opts)
1794 1798 repo = hg.peer(ui, opts, repopath)
1795 1799 if not repo.capable(b'getbundle'):
1796 1800 raise error.Abort(b"getbundle() not supported by target repository")
1797 1801 args = {}
1798 1802 if common:
1799 1803 args['common'] = [bin(s) for s in common]
1800 1804 if head:
1801 1805 args['heads'] = [bin(s) for s in head]
1802 1806 # TODO: get desired bundlecaps from command line.
1803 1807 args['bundlecaps'] = None
1804 1808 bundle = repo.getbundle(b'debug', **args)
1805 1809
1806 1810 bundletype = opts.get(b'type', b'bzip2').lower()
1807 1811 btypes = {
1808 1812 b'none': b'HG10UN',
1809 1813 b'bzip2': b'HG10BZ',
1810 1814 b'gzip': b'HG10GZ',
1811 1815 b'bundle2': b'HG20',
1812 1816 }
1813 1817 bundletype = btypes.get(bundletype)
1814 1818 if bundletype not in bundle2.bundletypes:
1815 1819 raise error.Abort(_(b'unknown bundle type specified with --type'))
1816 1820 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1817 1821
1818 1822
1819 1823 @command(b'debugignore', [], b'[FILE]')
1820 1824 def debugignore(ui, repo, *files, **opts):
1821 1825 """display the combined ignore pattern and information about ignored files
1822 1826
1823 1827 With no argument display the combined ignore pattern.
1824 1828
1825 1829 Given space separated file names, shows if the given file is ignored and
1826 1830 if so, show the ignore rule (file and line number) that matched it.
1827 1831 """
1828 1832 ignore = repo.dirstate._ignore
1829 1833 if not files:
1830 1834 # Show all the patterns
1831 1835 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1832 1836 else:
1833 1837 m = scmutil.match(repo[None], pats=files)
1834 1838 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1835 1839 for f in m.files():
1836 1840 nf = util.normpath(f)
1837 1841 ignored = None
1838 1842 ignoredata = None
1839 1843 if nf != b'.':
1840 1844 if ignore(nf):
1841 1845 ignored = nf
1842 1846 ignoredata = repo.dirstate._ignorefileandline(nf)
1843 1847 else:
1844 1848 for p in pathutil.finddirs(nf):
1845 1849 if ignore(p):
1846 1850 ignored = p
1847 1851 ignoredata = repo.dirstate._ignorefileandline(p)
1848 1852 break
1849 1853 if ignored:
1850 1854 if ignored == nf:
1851 1855 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1852 1856 else:
1853 1857 ui.write(
1854 1858 _(
1855 1859 b"%s is ignored because of "
1856 1860 b"containing directory %s\n"
1857 1861 )
1858 1862 % (uipathfn(f), ignored)
1859 1863 )
1860 1864 ignorefile, lineno, line = ignoredata
1861 1865 ui.write(
1862 1866 _(b"(ignore rule in %s, line %d: '%s')\n")
1863 1867 % (ignorefile, lineno, line)
1864 1868 )
1865 1869 else:
1866 1870 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1867 1871
1868 1872
1869 1873 @command(
1870 1874 b'debug-revlog-index|debugindex',
1871 1875 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1872 1876 _(b'-c|-m|FILE'),
1873 1877 )
1874 1878 def debugindex(ui, repo, file_=None, **opts):
1875 1879 """dump index data for a revlog"""
1876 1880 opts = pycompat.byteskwargs(opts)
1877 1881 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1878 1882
1879 1883 fm = ui.formatter(b'debugindex', opts)
1880 1884
1881 1885 revlog = getattr(store, b'_revlog', store)
1882 1886
1883 1887 return revlog_debug.debug_index(
1884 1888 ui,
1885 1889 repo,
1886 1890 formatter=fm,
1887 1891 revlog=revlog,
1888 1892 full_node=ui.debugflag,
1889 1893 )
1890 1894
1891 1895
1892 1896 @command(
1893 1897 b'debugindexdot',
1894 1898 cmdutil.debugrevlogopts,
1895 1899 _(b'-c|-m|FILE'),
1896 1900 optionalrepo=True,
1897 1901 )
1898 1902 def debugindexdot(ui, repo, file_=None, **opts):
1899 1903 """dump an index DAG as a graphviz dot file"""
1900 1904 opts = pycompat.byteskwargs(opts)
1901 1905 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1902 1906 ui.writenoi18n(b"digraph G {\n")
1903 1907 for i in r:
1904 1908 node = r.node(i)
1905 1909 pp = r.parents(node)
1906 1910 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1907 1911 if pp[1] != repo.nullid:
1908 1912 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1909 1913 ui.write(b"}\n")
1910 1914
1911 1915
1912 1916 @command(b'debugindexstats', [])
1913 1917 def debugindexstats(ui, repo):
1914 1918 """show stats related to the changelog index"""
1915 1919 repo.changelog.shortest(repo.nullid, 1)
1916 1920 index = repo.changelog.index
1917 1921 if not util.safehasattr(index, b'stats'):
1918 1922 raise error.Abort(_(b'debugindexstats only works with native code'))
1919 1923 for k, v in sorted(index.stats().items()):
1920 1924 ui.write(b'%s: %d\n' % (k, v))
1921 1925
1922 1926
1923 1927 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1924 1928 def debuginstall(ui, **opts):
1925 1929 """test Mercurial installation
1926 1930
1927 1931 Returns 0 on success.
1928 1932 """
1929 1933 opts = pycompat.byteskwargs(opts)
1930 1934
1931 1935 problems = 0
1932 1936
1933 1937 fm = ui.formatter(b'debuginstall', opts)
1934 1938 fm.startitem()
1935 1939
1936 1940 # encoding might be unknown or wrong. don't translate these messages.
1937 1941 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1938 1942 err = None
1939 1943 try:
1940 1944 codecs.lookup(pycompat.sysstr(encoding.encoding))
1941 1945 except LookupError as inst:
1942 1946 err = stringutil.forcebytestr(inst)
1943 1947 problems += 1
1944 1948 fm.condwrite(
1945 1949 err,
1946 1950 b'encodingerror',
1947 1951 b" %s\n (check that your locale is properly set)\n",
1948 1952 err,
1949 1953 )
1950 1954
1951 1955 # Python
1952 1956 pythonlib = None
1953 1957 if util.safehasattr(os, '__file__'):
1954 1958 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1955 1959 elif getattr(sys, 'oxidized', False):
1956 1960 pythonlib = pycompat.sysexecutable
1957 1961
1958 1962 fm.write(
1959 1963 b'pythonexe',
1960 1964 _(b"checking Python executable (%s)\n"),
1961 1965 pycompat.sysexecutable or _(b"unknown"),
1962 1966 )
1963 1967 fm.write(
1964 1968 b'pythonimplementation',
1965 1969 _(b"checking Python implementation (%s)\n"),
1966 1970 pycompat.sysbytes(platform.python_implementation()),
1967 1971 )
1968 1972 fm.write(
1969 1973 b'pythonver',
1970 1974 _(b"checking Python version (%s)\n"),
1971 1975 (b"%d.%d.%d" % sys.version_info[:3]),
1972 1976 )
1973 1977 fm.write(
1974 1978 b'pythonlib',
1975 1979 _(b"checking Python lib (%s)...\n"),
1976 1980 pythonlib or _(b"unknown"),
1977 1981 )
1978 1982
1979 1983 try:
1980 1984 from . import rustext # pytype: disable=import-error
1981 1985
1982 1986 rustext.__doc__ # trigger lazy import
1983 1987 except ImportError:
1984 1988 rustext = None
1985 1989
1986 1990 security = set(sslutil.supportedprotocols)
1987 1991 if sslutil.hassni:
1988 1992 security.add(b'sni')
1989 1993
1990 1994 fm.write(
1991 1995 b'pythonsecurity',
1992 1996 _(b"checking Python security support (%s)\n"),
1993 1997 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1994 1998 )
1995 1999
1996 2000 # These are warnings, not errors. So don't increment problem count. This
1997 2001 # may change in the future.
1998 2002 if b'tls1.2' not in security:
1999 2003 fm.plain(
2000 2004 _(
2001 2005 b' TLS 1.2 not supported by Python install; '
2002 2006 b'network connections lack modern security\n'
2003 2007 )
2004 2008 )
2005 2009 if b'sni' not in security:
2006 2010 fm.plain(
2007 2011 _(
2008 2012 b' SNI not supported by Python install; may have '
2009 2013 b'connectivity issues with some servers\n'
2010 2014 )
2011 2015 )
2012 2016
2013 2017 fm.plain(
2014 2018 _(
2015 2019 b"checking Rust extensions (%s)\n"
2016 2020 % (b'missing' if rustext is None else b'installed')
2017 2021 ),
2018 2022 )
2019 2023
2020 2024 # TODO print CA cert info
2021 2025
2022 2026 # hg version
2023 2027 hgver = util.version()
2024 2028 fm.write(
2025 2029 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2026 2030 )
2027 2031 fm.write(
2028 2032 b'hgverextra',
2029 2033 _(b"checking Mercurial custom build (%s)\n"),
2030 2034 b'+'.join(hgver.split(b'+')[1:]),
2031 2035 )
2032 2036
2033 2037 # compiled modules
2034 2038 hgmodules = None
2035 2039 if util.safehasattr(sys.modules[__name__], '__file__'):
2036 2040 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2037 2041 elif getattr(sys, 'oxidized', False):
2038 2042 hgmodules = pycompat.sysexecutable
2039 2043
2040 2044 fm.write(
2041 2045 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2042 2046 )
2043 2047 fm.write(
2044 2048 b'hgmodules',
2045 2049 _(b"checking installed modules (%s)...\n"),
2046 2050 hgmodules or _(b"unknown"),
2047 2051 )
2048 2052
2049 2053 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2050 2054 rustext = rustandc # for now, that's the only case
2051 2055 cext = policy.policy in (b'c', b'allow') or rustandc
2052 2056 nopure = cext or rustext
2053 2057 if nopure:
2054 2058 err = None
2055 2059 try:
2056 2060 if cext:
2057 2061 from .cext import ( # pytype: disable=import-error
2058 2062 base85,
2059 2063 bdiff,
2060 2064 mpatch,
2061 2065 osutil,
2062 2066 )
2063 2067
2064 2068 # quiet pyflakes
2065 2069 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2066 2070 if rustext:
2067 2071 from .rustext import ( # pytype: disable=import-error
2068 2072 ancestor,
2069 2073 dirstate,
2070 2074 )
2071 2075
2072 2076 dir(ancestor), dir(dirstate) # quiet pyflakes
2073 2077 except Exception as inst:
2074 2078 err = stringutil.forcebytestr(inst)
2075 2079 problems += 1
2076 2080 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2077 2081
2078 2082 compengines = util.compengines._engines.values()
2079 2083 fm.write(
2080 2084 b'compengines',
2081 2085 _(b'checking registered compression engines (%s)\n'),
2082 2086 fm.formatlist(
2083 2087 sorted(e.name() for e in compengines),
2084 2088 name=b'compengine',
2085 2089 fmt=b'%s',
2086 2090 sep=b', ',
2087 2091 ),
2088 2092 )
2089 2093 fm.write(
2090 2094 b'compenginesavail',
2091 2095 _(b'checking available compression engines (%s)\n'),
2092 2096 fm.formatlist(
2093 2097 sorted(e.name() for e in compengines if e.available()),
2094 2098 name=b'compengine',
2095 2099 fmt=b'%s',
2096 2100 sep=b', ',
2097 2101 ),
2098 2102 )
2099 2103 wirecompengines = compression.compengines.supportedwireengines(
2100 2104 compression.SERVERROLE
2101 2105 )
2102 2106 fm.write(
2103 2107 b'compenginesserver',
2104 2108 _(
2105 2109 b'checking available compression engines '
2106 2110 b'for wire protocol (%s)\n'
2107 2111 ),
2108 2112 fm.formatlist(
2109 2113 [e.name() for e in wirecompengines if e.wireprotosupport()],
2110 2114 name=b'compengine',
2111 2115 fmt=b'%s',
2112 2116 sep=b', ',
2113 2117 ),
2114 2118 )
2115 2119 re2 = b'missing'
2116 2120 if util._re2:
2117 2121 re2 = b'available'
2118 2122 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2119 2123 fm.data(re2=bool(util._re2))
2120 2124
2121 2125 # templates
2122 2126 p = templater.templatedir()
2123 2127 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2124 2128 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2125 2129 if p:
2126 2130 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2127 2131 if m:
2128 2132 # template found, check if it is working
2129 2133 err = None
2130 2134 try:
2131 2135 templater.templater.frommapfile(m)
2132 2136 except Exception as inst:
2133 2137 err = stringutil.forcebytestr(inst)
2134 2138 p = None
2135 2139 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2136 2140 else:
2137 2141 p = None
2138 2142 fm.condwrite(
2139 2143 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2140 2144 )
2141 2145 fm.condwrite(
2142 2146 not m,
2143 2147 b'defaulttemplatenotfound',
2144 2148 _(b" template '%s' not found\n"),
2145 2149 b"default",
2146 2150 )
2147 2151 if not p:
2148 2152 problems += 1
2149 2153 fm.condwrite(
2150 2154 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2151 2155 )
2152 2156
2153 2157 # editor
2154 2158 editor = ui.geteditor()
2155 2159 editor = util.expandpath(editor)
2156 2160 editorbin = procutil.shellsplit(editor)[0]
2157 2161 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2158 2162 cmdpath = procutil.findexe(editorbin)
2159 2163 fm.condwrite(
2160 2164 not cmdpath and editor == b'vi',
2161 2165 b'vinotfound',
2162 2166 _(
2163 2167 b" No commit editor set and can't find %s in PATH\n"
2164 2168 b" (specify a commit editor in your configuration"
2165 2169 b" file)\n"
2166 2170 ),
2167 2171 not cmdpath and editor == b'vi' and editorbin,
2168 2172 )
2169 2173 fm.condwrite(
2170 2174 not cmdpath and editor != b'vi',
2171 2175 b'editornotfound',
2172 2176 _(
2173 2177 b" Can't find editor '%s' in PATH\n"
2174 2178 b" (specify a commit editor in your configuration"
2175 2179 b" file)\n"
2176 2180 ),
2177 2181 not cmdpath and editorbin,
2178 2182 )
2179 2183 if not cmdpath and editor != b'vi':
2180 2184 problems += 1
2181 2185
2182 2186 # check username
2183 2187 username = None
2184 2188 err = None
2185 2189 try:
2186 2190 username = ui.username()
2187 2191 except error.Abort as e:
2188 2192 err = e.message
2189 2193 problems += 1
2190 2194
2191 2195 fm.condwrite(
2192 2196 username, b'username', _(b"checking username (%s)\n"), username
2193 2197 )
2194 2198 fm.condwrite(
2195 2199 err,
2196 2200 b'usernameerror',
2197 2201 _(
2198 2202 b"checking username...\n %s\n"
2199 2203 b" (specify a username in your configuration file)\n"
2200 2204 ),
2201 2205 err,
2202 2206 )
2203 2207
2204 2208 for name, mod in extensions.extensions():
2205 2209 handler = getattr(mod, 'debuginstall', None)
2206 2210 if handler is not None:
2207 2211 problems += handler(ui, fm)
2208 2212
2209 2213 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2210 2214 if not problems:
2211 2215 fm.data(problems=problems)
2212 2216 fm.condwrite(
2213 2217 problems,
2214 2218 b'problems',
2215 2219 _(b"%d problems detected, please check your install!\n"),
2216 2220 problems,
2217 2221 )
2218 2222 fm.end()
2219 2223
2220 2224 return problems
2221 2225
2222 2226
2223 2227 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2224 2228 def debugknown(ui, repopath, *ids, **opts):
2225 2229 """test whether node ids are known to a repo
2226 2230
2227 2231 Every ID must be a full-length hex node id string. Returns a list of 0s
2228 2232 and 1s indicating unknown/known.
2229 2233 """
2230 2234 opts = pycompat.byteskwargs(opts)
2231 2235 repo = hg.peer(ui, opts, repopath)
2232 2236 if not repo.capable(b'known'):
2233 2237 raise error.Abort(b"known() not supported by target repository")
2234 2238 flags = repo.known([bin(s) for s in ids])
2235 2239 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2236 2240
2237 2241
2238 2242 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2239 2243 def debuglabelcomplete(ui, repo, *args):
2240 2244 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2241 2245 debugnamecomplete(ui, repo, *args)
2242 2246
2243 2247
2244 2248 @command(
2245 2249 b'debuglocks',
2246 2250 [
2247 2251 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2248 2252 (
2249 2253 b'W',
2250 2254 b'force-free-wlock',
2251 2255 None,
2252 2256 _(b'free the working state lock (DANGEROUS)'),
2253 2257 ),
2254 2258 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2255 2259 (
2256 2260 b'S',
2257 2261 b'set-wlock',
2258 2262 None,
2259 2263 _(b'set the working state lock until stopped'),
2260 2264 ),
2261 2265 ],
2262 2266 _(b'[OPTION]...'),
2263 2267 )
2264 2268 def debuglocks(ui, repo, **opts):
2265 2269 """show or modify state of locks
2266 2270
2267 2271 By default, this command will show which locks are held. This
2268 2272 includes the user and process holding the lock, the amount of time
2269 2273 the lock has been held, and the machine name where the process is
2270 2274 running if it's not local.
2271 2275
2272 2276 Locks protect the integrity of Mercurial's data, so should be
2273 2277 treated with care. System crashes or other interruptions may cause
2274 2278 locks to not be properly released, though Mercurial will usually
2275 2279 detect and remove such stale locks automatically.
2276 2280
2277 2281 However, detecting stale locks may not always be possible (for
2278 2282 instance, on a shared filesystem). Removing locks may also be
2279 2283 blocked by filesystem permissions.
2280 2284
2281 2285 Setting a lock will prevent other commands from changing the data.
2282 2286 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2283 2287 The set locks are removed when the command exits.
2284 2288
2285 2289 Returns 0 if no locks are held.
2286 2290
2287 2291 """
2288 2292
2289 2293 if opts.get('force_free_lock'):
2290 2294 repo.svfs.tryunlink(b'lock')
2291 2295 if opts.get('force_free_wlock'):
2292 2296 repo.vfs.tryunlink(b'wlock')
2293 2297 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2294 2298 return 0
2295 2299
2296 2300 locks = []
2297 2301 try:
2298 2302 if opts.get('set_wlock'):
2299 2303 try:
2300 2304 locks.append(repo.wlock(False))
2301 2305 except error.LockHeld:
2302 2306 raise error.Abort(_(b'wlock is already held'))
2303 2307 if opts.get('set_lock'):
2304 2308 try:
2305 2309 locks.append(repo.lock(False))
2306 2310 except error.LockHeld:
2307 2311 raise error.Abort(_(b'lock is already held'))
2308 2312 if len(locks):
2309 2313 try:
2310 2314 if ui.interactive():
2311 2315 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2312 2316 ui.promptchoice(prompt)
2313 2317 else:
2314 2318 msg = b"%d locks held, waiting for signal\n"
2315 2319 msg %= len(locks)
2316 2320 ui.status(msg)
2317 2321 while True: # XXX wait for a signal
2318 2322 time.sleep(0.1)
2319 2323 except KeyboardInterrupt:
2320 2324 msg = b"signal-received releasing locks\n"
2321 2325 ui.status(msg)
2322 2326 return 0
2323 2327 finally:
2324 2328 release(*locks)
2325 2329
2326 2330 now = time.time()
2327 2331 held = 0
2328 2332
2329 2333 def report(vfs, name, method):
2330 2334 # this causes stale locks to get reaped for more accurate reporting
2331 2335 try:
2332 2336 l = method(False)
2333 2337 except error.LockHeld:
2334 2338 l = None
2335 2339
2336 2340 if l:
2337 2341 l.release()
2338 2342 else:
2339 2343 try:
2340 2344 st = vfs.lstat(name)
2341 2345 age = now - st[stat.ST_MTIME]
2342 2346 user = util.username(st.st_uid)
2343 2347 locker = vfs.readlock(name)
2344 2348 if b":" in locker:
2345 2349 host, pid = locker.split(b':')
2346 2350 if host == socket.gethostname():
2347 2351 locker = b'user %s, process %s' % (user or b'None', pid)
2348 2352 else:
2349 2353 locker = b'user %s, process %s, host %s' % (
2350 2354 user or b'None',
2351 2355 pid,
2352 2356 host,
2353 2357 )
2354 2358 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2355 2359 return 1
2356 2360 except FileNotFoundError:
2357 2361 pass
2358 2362
2359 2363 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2360 2364 return 0
2361 2365
2362 2366 held += report(repo.svfs, b"lock", repo.lock)
2363 2367 held += report(repo.vfs, b"wlock", repo.wlock)
2364 2368
2365 2369 return held
2366 2370
2367 2371
2368 2372 @command(
2369 2373 b'debugmanifestfulltextcache',
2370 2374 [
2371 2375 (b'', b'clear', False, _(b'clear the cache')),
2372 2376 (
2373 2377 b'a',
2374 2378 b'add',
2375 2379 [],
2376 2380 _(b'add the given manifest nodes to the cache'),
2377 2381 _(b'NODE'),
2378 2382 ),
2379 2383 ],
2380 2384 b'',
2381 2385 )
2382 2386 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2383 2387 """show, clear or amend the contents of the manifest fulltext cache"""
2384 2388
2385 2389 def getcache():
2386 2390 r = repo.manifestlog.getstorage(b'')
2387 2391 try:
2388 2392 return r._fulltextcache
2389 2393 except AttributeError:
2390 2394 msg = _(
2391 2395 b"Current revlog implementation doesn't appear to have a "
2392 2396 b"manifest fulltext cache\n"
2393 2397 )
2394 2398 raise error.Abort(msg)
2395 2399
2396 2400 if opts.get('clear'):
2397 2401 with repo.wlock():
2398 2402 cache = getcache()
2399 2403 cache.clear(clear_persisted_data=True)
2400 2404 return
2401 2405
2402 2406 if add:
2403 2407 with repo.wlock():
2404 2408 m = repo.manifestlog
2405 2409 store = m.getstorage(b'')
2406 2410 for n in add:
2407 2411 try:
2408 2412 manifest = m[store.lookup(n)]
2409 2413 except error.LookupError as e:
2410 2414 raise error.Abort(
2411 2415 bytes(e), hint=b"Check your manifest node id"
2412 2416 )
2413 2417 manifest.read() # stores revisision in cache too
2414 2418 return
2415 2419
2416 2420 cache = getcache()
2417 2421 if not len(cache):
2418 2422 ui.write(_(b'cache empty\n'))
2419 2423 else:
2420 2424 ui.write(
2421 2425 _(
2422 2426 b'cache contains %d manifest entries, in order of most to '
2423 2427 b'least recent:\n'
2424 2428 )
2425 2429 % (len(cache),)
2426 2430 )
2427 2431 totalsize = 0
2428 2432 for nodeid in cache:
2429 2433 # Use cache.get to not update the LRU order
2430 2434 data = cache.peek(nodeid)
2431 2435 size = len(data)
2432 2436 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2433 2437 ui.write(
2434 2438 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2435 2439 )
2436 2440 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2437 2441 ui.write(
2438 2442 _(b'total cache data size %s, on-disk %s\n')
2439 2443 % (util.bytecount(totalsize), util.bytecount(ondisk))
2440 2444 )
2441 2445
2442 2446
2443 2447 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2444 2448 def debugmergestate(ui, repo, *args, **opts):
2445 2449 """print merge state
2446 2450
2447 2451 Use --verbose to print out information about whether v1 or v2 merge state
2448 2452 was chosen."""
2449 2453
2450 2454 if ui.verbose:
2451 2455 ms = mergestatemod.mergestate(repo)
2452 2456
2453 2457 # sort so that reasonable information is on top
2454 2458 v1records = ms._readrecordsv1()
2455 2459 v2records = ms._readrecordsv2()
2456 2460
2457 2461 if not v1records and not v2records:
2458 2462 pass
2459 2463 elif not v2records:
2460 2464 ui.writenoi18n(b'no version 2 merge state\n')
2461 2465 elif ms._v1v2match(v1records, v2records):
2462 2466 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2463 2467 else:
2464 2468 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2465 2469
2466 2470 opts = pycompat.byteskwargs(opts)
2467 2471 if not opts[b'template']:
2468 2472 opts[b'template'] = (
2469 2473 b'{if(commits, "", "no merge state found\n")}'
2470 2474 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2471 2475 b'{files % "file: {path} (state \\"{state}\\")\n'
2472 2476 b'{if(local_path, "'
2473 2477 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2474 2478 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2475 2479 b' other path: {other_path} (node {other_node})\n'
2476 2480 b'")}'
2477 2481 b'{if(rename_side, "'
2478 2482 b' rename side: {rename_side}\n'
2479 2483 b' renamed path: {renamed_path}\n'
2480 2484 b'")}'
2481 2485 b'{extras % " extra: {key} = {value}\n"}'
2482 2486 b'"}'
2483 2487 b'{extras % "extra: {file} ({key} = {value})\n"}'
2484 2488 )
2485 2489
2486 2490 ms = mergestatemod.mergestate.read(repo)
2487 2491
2488 2492 fm = ui.formatter(b'debugmergestate', opts)
2489 2493 fm.startitem()
2490 2494
2491 2495 fm_commits = fm.nested(b'commits')
2492 2496 if ms.active():
2493 2497 for name, node, label_index in (
2494 2498 (b'local', ms.local, 0),
2495 2499 (b'other', ms.other, 1),
2496 2500 ):
2497 2501 fm_commits.startitem()
2498 2502 fm_commits.data(name=name)
2499 2503 fm_commits.data(node=hex(node))
2500 2504 if ms._labels and len(ms._labels) > label_index:
2501 2505 fm_commits.data(label=ms._labels[label_index])
2502 2506 fm_commits.end()
2503 2507
2504 2508 fm_files = fm.nested(b'files')
2505 2509 if ms.active():
2506 2510 for f in ms:
2507 2511 fm_files.startitem()
2508 2512 fm_files.data(path=f)
2509 2513 state = ms._state[f]
2510 2514 fm_files.data(state=state[0])
2511 2515 if state[0] in (
2512 2516 mergestatemod.MERGE_RECORD_UNRESOLVED,
2513 2517 mergestatemod.MERGE_RECORD_RESOLVED,
2514 2518 ):
2515 2519 fm_files.data(local_key=state[1])
2516 2520 fm_files.data(local_path=state[2])
2517 2521 fm_files.data(ancestor_path=state[3])
2518 2522 fm_files.data(ancestor_node=state[4])
2519 2523 fm_files.data(other_path=state[5])
2520 2524 fm_files.data(other_node=state[6])
2521 2525 fm_files.data(local_flags=state[7])
2522 2526 elif state[0] in (
2523 2527 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2524 2528 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2525 2529 ):
2526 2530 fm_files.data(renamed_path=state[1])
2527 2531 fm_files.data(rename_side=state[2])
2528 2532 fm_extras = fm_files.nested(b'extras')
2529 2533 for k, v in sorted(ms.extras(f).items()):
2530 2534 fm_extras.startitem()
2531 2535 fm_extras.data(key=k)
2532 2536 fm_extras.data(value=v)
2533 2537 fm_extras.end()
2534 2538
2535 2539 fm_files.end()
2536 2540
2537 2541 fm_extras = fm.nested(b'extras')
2538 2542 for f, d in sorted(ms.allextras().items()):
2539 2543 if f in ms:
2540 2544 # If file is in mergestate, we have already processed it's extras
2541 2545 continue
2542 2546 for k, v in d.items():
2543 2547 fm_extras.startitem()
2544 2548 fm_extras.data(file=f)
2545 2549 fm_extras.data(key=k)
2546 2550 fm_extras.data(value=v)
2547 2551 fm_extras.end()
2548 2552
2549 2553 fm.end()
2550 2554
2551 2555
2552 2556 @command(b'debugnamecomplete', [], _(b'NAME...'))
2553 2557 def debugnamecomplete(ui, repo, *args):
2554 2558 '''complete "names" - tags, open branch names, bookmark names'''
2555 2559
2556 2560 names = set()
2557 2561 # since we previously only listed open branches, we will handle that
2558 2562 # specially (after this for loop)
2559 2563 for name, ns in repo.names.items():
2560 2564 if name != b'branches':
2561 2565 names.update(ns.listnames(repo))
2562 2566 names.update(
2563 2567 tag
2564 2568 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2565 2569 if not closed
2566 2570 )
2567 2571 completions = set()
2568 2572 if not args:
2569 2573 args = [b'']
2570 2574 for a in args:
2571 2575 completions.update(n for n in names if n.startswith(a))
2572 2576 ui.write(b'\n'.join(sorted(completions)))
2573 2577 ui.write(b'\n')
2574 2578
2575 2579
2576 2580 @command(
2577 2581 b'debugnodemap',
2578 2582 [
2579 2583 (
2580 2584 b'',
2581 2585 b'dump-new',
2582 2586 False,
2583 2587 _(b'write a (new) persistent binary nodemap on stdout'),
2584 2588 ),
2585 2589 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2586 2590 (
2587 2591 b'',
2588 2592 b'check',
2589 2593 False,
2590 2594 _(b'check that the data on disk data are correct.'),
2591 2595 ),
2592 2596 (
2593 2597 b'',
2594 2598 b'metadata',
2595 2599 False,
2596 2600 _(b'display the on disk meta data for the nodemap'),
2597 2601 ),
2598 2602 ],
2599 2603 )
2600 2604 def debugnodemap(ui, repo, **opts):
2601 2605 """write and inspect on disk nodemap"""
2602 2606 if opts['dump_new']:
2603 2607 unfi = repo.unfiltered()
2604 2608 cl = unfi.changelog
2605 2609 if util.safehasattr(cl.index, "nodemap_data_all"):
2606 2610 data = cl.index.nodemap_data_all()
2607 2611 else:
2608 2612 data = nodemap.persistent_data(cl.index)
2609 2613 ui.write(data)
2610 2614 elif opts['dump_disk']:
2611 2615 unfi = repo.unfiltered()
2612 2616 cl = unfi.changelog
2613 2617 nm_data = nodemap.persisted_data(cl)
2614 2618 if nm_data is not None:
2615 2619 docket, data = nm_data
2616 2620 ui.write(data[:])
2617 2621 elif opts['check']:
2618 2622 unfi = repo.unfiltered()
2619 2623 cl = unfi.changelog
2620 2624 nm_data = nodemap.persisted_data(cl)
2621 2625 if nm_data is not None:
2622 2626 docket, data = nm_data
2623 2627 return nodemap.check_data(ui, cl.index, data)
2624 2628 elif opts['metadata']:
2625 2629 unfi = repo.unfiltered()
2626 2630 cl = unfi.changelog
2627 2631 nm_data = nodemap.persisted_data(cl)
2628 2632 if nm_data is not None:
2629 2633 docket, data = nm_data
2630 2634 ui.write((b"uid: %s\n") % docket.uid)
2631 2635 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2632 2636 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2633 2637 ui.write((b"data-length: %d\n") % docket.data_length)
2634 2638 ui.write((b"data-unused: %d\n") % docket.data_unused)
2635 2639 unused_perc = docket.data_unused * 100.0 / docket.data_length
2636 2640 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2637 2641
2638 2642
2639 2643 @command(
2640 2644 b'debugobsolete',
2641 2645 [
2642 2646 (b'', b'flags', 0, _(b'markers flag')),
2643 2647 (
2644 2648 b'',
2645 2649 b'record-parents',
2646 2650 False,
2647 2651 _(b'record parent information for the precursor'),
2648 2652 ),
2649 2653 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2650 2654 (
2651 2655 b'',
2652 2656 b'exclusive',
2653 2657 False,
2654 2658 _(b'restrict display to markers only relevant to REV'),
2655 2659 ),
2656 2660 (b'', b'index', False, _(b'display index of the marker')),
2657 2661 (b'', b'delete', [], _(b'delete markers specified by indices')),
2658 2662 ]
2659 2663 + cmdutil.commitopts2
2660 2664 + cmdutil.formatteropts,
2661 2665 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2662 2666 )
2663 2667 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2664 2668 """create arbitrary obsolete marker
2665 2669
2666 2670 With no arguments, displays the list of obsolescence markers."""
2667 2671
2668 2672 opts = pycompat.byteskwargs(opts)
2669 2673
2670 2674 def parsenodeid(s):
2671 2675 try:
2672 2676 # We do not use revsingle/revrange functions here to accept
2673 2677 # arbitrary node identifiers, possibly not present in the
2674 2678 # local repository.
2675 2679 n = bin(s)
2676 2680 if len(n) != repo.nodeconstants.nodelen:
2677 2681 raise ValueError
2678 2682 return n
2679 2683 except ValueError:
2680 2684 raise error.InputError(
2681 2685 b'changeset references must be full hexadecimal '
2682 2686 b'node identifiers'
2683 2687 )
2684 2688
2685 2689 if opts.get(b'delete'):
2686 2690 indices = []
2687 2691 for v in opts.get(b'delete'):
2688 2692 try:
2689 2693 indices.append(int(v))
2690 2694 except ValueError:
2691 2695 raise error.InputError(
2692 2696 _(b'invalid index value: %r') % v,
2693 2697 hint=_(b'use integers for indices'),
2694 2698 )
2695 2699
2696 2700 if repo.currenttransaction():
2697 2701 raise error.Abort(
2698 2702 _(b'cannot delete obsmarkers in the middle of transaction.')
2699 2703 )
2700 2704
2701 2705 with repo.lock():
2702 2706 n = repair.deleteobsmarkers(repo.obsstore, indices)
2703 2707 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2704 2708
2705 2709 return
2706 2710
2707 2711 if precursor is not None:
2708 2712 if opts[b'rev']:
2709 2713 raise error.InputError(
2710 2714 b'cannot select revision when creating marker'
2711 2715 )
2712 2716 metadata = {}
2713 2717 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2714 2718 succs = tuple(parsenodeid(succ) for succ in successors)
2715 2719 l = repo.lock()
2716 2720 try:
2717 2721 tr = repo.transaction(b'debugobsolete')
2718 2722 try:
2719 2723 date = opts.get(b'date')
2720 2724 if date:
2721 2725 date = dateutil.parsedate(date)
2722 2726 else:
2723 2727 date = None
2724 2728 prec = parsenodeid(precursor)
2725 2729 parents = None
2726 2730 if opts[b'record_parents']:
2727 2731 if prec not in repo.unfiltered():
2728 2732 raise error.Abort(
2729 2733 b'cannot used --record-parents on '
2730 2734 b'unknown changesets'
2731 2735 )
2732 2736 parents = repo.unfiltered()[prec].parents()
2733 2737 parents = tuple(p.node() for p in parents)
2734 2738 repo.obsstore.create(
2735 2739 tr,
2736 2740 prec,
2737 2741 succs,
2738 2742 opts[b'flags'],
2739 2743 parents=parents,
2740 2744 date=date,
2741 2745 metadata=metadata,
2742 2746 ui=ui,
2743 2747 )
2744 2748 tr.close()
2745 2749 except ValueError as exc:
2746 2750 raise error.Abort(
2747 2751 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2748 2752 )
2749 2753 finally:
2750 2754 tr.release()
2751 2755 finally:
2752 2756 l.release()
2753 2757 else:
2754 2758 if opts[b'rev']:
2755 2759 revs = logcmdutil.revrange(repo, opts[b'rev'])
2756 2760 nodes = [repo[r].node() for r in revs]
2757 2761 markers = list(
2758 2762 obsutil.getmarkers(
2759 2763 repo, nodes=nodes, exclusive=opts[b'exclusive']
2760 2764 )
2761 2765 )
2762 2766 markers.sort(key=lambda x: x._data)
2763 2767 else:
2764 2768 markers = obsutil.getmarkers(repo)
2765 2769
2766 2770 markerstoiter = markers
2767 2771 isrelevant = lambda m: True
2768 2772 if opts.get(b'rev') and opts.get(b'index'):
2769 2773 markerstoiter = obsutil.getmarkers(repo)
2770 2774 markerset = set(markers)
2771 2775 isrelevant = lambda m: m in markerset
2772 2776
2773 2777 fm = ui.formatter(b'debugobsolete', opts)
2774 2778 for i, m in enumerate(markerstoiter):
2775 2779 if not isrelevant(m):
2776 2780 # marker can be irrelevant when we're iterating over a set
2777 2781 # of markers (markerstoiter) which is bigger than the set
2778 2782 # of markers we want to display (markers)
2779 2783 # this can happen if both --index and --rev options are
2780 2784 # provided and thus we need to iterate over all of the markers
2781 2785 # to get the correct indices, but only display the ones that
2782 2786 # are relevant to --rev value
2783 2787 continue
2784 2788 fm.startitem()
2785 2789 ind = i if opts.get(b'index') else None
2786 2790 cmdutil.showmarker(fm, m, index=ind)
2787 2791 fm.end()
2788 2792
2789 2793
2790 2794 @command(
2791 2795 b'debugp1copies',
2792 2796 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2793 2797 _(b'[-r REV]'),
2794 2798 )
2795 2799 def debugp1copies(ui, repo, **opts):
2796 2800 """dump copy information compared to p1"""
2797 2801
2798 2802 opts = pycompat.byteskwargs(opts)
2799 2803 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2800 2804 for dst, src in ctx.p1copies().items():
2801 2805 ui.write(b'%s -> %s\n' % (src, dst))
2802 2806
2803 2807
2804 2808 @command(
2805 2809 b'debugp2copies',
2806 2810 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2807 2811 _(b'[-r REV]'),
2808 2812 )
2809 2813 def debugp2copies(ui, repo, **opts):
2810 2814 """dump copy information compared to p2"""
2811 2815
2812 2816 opts = pycompat.byteskwargs(opts)
2813 2817 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2814 2818 for dst, src in ctx.p2copies().items():
2815 2819 ui.write(b'%s -> %s\n' % (src, dst))
2816 2820
2817 2821
2818 2822 @command(
2819 2823 b'debugpathcomplete',
2820 2824 [
2821 2825 (b'f', b'full', None, _(b'complete an entire path')),
2822 2826 (b'n', b'normal', None, _(b'show only normal files')),
2823 2827 (b'a', b'added', None, _(b'show only added files')),
2824 2828 (b'r', b'removed', None, _(b'show only removed files')),
2825 2829 ],
2826 2830 _(b'FILESPEC...'),
2827 2831 )
2828 2832 def debugpathcomplete(ui, repo, *specs, **opts):
2829 2833 """complete part or all of a tracked path
2830 2834
2831 2835 This command supports shells that offer path name completion. It
2832 2836 currently completes only files already known to the dirstate.
2833 2837
2834 2838 Completion extends only to the next path segment unless
2835 2839 --full is specified, in which case entire paths are used."""
2836 2840
2837 2841 def complete(path, acceptable):
2838 2842 dirstate = repo.dirstate
2839 2843 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2840 2844 rootdir = repo.root + pycompat.ossep
2841 2845 if spec != repo.root and not spec.startswith(rootdir):
2842 2846 return [], []
2843 2847 if os.path.isdir(spec):
2844 2848 spec += b'/'
2845 2849 spec = spec[len(rootdir) :]
2846 2850 fixpaths = pycompat.ossep != b'/'
2847 2851 if fixpaths:
2848 2852 spec = spec.replace(pycompat.ossep, b'/')
2849 2853 speclen = len(spec)
2850 2854 fullpaths = opts['full']
2851 2855 files, dirs = set(), set()
2852 2856 adddir, addfile = dirs.add, files.add
2853 2857 for f, st in dirstate.items():
2854 2858 if f.startswith(spec) and st.state in acceptable:
2855 2859 if fixpaths:
2856 2860 f = f.replace(b'/', pycompat.ossep)
2857 2861 if fullpaths:
2858 2862 addfile(f)
2859 2863 continue
2860 2864 s = f.find(pycompat.ossep, speclen)
2861 2865 if s >= 0:
2862 2866 adddir(f[:s])
2863 2867 else:
2864 2868 addfile(f)
2865 2869 return files, dirs
2866 2870
2867 2871 acceptable = b''
2868 2872 if opts['normal']:
2869 2873 acceptable += b'nm'
2870 2874 if opts['added']:
2871 2875 acceptable += b'a'
2872 2876 if opts['removed']:
2873 2877 acceptable += b'r'
2874 2878 cwd = repo.getcwd()
2875 2879 if not specs:
2876 2880 specs = [b'.']
2877 2881
2878 2882 files, dirs = set(), set()
2879 2883 for spec in specs:
2880 2884 f, d = complete(spec, acceptable or b'nmar')
2881 2885 files.update(f)
2882 2886 dirs.update(d)
2883 2887 files.update(dirs)
2884 2888 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2885 2889 ui.write(b'\n')
2886 2890
2887 2891
2888 2892 @command(
2889 2893 b'debugpathcopies',
2890 2894 cmdutil.walkopts,
2891 2895 b'hg debugpathcopies REV1 REV2 [FILE]',
2892 2896 inferrepo=True,
2893 2897 )
2894 2898 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2895 2899 """show copies between two revisions"""
2896 2900 ctx1 = scmutil.revsingle(repo, rev1)
2897 2901 ctx2 = scmutil.revsingle(repo, rev2)
2898 2902 m = scmutil.match(ctx1, pats, opts)
2899 2903 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2900 2904 ui.write(b'%s -> %s\n' % (src, dst))
2901 2905
2902 2906
2903 2907 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2904 2908 def debugpeer(ui, path):
2905 2909 """establish a connection to a peer repository"""
2906 2910 # Always enable peer request logging. Requires --debug to display
2907 2911 # though.
2908 2912 overrides = {
2909 2913 (b'devel', b'debug.peer-request'): True,
2910 2914 }
2911 2915
2912 2916 with ui.configoverride(overrides):
2913 2917 peer = hg.peer(ui, {}, path)
2914 2918
2915 2919 try:
2916 2920 local = peer.local() is not None
2917 2921 canpush = peer.canpush()
2918 2922
2919 2923 ui.write(_(b'url: %s\n') % peer.url())
2920 2924 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2921 2925 ui.write(
2922 2926 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2923 2927 )
2924 2928 finally:
2925 2929 peer.close()
2926 2930
2927 2931
2928 2932 @command(
2929 2933 b'debugpickmergetool',
2930 2934 [
2931 2935 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2932 2936 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2933 2937 ]
2934 2938 + cmdutil.walkopts
2935 2939 + cmdutil.mergetoolopts,
2936 2940 _(b'[PATTERN]...'),
2937 2941 inferrepo=True,
2938 2942 )
2939 2943 def debugpickmergetool(ui, repo, *pats, **opts):
2940 2944 """examine which merge tool is chosen for specified file
2941 2945
2942 2946 As described in :hg:`help merge-tools`, Mercurial examines
2943 2947 configurations below in this order to decide which merge tool is
2944 2948 chosen for specified file.
2945 2949
2946 2950 1. ``--tool`` option
2947 2951 2. ``HGMERGE`` environment variable
2948 2952 3. configurations in ``merge-patterns`` section
2949 2953 4. configuration of ``ui.merge``
2950 2954 5. configurations in ``merge-tools`` section
2951 2955 6. ``hgmerge`` tool (for historical reason only)
2952 2956 7. default tool for fallback (``:merge`` or ``:prompt``)
2953 2957
2954 2958 This command writes out examination result in the style below::
2955 2959
2956 2960 FILE = MERGETOOL
2957 2961
2958 2962 By default, all files known in the first parent context of the
2959 2963 working directory are examined. Use file patterns and/or -I/-X
2960 2964 options to limit target files. -r/--rev is also useful to examine
2961 2965 files in another context without actual updating to it.
2962 2966
2963 2967 With --debug, this command shows warning messages while matching
2964 2968 against ``merge-patterns`` and so on, too. It is recommended to
2965 2969 use this option with explicit file patterns and/or -I/-X options,
2966 2970 because this option increases amount of output per file according
2967 2971 to configurations in hgrc.
2968 2972
2969 2973 With -v/--verbose, this command shows configurations below at
2970 2974 first (only if specified).
2971 2975
2972 2976 - ``--tool`` option
2973 2977 - ``HGMERGE`` environment variable
2974 2978 - configuration of ``ui.merge``
2975 2979
2976 2980 If merge tool is chosen before matching against
2977 2981 ``merge-patterns``, this command can't show any helpful
2978 2982 information, even with --debug. In such case, information above is
2979 2983 useful to know why a merge tool is chosen.
2980 2984 """
2981 2985 opts = pycompat.byteskwargs(opts)
2982 2986 overrides = {}
2983 2987 if opts[b'tool']:
2984 2988 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2985 2989 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2986 2990
2987 2991 with ui.configoverride(overrides, b'debugmergepatterns'):
2988 2992 hgmerge = encoding.environ.get(b"HGMERGE")
2989 2993 if hgmerge is not None:
2990 2994 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2991 2995 uimerge = ui.config(b"ui", b"merge")
2992 2996 if uimerge:
2993 2997 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2994 2998
2995 2999 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2996 3000 m = scmutil.match(ctx, pats, opts)
2997 3001 changedelete = opts[b'changedelete']
2998 3002 for path in ctx.walk(m):
2999 3003 fctx = ctx[path]
3000 3004 with ui.silent(
3001 3005 error=True
3002 3006 ) if not ui.debugflag else util.nullcontextmanager():
3003 3007 tool, toolpath = filemerge._picktool(
3004 3008 repo,
3005 3009 ui,
3006 3010 path,
3007 3011 fctx.isbinary(),
3008 3012 b'l' in fctx.flags(),
3009 3013 changedelete,
3010 3014 )
3011 3015 ui.write(b'%s = %s\n' % (path, tool))
3012 3016
3013 3017
3014 3018 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3015 3019 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3016 3020 """access the pushkey key/value protocol
3017 3021
3018 3022 With two args, list the keys in the given namespace.
3019 3023
3020 3024 With five args, set a key to new if it currently is set to old.
3021 3025 Reports success or failure.
3022 3026 """
3023 3027
3024 3028 target = hg.peer(ui, {}, repopath)
3025 3029 try:
3026 3030 if keyinfo:
3027 3031 key, old, new = keyinfo
3028 3032 with target.commandexecutor() as e:
3029 3033 r = e.callcommand(
3030 3034 b'pushkey',
3031 3035 {
3032 3036 b'namespace': namespace,
3033 3037 b'key': key,
3034 3038 b'old': old,
3035 3039 b'new': new,
3036 3040 },
3037 3041 ).result()
3038 3042
3039 3043 ui.status(pycompat.bytestr(r) + b'\n')
3040 3044 return not r
3041 3045 else:
3042 3046 for k, v in sorted(target.listkeys(namespace).items()):
3043 3047 ui.write(
3044 3048 b"%s\t%s\n"
3045 3049 % (stringutil.escapestr(k), stringutil.escapestr(v))
3046 3050 )
3047 3051 finally:
3048 3052 target.close()
3049 3053
3050 3054
3051 3055 @command(b'debugpvec', [], _(b'A B'))
3052 3056 def debugpvec(ui, repo, a, b=None):
3053 3057 ca = scmutil.revsingle(repo, a)
3054 3058 cb = scmutil.revsingle(repo, b)
3055 3059 pa = pvec.ctxpvec(ca)
3056 3060 pb = pvec.ctxpvec(cb)
3057 3061 if pa == pb:
3058 3062 rel = b"="
3059 3063 elif pa > pb:
3060 3064 rel = b">"
3061 3065 elif pa < pb:
3062 3066 rel = b"<"
3063 3067 elif pa | pb:
3064 3068 rel = b"|"
3065 3069 ui.write(_(b"a: %s\n") % pa)
3066 3070 ui.write(_(b"b: %s\n") % pb)
3067 3071 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3068 3072 ui.write(
3069 3073 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3070 3074 % (
3071 3075 abs(pa._depth - pb._depth),
3072 3076 pvec._hamming(pa._vec, pb._vec),
3073 3077 pa.distance(pb),
3074 3078 rel,
3075 3079 )
3076 3080 )
3077 3081
3078 3082
3079 3083 @command(
3080 3084 b'debugrebuilddirstate|debugrebuildstate',
3081 3085 [
3082 3086 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3083 3087 (
3084 3088 b'',
3085 3089 b'minimal',
3086 3090 None,
3087 3091 _(
3088 3092 b'only rebuild files that are inconsistent with '
3089 3093 b'the working copy parent'
3090 3094 ),
3091 3095 ),
3092 3096 ],
3093 3097 _(b'[-r REV]'),
3094 3098 )
3095 3099 def debugrebuilddirstate(ui, repo, rev, **opts):
3096 3100 """rebuild the dirstate as it would look like for the given revision
3097 3101
3098 3102 If no revision is specified the first current parent will be used.
3099 3103
3100 3104 The dirstate will be set to the files of the given revision.
3101 3105 The actual working directory content or existing dirstate
3102 3106 information such as adds or removes is not considered.
3103 3107
3104 3108 ``minimal`` will only rebuild the dirstate status for files that claim to be
3105 3109 tracked but are not in the parent manifest, or that exist in the parent
3106 3110 manifest but are not in the dirstate. It will not change adds, removes, or
3107 3111 modified files that are in the working copy parent.
3108 3112
3109 3113 One use of this command is to make the next :hg:`status` invocation
3110 3114 check the actual file content.
3111 3115 """
3112 3116 ctx = scmutil.revsingle(repo, rev)
3113 3117 with repo.wlock():
3114 3118 dirstate = repo.dirstate
3115 3119 changedfiles = None
3116 3120 # See command doc for what minimal does.
3117 3121 if opts.get('minimal'):
3118 3122 manifestfiles = set(ctx.manifest().keys())
3119 3123 dirstatefiles = set(dirstate)
3120 3124 manifestonly = manifestfiles - dirstatefiles
3121 3125 dsonly = dirstatefiles - manifestfiles
3122 3126 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3123 3127 changedfiles = manifestonly | dsnotadded
3124 3128
3125 3129 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3126 3130
3127 3131
3128 3132 @command(
3129 3133 b'debugrebuildfncache',
3130 3134 [
3131 3135 (
3132 3136 b'',
3133 3137 b'only-data',
3134 3138 False,
3135 3139 _(b'only look for wrong .d files (much faster)'),
3136 3140 )
3137 3141 ],
3138 3142 b'',
3139 3143 )
3140 3144 def debugrebuildfncache(ui, repo, **opts):
3141 3145 """rebuild the fncache file"""
3142 3146 opts = pycompat.byteskwargs(opts)
3143 3147 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3144 3148
3145 3149
3146 3150 @command(
3147 3151 b'debugrename',
3148 3152 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3149 3153 _(b'[-r REV] [FILE]...'),
3150 3154 )
3151 3155 def debugrename(ui, repo, *pats, **opts):
3152 3156 """dump rename information"""
3153 3157
3154 3158 opts = pycompat.byteskwargs(opts)
3155 3159 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3156 3160 m = scmutil.match(ctx, pats, opts)
3157 3161 for abs in ctx.walk(m):
3158 3162 fctx = ctx[abs]
3159 3163 o = fctx.filelog().renamed(fctx.filenode())
3160 3164 rel = repo.pathto(abs)
3161 3165 if o:
3162 3166 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3163 3167 else:
3164 3168 ui.write(_(b"%s not renamed\n") % rel)
3165 3169
3166 3170
3167 3171 @command(b'debugrequires|debugrequirements', [], b'')
3168 3172 def debugrequirements(ui, repo):
3169 3173 """print the current repo requirements"""
3170 3174 for r in sorted(repo.requirements):
3171 3175 ui.write(b"%s\n" % r)
3172 3176
3173 3177
3174 3178 @command(
3175 3179 b'debugrevlog',
3176 3180 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3177 3181 _(b'-c|-m|FILE'),
3178 3182 optionalrepo=True,
3179 3183 )
3180 3184 def debugrevlog(ui, repo, file_=None, **opts):
3181 3185 """show data and statistics about a revlog"""
3182 3186 opts = pycompat.byteskwargs(opts)
3183 3187 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3184 3188
3185 3189 if opts.get(b"dump"):
3186 3190 numrevs = len(r)
3187 3191 ui.write(
3188 3192 (
3189 3193 b"# rev p1rev p2rev start end deltastart base p1 p2"
3190 3194 b" rawsize totalsize compression heads chainlen\n"
3191 3195 )
3192 3196 )
3193 3197 ts = 0
3194 3198 heads = set()
3195 3199
3196 3200 for rev in range(numrevs):
3197 3201 dbase = r.deltaparent(rev)
3198 3202 if dbase == -1:
3199 3203 dbase = rev
3200 3204 cbase = r.chainbase(rev)
3201 3205 clen = r.chainlen(rev)
3202 3206 p1, p2 = r.parentrevs(rev)
3203 3207 rs = r.rawsize(rev)
3204 3208 ts = ts + rs
3205 3209 heads -= set(r.parentrevs(rev))
3206 3210 heads.add(rev)
3207 3211 try:
3208 3212 compression = ts / r.end(rev)
3209 3213 except ZeroDivisionError:
3210 3214 compression = 0
3211 3215 ui.write(
3212 3216 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3213 3217 b"%11d %5d %8d\n"
3214 3218 % (
3215 3219 rev,
3216 3220 p1,
3217 3221 p2,
3218 3222 r.start(rev),
3219 3223 r.end(rev),
3220 3224 r.start(dbase),
3221 3225 r.start(cbase),
3222 3226 r.start(p1),
3223 3227 r.start(p2),
3224 3228 rs,
3225 3229 ts,
3226 3230 compression,
3227 3231 len(heads),
3228 3232 clen,
3229 3233 )
3230 3234 )
3231 3235 return 0
3232 3236
3233 3237 format = r._format_version
3234 3238 v = r._format_flags
3235 3239 flags = []
3236 3240 gdelta = False
3237 3241 if v & revlog.FLAG_INLINE_DATA:
3238 3242 flags.append(b'inline')
3239 3243 if v & revlog.FLAG_GENERALDELTA:
3240 3244 gdelta = True
3241 3245 flags.append(b'generaldelta')
3242 3246 if not flags:
3243 3247 flags = [b'(none)']
3244 3248
3245 3249 ### tracks merge vs single parent
3246 3250 nummerges = 0
3247 3251
3248 3252 ### tracks ways the "delta" are build
3249 3253 # nodelta
3250 3254 numempty = 0
3251 3255 numemptytext = 0
3252 3256 numemptydelta = 0
3253 3257 # full file content
3254 3258 numfull = 0
3255 3259 # intermediate snapshot against a prior snapshot
3256 3260 numsemi = 0
3257 3261 # snapshot count per depth
3258 3262 numsnapdepth = collections.defaultdict(lambda: 0)
3259 3263 # delta against previous revision
3260 3264 numprev = 0
3261 3265 # delta against first or second parent (not prev)
3262 3266 nump1 = 0
3263 3267 nump2 = 0
3264 3268 # delta against neither prev nor parents
3265 3269 numother = 0
3266 3270 # delta against prev that are also first or second parent
3267 3271 # (details of `numprev`)
3268 3272 nump1prev = 0
3269 3273 nump2prev = 0
3270 3274
3271 3275 # data about delta chain of each revs
3272 3276 chainlengths = []
3273 3277 chainbases = []
3274 3278 chainspans = []
3275 3279
3276 3280 # data about each revision
3277 3281 datasize = [None, 0, 0]
3278 3282 fullsize = [None, 0, 0]
3279 3283 semisize = [None, 0, 0]
3280 3284 # snapshot count per depth
3281 3285 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3282 3286 deltasize = [None, 0, 0]
3283 3287 chunktypecounts = {}
3284 3288 chunktypesizes = {}
3285 3289
3286 3290 def addsize(size, l):
3287 3291 if l[0] is None or size < l[0]:
3288 3292 l[0] = size
3289 3293 if size > l[1]:
3290 3294 l[1] = size
3291 3295 l[2] += size
3292 3296
3293 3297 numrevs = len(r)
3294 3298 for rev in range(numrevs):
3295 3299 p1, p2 = r.parentrevs(rev)
3296 3300 delta = r.deltaparent(rev)
3297 3301 if format > 0:
3298 3302 addsize(r.rawsize(rev), datasize)
3299 3303 if p2 != nullrev:
3300 3304 nummerges += 1
3301 3305 size = r.length(rev)
3302 3306 if delta == nullrev:
3303 3307 chainlengths.append(0)
3304 3308 chainbases.append(r.start(rev))
3305 3309 chainspans.append(size)
3306 3310 if size == 0:
3307 3311 numempty += 1
3308 3312 numemptytext += 1
3309 3313 else:
3310 3314 numfull += 1
3311 3315 numsnapdepth[0] += 1
3312 3316 addsize(size, fullsize)
3313 3317 addsize(size, snapsizedepth[0])
3314 3318 else:
3315 3319 chainlengths.append(chainlengths[delta] + 1)
3316 3320 baseaddr = chainbases[delta]
3317 3321 revaddr = r.start(rev)
3318 3322 chainbases.append(baseaddr)
3319 3323 chainspans.append((revaddr - baseaddr) + size)
3320 3324 if size == 0:
3321 3325 numempty += 1
3322 3326 numemptydelta += 1
3323 3327 elif r.issnapshot(rev):
3324 3328 addsize(size, semisize)
3325 3329 numsemi += 1
3326 3330 depth = r.snapshotdepth(rev)
3327 3331 numsnapdepth[depth] += 1
3328 3332 addsize(size, snapsizedepth[depth])
3329 3333 else:
3330 3334 addsize(size, deltasize)
3331 3335 if delta == rev - 1:
3332 3336 numprev += 1
3333 3337 if delta == p1:
3334 3338 nump1prev += 1
3335 3339 elif delta == p2:
3336 3340 nump2prev += 1
3337 3341 elif delta == p1:
3338 3342 nump1 += 1
3339 3343 elif delta == p2:
3340 3344 nump2 += 1
3341 3345 elif delta != nullrev:
3342 3346 numother += 1
3343 3347
3344 3348 # Obtain data on the raw chunks in the revlog.
3345 3349 if util.safehasattr(r, b'_getsegmentforrevs'):
3346 3350 segment = r._getsegmentforrevs(rev, rev)[1]
3347 3351 else:
3348 3352 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3349 3353 if segment:
3350 3354 chunktype = bytes(segment[0:1])
3351 3355 else:
3352 3356 chunktype = b'empty'
3353 3357
3354 3358 if chunktype not in chunktypecounts:
3355 3359 chunktypecounts[chunktype] = 0
3356 3360 chunktypesizes[chunktype] = 0
3357 3361
3358 3362 chunktypecounts[chunktype] += 1
3359 3363 chunktypesizes[chunktype] += size
3360 3364
3361 3365 # Adjust size min value for empty cases
3362 3366 for size in (datasize, fullsize, semisize, deltasize):
3363 3367 if size[0] is None:
3364 3368 size[0] = 0
3365 3369
3366 3370 numdeltas = numrevs - numfull - numempty - numsemi
3367 3371 numoprev = numprev - nump1prev - nump2prev
3368 3372 totalrawsize = datasize[2]
3369 3373 datasize[2] /= numrevs
3370 3374 fulltotal = fullsize[2]
3371 3375 if numfull == 0:
3372 3376 fullsize[2] = 0
3373 3377 else:
3374 3378 fullsize[2] /= numfull
3375 3379 semitotal = semisize[2]
3376 3380 snaptotal = {}
3377 3381 if numsemi > 0:
3378 3382 semisize[2] /= numsemi
3379 3383 for depth in snapsizedepth:
3380 3384 snaptotal[depth] = snapsizedepth[depth][2]
3381 3385 snapsizedepth[depth][2] /= numsnapdepth[depth]
3382 3386
3383 3387 deltatotal = deltasize[2]
3384 3388 if numdeltas > 0:
3385 3389 deltasize[2] /= numdeltas
3386 3390 totalsize = fulltotal + semitotal + deltatotal
3387 3391 avgchainlen = sum(chainlengths) / numrevs
3388 3392 maxchainlen = max(chainlengths)
3389 3393 maxchainspan = max(chainspans)
3390 3394 compratio = 1
3391 3395 if totalsize:
3392 3396 compratio = totalrawsize / totalsize
3393 3397
3394 3398 basedfmtstr = b'%%%dd\n'
3395 3399 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3396 3400
3397 3401 def dfmtstr(max):
3398 3402 return basedfmtstr % len(str(max))
3399 3403
3400 3404 def pcfmtstr(max, padding=0):
3401 3405 return basepcfmtstr % (len(str(max)), b' ' * padding)
3402 3406
3403 3407 def pcfmt(value, total):
3404 3408 if total:
3405 3409 return (value, 100 * float(value) / total)
3406 3410 else:
3407 3411 return value, 100.0
3408 3412
3409 3413 ui.writenoi18n(b'format : %d\n' % format)
3410 3414 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3411 3415
3412 3416 ui.write(b'\n')
3413 3417 fmt = pcfmtstr(totalsize)
3414 3418 fmt2 = dfmtstr(totalsize)
3415 3419 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3416 3420 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3417 3421 ui.writenoi18n(
3418 3422 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3419 3423 )
3420 3424 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3421 3425 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3422 3426 ui.writenoi18n(
3423 3427 b' text : '
3424 3428 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3425 3429 )
3426 3430 ui.writenoi18n(
3427 3431 b' delta : '
3428 3432 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3429 3433 )
3430 3434 ui.writenoi18n(
3431 3435 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3432 3436 )
3433 3437 for depth in sorted(numsnapdepth):
3434 3438 ui.write(
3435 3439 (b' lvl-%-3d : ' % depth)
3436 3440 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3437 3441 )
3438 3442 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3439 3443 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3440 3444 ui.writenoi18n(
3441 3445 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3442 3446 )
3443 3447 for depth in sorted(numsnapdepth):
3444 3448 ui.write(
3445 3449 (b' lvl-%-3d : ' % depth)
3446 3450 + fmt % pcfmt(snaptotal[depth], totalsize)
3447 3451 )
3448 3452 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3449 3453
3450 3454 def fmtchunktype(chunktype):
3451 3455 if chunktype == b'empty':
3452 3456 return b' %s : ' % chunktype
3453 3457 elif chunktype in pycompat.bytestr(string.ascii_letters):
3454 3458 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3455 3459 else:
3456 3460 return b' 0x%s : ' % hex(chunktype)
3457 3461
3458 3462 ui.write(b'\n')
3459 3463 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3460 3464 for chunktype in sorted(chunktypecounts):
3461 3465 ui.write(fmtchunktype(chunktype))
3462 3466 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3463 3467 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3464 3468 for chunktype in sorted(chunktypecounts):
3465 3469 ui.write(fmtchunktype(chunktype))
3466 3470 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3467 3471
3468 3472 ui.write(b'\n')
3469 3473 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3470 3474 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3471 3475 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3472 3476 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3473 3477 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3474 3478
3475 3479 if format > 0:
3476 3480 ui.write(b'\n')
3477 3481 ui.writenoi18n(
3478 3482 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3479 3483 % tuple(datasize)
3480 3484 )
3481 3485 ui.writenoi18n(
3482 3486 b'full revision size (min/max/avg) : %d / %d / %d\n'
3483 3487 % tuple(fullsize)
3484 3488 )
3485 3489 ui.writenoi18n(
3486 3490 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3487 3491 % tuple(semisize)
3488 3492 )
3489 3493 for depth in sorted(snapsizedepth):
3490 3494 if depth == 0:
3491 3495 continue
3492 3496 ui.writenoi18n(
3493 3497 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3494 3498 % ((depth,) + tuple(snapsizedepth[depth]))
3495 3499 )
3496 3500 ui.writenoi18n(
3497 3501 b'delta size (min/max/avg) : %d / %d / %d\n'
3498 3502 % tuple(deltasize)
3499 3503 )
3500 3504
3501 3505 if numdeltas > 0:
3502 3506 ui.write(b'\n')
3503 3507 fmt = pcfmtstr(numdeltas)
3504 3508 fmt2 = pcfmtstr(numdeltas, 4)
3505 3509 ui.writenoi18n(
3506 3510 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3507 3511 )
3508 3512 if numprev > 0:
3509 3513 ui.writenoi18n(
3510 3514 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3511 3515 )
3512 3516 ui.writenoi18n(
3513 3517 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3514 3518 )
3515 3519 ui.writenoi18n(
3516 3520 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3517 3521 )
3518 3522 if gdelta:
3519 3523 ui.writenoi18n(
3520 3524 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3521 3525 )
3522 3526 ui.writenoi18n(
3523 3527 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3524 3528 )
3525 3529 ui.writenoi18n(
3526 3530 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3527 3531 )
3528 3532
3529 3533
3530 3534 @command(
3531 3535 b'debugrevlogindex',
3532 3536 cmdutil.debugrevlogopts
3533 3537 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3534 3538 _(b'[-f FORMAT] -c|-m|FILE'),
3535 3539 optionalrepo=True,
3536 3540 )
3537 3541 def debugrevlogindex(ui, repo, file_=None, **opts):
3538 3542 """dump the contents of a revlog index"""
3539 3543 opts = pycompat.byteskwargs(opts)
3540 3544 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3541 3545 format = opts.get(b'format', 0)
3542 3546 if format not in (0, 1):
3543 3547 raise error.Abort(_(b"unknown format %d") % format)
3544 3548
3545 3549 if ui.debugflag:
3546 3550 shortfn = hex
3547 3551 else:
3548 3552 shortfn = short
3549 3553
3550 3554 # There might not be anything in r, so have a sane default
3551 3555 idlen = 12
3552 3556 for i in r:
3553 3557 idlen = len(shortfn(r.node(i)))
3554 3558 break
3555 3559
3556 3560 if format == 0:
3557 3561 if ui.verbose:
3558 3562 ui.writenoi18n(
3559 3563 b" rev offset length linkrev %s %s p2\n"
3560 3564 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3561 3565 )
3562 3566 else:
3563 3567 ui.writenoi18n(
3564 3568 b" rev linkrev %s %s p2\n"
3565 3569 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3566 3570 )
3567 3571 elif format == 1:
3568 3572 if ui.verbose:
3569 3573 ui.writenoi18n(
3570 3574 (
3571 3575 b" rev flag offset length size link p1"
3572 3576 b" p2 %s\n"
3573 3577 )
3574 3578 % b"nodeid".rjust(idlen)
3575 3579 )
3576 3580 else:
3577 3581 ui.writenoi18n(
3578 3582 b" rev flag size link p1 p2 %s\n"
3579 3583 % b"nodeid".rjust(idlen)
3580 3584 )
3581 3585
3582 3586 for i in r:
3583 3587 node = r.node(i)
3584 3588 if format == 0:
3585 3589 try:
3586 3590 pp = r.parents(node)
3587 3591 except Exception:
3588 3592 pp = [repo.nullid, repo.nullid]
3589 3593 if ui.verbose:
3590 3594 ui.write(
3591 3595 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3592 3596 % (
3593 3597 i,
3594 3598 r.start(i),
3595 3599 r.length(i),
3596 3600 r.linkrev(i),
3597 3601 shortfn(node),
3598 3602 shortfn(pp[0]),
3599 3603 shortfn(pp[1]),
3600 3604 )
3601 3605 )
3602 3606 else:
3603 3607 ui.write(
3604 3608 b"% 6d % 7d %s %s %s\n"
3605 3609 % (
3606 3610 i,
3607 3611 r.linkrev(i),
3608 3612 shortfn(node),
3609 3613 shortfn(pp[0]),
3610 3614 shortfn(pp[1]),
3611 3615 )
3612 3616 )
3613 3617 elif format == 1:
3614 3618 pr = r.parentrevs(i)
3615 3619 if ui.verbose:
3616 3620 ui.write(
3617 3621 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3618 3622 % (
3619 3623 i,
3620 3624 r.flags(i),
3621 3625 r.start(i),
3622 3626 r.length(i),
3623 3627 r.rawsize(i),
3624 3628 r.linkrev(i),
3625 3629 pr[0],
3626 3630 pr[1],
3627 3631 shortfn(node),
3628 3632 )
3629 3633 )
3630 3634 else:
3631 3635 ui.write(
3632 3636 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3633 3637 % (
3634 3638 i,
3635 3639 r.flags(i),
3636 3640 r.rawsize(i),
3637 3641 r.linkrev(i),
3638 3642 pr[0],
3639 3643 pr[1],
3640 3644 shortfn(node),
3641 3645 )
3642 3646 )
3643 3647
3644 3648
3645 3649 @command(
3646 3650 b'debugrevspec',
3647 3651 [
3648 3652 (
3649 3653 b'',
3650 3654 b'optimize',
3651 3655 None,
3652 3656 _(b'print parsed tree after optimizing (DEPRECATED)'),
3653 3657 ),
3654 3658 (
3655 3659 b'',
3656 3660 b'show-revs',
3657 3661 True,
3658 3662 _(b'print list of result revisions (default)'),
3659 3663 ),
3660 3664 (
3661 3665 b's',
3662 3666 b'show-set',
3663 3667 None,
3664 3668 _(b'print internal representation of result set'),
3665 3669 ),
3666 3670 (
3667 3671 b'p',
3668 3672 b'show-stage',
3669 3673 [],
3670 3674 _(b'print parsed tree at the given stage'),
3671 3675 _(b'NAME'),
3672 3676 ),
3673 3677 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3674 3678 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3675 3679 ],
3676 3680 b'REVSPEC',
3677 3681 )
3678 3682 def debugrevspec(ui, repo, expr, **opts):
3679 3683 """parse and apply a revision specification
3680 3684
3681 3685 Use -p/--show-stage option to print the parsed tree at the given stages.
3682 3686 Use -p all to print tree at every stage.
3683 3687
3684 3688 Use --no-show-revs option with -s or -p to print only the set
3685 3689 representation or the parsed tree respectively.
3686 3690
3687 3691 Use --verify-optimized to compare the optimized result with the unoptimized
3688 3692 one. Returns 1 if the optimized result differs.
3689 3693 """
3690 3694 opts = pycompat.byteskwargs(opts)
3691 3695 aliases = ui.configitems(b'revsetalias')
3692 3696 stages = [
3693 3697 (b'parsed', lambda tree: tree),
3694 3698 (
3695 3699 b'expanded',
3696 3700 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3697 3701 ),
3698 3702 (b'concatenated', revsetlang.foldconcat),
3699 3703 (b'analyzed', revsetlang.analyze),
3700 3704 (b'optimized', revsetlang.optimize),
3701 3705 ]
3702 3706 if opts[b'no_optimized']:
3703 3707 stages = stages[:-1]
3704 3708 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3705 3709 raise error.Abort(
3706 3710 _(b'cannot use --verify-optimized with --no-optimized')
3707 3711 )
3708 3712 stagenames = {n for n, f in stages}
3709 3713
3710 3714 showalways = set()
3711 3715 showchanged = set()
3712 3716 if ui.verbose and not opts[b'show_stage']:
3713 3717 # show parsed tree by --verbose (deprecated)
3714 3718 showalways.add(b'parsed')
3715 3719 showchanged.update([b'expanded', b'concatenated'])
3716 3720 if opts[b'optimize']:
3717 3721 showalways.add(b'optimized')
3718 3722 if opts[b'show_stage'] and opts[b'optimize']:
3719 3723 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3720 3724 if opts[b'show_stage'] == [b'all']:
3721 3725 showalways.update(stagenames)
3722 3726 else:
3723 3727 for n in opts[b'show_stage']:
3724 3728 if n not in stagenames:
3725 3729 raise error.Abort(_(b'invalid stage name: %s') % n)
3726 3730 showalways.update(opts[b'show_stage'])
3727 3731
3728 3732 treebystage = {}
3729 3733 printedtree = None
3730 3734 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3731 3735 for n, f in stages:
3732 3736 treebystage[n] = tree = f(tree)
3733 3737 if n in showalways or (n in showchanged and tree != printedtree):
3734 3738 if opts[b'show_stage'] or n != b'parsed':
3735 3739 ui.write(b"* %s:\n" % n)
3736 3740 ui.write(revsetlang.prettyformat(tree), b"\n")
3737 3741 printedtree = tree
3738 3742
3739 3743 if opts[b'verify_optimized']:
3740 3744 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3741 3745 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3742 3746 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3743 3747 ui.writenoi18n(
3744 3748 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3745 3749 )
3746 3750 ui.writenoi18n(
3747 3751 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3748 3752 )
3749 3753 arevs = list(arevs)
3750 3754 brevs = list(brevs)
3751 3755 if arevs == brevs:
3752 3756 return 0
3753 3757 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3754 3758 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3755 3759 sm = difflib.SequenceMatcher(None, arevs, brevs)
3756 3760 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3757 3761 if tag in ('delete', 'replace'):
3758 3762 for c in arevs[alo:ahi]:
3759 3763 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3760 3764 if tag in ('insert', 'replace'):
3761 3765 for c in brevs[blo:bhi]:
3762 3766 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3763 3767 if tag == 'equal':
3764 3768 for c in arevs[alo:ahi]:
3765 3769 ui.write(b' %d\n' % c)
3766 3770 return 1
3767 3771
3768 3772 func = revset.makematcher(tree)
3769 3773 revs = func(repo)
3770 3774 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3771 3775 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3772 3776 if not opts[b'show_revs']:
3773 3777 return
3774 3778 for c in revs:
3775 3779 ui.write(b"%d\n" % c)
3776 3780
3777 3781
3778 3782 @command(
3779 3783 b'debugserve',
3780 3784 [
3781 3785 (
3782 3786 b'',
3783 3787 b'sshstdio',
3784 3788 False,
3785 3789 _(b'run an SSH server bound to process handles'),
3786 3790 ),
3787 3791 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3788 3792 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3789 3793 ],
3790 3794 b'',
3791 3795 )
3792 3796 def debugserve(ui, repo, **opts):
3793 3797 """run a server with advanced settings
3794 3798
3795 3799 This command is similar to :hg:`serve`. It exists partially as a
3796 3800 workaround to the fact that ``hg serve --stdio`` must have specific
3797 3801 arguments for security reasons.
3798 3802 """
3799 3803 opts = pycompat.byteskwargs(opts)
3800 3804
3801 3805 if not opts[b'sshstdio']:
3802 3806 raise error.Abort(_(b'only --sshstdio is currently supported'))
3803 3807
3804 3808 logfh = None
3805 3809
3806 3810 if opts[b'logiofd'] and opts[b'logiofile']:
3807 3811 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3808 3812
3809 3813 if opts[b'logiofd']:
3810 3814 # Ideally we would be line buffered. But line buffering in binary
3811 3815 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3812 3816 # buffering could have performance impacts. But since this isn't
3813 3817 # performance critical code, it should be fine.
3814 3818 try:
3815 3819 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3816 3820 except OSError as e:
3817 3821 if e.errno != errno.ESPIPE:
3818 3822 raise
3819 3823 # can't seek a pipe, so `ab` mode fails on py3
3820 3824 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3821 3825 elif opts[b'logiofile']:
3822 3826 logfh = open(opts[b'logiofile'], b'ab', 0)
3823 3827
3824 3828 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3825 3829 s.serve_forever()
3826 3830
3827 3831
3828 3832 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3829 3833 def debugsetparents(ui, repo, rev1, rev2=None):
3830 3834 """manually set the parents of the current working directory (DANGEROUS)
3831 3835
3832 3836 This command is not what you are looking for and should not be used. Using
3833 3837 this command will most certainly results in slight corruption of the file
3834 3838 level histories withing your repository. DO NOT USE THIS COMMAND.
3835 3839
3836 3840 The command update the p1 and p2 field in the dirstate, and not touching
3837 3841 anything else. This useful for writing repository conversion tools, but
3838 3842 should be used with extreme care. For example, neither the working
3839 3843 directory nor the dirstate is updated, so file status may be incorrect
3840 3844 after running this command. Only used if you are one of the few people that
3841 3845 deeply unstand both conversion tools and file level histories. If you are
3842 3846 reading this help, you are not one of this people (most of them sailed west
3843 3847 from Mithlond anyway.
3844 3848
3845 3849 So one last time DO NOT USE THIS COMMAND.
3846 3850
3847 3851 Returns 0 on success.
3848 3852 """
3849 3853
3850 3854 node1 = scmutil.revsingle(repo, rev1).node()
3851 3855 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3852 3856
3853 3857 with repo.wlock():
3854 3858 repo.setparents(node1, node2)
3855 3859
3856 3860
3857 3861 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3858 3862 def debugsidedata(ui, repo, file_, rev=None, **opts):
3859 3863 """dump the side data for a cl/manifest/file revision
3860 3864
3861 3865 Use --verbose to dump the sidedata content."""
3862 3866 opts = pycompat.byteskwargs(opts)
3863 3867 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3864 3868 if rev is not None:
3865 3869 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3866 3870 file_, rev = None, file_
3867 3871 elif rev is None:
3868 3872 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3869 3873 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3870 3874 r = getattr(r, '_revlog', r)
3871 3875 try:
3872 3876 sidedata = r.sidedata(r.lookup(rev))
3873 3877 except KeyError:
3874 3878 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3875 3879 if sidedata:
3876 3880 sidedata = list(sidedata.items())
3877 3881 sidedata.sort()
3878 3882 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3879 3883 for key, value in sidedata:
3880 3884 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3881 3885 if ui.verbose:
3882 3886 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3883 3887
3884 3888
3885 3889 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3886 3890 def debugssl(ui, repo, source=None, **opts):
3887 3891 """test a secure connection to a server
3888 3892
3889 3893 This builds the certificate chain for the server on Windows, installing the
3890 3894 missing intermediates and trusted root via Windows Update if necessary. It
3891 3895 does nothing on other platforms.
3892 3896
3893 3897 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3894 3898 that server is used. See :hg:`help urls` for more information.
3895 3899
3896 3900 If the update succeeds, retry the original operation. Otherwise, the cause
3897 3901 of the SSL error is likely another issue.
3898 3902 """
3899 3903 if not pycompat.iswindows:
3900 3904 raise error.Abort(
3901 3905 _(b'certificate chain building is only possible on Windows')
3902 3906 )
3903 3907
3904 3908 if not source:
3905 3909 if not repo:
3906 3910 raise error.Abort(
3907 3911 _(
3908 3912 b"there is no Mercurial repository here, and no "
3909 3913 b"server specified"
3910 3914 )
3911 3915 )
3912 3916 source = b"default"
3913 3917
3914 3918 source, branches = urlutil.get_unique_pull_path(
3915 3919 b'debugssl', repo, ui, source
3916 3920 )
3917 3921 url = urlutil.url(source)
3918 3922
3919 3923 defaultport = {b'https': 443, b'ssh': 22}
3920 3924 if url.scheme in defaultport:
3921 3925 try:
3922 3926 addr = (url.host, int(url.port or defaultport[url.scheme]))
3923 3927 except ValueError:
3924 3928 raise error.Abort(_(b"malformed port number in URL"))
3925 3929 else:
3926 3930 raise error.Abort(_(b"only https and ssh connections are supported"))
3927 3931
3928 3932 from . import win32
3929 3933
3930 3934 s = ssl.wrap_socket(
3931 3935 socket.socket(),
3932 3936 ssl_version=ssl.PROTOCOL_TLS,
3933 3937 cert_reqs=ssl.CERT_NONE,
3934 3938 ca_certs=None,
3935 3939 )
3936 3940
3937 3941 try:
3938 3942 s.connect(addr)
3939 3943 cert = s.getpeercert(True)
3940 3944
3941 3945 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3942 3946
3943 3947 complete = win32.checkcertificatechain(cert, build=False)
3944 3948
3945 3949 if not complete:
3946 3950 ui.status(_(b'certificate chain is incomplete, updating... '))
3947 3951
3948 3952 if not win32.checkcertificatechain(cert):
3949 3953 ui.status(_(b'failed.\n'))
3950 3954 else:
3951 3955 ui.status(_(b'done.\n'))
3952 3956 else:
3953 3957 ui.status(_(b'full certificate chain is available\n'))
3954 3958 finally:
3955 3959 s.close()
3956 3960
3957 3961
3958 3962 @command(
3959 3963 b"debugbackupbundle",
3960 3964 [
3961 3965 (
3962 3966 b"",
3963 3967 b"recover",
3964 3968 b"",
3965 3969 b"brings the specified changeset back into the repository",
3966 3970 )
3967 3971 ]
3968 3972 + cmdutil.logopts,
3969 3973 _(b"hg debugbackupbundle [--recover HASH]"),
3970 3974 )
3971 3975 def debugbackupbundle(ui, repo, *pats, **opts):
3972 3976 """lists the changesets available in backup bundles
3973 3977
3974 3978 Without any arguments, this command prints a list of the changesets in each
3975 3979 backup bundle.
3976 3980
3977 3981 --recover takes a changeset hash and unbundles the first bundle that
3978 3982 contains that hash, which puts that changeset back in your repository.
3979 3983
3980 3984 --verbose will print the entire commit message and the bundle path for that
3981 3985 backup.
3982 3986 """
3983 3987 backups = list(
3984 3988 filter(
3985 3989 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3986 3990 )
3987 3991 )
3988 3992 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3989 3993
3990 3994 opts = pycompat.byteskwargs(opts)
3991 3995 opts[b"bundle"] = b""
3992 3996 opts[b"force"] = None
3993 3997 limit = logcmdutil.getlimit(opts)
3994 3998
3995 3999 def display(other, chlist, displayer):
3996 4000 if opts.get(b"newest_first"):
3997 4001 chlist.reverse()
3998 4002 count = 0
3999 4003 for n in chlist:
4000 4004 if limit is not None and count >= limit:
4001 4005 break
4002 4006 parents = [
4003 4007 True for p in other.changelog.parents(n) if p != repo.nullid
4004 4008 ]
4005 4009 if opts.get(b"no_merges") and len(parents) == 2:
4006 4010 continue
4007 4011 count += 1
4008 4012 displayer.show(other[n])
4009 4013
4010 4014 recovernode = opts.get(b"recover")
4011 4015 if recovernode:
4012 4016 if scmutil.isrevsymbol(repo, recovernode):
4013 4017 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4014 4018 return
4015 4019 elif backups:
4016 4020 msg = _(
4017 4021 b"Recover changesets using: hg debugbackupbundle --recover "
4018 4022 b"<changeset hash>\n\nAvailable backup changesets:"
4019 4023 )
4020 4024 ui.status(msg, label=b"status.removed")
4021 4025 else:
4022 4026 ui.status(_(b"no backup changesets found\n"))
4023 4027 return
4024 4028
4025 4029 for backup in backups:
4026 4030 # Much of this is copied from the hg incoming logic
4027 4031 source = os.path.relpath(backup, encoding.getcwd())
4028 4032 source, branches = urlutil.get_unique_pull_path(
4029 4033 b'debugbackupbundle',
4030 4034 repo,
4031 4035 ui,
4032 4036 source,
4033 4037 default_branches=opts.get(b'branch'),
4034 4038 )
4035 4039 try:
4036 4040 other = hg.peer(repo, opts, source)
4037 4041 except error.LookupError as ex:
4038 4042 msg = _(b"\nwarning: unable to open bundle %s") % source
4039 4043 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4040 4044 ui.warn(msg, hint=hint)
4041 4045 continue
4042 4046 revs, checkout = hg.addbranchrevs(
4043 4047 repo, other, branches, opts.get(b"rev")
4044 4048 )
4045 4049
4046 4050 if revs:
4047 4051 revs = [other.lookup(rev) for rev in revs]
4048 4052
4049 4053 with ui.silent():
4050 4054 try:
4051 4055 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4052 4056 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4053 4057 )
4054 4058 except error.LookupError:
4055 4059 continue
4056 4060
4057 4061 try:
4058 4062 if not chlist:
4059 4063 continue
4060 4064 if recovernode:
4061 4065 with repo.lock(), repo.transaction(b"unbundle") as tr:
4062 4066 if scmutil.isrevsymbol(other, recovernode):
4063 4067 ui.status(_(b"Unbundling %s\n") % (recovernode))
4064 4068 f = hg.openpath(ui, source)
4065 4069 gen = exchange.readbundle(ui, f, source)
4066 4070 if isinstance(gen, bundle2.unbundle20):
4067 4071 bundle2.applybundle(
4068 4072 repo,
4069 4073 gen,
4070 4074 tr,
4071 4075 source=b"unbundle",
4072 4076 url=b"bundle:" + source,
4073 4077 )
4074 4078 else:
4075 4079 gen.apply(repo, b"unbundle", b"bundle:" + source)
4076 4080 break
4077 4081 else:
4078 4082 backupdate = encoding.strtolocal(
4079 4083 time.strftime(
4080 4084 "%a %H:%M, %Y-%m-%d",
4081 4085 time.localtime(os.path.getmtime(source)),
4082 4086 )
4083 4087 )
4084 4088 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4085 4089 if ui.verbose:
4086 4090 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4087 4091 else:
4088 4092 opts[
4089 4093 b"template"
4090 4094 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4091 4095 displayer = logcmdutil.changesetdisplayer(
4092 4096 ui, other, opts, False
4093 4097 )
4094 4098 display(other, chlist, displayer)
4095 4099 displayer.close()
4096 4100 finally:
4097 4101 cleanupfn()
4098 4102
4099 4103
4100 4104 @command(
4101 4105 b'debugsub',
4102 4106 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4103 4107 _(b'[-r REV] [REV]'),
4104 4108 )
4105 4109 def debugsub(ui, repo, rev=None):
4106 4110 ctx = scmutil.revsingle(repo, rev, None)
4107 4111 for k, v in sorted(ctx.substate.items()):
4108 4112 ui.writenoi18n(b'path %s\n' % k)
4109 4113 ui.writenoi18n(b' source %s\n' % v[0])
4110 4114 ui.writenoi18n(b' revision %s\n' % v[1])
4111 4115
4112 4116
4113 4117 @command(b'debugshell', optionalrepo=True)
4114 4118 def debugshell(ui, repo):
4115 4119 """run an interactive Python interpreter
4116 4120
4117 4121 The local namespace is provided with a reference to the ui and
4118 4122 the repo instance (if available).
4119 4123 """
4120 4124 import code
4121 4125
4122 4126 imported_objects = {
4123 4127 'ui': ui,
4124 4128 'repo': repo,
4125 4129 }
4126 4130
4127 4131 code.interact(local=imported_objects)
4128 4132
4129 4133
4130 4134 @command(
4131 4135 b'debugsuccessorssets',
4132 4136 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4133 4137 _(b'[REV]'),
4134 4138 )
4135 4139 def debugsuccessorssets(ui, repo, *revs, **opts):
4136 4140 """show set of successors for revision
4137 4141
4138 4142 A successors set of changeset A is a consistent group of revisions that
4139 4143 succeed A. It contains non-obsolete changesets only unless closests
4140 4144 successors set is set.
4141 4145
4142 4146 In most cases a changeset A has a single successors set containing a single
4143 4147 successor (changeset A replaced by A').
4144 4148
4145 4149 A changeset that is made obsolete with no successors are called "pruned".
4146 4150 Such changesets have no successors sets at all.
4147 4151
4148 4152 A changeset that has been "split" will have a successors set containing
4149 4153 more than one successor.
4150 4154
4151 4155 A changeset that has been rewritten in multiple different ways is called
4152 4156 "divergent". Such changesets have multiple successor sets (each of which
4153 4157 may also be split, i.e. have multiple successors).
4154 4158
4155 4159 Results are displayed as follows::
4156 4160
4157 4161 <rev1>
4158 4162 <successors-1A>
4159 4163 <rev2>
4160 4164 <successors-2A>
4161 4165 <successors-2B1> <successors-2B2> <successors-2B3>
4162 4166
4163 4167 Here rev2 has two possible (i.e. divergent) successors sets. The first
4164 4168 holds one element, whereas the second holds three (i.e. the changeset has
4165 4169 been split).
4166 4170 """
4167 4171 # passed to successorssets caching computation from one call to another
4168 4172 cache = {}
4169 4173 ctx2str = bytes
4170 4174 node2str = short
4171 4175 for rev in logcmdutil.revrange(repo, revs):
4172 4176 ctx = repo[rev]
4173 4177 ui.write(b'%s\n' % ctx2str(ctx))
4174 4178 for succsset in obsutil.successorssets(
4175 4179 repo, ctx.node(), closest=opts['closest'], cache=cache
4176 4180 ):
4177 4181 if succsset:
4178 4182 ui.write(b' ')
4179 4183 ui.write(node2str(succsset[0]))
4180 4184 for node in succsset[1:]:
4181 4185 ui.write(b' ')
4182 4186 ui.write(node2str(node))
4183 4187 ui.write(b'\n')
4184 4188
4185 4189
4186 4190 @command(b'debugtagscache', [])
4187 4191 def debugtagscache(ui, repo):
4188 4192 """display the contents of .hg/cache/hgtagsfnodes1"""
4189 4193 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4190 4194 flog = repo.file(b'.hgtags')
4191 4195 for r in repo:
4192 4196 node = repo[r].node()
4193 4197 tagsnode = cache.getfnode(node, computemissing=False)
4194 4198 if tagsnode:
4195 4199 tagsnodedisplay = hex(tagsnode)
4196 4200 if not flog.hasnode(tagsnode):
4197 4201 tagsnodedisplay += b' (unknown node)'
4198 4202 elif tagsnode is None:
4199 4203 tagsnodedisplay = b'missing'
4200 4204 else:
4201 4205 tagsnodedisplay = b'invalid'
4202 4206
4203 4207 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4204 4208
4205 4209
4206 4210 @command(
4207 4211 b'debugtemplate',
4208 4212 [
4209 4213 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4210 4214 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4211 4215 ],
4212 4216 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4213 4217 optionalrepo=True,
4214 4218 )
4215 4219 def debugtemplate(ui, repo, tmpl, **opts):
4216 4220 """parse and apply a template
4217 4221
4218 4222 If -r/--rev is given, the template is processed as a log template and
4219 4223 applied to the given changesets. Otherwise, it is processed as a generic
4220 4224 template.
4221 4225
4222 4226 Use --verbose to print the parsed tree.
4223 4227 """
4224 4228 revs = None
4225 4229 if opts['rev']:
4226 4230 if repo is None:
4227 4231 raise error.RepoError(
4228 4232 _(b'there is no Mercurial repository here (.hg not found)')
4229 4233 )
4230 4234 revs = logcmdutil.revrange(repo, opts['rev'])
4231 4235
4232 4236 props = {}
4233 4237 for d in opts['define']:
4234 4238 try:
4235 4239 k, v = (e.strip() for e in d.split(b'=', 1))
4236 4240 if not k or k == b'ui':
4237 4241 raise ValueError
4238 4242 props[k] = v
4239 4243 except ValueError:
4240 4244 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4241 4245
4242 4246 if ui.verbose:
4243 4247 aliases = ui.configitems(b'templatealias')
4244 4248 tree = templater.parse(tmpl)
4245 4249 ui.note(templater.prettyformat(tree), b'\n')
4246 4250 newtree = templater.expandaliases(tree, aliases)
4247 4251 if newtree != tree:
4248 4252 ui.notenoi18n(
4249 4253 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4250 4254 )
4251 4255
4252 4256 if revs is None:
4253 4257 tres = formatter.templateresources(ui, repo)
4254 4258 t = formatter.maketemplater(ui, tmpl, resources=tres)
4255 4259 if ui.verbose:
4256 4260 kwds, funcs = t.symbolsuseddefault()
4257 4261 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4258 4262 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4259 4263 ui.write(t.renderdefault(props))
4260 4264 else:
4261 4265 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4262 4266 if ui.verbose:
4263 4267 kwds, funcs = displayer.t.symbolsuseddefault()
4264 4268 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4265 4269 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4266 4270 for r in revs:
4267 4271 displayer.show(repo[r], **pycompat.strkwargs(props))
4268 4272 displayer.close()
4269 4273
4270 4274
4271 4275 @command(
4272 4276 b'debuguigetpass',
4273 4277 [
4274 4278 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4275 4279 ],
4276 4280 _(b'[-p TEXT]'),
4277 4281 norepo=True,
4278 4282 )
4279 4283 def debuguigetpass(ui, prompt=b''):
4280 4284 """show prompt to type password"""
4281 4285 r = ui.getpass(prompt)
4282 4286 if r is None:
4283 4287 r = b"<default response>"
4284 4288 ui.writenoi18n(b'response: %s\n' % r)
4285 4289
4286 4290
4287 4291 @command(
4288 4292 b'debuguiprompt',
4289 4293 [
4290 4294 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4291 4295 ],
4292 4296 _(b'[-p TEXT]'),
4293 4297 norepo=True,
4294 4298 )
4295 4299 def debuguiprompt(ui, prompt=b''):
4296 4300 """show plain prompt"""
4297 4301 r = ui.prompt(prompt)
4298 4302 ui.writenoi18n(b'response: %s\n' % r)
4299 4303
4300 4304
4301 4305 @command(b'debugupdatecaches', [])
4302 4306 def debugupdatecaches(ui, repo, *pats, **opts):
4303 4307 """warm all known caches in the repository"""
4304 4308 with repo.wlock(), repo.lock():
4305 4309 repo.updatecaches(caches=repository.CACHES_ALL)
4306 4310
4307 4311
4308 4312 @command(
4309 4313 b'debugupgraderepo',
4310 4314 [
4311 4315 (
4312 4316 b'o',
4313 4317 b'optimize',
4314 4318 [],
4315 4319 _(b'extra optimization to perform'),
4316 4320 _(b'NAME'),
4317 4321 ),
4318 4322 (b'', b'run', False, _(b'performs an upgrade')),
4319 4323 (b'', b'backup', True, _(b'keep the old repository content around')),
4320 4324 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4321 4325 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4322 4326 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4323 4327 ],
4324 4328 )
4325 4329 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4326 4330 """upgrade a repository to use different features
4327 4331
4328 4332 If no arguments are specified, the repository is evaluated for upgrade
4329 4333 and a list of problems and potential optimizations is printed.
4330 4334
4331 4335 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4332 4336 can be influenced via additional arguments. More details will be provided
4333 4337 by the command output when run without ``--run``.
4334 4338
4335 4339 During the upgrade, the repository will be locked and no writes will be
4336 4340 allowed.
4337 4341
4338 4342 At the end of the upgrade, the repository may not be readable while new
4339 4343 repository data is swapped in. This window will be as long as it takes to
4340 4344 rename some directories inside the ``.hg`` directory. On most machines, this
4341 4345 should complete almost instantaneously and the chances of a consumer being
4342 4346 unable to access the repository should be low.
4343 4347
4344 4348 By default, all revlogs will be upgraded. You can restrict this using flags
4345 4349 such as `--manifest`:
4346 4350
4347 4351 * `--manifest`: only optimize the manifest
4348 4352 * `--no-manifest`: optimize all revlog but the manifest
4349 4353 * `--changelog`: optimize the changelog only
4350 4354 * `--no-changelog --no-manifest`: optimize filelogs only
4351 4355 * `--filelogs`: optimize the filelogs only
4352 4356 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4353 4357 """
4354 4358 return upgrade.upgraderepo(
4355 4359 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4356 4360 )
4357 4361
4358 4362
4359 4363 @command(
4360 4364 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4361 4365 )
4362 4366 def debugwalk(ui, repo, *pats, **opts):
4363 4367 """show how files match on given patterns"""
4364 4368 opts = pycompat.byteskwargs(opts)
4365 4369 m = scmutil.match(repo[None], pats, opts)
4366 4370 if ui.verbose:
4367 4371 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4368 4372 items = list(repo[None].walk(m))
4369 4373 if not items:
4370 4374 return
4371 4375 f = lambda fn: fn
4372 4376 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4373 4377 f = lambda fn: util.normpath(fn)
4374 4378 fmt = b'f %%-%ds %%-%ds %%s' % (
4375 4379 max([len(abs) for abs in items]),
4376 4380 max([len(repo.pathto(abs)) for abs in items]),
4377 4381 )
4378 4382 for abs in items:
4379 4383 line = fmt % (
4380 4384 abs,
4381 4385 f(repo.pathto(abs)),
4382 4386 m.exact(abs) and b'exact' or b'',
4383 4387 )
4384 4388 ui.write(b"%s\n" % line.rstrip())
4385 4389
4386 4390
4387 4391 @command(b'debugwhyunstable', [], _(b'REV'))
4388 4392 def debugwhyunstable(ui, repo, rev):
4389 4393 """explain instabilities of a changeset"""
4390 4394 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4391 4395 dnodes = b''
4392 4396 if entry.get(b'divergentnodes'):
4393 4397 dnodes = (
4394 4398 b' '.join(
4395 4399 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4396 4400 for ctx in entry[b'divergentnodes']
4397 4401 )
4398 4402 + b' '
4399 4403 )
4400 4404 ui.write(
4401 4405 b'%s: %s%s %s\n'
4402 4406 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4403 4407 )
4404 4408
4405 4409
4406 4410 @command(
4407 4411 b'debugwireargs',
4408 4412 [
4409 4413 (b'', b'three', b'', b'three'),
4410 4414 (b'', b'four', b'', b'four'),
4411 4415 (b'', b'five', b'', b'five'),
4412 4416 ]
4413 4417 + cmdutil.remoteopts,
4414 4418 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4415 4419 norepo=True,
4416 4420 )
4417 4421 def debugwireargs(ui, repopath, *vals, **opts):
4418 4422 opts = pycompat.byteskwargs(opts)
4419 4423 repo = hg.peer(ui, opts, repopath)
4420 4424 try:
4421 4425 for opt in cmdutil.remoteopts:
4422 4426 del opts[opt[1]]
4423 4427 args = {}
4424 4428 for k, v in opts.items():
4425 4429 if v:
4426 4430 args[k] = v
4427 4431 args = pycompat.strkwargs(args)
4428 4432 # run twice to check that we don't mess up the stream for the next command
4429 4433 res1 = repo.debugwireargs(*vals, **args)
4430 4434 res2 = repo.debugwireargs(*vals, **args)
4431 4435 ui.write(b"%s\n" % res1)
4432 4436 if res1 != res2:
4433 4437 ui.warn(b"%s\n" % res2)
4434 4438 finally:
4435 4439 repo.close()
4436 4440
4437 4441
4438 4442 def _parsewirelangblocks(fh):
4439 4443 activeaction = None
4440 4444 blocklines = []
4441 4445 lastindent = 0
4442 4446
4443 4447 for line in fh:
4444 4448 line = line.rstrip()
4445 4449 if not line:
4446 4450 continue
4447 4451
4448 4452 if line.startswith(b'#'):
4449 4453 continue
4450 4454
4451 4455 if not line.startswith(b' '):
4452 4456 # New block. Flush previous one.
4453 4457 if activeaction:
4454 4458 yield activeaction, blocklines
4455 4459
4456 4460 activeaction = line
4457 4461 blocklines = []
4458 4462 lastindent = 0
4459 4463 continue
4460 4464
4461 4465 # Else we start with an indent.
4462 4466
4463 4467 if not activeaction:
4464 4468 raise error.Abort(_(b'indented line outside of block'))
4465 4469
4466 4470 indent = len(line) - len(line.lstrip())
4467 4471
4468 4472 # If this line is indented more than the last line, concatenate it.
4469 4473 if indent > lastindent and blocklines:
4470 4474 blocklines[-1] += line.lstrip()
4471 4475 else:
4472 4476 blocklines.append(line)
4473 4477 lastindent = indent
4474 4478
4475 4479 # Flush last block.
4476 4480 if activeaction:
4477 4481 yield activeaction, blocklines
4478 4482
4479 4483
4480 4484 @command(
4481 4485 b'debugwireproto',
4482 4486 [
4483 4487 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4484 4488 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4485 4489 (
4486 4490 b'',
4487 4491 b'noreadstderr',
4488 4492 False,
4489 4493 _(b'do not read from stderr of the remote'),
4490 4494 ),
4491 4495 (
4492 4496 b'',
4493 4497 b'nologhandshake',
4494 4498 False,
4495 4499 _(b'do not log I/O related to the peer handshake'),
4496 4500 ),
4497 4501 ]
4498 4502 + cmdutil.remoteopts,
4499 4503 _(b'[PATH]'),
4500 4504 optionalrepo=True,
4501 4505 )
4502 4506 def debugwireproto(ui, repo, path=None, **opts):
4503 4507 """send wire protocol commands to a server
4504 4508
4505 4509 This command can be used to issue wire protocol commands to remote
4506 4510 peers and to debug the raw data being exchanged.
4507 4511
4508 4512 ``--localssh`` will start an SSH server against the current repository
4509 4513 and connect to that. By default, the connection will perform a handshake
4510 4514 and establish an appropriate peer instance.
4511 4515
4512 4516 ``--peer`` can be used to bypass the handshake protocol and construct a
4513 4517 peer instance using the specified class type. Valid values are ``raw``,
4514 4518 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4515 4519 don't support higher-level command actions.
4516 4520
4517 4521 ``--noreadstderr`` can be used to disable automatic reading from stderr
4518 4522 of the peer (for SSH connections only). Disabling automatic reading of
4519 4523 stderr is useful for making output more deterministic.
4520 4524
4521 4525 Commands are issued via a mini language which is specified via stdin.
4522 4526 The language consists of individual actions to perform. An action is
4523 4527 defined by a block. A block is defined as a line with no leading
4524 4528 space followed by 0 or more lines with leading space. Blocks are
4525 4529 effectively a high-level command with additional metadata.
4526 4530
4527 4531 Lines beginning with ``#`` are ignored.
4528 4532
4529 4533 The following sections denote available actions.
4530 4534
4531 4535 raw
4532 4536 ---
4533 4537
4534 4538 Send raw data to the server.
4535 4539
4536 4540 The block payload contains the raw data to send as one atomic send
4537 4541 operation. The data may not actually be delivered in a single system
4538 4542 call: it depends on the abilities of the transport being used.
4539 4543
4540 4544 Each line in the block is de-indented and concatenated. Then, that
4541 4545 value is evaluated as a Python b'' literal. This allows the use of
4542 4546 backslash escaping, etc.
4543 4547
4544 4548 raw+
4545 4549 ----
4546 4550
4547 4551 Behaves like ``raw`` except flushes output afterwards.
4548 4552
4549 4553 command <X>
4550 4554 -----------
4551 4555
4552 4556 Send a request to run a named command, whose name follows the ``command``
4553 4557 string.
4554 4558
4555 4559 Arguments to the command are defined as lines in this block. The format of
4556 4560 each line is ``<key> <value>``. e.g.::
4557 4561
4558 4562 command listkeys
4559 4563 namespace bookmarks
4560 4564
4561 4565 If the value begins with ``eval:``, it will be interpreted as a Python
4562 4566 literal expression. Otherwise values are interpreted as Python b'' literals.
4563 4567 This allows sending complex types and encoding special byte sequences via
4564 4568 backslash escaping.
4565 4569
4566 4570 The following arguments have special meaning:
4567 4571
4568 4572 ``PUSHFILE``
4569 4573 When defined, the *push* mechanism of the peer will be used instead
4570 4574 of the static request-response mechanism and the content of the
4571 4575 file specified in the value of this argument will be sent as the
4572 4576 command payload.
4573 4577
4574 4578 This can be used to submit a local bundle file to the remote.
4575 4579
4576 4580 batchbegin
4577 4581 ----------
4578 4582
4579 4583 Instruct the peer to begin a batched send.
4580 4584
4581 4585 All ``command`` blocks are queued for execution until the next
4582 4586 ``batchsubmit`` block.
4583 4587
4584 4588 batchsubmit
4585 4589 -----------
4586 4590
4587 4591 Submit previously queued ``command`` blocks as a batch request.
4588 4592
4589 4593 This action MUST be paired with a ``batchbegin`` action.
4590 4594
4591 4595 httprequest <method> <path>
4592 4596 ---------------------------
4593 4597
4594 4598 (HTTP peer only)
4595 4599
4596 4600 Send an HTTP request to the peer.
4597 4601
4598 4602 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4599 4603
4600 4604 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4601 4605 headers to add to the request. e.g. ``Accept: foo``.
4602 4606
4603 4607 The following arguments are special:
4604 4608
4605 4609 ``BODYFILE``
4606 4610 The content of the file defined as the value to this argument will be
4607 4611 transferred verbatim as the HTTP request body.
4608 4612
4609 4613 ``frame <type> <flags> <payload>``
4610 4614 Send a unified protocol frame as part of the request body.
4611 4615
4612 4616 All frames will be collected and sent as the body to the HTTP
4613 4617 request.
4614 4618
4615 4619 close
4616 4620 -----
4617 4621
4618 4622 Close the connection to the server.
4619 4623
4620 4624 flush
4621 4625 -----
4622 4626
4623 4627 Flush data written to the server.
4624 4628
4625 4629 readavailable
4626 4630 -------------
4627 4631
4628 4632 Close the write end of the connection and read all available data from
4629 4633 the server.
4630 4634
4631 4635 If the connection to the server encompasses multiple pipes, we poll both
4632 4636 pipes and read available data.
4633 4637
4634 4638 readline
4635 4639 --------
4636 4640
4637 4641 Read a line of output from the server. If there are multiple output
4638 4642 pipes, reads only the main pipe.
4639 4643
4640 4644 ereadline
4641 4645 ---------
4642 4646
4643 4647 Like ``readline``, but read from the stderr pipe, if available.
4644 4648
4645 4649 read <X>
4646 4650 --------
4647 4651
4648 4652 ``read()`` N bytes from the server's main output pipe.
4649 4653
4650 4654 eread <X>
4651 4655 ---------
4652 4656
4653 4657 ``read()`` N bytes from the server's stderr pipe, if available.
4654 4658
4655 4659 Specifying Unified Frame-Based Protocol Frames
4656 4660 ----------------------------------------------
4657 4661
4658 4662 It is possible to emit a *Unified Frame-Based Protocol* by using special
4659 4663 syntax.
4660 4664
4661 4665 A frame is composed as a type, flags, and payload. These can be parsed
4662 4666 from a string of the form:
4663 4667
4664 4668 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4665 4669
4666 4670 ``request-id`` and ``stream-id`` are integers defining the request and
4667 4671 stream identifiers.
4668 4672
4669 4673 ``type`` can be an integer value for the frame type or the string name
4670 4674 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4671 4675 ``command-name``.
4672 4676
4673 4677 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4674 4678 components. Each component (and there can be just one) can be an integer
4675 4679 or a flag name for stream flags or frame flags, respectively. Values are
4676 4680 resolved to integers and then bitwise OR'd together.
4677 4681
4678 4682 ``payload`` represents the raw frame payload. If it begins with
4679 4683 ``cbor:``, the following string is evaluated as Python code and the
4680 4684 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4681 4685 as a Python byte string literal.
4682 4686 """
4683 4687 opts = pycompat.byteskwargs(opts)
4684 4688
4685 4689 if opts[b'localssh'] and not repo:
4686 4690 raise error.Abort(_(b'--localssh requires a repository'))
4687 4691
4688 4692 if opts[b'peer'] and opts[b'peer'] not in (
4689 4693 b'raw',
4690 4694 b'ssh1',
4691 4695 ):
4692 4696 raise error.Abort(
4693 4697 _(b'invalid value for --peer'),
4694 4698 hint=_(b'valid values are "raw" and "ssh1"'),
4695 4699 )
4696 4700
4697 4701 if path and opts[b'localssh']:
4698 4702 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4699 4703
4700 4704 if ui.interactive():
4701 4705 ui.write(_(b'(waiting for commands on stdin)\n'))
4702 4706
4703 4707 blocks = list(_parsewirelangblocks(ui.fin))
4704 4708
4705 4709 proc = None
4706 4710 stdin = None
4707 4711 stdout = None
4708 4712 stderr = None
4709 4713 opener = None
4710 4714
4711 4715 if opts[b'localssh']:
4712 4716 # We start the SSH server in its own process so there is process
4713 4717 # separation. This prevents a whole class of potential bugs around
4714 4718 # shared state from interfering with server operation.
4715 4719 args = procutil.hgcmd() + [
4716 4720 b'-R',
4717 4721 repo.root,
4718 4722 b'debugserve',
4719 4723 b'--sshstdio',
4720 4724 ]
4721 4725 proc = subprocess.Popen(
4722 4726 pycompat.rapply(procutil.tonativestr, args),
4723 4727 stdin=subprocess.PIPE,
4724 4728 stdout=subprocess.PIPE,
4725 4729 stderr=subprocess.PIPE,
4726 4730 bufsize=0,
4727 4731 )
4728 4732
4729 4733 stdin = proc.stdin
4730 4734 stdout = proc.stdout
4731 4735 stderr = proc.stderr
4732 4736
4733 4737 # We turn the pipes into observers so we can log I/O.
4734 4738 if ui.verbose or opts[b'peer'] == b'raw':
4735 4739 stdin = util.makeloggingfileobject(
4736 4740 ui, proc.stdin, b'i', logdata=True
4737 4741 )
4738 4742 stdout = util.makeloggingfileobject(
4739 4743 ui, proc.stdout, b'o', logdata=True
4740 4744 )
4741 4745 stderr = util.makeloggingfileobject(
4742 4746 ui, proc.stderr, b'e', logdata=True
4743 4747 )
4744 4748
4745 4749 # --localssh also implies the peer connection settings.
4746 4750
4747 4751 url = b'ssh://localserver'
4748 4752 autoreadstderr = not opts[b'noreadstderr']
4749 4753
4750 4754 if opts[b'peer'] == b'ssh1':
4751 4755 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4752 4756 peer = sshpeer.sshv1peer(
4753 4757 ui,
4754 4758 url,
4755 4759 proc,
4756 4760 stdin,
4757 4761 stdout,
4758 4762 stderr,
4759 4763 None,
4760 4764 autoreadstderr=autoreadstderr,
4761 4765 )
4762 4766 elif opts[b'peer'] == b'raw':
4763 4767 ui.write(_(b'using raw connection to peer\n'))
4764 4768 peer = None
4765 4769 else:
4766 4770 ui.write(_(b'creating ssh peer from handshake results\n'))
4767 4771 peer = sshpeer.makepeer(
4768 4772 ui,
4769 4773 url,
4770 4774 proc,
4771 4775 stdin,
4772 4776 stdout,
4773 4777 stderr,
4774 4778 autoreadstderr=autoreadstderr,
4775 4779 )
4776 4780
4777 4781 elif path:
4778 4782 # We bypass hg.peer() so we can proxy the sockets.
4779 4783 # TODO consider not doing this because we skip
4780 4784 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4781 4785 u = urlutil.url(path)
4782 4786 if u.scheme != b'http':
4783 4787 raise error.Abort(_(b'only http:// paths are currently supported'))
4784 4788
4785 4789 url, authinfo = u.authinfo()
4786 4790 openerargs = {
4787 4791 'useragent': b'Mercurial debugwireproto',
4788 4792 }
4789 4793
4790 4794 # Turn pipes/sockets into observers so we can log I/O.
4791 4795 if ui.verbose:
4792 4796 openerargs.update(
4793 4797 {
4794 4798 'loggingfh': ui,
4795 4799 'loggingname': b's',
4796 4800 'loggingopts': {
4797 4801 'logdata': True,
4798 4802 'logdataapis': False,
4799 4803 },
4800 4804 }
4801 4805 )
4802 4806
4803 4807 if ui.debugflag:
4804 4808 openerargs['loggingopts']['logdataapis'] = True
4805 4809
4806 4810 # Don't send default headers when in raw mode. This allows us to
4807 4811 # bypass most of the behavior of our URL handling code so we can
4808 4812 # have near complete control over what's sent on the wire.
4809 4813 if opts[b'peer'] == b'raw':
4810 4814 openerargs['sendaccept'] = False
4811 4815
4812 4816 opener = urlmod.opener(ui, authinfo, **openerargs)
4813 4817
4814 4818 if opts[b'peer'] == b'raw':
4815 4819 ui.write(_(b'using raw connection to peer\n'))
4816 4820 peer = None
4817 4821 elif opts[b'peer']:
4818 4822 raise error.Abort(
4819 4823 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4820 4824 )
4821 4825 else:
4822 4826 peer = httppeer.makepeer(ui, path, opener=opener)
4823 4827
4824 4828 # We /could/ populate stdin/stdout with sock.makefile()...
4825 4829 else:
4826 4830 raise error.Abort(_(b'unsupported connection configuration'))
4827 4831
4828 4832 batchedcommands = None
4829 4833
4830 4834 # Now perform actions based on the parsed wire language instructions.
4831 4835 for action, lines in blocks:
4832 4836 if action in (b'raw', b'raw+'):
4833 4837 if not stdin:
4834 4838 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4835 4839
4836 4840 # Concatenate the data together.
4837 4841 data = b''.join(l.lstrip() for l in lines)
4838 4842 data = stringutil.unescapestr(data)
4839 4843 stdin.write(data)
4840 4844
4841 4845 if action == b'raw+':
4842 4846 stdin.flush()
4843 4847 elif action == b'flush':
4844 4848 if not stdin:
4845 4849 raise error.Abort(_(b'cannot call flush on this peer'))
4846 4850 stdin.flush()
4847 4851 elif action.startswith(b'command'):
4848 4852 if not peer:
4849 4853 raise error.Abort(
4850 4854 _(
4851 4855 b'cannot send commands unless peer instance '
4852 4856 b'is available'
4853 4857 )
4854 4858 )
4855 4859
4856 4860 command = action.split(b' ', 1)[1]
4857 4861
4858 4862 args = {}
4859 4863 for line in lines:
4860 4864 # We need to allow empty values.
4861 4865 fields = line.lstrip().split(b' ', 1)
4862 4866 if len(fields) == 1:
4863 4867 key = fields[0]
4864 4868 value = b''
4865 4869 else:
4866 4870 key, value = fields
4867 4871
4868 4872 if value.startswith(b'eval:'):
4869 4873 value = stringutil.evalpythonliteral(value[5:])
4870 4874 else:
4871 4875 value = stringutil.unescapestr(value)
4872 4876
4873 4877 args[key] = value
4874 4878
4875 4879 if batchedcommands is not None:
4876 4880 batchedcommands.append((command, args))
4877 4881 continue
4878 4882
4879 4883 ui.status(_(b'sending %s command\n') % command)
4880 4884
4881 4885 if b'PUSHFILE' in args:
4882 4886 with open(args[b'PUSHFILE'], 'rb') as fh:
4883 4887 del args[b'PUSHFILE']
4884 4888 res, output = peer._callpush(
4885 4889 command, fh, **pycompat.strkwargs(args)
4886 4890 )
4887 4891 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4888 4892 ui.status(
4889 4893 _(b'remote output: %s\n') % stringutil.escapestr(output)
4890 4894 )
4891 4895 else:
4892 4896 with peer.commandexecutor() as e:
4893 4897 res = e.callcommand(command, args).result()
4894 4898
4895 4899 ui.status(
4896 4900 _(b'response: %s\n')
4897 4901 % stringutil.pprint(res, bprefix=True, indent=2)
4898 4902 )
4899 4903
4900 4904 elif action == b'batchbegin':
4901 4905 if batchedcommands is not None:
4902 4906 raise error.Abort(_(b'nested batchbegin not allowed'))
4903 4907
4904 4908 batchedcommands = []
4905 4909 elif action == b'batchsubmit':
4906 4910 # There is a batching API we could go through. But it would be
4907 4911 # difficult to normalize requests into function calls. It is easier
4908 4912 # to bypass this layer and normalize to commands + args.
4909 4913 ui.status(
4910 4914 _(b'sending batch with %d sub-commands\n')
4911 4915 % len(batchedcommands)
4912 4916 )
4913 4917 assert peer is not None
4914 4918 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4915 4919 ui.status(
4916 4920 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4917 4921 )
4918 4922
4919 4923 batchedcommands = None
4920 4924
4921 4925 elif action.startswith(b'httprequest '):
4922 4926 if not opener:
4923 4927 raise error.Abort(
4924 4928 _(b'cannot use httprequest without an HTTP peer')
4925 4929 )
4926 4930
4927 4931 request = action.split(b' ', 2)
4928 4932 if len(request) != 3:
4929 4933 raise error.Abort(
4930 4934 _(
4931 4935 b'invalid httprequest: expected format is '
4932 4936 b'"httprequest <method> <path>'
4933 4937 )
4934 4938 )
4935 4939
4936 4940 method, httppath = request[1:]
4937 4941 headers = {}
4938 4942 body = None
4939 4943 frames = []
4940 4944 for line in lines:
4941 4945 line = line.lstrip()
4942 4946 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4943 4947 if m:
4944 4948 # Headers need to use native strings.
4945 4949 key = pycompat.strurl(m.group(1))
4946 4950 value = pycompat.strurl(m.group(2))
4947 4951 headers[key] = value
4948 4952 continue
4949 4953
4950 4954 if line.startswith(b'BODYFILE '):
4951 4955 with open(line.split(b' ', 1), b'rb') as fh:
4952 4956 body = fh.read()
4953 4957 elif line.startswith(b'frame '):
4954 4958 frame = wireprotoframing.makeframefromhumanstring(
4955 4959 line[len(b'frame ') :]
4956 4960 )
4957 4961
4958 4962 frames.append(frame)
4959 4963 else:
4960 4964 raise error.Abort(
4961 4965 _(b'unknown argument to httprequest: %s') % line
4962 4966 )
4963 4967
4964 4968 url = path + httppath
4965 4969
4966 4970 if frames:
4967 4971 body = b''.join(bytes(f) for f in frames)
4968 4972
4969 4973 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4970 4974
4971 4975 # urllib.Request insists on using has_data() as a proxy for
4972 4976 # determining the request method. Override that to use our
4973 4977 # explicitly requested method.
4974 4978 req.get_method = lambda: pycompat.sysstr(method)
4975 4979
4976 4980 try:
4977 4981 res = opener.open(req)
4978 4982 body = res.read()
4979 4983 except util.urlerr.urlerror as e:
4980 4984 # read() method must be called, but only exists in Python 2
4981 4985 getattr(e, 'read', lambda: None)()
4982 4986 continue
4983 4987
4984 4988 ct = res.headers.get('Content-Type')
4985 4989 if ct == 'application/mercurial-cbor':
4986 4990 ui.write(
4987 4991 _(b'cbor> %s\n')
4988 4992 % stringutil.pprint(
4989 4993 cborutil.decodeall(body), bprefix=True, indent=2
4990 4994 )
4991 4995 )
4992 4996
4993 4997 elif action == b'close':
4994 4998 assert peer is not None
4995 4999 peer.close()
4996 5000 elif action == b'readavailable':
4997 5001 if not stdout or not stderr:
4998 5002 raise error.Abort(
4999 5003 _(b'readavailable not available on this peer')
5000 5004 )
5001 5005
5002 5006 stdin.close()
5003 5007 stdout.read()
5004 5008 stderr.read()
5005 5009
5006 5010 elif action == b'readline':
5007 5011 if not stdout:
5008 5012 raise error.Abort(_(b'readline not available on this peer'))
5009 5013 stdout.readline()
5010 5014 elif action == b'ereadline':
5011 5015 if not stderr:
5012 5016 raise error.Abort(_(b'ereadline not available on this peer'))
5013 5017 stderr.readline()
5014 5018 elif action.startswith(b'read '):
5015 5019 count = int(action.split(b' ', 1)[1])
5016 5020 if not stdout:
5017 5021 raise error.Abort(_(b'read not available on this peer'))
5018 5022 stdout.read(count)
5019 5023 elif action.startswith(b'eread '):
5020 5024 count = int(action.split(b' ', 1)[1])
5021 5025 if not stderr:
5022 5026 raise error.Abort(_(b'eread not available on this peer'))
5023 5027 stderr.read(count)
5024 5028 else:
5025 5029 raise error.Abort(_(b'unknown action: %s') % action)
5026 5030
5027 5031 if batchedcommands is not None:
5028 5032 raise error.Abort(_(b'unclosed "batchbegin" request'))
5029 5033
5030 5034 if peer:
5031 5035 peer.close()
5032 5036
5033 5037 if proc:
5034 5038 proc.kill()
@@ -1,193 +1,198 b''
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import collections
10 10
11 11 from .i18n import _
12 12 from .node import short
13 13 from . import (
14 14 error,
15 15 )
16 16
17 17
18 18 def findcommonincoming(repo, remote, heads=None, force=False, audit=None):
19 19 """Return a tuple (common, fetch, heads) used to identify the common
20 20 subset of nodes between repo and remote.
21 21
22 22 "common" is a list of (at least) the heads of the common subset.
23 23 "fetch" is a list of roots of the nodes that would be incoming, to be
24 24 supplied to changegroupsubset.
25 25 "heads" is either the supplied heads, or else the remote's heads.
26 26 """
27 27
28 28 knownnode = repo.changelog.hasnode
29 29 search = []
30 30 fetch = set()
31 31 seen = set()
32 32 seenbranch = set()
33 33 base = set()
34 34
35 35 if not heads:
36 36 with remote.commandexecutor() as e:
37 37 heads = e.callcommand(b'heads', {}).result()
38 38
39 39 if audit is not None:
40 40 audit[b'total-roundtrips'] = 1
41 41 audit[b'total-queries'] = 0
42 audit[b'total-queries-branches'] = 0
43 audit[b'total-queries-between'] = 0
42 44
43 45 if repo.changelog.tip() == repo.nullid:
44 46 base.add(repo.nullid)
45 47 if heads != [repo.nullid]:
46 48 return [repo.nullid], [repo.nullid], list(heads)
47 49 return [repo.nullid], [], heads
48 50
49 51 # assume we're closer to the tip than the root
50 52 # and start by examining the heads
51 53 repo.ui.status(_(b"searching for changes\n"))
52 54
53 55 unknown = []
54 56 for h in heads:
55 57 if not knownnode(h):
56 58 unknown.append(h)
57 59 else:
58 60 base.add(h)
59 61
60 62 if not unknown:
61 63 return list(base), [], list(heads)
62 64
63 65 req = set(unknown)
64 66 reqcnt = 0
65 67 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
66 68
67 69 # search through remote branches
68 70 # a 'branch' here is a linear segment of history, with four parts:
69 71 # head, root, first parent, second parent
70 72 # (a branch always has two parents (or none) by definition)
71 73 with remote.commandexecutor() as e:
72 74 if audit is not None:
73 75 audit[b'total-queries'] += len(unknown)
76 audit[b'total-queries-branches'] += len(unknown)
74 77 branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
75 78
76 79 unknown = collections.deque(branches)
77 80 while unknown:
78 81 r = []
79 82 while unknown:
80 83 n = unknown.popleft()
81 84 if n[0] in seen:
82 85 continue
83 86
84 87 repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
85 88 if n[0] == repo.nullid: # found the end of the branch
86 89 pass
87 90 elif n in seenbranch:
88 91 repo.ui.debug(b"branch already found\n")
89 92 continue
90 93 elif n[1] and knownnode(n[1]): # do we know the base?
91 94 repo.ui.debug(
92 95 b"found incomplete branch %s:%s\n"
93 96 % (short(n[0]), short(n[1]))
94 97 )
95 98 search.append(n[0:2]) # schedule branch range for scanning
96 99 seenbranch.add(n)
97 100 else:
98 101 if n[1] not in seen and n[1] not in fetch:
99 102 if knownnode(n[2]) and knownnode(n[3]):
100 103 repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
101 104 fetch.add(n[1]) # earliest unknown
102 105 for p in n[2:4]:
103 106 if knownnode(p):
104 107 base.add(p) # latest known
105 108
106 109 for p in n[2:4]:
107 110 if p not in req and not knownnode(p):
108 111 r.append(p)
109 112 req.add(p)
110 113 seen.add(n[0])
111 114
112 115 if r:
113 116 reqcnt += 1
114 117 progress.increment()
115 118 repo.ui.debug(
116 119 b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
117 120 )
118 121 for p in range(0, len(r), 10):
119 122 with remote.commandexecutor() as e:
120 123 subset = r[p : p + 10]
121 124 if audit is not None:
122 125 audit[b'total-queries'] += len(subset)
126 audit[b'total-queries-branches'] += len(subset)
123 127 branches = e.callcommand(
124 128 b'branches',
125 129 {
126 130 b'nodes': subset,
127 131 },
128 132 ).result()
129 133
130 134 for b in branches:
131 135 repo.ui.debug(
132 136 b"received %s:%s\n" % (short(b[0]), short(b[1]))
133 137 )
134 138 unknown.append(b)
135 139
136 140 # do binary search on the branches we found
137 141 while search:
138 142 newsearch = []
139 143 reqcnt += 1
140 144 progress.increment()
141 145
142 146 with remote.commandexecutor() as e:
143 147 if audit is not None:
144 148 audit[b'total-queries'] += len(search)
149 audit[b'total-queries-between'] += len(search)
145 150 between = e.callcommand(b'between', {b'pairs': search}).result()
146 151
147 152 for n, l in zip(search, between):
148 153 l.append(n[1])
149 154 p = n[0]
150 155 f = 1
151 156 for i in l:
152 157 repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
153 158 if knownnode(i):
154 159 if f <= 2:
155 160 repo.ui.debug(
156 161 b"found new branch changeset %s\n" % short(p)
157 162 )
158 163 fetch.add(p)
159 164 base.add(i)
160 165 else:
161 166 repo.ui.debug(
162 167 b"narrowed branch search to %s:%s\n"
163 168 % (short(p), short(i))
164 169 )
165 170 newsearch.append((p, i))
166 171 break
167 172 p, f = i, f * 2
168 173 search = newsearch
169 174
170 175 # sanity check our fetch list
171 176 for f in fetch:
172 177 if knownnode(f):
173 178 raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
174 179
175 180 base = list(base)
176 181 if base == [repo.nullid]:
177 182 if force:
178 183 repo.ui.warn(_(b"warning: repository is unrelated\n"))
179 184 else:
180 185 raise error.Abort(_(b"repository is unrelated"))
181 186
182 187 repo.ui.debug(
183 188 b"found new changesets starting at "
184 189 + b" ".join([short(f) for f in fetch])
185 190 + b"\n"
186 191 )
187 192
188 193 progress.complete()
189 194 repo.ui.debug(b"%d total queries\n" % reqcnt)
190 195 if audit is not None:
191 196 audit[b'total-roundtrips'] = reqcnt
192 197
193 198 return base, list(fetch), heads
@@ -1,1806 +1,1830 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 18 > echo
19 19 > echo "% -- a -> b set (tip only)"
20 20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 21 > echo
22 22 > echo "% -- b -> a tree"
23 23 > hg -R b debugdiscovery a --verbose --old
24 24 > echo
25 25 > echo "% -- b -> a set"
26 26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 27 > echo
28 28 > echo "% -- b -> a set (tip only)"
29 29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 30 > cd ..
31 31 > }
32 32
33 33
34 34 Small superset:
35 35
36 36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 37 > +2:f +1:a1:b1
38 38 > <f +4 :a2
39 39 > +5 :b2
40 40 > <f +3 :b3'
41 41
42 42 % -- a -> b tree
43 43 comparing with b
44 44 searching for changes
45 45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 46 elapsed time: * seconds (glob)
47 47 round-trips: 2
48 48 queries: 6
49 queries-branches: 2
50 queries-between: 4
49 51 heads summary:
50 52 total common heads: 2
51 53 also local heads: 2
52 54 also remote heads: 1
53 55 both: 1
54 56 local heads: 2
55 57 common: 2
56 58 missing: 0
57 59 remote heads: 3
58 60 common: 1
59 61 unknown: 2
60 62 local changesets: 7
61 63 common: 7
62 64 heads: 2
63 65 roots: 1
64 66 missing: 0
65 67 heads: 0
66 68 roots: 0
67 69 first undecided set: 3
68 70 heads: 1
69 71 roots: 1
70 72 common: 3
71 73 missing: 0
72 74 common heads: 01241442b3c2 b5714e113bc0
73 75
74 76 % -- a -> b set
75 77 comparing with b
76 78 query 1; heads
77 79 searching for changes
78 80 all local changesets known remotely
79 81 elapsed time: * seconds (glob)
80 82 round-trips: 1
81 83 queries: 2
82 84 heads summary:
83 85 total common heads: 2
84 86 also local heads: 2
85 87 also remote heads: 1
86 88 both: 1
87 89 local heads: 2
88 90 common: 2
89 91 missing: 0
90 92 remote heads: 3
91 93 common: 1
92 94 unknown: 2
93 95 local changesets: 7
94 96 common: 7
95 97 heads: 2
96 98 roots: 1
97 99 missing: 0
98 100 heads: 0
99 101 roots: 0
100 102 first undecided set: 3
101 103 heads: 1
102 104 roots: 1
103 105 common: 3
104 106 missing: 0
105 107 common heads: 01241442b3c2 b5714e113bc0
106 108
107 109 % -- a -> b set (tip only)
108 110 comparing with b
109 111 query 1; heads
110 112 searching for changes
111 113 all local changesets known remotely
112 114 elapsed time: * seconds (glob)
113 115 round-trips: 1
114 116 queries: 1
115 117 heads summary:
116 118 total common heads: 1
117 119 also local heads: 1
118 120 also remote heads: 0
119 121 both: 0
120 122 local heads: 2
121 123 common: 1
122 124 missing: 1
123 125 remote heads: 3
124 126 common: 0
125 127 unknown: 3
126 128 local changesets: 7
127 129 common: 6
128 130 heads: 1
129 131 roots: 1
130 132 missing: 1
131 133 heads: 1
132 134 roots: 1
133 135 first undecided set: 6
134 136 heads: 2
135 137 roots: 1
136 138 common: 5
137 139 missing: 1
138 140 common heads: b5714e113bc0
139 141
140 142 % -- b -> a tree
141 143 comparing with a
142 144 searching for changes
143 145 unpruned common: 01241442b3c2 b5714e113bc0
144 146 elapsed time: * seconds (glob)
145 147 round-trips: 1
146 148 queries: 0
149 queries-branches: 0
150 queries-between: 0
147 151 heads summary:
148 152 total common heads: 2
149 153 also local heads: 1
150 154 also remote heads: 2
151 155 both: 1
152 156 local heads: 3
153 157 common: 1
154 158 missing: 2
155 159 remote heads: 2
156 160 common: 2
157 161 unknown: 0
158 162 local changesets: 15
159 163 common: 7
160 164 heads: 2
161 165 roots: 1
162 166 missing: 8
163 167 heads: 2
164 168 roots: 2
165 169 first undecided set: 8
166 170 heads: 2
167 171 roots: 2
168 172 common: 0
169 173 missing: 8
170 174 common heads: 01241442b3c2 b5714e113bc0
171 175
172 176 % -- b -> a set
173 177 comparing with a
174 178 query 1; heads
175 179 searching for changes
176 180 all remote heads known locally
177 181 elapsed time: * seconds (glob)
178 182 round-trips: 1
179 183 queries: 3
180 184 heads summary:
181 185 total common heads: 2
182 186 also local heads: 1
183 187 also remote heads: 2
184 188 both: 1
185 189 local heads: 3
186 190 common: 1
187 191 missing: 2
188 192 remote heads: 2
189 193 common: 2
190 194 unknown: 0
191 195 local changesets: 15
192 196 common: 7
193 197 heads: 2
194 198 roots: 1
195 199 missing: 8
196 200 heads: 2
197 201 roots: 2
198 202 first undecided set: 8
199 203 heads: 2
200 204 roots: 2
201 205 common: 0
202 206 missing: 8
203 207 common heads: 01241442b3c2 b5714e113bc0
204 208
205 209 % -- b -> a set (tip only)
206 210 comparing with a
207 211 query 1; heads
208 212 searching for changes
209 213 all remote heads known locally
210 214 elapsed time: * seconds (glob)
211 215 round-trips: 1
212 216 queries: 1
213 217 heads summary:
214 218 total common heads: 2
215 219 also local heads: 1
216 220 also remote heads: 2
217 221 both: 1
218 222 local heads: 3
219 223 common: 1
220 224 missing: 2
221 225 remote heads: 2
222 226 common: 2
223 227 unknown: 0
224 228 local changesets: 15
225 229 common: 7
226 230 heads: 2
227 231 roots: 1
228 232 missing: 8
229 233 heads: 2
230 234 roots: 2
231 235 first undecided set: 8
232 236 heads: 2
233 237 roots: 2
234 238 common: 0
235 239 missing: 8
236 240 common heads: 01241442b3c2 b5714e113bc0
237 241
238 242
239 243 Many new:
240 244
241 245 $ testdesc '-ra1 -ra2' '-rb' '
242 246 > +2:f +3:a1 +3:b
243 247 > <f +30 :a2'
244 248
245 249 % -- a -> b tree
246 250 comparing with b
247 251 searching for changes
248 252 unpruned common: bebd167eb94d
249 253 elapsed time: * seconds (glob)
250 254 round-trips: 2
251 255 queries: 3
256 queries-branches: 1
257 queries-between: 2
252 258 heads summary:
253 259 total common heads: 1
254 260 also local heads: 1
255 261 also remote heads: 0
256 262 both: 0
257 263 local heads: 2
258 264 common: 1
259 265 missing: 1
260 266 remote heads: 1
261 267 common: 0
262 268 unknown: 1
263 269 local changesets: 35
264 270 common: 5
265 271 heads: 1
266 272 roots: 1
267 273 missing: 30
268 274 heads: 1
269 275 roots: 1
270 276 first undecided set: 34
271 277 heads: 2
272 278 roots: 1
273 279 common: 4
274 280 missing: 30
275 281 common heads: bebd167eb94d
276 282
277 283 % -- a -> b set
278 284 comparing with b
279 285 query 1; heads
280 286 searching for changes
281 287 taking initial sample
282 288 searching: 2 queries
283 289 query 2; still undecided: 29, sample size is: 29
284 290 2 total queries in *.????s (glob)
285 291 elapsed time: * seconds (glob)
286 292 round-trips: 2
287 293 queries: 31
288 294 heads summary:
289 295 total common heads: 1
290 296 also local heads: 1
291 297 also remote heads: 0
292 298 both: 0
293 299 local heads: 2
294 300 common: 1
295 301 missing: 1
296 302 remote heads: 1
297 303 common: 0
298 304 unknown: 1
299 305 local changesets: 35
300 306 common: 5
301 307 heads: 1
302 308 roots: 1
303 309 missing: 30
304 310 heads: 1
305 311 roots: 1
306 312 first undecided set: 34
307 313 heads: 2
308 314 roots: 1
309 315 common: 4
310 316 missing: 30
311 317 common heads: bebd167eb94d
312 318
313 319 % -- a -> b set (tip only)
314 320 comparing with b
315 321 query 1; heads
316 322 searching for changes
317 323 taking quick initial sample
318 324 searching: 2 queries
319 325 query 2; still undecided: 31, sample size is: 31
320 326 2 total queries in *.????s (glob)
321 327 elapsed time: * seconds (glob)
322 328 round-trips: 2
323 329 queries: 32
324 330 heads summary:
325 331 total common heads: 1
326 332 also local heads: 0
327 333 also remote heads: 0
328 334 both: 0
329 335 local heads: 2
330 336 common: 0
331 337 missing: 2
332 338 remote heads: 1
333 339 common: 0
334 340 unknown: 1
335 341 local changesets: 35
336 342 common: 2
337 343 heads: 1
338 344 roots: 1
339 345 missing: 33
340 346 heads: 2
341 347 roots: 2
342 348 first undecided set: 35
343 349 heads: 2
344 350 roots: 1
345 351 common: 2
346 352 missing: 33
347 353 common heads: 66f7d451a68b
348 354
349 355 % -- b -> a tree
350 356 comparing with a
351 357 searching for changes
352 358 unpruned common: 66f7d451a68b bebd167eb94d
353 359 elapsed time: * seconds (glob)
354 360 round-trips: 4
355 361 queries: 5
362 queries-branches: 1
363 queries-between: 4
356 364 heads summary:
357 365 total common heads: 1
358 366 also local heads: 0
359 367 also remote heads: 1
360 368 both: 0
361 369 local heads: 1
362 370 common: 0
363 371 missing: 1
364 372 remote heads: 2
365 373 common: 1
366 374 unknown: 1
367 375 local changesets: 8
368 376 common: 5
369 377 heads: 1
370 378 roots: 1
371 379 missing: 3
372 380 heads: 1
373 381 roots: 1
374 382 first undecided set: 3
375 383 heads: 1
376 384 roots: 1
377 385 common: 0
378 386 missing: 3
379 387 common heads: bebd167eb94d
380 388
381 389 % -- b -> a set
382 390 comparing with a
383 391 query 1; heads
384 392 searching for changes
385 393 taking initial sample
386 394 searching: 2 queries
387 395 query 2; still undecided: 2, sample size is: 2
388 396 2 total queries in *.????s (glob)
389 397 elapsed time: * seconds (glob)
390 398 round-trips: 2
391 399 queries: 3
392 400 heads summary:
393 401 total common heads: 1
394 402 also local heads: 0
395 403 also remote heads: 1
396 404 both: 0
397 405 local heads: 1
398 406 common: 0
399 407 missing: 1
400 408 remote heads: 2
401 409 common: 1
402 410 unknown: 1
403 411 local changesets: 8
404 412 common: 5
405 413 heads: 1
406 414 roots: 1
407 415 missing: 3
408 416 heads: 1
409 417 roots: 1
410 418 first undecided set: 3
411 419 heads: 1
412 420 roots: 1
413 421 common: 0
414 422 missing: 3
415 423 common heads: bebd167eb94d
416 424
417 425 % -- b -> a set (tip only)
418 426 comparing with a
419 427 query 1; heads
420 428 searching for changes
421 429 taking initial sample
422 430 searching: 2 queries
423 431 query 2; still undecided: 2, sample size is: 2
424 432 2 total queries in *.????s (glob)
425 433 elapsed time: * seconds (glob)
426 434 round-trips: 2
427 435 queries: 3
428 436 heads summary:
429 437 total common heads: 1
430 438 also local heads: 0
431 439 also remote heads: 1
432 440 both: 0
433 441 local heads: 1
434 442 common: 0
435 443 missing: 1
436 444 remote heads: 2
437 445 common: 1
438 446 unknown: 1
439 447 local changesets: 8
440 448 common: 5
441 449 heads: 1
442 450 roots: 1
443 451 missing: 3
444 452 heads: 1
445 453 roots: 1
446 454 first undecided set: 3
447 455 heads: 1
448 456 roots: 1
449 457 common: 0
450 458 missing: 3
451 459 common heads: bebd167eb94d
452 460
453 461 Both sides many new with stub:
454 462
455 463 $ testdesc '-ra1 -ra2' '-rb' '
456 464 > +2:f +2:a1 +30 :b
457 465 > <f +30 :a2'
458 466
459 467 % -- a -> b tree
460 468 comparing with b
461 469 searching for changes
462 470 unpruned common: 2dc09a01254d
463 471 elapsed time: * seconds (glob)
464 472 round-trips: 4
465 473 queries: 5
474 queries-branches: 1
475 queries-between: 4
466 476 heads summary:
467 477 total common heads: 1
468 478 also local heads: 1
469 479 also remote heads: 0
470 480 both: 0
471 481 local heads: 2
472 482 common: 1
473 483 missing: 1
474 484 remote heads: 1
475 485 common: 0
476 486 unknown: 1
477 487 local changesets: 34
478 488 common: 4
479 489 heads: 1
480 490 roots: 1
481 491 missing: 30
482 492 heads: 1
483 493 roots: 1
484 494 first undecided set: 33
485 495 heads: 2
486 496 roots: 1
487 497 common: 3
488 498 missing: 30
489 499 common heads: 2dc09a01254d
490 500
491 501 % -- a -> b set
492 502 comparing with b
493 503 query 1; heads
494 504 searching for changes
495 505 taking initial sample
496 506 searching: 2 queries
497 507 query 2; still undecided: 29, sample size is: 29
498 508 2 total queries in *.????s (glob)
499 509 elapsed time: * seconds (glob)
500 510 round-trips: 2
501 511 queries: 31
502 512 heads summary:
503 513 total common heads: 1
504 514 also local heads: 1
505 515 also remote heads: 0
506 516 both: 0
507 517 local heads: 2
508 518 common: 1
509 519 missing: 1
510 520 remote heads: 1
511 521 common: 0
512 522 unknown: 1
513 523 local changesets: 34
514 524 common: 4
515 525 heads: 1
516 526 roots: 1
517 527 missing: 30
518 528 heads: 1
519 529 roots: 1
520 530 first undecided set: 33
521 531 heads: 2
522 532 roots: 1
523 533 common: 3
524 534 missing: 30
525 535 common heads: 2dc09a01254d
526 536
527 537 % -- a -> b set (tip only)
528 538 comparing with b
529 539 query 1; heads
530 540 searching for changes
531 541 taking quick initial sample
532 542 searching: 2 queries
533 543 query 2; still undecided: 31, sample size is: 31
534 544 2 total queries in *.????s (glob)
535 545 elapsed time: * seconds (glob)
536 546 round-trips: 2
537 547 queries: 32
538 548 heads summary:
539 549 total common heads: 1
540 550 also local heads: 0
541 551 also remote heads: 0
542 552 both: 0
543 553 local heads: 2
544 554 common: 0
545 555 missing: 2
546 556 remote heads: 1
547 557 common: 0
548 558 unknown: 1
549 559 local changesets: 34
550 560 common: 2
551 561 heads: 1
552 562 roots: 1
553 563 missing: 32
554 564 heads: 2
555 565 roots: 2
556 566 first undecided set: 34
557 567 heads: 2
558 568 roots: 1
559 569 common: 2
560 570 missing: 32
561 571 common heads: 66f7d451a68b
562 572
563 573 % -- b -> a tree
564 574 comparing with a
565 575 searching for changes
566 576 unpruned common: 2dc09a01254d 66f7d451a68b
567 577 elapsed time: * seconds (glob)
568 578 round-trips: 4
569 579 queries: 5
580 queries-branches: 1
581 queries-between: 4
570 582 heads summary:
571 583 total common heads: 1
572 584 also local heads: 0
573 585 also remote heads: 1
574 586 both: 0
575 587 local heads: 1
576 588 common: 0
577 589 missing: 1
578 590 remote heads: 2
579 591 common: 1
580 592 unknown: 1
581 593 local changesets: 34
582 594 common: 4
583 595 heads: 1
584 596 roots: 1
585 597 missing: 30
586 598 heads: 1
587 599 roots: 1
588 600 first undecided set: 30
589 601 heads: 1
590 602 roots: 1
591 603 common: 0
592 604 missing: 30
593 605 common heads: 2dc09a01254d
594 606
595 607 % -- b -> a set
596 608 comparing with a
597 609 query 1; heads
598 610 searching for changes
599 611 taking initial sample
600 612 searching: 2 queries
601 613 query 2; still undecided: 29, sample size is: 29
602 614 2 total queries in *.????s (glob)
603 615 elapsed time: * seconds (glob)
604 616 round-trips: 2
605 617 queries: 30
606 618 heads summary:
607 619 total common heads: 1
608 620 also local heads: 0
609 621 also remote heads: 1
610 622 both: 0
611 623 local heads: 1
612 624 common: 0
613 625 missing: 1
614 626 remote heads: 2
615 627 common: 1
616 628 unknown: 1
617 629 local changesets: 34
618 630 common: 4
619 631 heads: 1
620 632 roots: 1
621 633 missing: 30
622 634 heads: 1
623 635 roots: 1
624 636 first undecided set: 30
625 637 heads: 1
626 638 roots: 1
627 639 common: 0
628 640 missing: 30
629 641 common heads: 2dc09a01254d
630 642
631 643 % -- b -> a set (tip only)
632 644 comparing with a
633 645 query 1; heads
634 646 searching for changes
635 647 taking initial sample
636 648 searching: 2 queries
637 649 query 2; still undecided: 29, sample size is: 29
638 650 2 total queries in *.????s (glob)
639 651 elapsed time: * seconds (glob)
640 652 round-trips: 2
641 653 queries: 30
642 654 heads summary:
643 655 total common heads: 1
644 656 also local heads: 0
645 657 also remote heads: 1
646 658 both: 0
647 659 local heads: 1
648 660 common: 0
649 661 missing: 1
650 662 remote heads: 2
651 663 common: 1
652 664 unknown: 1
653 665 local changesets: 34
654 666 common: 4
655 667 heads: 1
656 668 roots: 1
657 669 missing: 30
658 670 heads: 1
659 671 roots: 1
660 672 first undecided set: 30
661 673 heads: 1
662 674 roots: 1
663 675 common: 0
664 676 missing: 30
665 677 common heads: 2dc09a01254d
666 678
667 679
668 680 Both many new:
669 681
670 682 $ testdesc '-ra' '-rb' '
671 683 > +2:f +30 :b
672 684 > <f +30 :a'
673 685
674 686 % -- a -> b tree
675 687 comparing with b
676 688 searching for changes
677 689 unpruned common: 66f7d451a68b
678 690 elapsed time: * seconds (glob)
679 691 round-trips: 4
680 692 queries: 5
693 queries-branches: 1
694 queries-between: 4
681 695 heads summary:
682 696 total common heads: 1
683 697 also local heads: 0
684 698 also remote heads: 0
685 699 both: 0
686 700 local heads: 1
687 701 common: 0
688 702 missing: 1
689 703 remote heads: 1
690 704 common: 0
691 705 unknown: 1
692 706 local changesets: 32
693 707 common: 2
694 708 heads: 1
695 709 roots: 1
696 710 missing: 30
697 711 heads: 1
698 712 roots: 1
699 713 first undecided set: 32
700 714 heads: 1
701 715 roots: 1
702 716 common: 2
703 717 missing: 30
704 718 common heads: 66f7d451a68b
705 719
706 720 % -- a -> b set
707 721 comparing with b
708 722 query 1; heads
709 723 searching for changes
710 724 taking quick initial sample
711 725 searching: 2 queries
712 726 query 2; still undecided: 31, sample size is: 31
713 727 2 total queries in *.????s (glob)
714 728 elapsed time: * seconds (glob)
715 729 round-trips: 2
716 730 queries: 32
717 731 heads summary:
718 732 total common heads: 1
719 733 also local heads: 0
720 734 also remote heads: 0
721 735 both: 0
722 736 local heads: 1
723 737 common: 0
724 738 missing: 1
725 739 remote heads: 1
726 740 common: 0
727 741 unknown: 1
728 742 local changesets: 32
729 743 common: 2
730 744 heads: 1
731 745 roots: 1
732 746 missing: 30
733 747 heads: 1
734 748 roots: 1
735 749 first undecided set: 32
736 750 heads: 1
737 751 roots: 1
738 752 common: 2
739 753 missing: 30
740 754 common heads: 66f7d451a68b
741 755
742 756 % -- a -> b set (tip only)
743 757 comparing with b
744 758 query 1; heads
745 759 searching for changes
746 760 taking quick initial sample
747 761 searching: 2 queries
748 762 query 2; still undecided: 31, sample size is: 31
749 763 2 total queries in *.????s (glob)
750 764 elapsed time: * seconds (glob)
751 765 round-trips: 2
752 766 queries: 32
753 767 heads summary:
754 768 total common heads: 1
755 769 also local heads: 0
756 770 also remote heads: 0
757 771 both: 0
758 772 local heads: 1
759 773 common: 0
760 774 missing: 1
761 775 remote heads: 1
762 776 common: 0
763 777 unknown: 1
764 778 local changesets: 32
765 779 common: 2
766 780 heads: 1
767 781 roots: 1
768 782 missing: 30
769 783 heads: 1
770 784 roots: 1
771 785 first undecided set: 32
772 786 heads: 1
773 787 roots: 1
774 788 common: 2
775 789 missing: 30
776 790 common heads: 66f7d451a68b
777 791
778 792 % -- b -> a tree
779 793 comparing with a
780 794 searching for changes
781 795 unpruned common: 66f7d451a68b
782 796 elapsed time: * seconds (glob)
783 797 round-trips: 4
784 798 queries: 5
799 queries-branches: 1
800 queries-between: 4
785 801 heads summary:
786 802 total common heads: 1
787 803 also local heads: 0
788 804 also remote heads: 0
789 805 both: 0
790 806 local heads: 1
791 807 common: 0
792 808 missing: 1
793 809 remote heads: 1
794 810 common: 0
795 811 unknown: 1
796 812 local changesets: 32
797 813 common: 2
798 814 heads: 1
799 815 roots: 1
800 816 missing: 30
801 817 heads: 1
802 818 roots: 1
803 819 first undecided set: 32
804 820 heads: 1
805 821 roots: 1
806 822 common: 2
807 823 missing: 30
808 824 common heads: 66f7d451a68b
809 825
810 826 % -- b -> a set
811 827 comparing with a
812 828 query 1; heads
813 829 searching for changes
814 830 taking quick initial sample
815 831 searching: 2 queries
816 832 query 2; still undecided: 31, sample size is: 31
817 833 2 total queries in *.????s (glob)
818 834 elapsed time: * seconds (glob)
819 835 round-trips: 2
820 836 queries: 32
821 837 heads summary:
822 838 total common heads: 1
823 839 also local heads: 0
824 840 also remote heads: 0
825 841 both: 0
826 842 local heads: 1
827 843 common: 0
828 844 missing: 1
829 845 remote heads: 1
830 846 common: 0
831 847 unknown: 1
832 848 local changesets: 32
833 849 common: 2
834 850 heads: 1
835 851 roots: 1
836 852 missing: 30
837 853 heads: 1
838 854 roots: 1
839 855 first undecided set: 32
840 856 heads: 1
841 857 roots: 1
842 858 common: 2
843 859 missing: 30
844 860 common heads: 66f7d451a68b
845 861
846 862 % -- b -> a set (tip only)
847 863 comparing with a
848 864 query 1; heads
849 865 searching for changes
850 866 taking quick initial sample
851 867 searching: 2 queries
852 868 query 2; still undecided: 31, sample size is: 31
853 869 2 total queries in *.????s (glob)
854 870 elapsed time: * seconds (glob)
855 871 round-trips: 2
856 872 queries: 32
857 873 heads summary:
858 874 total common heads: 1
859 875 also local heads: 0
860 876 also remote heads: 0
861 877 both: 0
862 878 local heads: 1
863 879 common: 0
864 880 missing: 1
865 881 remote heads: 1
866 882 common: 0
867 883 unknown: 1
868 884 local changesets: 32
869 885 common: 2
870 886 heads: 1
871 887 roots: 1
872 888 missing: 30
873 889 heads: 1
874 890 roots: 1
875 891 first undecided set: 32
876 892 heads: 1
877 893 roots: 1
878 894 common: 2
879 895 missing: 30
880 896 common heads: 66f7d451a68b
881 897
882 898
883 899 Both many new skewed:
884 900
885 901 $ testdesc '-ra' '-rb' '
886 902 > +2:f +30 :b
887 903 > <f +50 :a'
888 904
889 905 % -- a -> b tree
890 906 comparing with b
891 907 searching for changes
892 908 unpruned common: 66f7d451a68b
893 909 elapsed time: * seconds (glob)
894 910 round-trips: 4
895 911 queries: 5
912 queries-branches: 1
913 queries-between: 4
896 914 heads summary:
897 915 total common heads: 1
898 916 also local heads: 0
899 917 also remote heads: 0
900 918 both: 0
901 919 local heads: 1
902 920 common: 0
903 921 missing: 1
904 922 remote heads: 1
905 923 common: 0
906 924 unknown: 1
907 925 local changesets: 52
908 926 common: 2
909 927 heads: 1
910 928 roots: 1
911 929 missing: 50
912 930 heads: 1
913 931 roots: 1
914 932 first undecided set: 52
915 933 heads: 1
916 934 roots: 1
917 935 common: 2
918 936 missing: 50
919 937 common heads: 66f7d451a68b
920 938
921 939 % -- a -> b set
922 940 comparing with b
923 941 query 1; heads
924 942 searching for changes
925 943 taking quick initial sample
926 944 searching: 2 queries
927 945 query 2; still undecided: 51, sample size is: 51
928 946 2 total queries in *.????s (glob)
929 947 elapsed time: * seconds (glob)
930 948 round-trips: 2
931 949 queries: 52
932 950 heads summary:
933 951 total common heads: 1
934 952 also local heads: 0
935 953 also remote heads: 0
936 954 both: 0
937 955 local heads: 1
938 956 common: 0
939 957 missing: 1
940 958 remote heads: 1
941 959 common: 0
942 960 unknown: 1
943 961 local changesets: 52
944 962 common: 2
945 963 heads: 1
946 964 roots: 1
947 965 missing: 50
948 966 heads: 1
949 967 roots: 1
950 968 first undecided set: 52
951 969 heads: 1
952 970 roots: 1
953 971 common: 2
954 972 missing: 50
955 973 common heads: 66f7d451a68b
956 974
957 975 % -- a -> b set (tip only)
958 976 comparing with b
959 977 query 1; heads
960 978 searching for changes
961 979 taking quick initial sample
962 980 searching: 2 queries
963 981 query 2; still undecided: 51, sample size is: 51
964 982 2 total queries in *.????s (glob)
965 983 elapsed time: * seconds (glob)
966 984 round-trips: 2
967 985 queries: 52
968 986 heads summary:
969 987 total common heads: 1
970 988 also local heads: 0
971 989 also remote heads: 0
972 990 both: 0
973 991 local heads: 1
974 992 common: 0
975 993 missing: 1
976 994 remote heads: 1
977 995 common: 0
978 996 unknown: 1
979 997 local changesets: 52
980 998 common: 2
981 999 heads: 1
982 1000 roots: 1
983 1001 missing: 50
984 1002 heads: 1
985 1003 roots: 1
986 1004 first undecided set: 52
987 1005 heads: 1
988 1006 roots: 1
989 1007 common: 2
990 1008 missing: 50
991 1009 common heads: 66f7d451a68b
992 1010
993 1011 % -- b -> a tree
994 1012 comparing with a
995 1013 searching for changes
996 1014 unpruned common: 66f7d451a68b
997 1015 elapsed time: * seconds (glob)
998 1016 round-trips: 3
999 1017 queries: 4
1018 queries-branches: 1
1019 queries-between: 3
1000 1020 heads summary:
1001 1021 total common heads: 1
1002 1022 also local heads: 0
1003 1023 also remote heads: 0
1004 1024 both: 0
1005 1025 local heads: 1
1006 1026 common: 0
1007 1027 missing: 1
1008 1028 remote heads: 1
1009 1029 common: 0
1010 1030 unknown: 1
1011 1031 local changesets: 32
1012 1032 common: 2
1013 1033 heads: 1
1014 1034 roots: 1
1015 1035 missing: 30
1016 1036 heads: 1
1017 1037 roots: 1
1018 1038 first undecided set: 32
1019 1039 heads: 1
1020 1040 roots: 1
1021 1041 common: 2
1022 1042 missing: 30
1023 1043 common heads: 66f7d451a68b
1024 1044
1025 1045 % -- b -> a set
1026 1046 comparing with a
1027 1047 query 1; heads
1028 1048 searching for changes
1029 1049 taking quick initial sample
1030 1050 searching: 2 queries
1031 1051 query 2; still undecided: 31, sample size is: 31
1032 1052 2 total queries in *.????s (glob)
1033 1053 elapsed time: * seconds (glob)
1034 1054 round-trips: 2
1035 1055 queries: 32
1036 1056 heads summary:
1037 1057 total common heads: 1
1038 1058 also local heads: 0
1039 1059 also remote heads: 0
1040 1060 both: 0
1041 1061 local heads: 1
1042 1062 common: 0
1043 1063 missing: 1
1044 1064 remote heads: 1
1045 1065 common: 0
1046 1066 unknown: 1
1047 1067 local changesets: 32
1048 1068 common: 2
1049 1069 heads: 1
1050 1070 roots: 1
1051 1071 missing: 30
1052 1072 heads: 1
1053 1073 roots: 1
1054 1074 first undecided set: 32
1055 1075 heads: 1
1056 1076 roots: 1
1057 1077 common: 2
1058 1078 missing: 30
1059 1079 common heads: 66f7d451a68b
1060 1080
1061 1081 % -- b -> a set (tip only)
1062 1082 comparing with a
1063 1083 query 1; heads
1064 1084 searching for changes
1065 1085 taking quick initial sample
1066 1086 searching: 2 queries
1067 1087 query 2; still undecided: 31, sample size is: 31
1068 1088 2 total queries in *.????s (glob)
1069 1089 elapsed time: * seconds (glob)
1070 1090 round-trips: 2
1071 1091 queries: 32
1072 1092 heads summary:
1073 1093 total common heads: 1
1074 1094 also local heads: 0
1075 1095 also remote heads: 0
1076 1096 both: 0
1077 1097 local heads: 1
1078 1098 common: 0
1079 1099 missing: 1
1080 1100 remote heads: 1
1081 1101 common: 0
1082 1102 unknown: 1
1083 1103 local changesets: 32
1084 1104 common: 2
1085 1105 heads: 1
1086 1106 roots: 1
1087 1107 missing: 30
1088 1108 heads: 1
1089 1109 roots: 1
1090 1110 first undecided set: 32
1091 1111 heads: 1
1092 1112 roots: 1
1093 1113 common: 2
1094 1114 missing: 30
1095 1115 common heads: 66f7d451a68b
1096 1116
1097 1117
1098 1118 Both many new on top of long history:
1099 1119
1100 1120 $ testdesc '-ra' '-rb' '
1101 1121 > +1000:f +30 :b
1102 1122 > <f +50 :a'
1103 1123
1104 1124 % -- a -> b tree
1105 1125 comparing with b
1106 1126 searching for changes
1107 1127 unpruned common: 7ead0cba2838
1108 1128 elapsed time: * seconds (glob)
1109 1129 round-trips: 4
1110 1130 queries: 5
1131 queries-branches: 1
1132 queries-between: 4
1111 1133 heads summary:
1112 1134 total common heads: 1
1113 1135 also local heads: 0
1114 1136 also remote heads: 0
1115 1137 both: 0
1116 1138 local heads: 1
1117 1139 common: 0
1118 1140 missing: 1
1119 1141 remote heads: 1
1120 1142 common: 0
1121 1143 unknown: 1
1122 1144 local changesets: 1050
1123 1145 common: 1000
1124 1146 heads: 1
1125 1147 roots: 1
1126 1148 missing: 50
1127 1149 heads: 1
1128 1150 roots: 1
1129 1151 first undecided set: 1050
1130 1152 heads: 1
1131 1153 roots: 1
1132 1154 common: 1000
1133 1155 missing: 50
1134 1156 common heads: 7ead0cba2838
1135 1157
1136 1158 % -- a -> b set
1137 1159 comparing with b
1138 1160 query 1; heads
1139 1161 searching for changes
1140 1162 taking quick initial sample
1141 1163 searching: 2 queries
1142 1164 query 2; still undecided: 1049, sample size is: 11
1143 1165 sampling from both directions
1144 1166 searching: 3 queries
1145 1167 query 3; still undecided: 31, sample size is: 31
1146 1168 3 total queries in *.????s (glob)
1147 1169 elapsed time: * seconds (glob)
1148 1170 round-trips: 3
1149 1171 queries: 43
1150 1172 heads summary:
1151 1173 total common heads: 1
1152 1174 also local heads: 0
1153 1175 also remote heads: 0
1154 1176 both: 0
1155 1177 local heads: 1
1156 1178 common: 0
1157 1179 missing: 1
1158 1180 remote heads: 1
1159 1181 common: 0
1160 1182 unknown: 1
1161 1183 local changesets: 1050
1162 1184 common: 1000
1163 1185 heads: 1
1164 1186 roots: 1
1165 1187 missing: 50
1166 1188 heads: 1
1167 1189 roots: 1
1168 1190 first undecided set: 1050
1169 1191 heads: 1
1170 1192 roots: 1
1171 1193 common: 1000
1172 1194 missing: 50
1173 1195 common heads: 7ead0cba2838
1174 1196
1175 1197 % -- a -> b set (tip only)
1176 1198 comparing with b
1177 1199 query 1; heads
1178 1200 searching for changes
1179 1201 taking quick initial sample
1180 1202 searching: 2 queries
1181 1203 query 2; still undecided: 1049, sample size is: 11
1182 1204 sampling from both directions
1183 1205 searching: 3 queries
1184 1206 query 3; still undecided: 31, sample size is: 31
1185 1207 3 total queries in *.????s (glob)
1186 1208 elapsed time: * seconds (glob)
1187 1209 round-trips: 3
1188 1210 queries: 43
1189 1211 heads summary:
1190 1212 total common heads: 1
1191 1213 also local heads: 0
1192 1214 also remote heads: 0
1193 1215 both: 0
1194 1216 local heads: 1
1195 1217 common: 0
1196 1218 missing: 1
1197 1219 remote heads: 1
1198 1220 common: 0
1199 1221 unknown: 1
1200 1222 local changesets: 1050
1201 1223 common: 1000
1202 1224 heads: 1
1203 1225 roots: 1
1204 1226 missing: 50
1205 1227 heads: 1
1206 1228 roots: 1
1207 1229 first undecided set: 1050
1208 1230 heads: 1
1209 1231 roots: 1
1210 1232 common: 1000
1211 1233 missing: 50
1212 1234 common heads: 7ead0cba2838
1213 1235
1214 1236 % -- b -> a tree
1215 1237 comparing with a
1216 1238 searching for changes
1217 1239 unpruned common: 7ead0cba2838
1218 1240 elapsed time: * seconds (glob)
1219 1241 round-trips: 3
1220 1242 queries: 4
1243 queries-branches: 1
1244 queries-between: 3
1221 1245 heads summary:
1222 1246 total common heads: 1
1223 1247 also local heads: 0
1224 1248 also remote heads: 0
1225 1249 both: 0
1226 1250 local heads: 1
1227 1251 common: 0
1228 1252 missing: 1
1229 1253 remote heads: 1
1230 1254 common: 0
1231 1255 unknown: 1
1232 1256 local changesets: 1030
1233 1257 common: 1000
1234 1258 heads: 1
1235 1259 roots: 1
1236 1260 missing: 30
1237 1261 heads: 1
1238 1262 roots: 1
1239 1263 first undecided set: 1030
1240 1264 heads: 1
1241 1265 roots: 1
1242 1266 common: 1000
1243 1267 missing: 30
1244 1268 common heads: 7ead0cba2838
1245 1269
1246 1270 % -- b -> a set
1247 1271 comparing with a
1248 1272 query 1; heads
1249 1273 searching for changes
1250 1274 taking quick initial sample
1251 1275 searching: 2 queries
1252 1276 query 2; still undecided: 1029, sample size is: 11
1253 1277 sampling from both directions
1254 1278 searching: 3 queries
1255 1279 query 3; still undecided: 15, sample size is: 15
1256 1280 3 total queries in *.????s (glob)
1257 1281 elapsed time: * seconds (glob)
1258 1282 round-trips: 3
1259 1283 queries: 27
1260 1284 heads summary:
1261 1285 total common heads: 1
1262 1286 also local heads: 0
1263 1287 also remote heads: 0
1264 1288 both: 0
1265 1289 local heads: 1
1266 1290 common: 0
1267 1291 missing: 1
1268 1292 remote heads: 1
1269 1293 common: 0
1270 1294 unknown: 1
1271 1295 local changesets: 1030
1272 1296 common: 1000
1273 1297 heads: 1
1274 1298 roots: 1
1275 1299 missing: 30
1276 1300 heads: 1
1277 1301 roots: 1
1278 1302 first undecided set: 1030
1279 1303 heads: 1
1280 1304 roots: 1
1281 1305 common: 1000
1282 1306 missing: 30
1283 1307 common heads: 7ead0cba2838
1284 1308
1285 1309 % -- b -> a set (tip only)
1286 1310 comparing with a
1287 1311 query 1; heads
1288 1312 searching for changes
1289 1313 taking quick initial sample
1290 1314 searching: 2 queries
1291 1315 query 2; still undecided: 1029, sample size is: 11
1292 1316 sampling from both directions
1293 1317 searching: 3 queries
1294 1318 query 3; still undecided: 15, sample size is: 15
1295 1319 3 total queries in *.????s (glob)
1296 1320 elapsed time: * seconds (glob)
1297 1321 round-trips: 3
1298 1322 queries: 27
1299 1323 heads summary:
1300 1324 total common heads: 1
1301 1325 also local heads: 0
1302 1326 also remote heads: 0
1303 1327 both: 0
1304 1328 local heads: 1
1305 1329 common: 0
1306 1330 missing: 1
1307 1331 remote heads: 1
1308 1332 common: 0
1309 1333 unknown: 1
1310 1334 local changesets: 1030
1311 1335 common: 1000
1312 1336 heads: 1
1313 1337 roots: 1
1314 1338 missing: 30
1315 1339 heads: 1
1316 1340 roots: 1
1317 1341 first undecided set: 1030
1318 1342 heads: 1
1319 1343 roots: 1
1320 1344 common: 1000
1321 1345 missing: 30
1322 1346 common heads: 7ead0cba2838
1323 1347
1324 1348
1325 1349 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1326 1350
1327 1351 $ hg init manyheads
1328 1352 $ cd manyheads
1329 1353 $ echo "+300:r @a" >dagdesc
1330 1354 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1331 1355 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1332 1356 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1333 1357 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1334 1358 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1335 1359 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1336 1360 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1337 1361 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1338 1362 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1339 1363 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1340 1364 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1341 1365 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1342 1366 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1343 1367 $ echo "@b *r+3" >>dagdesc # one more head
1344 1368 $ hg debugbuilddag <dagdesc
1345 1369 reading DAG from stdin
1346 1370
1347 1371 $ hg heads -t --template . | wc -c
1348 1372 \s*261 (re)
1349 1373
1350 1374 $ hg clone -b a . a
1351 1375 adding changesets
1352 1376 adding manifests
1353 1377 adding file changes
1354 1378 added 1340 changesets with 0 changes to 0 files (+259 heads)
1355 1379 new changesets 1ea73414a91b:1c51e2c80832
1356 1380 updating to branch a
1357 1381 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1358 1382 $ hg clone -b b . b
1359 1383 adding changesets
1360 1384 adding manifests
1361 1385 adding file changes
1362 1386 added 304 changesets with 0 changes to 0 files
1363 1387 new changesets 1ea73414a91b:513314ca8b3a
1364 1388 updating to branch b
1365 1389 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1366 1390
1367 1391 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
1368 1392 comparing with b
1369 1393 query 1; heads
1370 1394 searching for changes
1371 1395 taking quick initial sample
1372 1396 searching: 2 queries
1373 1397 query 2; still undecided: 1080, sample size is: 50
1374 1398 sampling from both directions
1375 1399 searching: 3 queries
1376 1400 query 3; still undecided: 1030, sample size is: 200
1377 1401 sampling from both directions
1378 1402 searching: 4 queries
1379 1403 query 4; still undecided: 547, sample size is: 210
1380 1404 sampling from both directions
1381 1405 searching: 5 queries
1382 1406 query 5; still undecided: 336, sample size is: 220
1383 1407 sampling from both directions
1384 1408 searching: 6 queries
1385 1409 query 6; still undecided: 114, sample size is: 114
1386 1410 6 total queries in *.????s (glob)
1387 1411 elapsed time: * seconds (glob)
1388 1412 round-trips: 6
1389 1413 queries: 1054
1390 1414 heads summary:
1391 1415 total common heads: 1
1392 1416 also local heads: 0
1393 1417 also remote heads: 0
1394 1418 both: 0
1395 1419 local heads: 260
1396 1420 common: 0
1397 1421 missing: 260
1398 1422 remote heads: 1
1399 1423 common: 0
1400 1424 unknown: 1
1401 1425 local changesets: 1340
1402 1426 common: 300
1403 1427 heads: 1
1404 1428 roots: 1
1405 1429 missing: 1040
1406 1430 heads: 260
1407 1431 roots: 260
1408 1432 first undecided set: 1340
1409 1433 heads: 260
1410 1434 roots: 1
1411 1435 common: 300
1412 1436 missing: 1040
1413 1437 common heads: 3ee37d65064a
1414 1438 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1415 1439 comparing with b
1416 1440 query 1; heads
1417 1441 searching for changes
1418 1442 taking quick initial sample
1419 1443 searching: 2 queries
1420 1444 query 2; still undecided: 303, sample size is: 9
1421 1445 sampling from both directions
1422 1446 searching: 3 queries
1423 1447 query 3; still undecided: 3, sample size is: 3
1424 1448 3 total queries in *.????s (glob)
1425 1449 elapsed time: * seconds (glob)
1426 1450 round-trips: 3
1427 1451 queries: 13
1428 1452 heads summary:
1429 1453 total common heads: 1
1430 1454 also local heads: 0
1431 1455 also remote heads: 0
1432 1456 both: 0
1433 1457 local heads: 260
1434 1458 common: 0
1435 1459 missing: 260
1436 1460 remote heads: 1
1437 1461 common: 0
1438 1462 unknown: 1
1439 1463 local changesets: 1340
1440 1464 common: 300
1441 1465 heads: 1
1442 1466 roots: 1
1443 1467 missing: 1040
1444 1468 heads: 260
1445 1469 roots: 260
1446 1470 first undecided set: 1340
1447 1471 heads: 260
1448 1472 roots: 1
1449 1473 common: 300
1450 1474 missing: 1040
1451 1475 common heads: 3ee37d65064a
1452 1476
1453 1477 $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
1454 1478 comparing with b
1455 1479 searching for changes
1456 1480 sampling from both directions
1457 1481 query 1; still undecided: 1340, sample size is: 50
1458 1482 sampling from both directions
1459 1483 query 2; still undecided: 995, sample size is: 60
1460 1484 sampling from both directions
1461 1485 query 3; still undecided: 913, sample size is: 72
1462 1486 sampling from both directions
1463 1487 query 4; still undecided: 816, sample size is: 204
1464 1488 sampling from both directions
1465 1489 query 5; still undecided: 612, sample size is: 153
1466 1490 sampling from both directions
1467 1491 query 6; still undecided: 456, sample size is: 123
1468 1492 sampling from both directions
1469 1493 query 7; still undecided: 332, sample size is: 147
1470 1494 sampling from both directions
1471 1495 query 8; still undecided: 184, sample size is: 176
1472 1496 sampling from both directions
1473 1497 query 9; still undecided: 8, sample size is: 8
1474 1498 9 total queries in *s (glob)
1475 1499 elapsed time: * seconds (glob)
1476 1500 round-trips: 9
1477 1501 queries: 993
1478 1502 heads summary:
1479 1503 total common heads: 1
1480 1504 also local heads: 0
1481 1505 also remote heads: 0
1482 1506 both: 0
1483 1507 local heads: 260
1484 1508 common: 0
1485 1509 missing: 260
1486 1510 remote heads: 1
1487 1511 common: 0
1488 1512 unknown: 1
1489 1513 local changesets: 1340
1490 1514 common: 300
1491 1515 heads: 1
1492 1516 roots: 1
1493 1517 missing: 1040
1494 1518 heads: 260
1495 1519 roots: 260
1496 1520 first undecided set: 1340
1497 1521 heads: 260
1498 1522 roots: 1
1499 1523 common: 300
1500 1524 missing: 1040
1501 1525 common heads: 3ee37d65064a
1502 1526
1503 1527 Test actual protocol when pulling one new head in addition to common heads
1504 1528
1505 1529 $ hg clone -U b c
1506 1530 $ hg -R c id -ir tip
1507 1531 513314ca8b3a
1508 1532 $ hg -R c up -qr default
1509 1533 $ touch c/f
1510 1534 $ hg -R c ci -Aqm "extra head"
1511 1535 $ hg -R c id -i
1512 1536 e64a39e7da8b
1513 1537
1514 1538 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1515 1539 $ cat hg.pid >> $DAEMON_PIDS
1516 1540
1517 1541 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1518 1542 comparing with http://localhost:$HGPORT/
1519 1543 searching for changes
1520 1544 e64a39e7da8b
1521 1545
1522 1546 $ killdaemons.py
1523 1547 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1524 1548 "GET /?cmd=capabilities HTTP/1.1" 200 -
1525 1549 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1526 1550 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1527 1551 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1528 1552 $ cat errors.log
1529 1553
1530 1554 $ cd ..
1531 1555
1532 1556
1533 1557 Issue 4438 - test coverage for 3ef893520a85 issues.
1534 1558
1535 1559 $ mkdir issue4438
1536 1560 $ cd issue4438
1537 1561 #if false
1538 1562 generate new bundles:
1539 1563 $ hg init r1
1540 1564 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1541 1565 $ hg clone -q r1 r2
1542 1566 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1543 1567 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1544 1568 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1545 1569 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1546 1570 #else
1547 1571 use existing bundles:
1548 1572 $ hg init r1
1549 1573 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1550 1574 $ hg -R r1 -q up
1551 1575 $ hg init r2
1552 1576 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1553 1577 $ hg -R r2 -q up
1554 1578 #endif
1555 1579
1556 1580 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1557 1581
1558 1582 $ hg -R r1 outgoing r2 -T'{rev} '
1559 1583 comparing with r2
1560 1584 searching for changes
1561 1585 101 102 103 104 105 106 107 108 109 110 (no-eol)
1562 1586
1563 1587 The case where all the 'initialsamplesize' samples already were common would
1564 1588 give 'all remote heads known locally' without checking the remaining heads -
1565 1589 fixed in 86c35b7ae300:
1566 1590
1567 1591 $ cat >> r1/.hg/hgrc << EOF
1568 1592 > [devel]
1569 1593 > discovery.randomize = False
1570 1594 > EOF
1571 1595
1572 1596 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1573 1597 > --config blackbox.track='command commandfinish discovery'
1574 1598 comparing with r2
1575 1599 searching for changes
1576 1600 101 102 103 104 105 106 107 108 109 110 (no-eol)
1577 1601 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1578 1602 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --no-profile --cmdserver chgunix * (glob) (chg !)
1579 1603 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1580 1604 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1581 1605 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1582 1606 $ cd ..
1583 1607
1584 1608 Even if the set of revs to discover is restricted, unrelated revs may be
1585 1609 returned as common heads.
1586 1610
1587 1611 $ mkdir ancestorsof
1588 1612 $ cd ancestorsof
1589 1613 $ hg init a
1590 1614 $ hg clone a b -q
1591 1615 $ cd b
1592 1616 $ hg debugbuilddag '.:root *root *root'
1593 1617 $ hg log -G -T '{node|short}'
1594 1618 o fa942426a6fd
1595 1619 |
1596 1620 | o 66f7d451a68b
1597 1621 |/
1598 1622 o 1ea73414a91b
1599 1623
1600 1624 $ hg push -r 66f7d451a68b -q
1601 1625 $ hg debugdiscovery --verbose --rev fa942426a6fd
1602 1626 comparing with $TESTTMP/ancestorsof/a
1603 1627 searching for changes
1604 1628 elapsed time: * seconds (glob)
1605 1629 round-trips: 1
1606 1630 queries: 1
1607 1631 heads summary:
1608 1632 total common heads: 1
1609 1633 also local heads: 1
1610 1634 also remote heads: 1
1611 1635 both: 1
1612 1636 local heads: 2
1613 1637 common: 1
1614 1638 missing: 1
1615 1639 remote heads: 1
1616 1640 common: 1
1617 1641 unknown: 0
1618 1642 local changesets: 3
1619 1643 common: 2
1620 1644 heads: 1
1621 1645 roots: 1
1622 1646 missing: 1
1623 1647 heads: 1
1624 1648 roots: 1
1625 1649 first undecided set: 1
1626 1650 heads: 1
1627 1651 roots: 1
1628 1652 common: 0
1629 1653 missing: 1
1630 1654 common heads: 66f7d451a68b
1631 1655
1632 1656 $ cd ..
1633 1657
1634 1658
1635 1659 Test debuging discovery using different subset of the same repository
1636 1660 =====================================================================
1637 1661
1638 1662 remote is a local subset
1639 1663 ------------------------
1640 1664
1641 1665 remote will be last 25 heads of the local graph
1642 1666
1643 1667 $ cd $TESTTMP/manyheads
1644 1668 $ hg -R a debugdiscovery \
1645 1669 > --debug \
1646 1670 > --remote-as-revs 'last(heads(all()), 25)' \
1647 1671 > --config devel.discovery.randomize=false
1648 1672 query 1; heads
1649 1673 searching for changes
1650 1674 all remote heads known locally
1651 1675 elapsed time: * seconds (glob)
1652 1676 round-trips: 1
1653 1677 queries: 260
1654 1678 heads summary:
1655 1679 total common heads: 25
1656 1680 also local heads: 25
1657 1681 also remote heads: 25
1658 1682 both: 25
1659 1683 local heads: 260
1660 1684 common: 25
1661 1685 missing: 235
1662 1686 remote heads: 25
1663 1687 common: 25
1664 1688 unknown: 0
1665 1689 local changesets: 1340
1666 1690 common: 400
1667 1691 heads: 25
1668 1692 roots: 1
1669 1693 missing: 940
1670 1694 heads: 235
1671 1695 roots: 235
1672 1696 first undecided set: 940
1673 1697 heads: 235
1674 1698 roots: 235
1675 1699 common: 0
1676 1700 missing: 940
1677 1701 common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
1678 1702
1679 1703 local is a local subset
1680 1704 ------------------------
1681 1705
1682 1706 remote will be last 25 heads of the local graph
1683 1707
1684 1708 $ cd $TESTTMP/manyheads
1685 1709 $ hg -R a debugdiscovery b \
1686 1710 > --debug \
1687 1711 > --local-as-revs 'first(heads(all()), 25)' \
1688 1712 > --config devel.discovery.randomize=false
1689 1713 comparing with b
1690 1714 query 1; heads
1691 1715 searching for changes
1692 1716 taking quick initial sample
1693 1717 query 2; still undecided: 375, sample size is: 81
1694 1718 sampling from both directions
1695 1719 query 3; still undecided: 3, sample size is: 3
1696 1720 3 total queries *s (glob)
1697 1721 elapsed time: * seconds (glob)
1698 1722 round-trips: 3
1699 1723 queries: 109
1700 1724 heads summary:
1701 1725 total common heads: 1
1702 1726 also local heads: 0
1703 1727 also remote heads: 0
1704 1728 both: 0
1705 1729 local heads: 25
1706 1730 common: 0
1707 1731 missing: 25
1708 1732 remote heads: 1
1709 1733 common: 0
1710 1734 unknown: 1
1711 1735 local changesets: 400
1712 1736 common: 300
1713 1737 heads: 1
1714 1738 roots: 1
1715 1739 missing: 100
1716 1740 heads: 25
1717 1741 roots: 25
1718 1742 first undecided set: 400
1719 1743 heads: 25
1720 1744 roots: 1
1721 1745 common: 300
1722 1746 missing: 100
1723 1747 common heads: 3ee37d65064a
1724 1748
1725 1749 both local and remove are subset
1726 1750 ------------------------
1727 1751
1728 1752 remote will be last 25 heads of the local graph
1729 1753
1730 1754 $ cd $TESTTMP/manyheads
1731 1755 $ hg -R a debugdiscovery \
1732 1756 > --debug \
1733 1757 > --local-as-revs 'first(heads(all()), 25)' \
1734 1758 > --remote-as-revs 'last(heads(all()), 25)' \
1735 1759 > --config devel.discovery.randomize=false
1736 1760 query 1; heads
1737 1761 searching for changes
1738 1762 taking quick initial sample
1739 1763 query 2; still undecided: 375, sample size is: 81
1740 1764 sampling from both directions
1741 1765 query 3; still undecided: 3, sample size is: 3
1742 1766 3 total queries in *s (glob)
1743 1767 elapsed time: * seconds (glob)
1744 1768 round-trips: 3
1745 1769 queries: 109
1746 1770 heads summary:
1747 1771 total common heads: 1
1748 1772 also local heads: 0
1749 1773 also remote heads: 0
1750 1774 both: 0
1751 1775 local heads: 25
1752 1776 common: 0
1753 1777 missing: 25
1754 1778 remote heads: 25
1755 1779 common: 0
1756 1780 unknown: 25
1757 1781 local changesets: 400
1758 1782 common: 300
1759 1783 heads: 1
1760 1784 roots: 1
1761 1785 missing: 100
1762 1786 heads: 25
1763 1787 roots: 25
1764 1788 first undecided set: 400
1765 1789 heads: 25
1766 1790 roots: 1
1767 1791 common: 300
1768 1792 missing: 100
1769 1793 common heads: 3ee37d65064a
1770 1794
1771 1795 Test -T json output
1772 1796 -------------------
1773 1797
1774 1798 $ hg -R a debugdiscovery \
1775 1799 > -T json \
1776 1800 > --debug \
1777 1801 > --local-as-revs 'first(heads(all()), 25)' \
1778 1802 > --remote-as-revs 'last(heads(all()), 25)' \
1779 1803 > --config devel.discovery.randomize=false
1780 1804 [
1781 1805 {
1782 1806 "elapsed": *, (glob)
1783 1807 "nb-common-heads": 1,
1784 1808 "nb-common-heads-both": 0,
1785 1809 "nb-common-heads-local": 0,
1786 1810 "nb-common-heads-remote": 0,
1787 1811 "nb-common-roots": 1,
1788 1812 "nb-head-local": 25,
1789 1813 "nb-head-local-missing": 25,
1790 1814 "nb-head-remote": 25,
1791 1815 "nb-head-remote-unknown": 25,
1792 1816 "nb-ini_und": 400,
1793 1817 "nb-ini_und-common": 300,
1794 1818 "nb-ini_und-heads": 25,
1795 1819 "nb-ini_und-missing": 100,
1796 1820 "nb-ini_und-roots": 1,
1797 1821 "nb-missing-heads": 25,
1798 1822 "nb-missing-roots": 25,
1799 1823 "nb-revs": 400,
1800 1824 "nb-revs-common": 300,
1801 1825 "nb-revs-missing": 100,
1802 1826 "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob)
1803 1827 "total-queries": 109,
1804 1828 "total-roundtrips": 3
1805 1829 }
1806 1830 ]
General Comments 0
You need to be logged in to leave comments. Login now