##// END OF EJS Templates
discovery: also audit the number of queries done...
marmoute -
r49881:f054a557 default
parent child Browse files
Show More
@@ -1,4883 +1,4884 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 repoview,
73 73 requirements,
74 74 revlog,
75 75 revset,
76 76 revsetlang,
77 77 scmutil,
78 78 setdiscovery,
79 79 simplemerge,
80 80 sshpeer,
81 81 sslutil,
82 82 streamclone,
83 83 strip,
84 84 tags as tagsmod,
85 85 templater,
86 86 treediscovery,
87 87 upgrade,
88 88 url as urlmod,
89 89 util,
90 90 vfs as vfsmod,
91 91 wireprotoframing,
92 92 wireprotoserver,
93 93 )
94 94 from .interfaces import repository
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 urlutil,
102 102 )
103 103
104 104 from .revlogutils import (
105 105 deltas as deltautil,
106 106 nodemap,
107 107 rewrite,
108 108 sidedata,
109 109 )
110 110
111 111 release = lockmod.release
112 112
113 113 table = {}
114 114 table.update(strip.command._table)
115 115 command = registrar.command(table)
116 116
117 117
118 118 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
119 119 def debugancestor(ui, repo, *args):
120 120 """find the ancestor revision of two revisions in a given index"""
121 121 if len(args) == 3:
122 122 index, rev1, rev2 = args
123 123 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
124 124 lookup = r.lookup
125 125 elif len(args) == 2:
126 126 if not repo:
127 127 raise error.Abort(
128 128 _(b'there is no Mercurial repository here (.hg not found)')
129 129 )
130 130 rev1, rev2 = args
131 131 r = repo.changelog
132 132 lookup = repo.lookup
133 133 else:
134 134 raise error.Abort(_(b'either two or three arguments required'))
135 135 a = r.ancestor(lookup(rev1), lookup(rev2))
136 136 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
137 137
138 138
139 139 @command(b'debugantivirusrunning', [])
140 140 def debugantivirusrunning(ui, repo):
141 141 """attempt to trigger an antivirus scanner to see if one is active"""
142 142 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
143 143 f.write(
144 144 util.b85decode(
145 145 # This is a base85-armored version of the EICAR test file. See
146 146 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
147 147 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
148 148 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
149 149 )
150 150 )
151 151 # Give an AV engine time to scan the file.
152 152 time.sleep(2)
153 153 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
154 154
155 155
156 156 @command(b'debugapplystreamclonebundle', [], b'FILE')
157 157 def debugapplystreamclonebundle(ui, repo, fname):
158 158 """apply a stream clone bundle file"""
159 159 f = hg.openpath(ui, fname)
160 160 gen = exchange.readbundle(ui, f, fname)
161 161 gen.apply(repo)
162 162
163 163
164 164 @command(
165 165 b'debugbuilddag',
166 166 [
167 167 (
168 168 b'm',
169 169 b'mergeable-file',
170 170 None,
171 171 _(b'add single file mergeable changes'),
172 172 ),
173 173 (
174 174 b'o',
175 175 b'overwritten-file',
176 176 None,
177 177 _(b'add single file all revs overwrite'),
178 178 ),
179 179 (b'n', b'new-file', None, _(b'add new file at each rev')),
180 180 (
181 181 b'',
182 182 b'from-existing',
183 183 None,
184 184 _(b'continue from a non-empty repository'),
185 185 ),
186 186 ],
187 187 _(b'[OPTION]... [TEXT]'),
188 188 )
189 189 def debugbuilddag(
190 190 ui,
191 191 repo,
192 192 text=None,
193 193 mergeable_file=False,
194 194 overwritten_file=False,
195 195 new_file=False,
196 196 from_existing=False,
197 197 ):
198 198 """builds a repo with a given DAG from scratch in the current empty repo
199 199
200 200 The description of the DAG is read from stdin if not given on the
201 201 command line.
202 202
203 203 Elements:
204 204
205 205 - "+n" is a linear run of n nodes based on the current default parent
206 206 - "." is a single node based on the current default parent
207 207 - "$" resets the default parent to null (implied at the start);
208 208 otherwise the default parent is always the last node created
209 209 - "<p" sets the default parent to the backref p
210 210 - "*p" is a fork at parent p, which is a backref
211 211 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
212 212 - "/p2" is a merge of the preceding node and p2
213 213 - ":tag" defines a local tag for the preceding node
214 214 - "@branch" sets the named branch for subsequent nodes
215 215 - "#...\\n" is a comment up to the end of the line
216 216
217 217 Whitespace between the above elements is ignored.
218 218
219 219 A backref is either
220 220
221 221 - a number n, which references the node curr-n, where curr is the current
222 222 node, or
223 223 - the name of a local tag you placed earlier using ":tag", or
224 224 - empty to denote the default parent.
225 225
226 226 All string valued-elements are either strictly alphanumeric, or must
227 227 be enclosed in double quotes ("..."), with "\\" as escape character.
228 228 """
229 229
230 230 if text is None:
231 231 ui.status(_(b"reading DAG from stdin\n"))
232 232 text = ui.fin.read()
233 233
234 234 cl = repo.changelog
235 235 if len(cl) > 0 and not from_existing:
236 236 raise error.Abort(_(b'repository is not empty'))
237 237
238 238 # determine number of revs in DAG
239 239 total = 0
240 240 for type, data in dagparser.parsedag(text):
241 241 if type == b'n':
242 242 total += 1
243 243
244 244 if mergeable_file:
245 245 linesperrev = 2
246 246 # make a file with k lines per rev
247 247 initialmergedlines = [
248 248 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
249 249 ]
250 250 initialmergedlines.append(b"")
251 251
252 252 tags = []
253 253 progress = ui.makeprogress(
254 254 _(b'building'), unit=_(b'revisions'), total=total
255 255 )
256 256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 257 at = -1
258 258 atbranch = b'default'
259 259 nodeids = []
260 260 id = 0
261 261 progress.update(id)
262 262 for type, data in dagparser.parsedag(text):
263 263 if type == b'n':
264 264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 265 id, ps = data
266 266
267 267 files = []
268 268 filecontent = {}
269 269
270 270 p2 = None
271 271 if mergeable_file:
272 272 fn = b"mf"
273 273 p1 = repo[ps[0]]
274 274 if len(ps) > 1:
275 275 p2 = repo[ps[1]]
276 276 pa = p1.ancestor(p2)
277 277 base, local, other = [
278 278 x[fn].data() for x in (pa, p1, p2)
279 279 ]
280 280 m3 = simplemerge.Merge3Text(base, local, other)
281 281 ml = [
282 282 l.strip()
283 283 for l in simplemerge.render_minimized(m3)[0]
284 284 ]
285 285 ml.append(b"")
286 286 elif at > 0:
287 287 ml = p1[fn].data().split(b"\n")
288 288 else:
289 289 ml = initialmergedlines
290 290 ml[id * linesperrev] += b" r%i" % id
291 291 mergedtext = b"\n".join(ml)
292 292 files.append(fn)
293 293 filecontent[fn] = mergedtext
294 294
295 295 if overwritten_file:
296 296 fn = b"of"
297 297 files.append(fn)
298 298 filecontent[fn] = b"r%i\n" % id
299 299
300 300 if new_file:
301 301 fn = b"nf%i" % id
302 302 files.append(fn)
303 303 filecontent[fn] = b"r%i\n" % id
304 304 if len(ps) > 1:
305 305 if not p2:
306 306 p2 = repo[ps[1]]
307 307 for fn in p2:
308 308 if fn.startswith(b"nf"):
309 309 files.append(fn)
310 310 filecontent[fn] = p2[fn].data()
311 311
312 312 def fctxfn(repo, cx, path):
313 313 if path in filecontent:
314 314 return context.memfilectx(
315 315 repo, cx, path, filecontent[path]
316 316 )
317 317 return None
318 318
319 319 if len(ps) == 0 or ps[0] < 0:
320 320 pars = [None, None]
321 321 elif len(ps) == 1:
322 322 pars = [nodeids[ps[0]], None]
323 323 else:
324 324 pars = [nodeids[p] for p in ps]
325 325 cx = context.memctx(
326 326 repo,
327 327 pars,
328 328 b"r%i" % id,
329 329 files,
330 330 fctxfn,
331 331 date=(id, 0),
332 332 user=b"debugbuilddag",
333 333 extra={b'branch': atbranch},
334 334 )
335 335 nodeid = repo.commitctx(cx)
336 336 nodeids.append(nodeid)
337 337 at = id
338 338 elif type == b'l':
339 339 id, name = data
340 340 ui.note((b'tag %s\n' % name))
341 341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 342 elif type == b'a':
343 343 ui.note((b'branch %s\n' % data))
344 344 atbranch = data
345 345 progress.update(id)
346 346
347 347 if tags:
348 348 repo.vfs.write(b"localtags", b"".join(tags))
349 349
350 350
351 351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 352 indent_string = b' ' * indent
353 353 if all:
354 354 ui.writenoi18n(
355 355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 356 % indent_string
357 357 )
358 358
359 359 def showchunks(named):
360 360 ui.write(b"\n%s%s\n" % (indent_string, named))
361 361 for deltadata in gen.deltaiter():
362 362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 363 ui.write(
364 364 b"%s%s %s %s %s %s %d\n"
365 365 % (
366 366 indent_string,
367 367 hex(node),
368 368 hex(p1),
369 369 hex(p2),
370 370 hex(cs),
371 371 hex(deltabase),
372 372 len(delta),
373 373 )
374 374 )
375 375
376 376 gen.changelogheader()
377 377 showchunks(b"changelog")
378 378 gen.manifestheader()
379 379 showchunks(b"manifest")
380 380 for chunkdata in iter(gen.filelogheader, {}):
381 381 fname = chunkdata[b'filename']
382 382 showchunks(fname)
383 383 else:
384 384 if isinstance(gen, bundle2.unbundle20):
385 385 raise error.Abort(_(b'use debugbundle2 for this file'))
386 386 gen.changelogheader()
387 387 for deltadata in gen.deltaiter():
388 388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390 390
391 391
392 392 def _debugobsmarkers(ui, part, indent=0, **opts):
393 393 """display version and markers contained in 'data'"""
394 394 opts = pycompat.byteskwargs(opts)
395 395 data = part.read()
396 396 indent_string = b' ' * indent
397 397 try:
398 398 version, markers = obsolete._readmarkers(data)
399 399 except error.UnknownVersion as exc:
400 400 msg = b"%sunsupported version: %s (%d bytes)\n"
401 401 msg %= indent_string, exc.version, len(data)
402 402 ui.write(msg)
403 403 else:
404 404 msg = b"%sversion: %d (%d bytes)\n"
405 405 msg %= indent_string, version, len(data)
406 406 ui.write(msg)
407 407 fm = ui.formatter(b'debugobsolete', opts)
408 408 for rawmarker in sorted(markers):
409 409 m = obsutil.marker(None, rawmarker)
410 410 fm.startitem()
411 411 fm.plain(indent_string)
412 412 cmdutil.showmarker(fm, m)
413 413 fm.end()
414 414
415 415
416 416 def _debugphaseheads(ui, data, indent=0):
417 417 """display version and markers contained in 'data'"""
418 418 indent_string = b' ' * indent
419 419 headsbyphase = phases.binarydecode(data)
420 420 for phase in phases.allphases:
421 421 for head in headsbyphase[phase]:
422 422 ui.write(indent_string)
423 423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424 424
425 425
426 426 def _quasirepr(thing):
427 427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 428 return b'{%s}' % (
429 429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 430 )
431 431 return pycompat.bytestr(repr(thing))
432 432
433 433
434 434 def _debugbundle2(ui, gen, all=None, **opts):
435 435 """lists the contents of a bundle2"""
436 436 if not isinstance(gen, bundle2.unbundle20):
437 437 raise error.Abort(_(b'not a bundle2 file'))
438 438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 439 parttypes = opts.get('part_type', [])
440 440 for part in gen.iterparts():
441 441 if parttypes and part.type not in parttypes:
442 442 continue
443 443 msg = b'%s -- %s (mandatory: %r)\n'
444 444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 445 if part.type == b'changegroup':
446 446 version = part.params.get(b'version', b'01')
447 447 cg = changegroup.getunbundler(version, part, b'UN')
448 448 if not ui.quiet:
449 449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 450 if part.type == b'obsmarkers':
451 451 if not ui.quiet:
452 452 _debugobsmarkers(ui, part, indent=4, **opts)
453 453 if part.type == b'phase-heads':
454 454 if not ui.quiet:
455 455 _debugphaseheads(ui, part, indent=4)
456 456
457 457
458 458 @command(
459 459 b'debugbundle',
460 460 [
461 461 (b'a', b'all', None, _(b'show all details')),
462 462 (b'', b'part-type', [], _(b'show only the named part type')),
463 463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 464 ],
465 465 _(b'FILE'),
466 466 norepo=True,
467 467 )
468 468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 469 """lists the contents of a bundle"""
470 470 with hg.openpath(ui, bundlepath) as f:
471 471 if spec:
472 472 spec = exchange.getbundlespec(ui, f)
473 473 ui.write(b'%s\n' % spec)
474 474 return
475 475
476 476 gen = exchange.readbundle(ui, f, bundlepath)
477 477 if isinstance(gen, bundle2.unbundle20):
478 478 return _debugbundle2(ui, gen, all=all, **opts)
479 479 _debugchangegroup(ui, gen, all=all, **opts)
480 480
481 481
482 482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 483 def debugcapabilities(ui, path, **opts):
484 484 """lists the capabilities of a remote peer"""
485 485 opts = pycompat.byteskwargs(opts)
486 486 peer = hg.peer(ui, opts, path)
487 487 try:
488 488 caps = peer.capabilities()
489 489 ui.writenoi18n(b'Main capabilities:\n')
490 490 for c in sorted(caps):
491 491 ui.write(b' %s\n' % c)
492 492 b2caps = bundle2.bundle2caps(peer)
493 493 if b2caps:
494 494 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 495 for key, values in sorted(b2caps.items()):
496 496 ui.write(b' %s\n' % key)
497 497 for v in values:
498 498 ui.write(b' %s\n' % v)
499 499 finally:
500 500 peer.close()
501 501
502 502
503 503 @command(
504 504 b'debugchangedfiles',
505 505 [
506 506 (
507 507 b'',
508 508 b'compute',
509 509 False,
510 510 b"compute information instead of reading it from storage",
511 511 ),
512 512 ],
513 513 b'REV',
514 514 )
515 515 def debugchangedfiles(ui, repo, rev, **opts):
516 516 """list the stored files changes for a revision"""
517 517 ctx = logcmdutil.revsingle(repo, rev, None)
518 518 files = None
519 519
520 520 if opts['compute']:
521 521 files = metadata.compute_all_files_changes(ctx)
522 522 else:
523 523 sd = repo.changelog.sidedata(ctx.rev())
524 524 files_block = sd.get(sidedata.SD_FILES)
525 525 if files_block is not None:
526 526 files = metadata.decode_files_sidedata(sd)
527 527 if files is not None:
528 528 for f in sorted(files.touched):
529 529 if f in files.added:
530 530 action = b"added"
531 531 elif f in files.removed:
532 532 action = b"removed"
533 533 elif f in files.merged:
534 534 action = b"merged"
535 535 elif f in files.salvaged:
536 536 action = b"salvaged"
537 537 else:
538 538 action = b"touched"
539 539
540 540 copy_parent = b""
541 541 copy_source = b""
542 542 if f in files.copied_from_p1:
543 543 copy_parent = b"p1"
544 544 copy_source = files.copied_from_p1[f]
545 545 elif f in files.copied_from_p2:
546 546 copy_parent = b"p2"
547 547 copy_source = files.copied_from_p2[f]
548 548
549 549 data = (action, copy_parent, f, copy_source)
550 550 template = b"%-8s %2s: %s, %s;\n"
551 551 ui.write(template % data)
552 552
553 553
554 554 @command(b'debugcheckstate', [], b'')
555 555 def debugcheckstate(ui, repo):
556 556 """validate the correctness of the current dirstate"""
557 557 parent1, parent2 = repo.dirstate.parents()
558 558 m1 = repo[parent1].manifest()
559 559 m2 = repo[parent2].manifest()
560 560 errors = 0
561 561 for err in repo.dirstate.verify(m1, m2):
562 562 ui.warn(err[0] % err[1:])
563 563 errors += 1
564 564 if errors:
565 565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
566 566 raise error.Abort(errstr)
567 567
568 568
569 569 @command(
570 570 b'debugcolor',
571 571 [(b'', b'style', None, _(b'show all configured styles'))],
572 572 b'hg debugcolor',
573 573 )
574 574 def debugcolor(ui, repo, **opts):
575 575 """show available color, effects or style"""
576 576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
577 577 if opts.get('style'):
578 578 return _debugdisplaystyle(ui)
579 579 else:
580 580 return _debugdisplaycolor(ui)
581 581
582 582
583 583 def _debugdisplaycolor(ui):
584 584 ui = ui.copy()
585 585 ui._styles.clear()
586 586 for effect in color._activeeffects(ui).keys():
587 587 ui._styles[effect] = effect
588 588 if ui._terminfoparams:
589 589 for k, v in ui.configitems(b'color'):
590 590 if k.startswith(b'color.'):
591 591 ui._styles[k] = k[6:]
592 592 elif k.startswith(b'terminfo.'):
593 593 ui._styles[k] = k[9:]
594 594 ui.write(_(b'available colors:\n'))
595 595 # sort label with a '_' after the other to group '_background' entry.
596 596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
597 597 for colorname, label in items:
598 598 ui.write(b'%s\n' % colorname, label=label)
599 599
600 600
601 601 def _debugdisplaystyle(ui):
602 602 ui.write(_(b'available style:\n'))
603 603 if not ui._styles:
604 604 return
605 605 width = max(len(s) for s in ui._styles)
606 606 for label, effects in sorted(ui._styles.items()):
607 607 ui.write(b'%s' % label, label=label)
608 608 if effects:
609 609 # 50
610 610 ui.write(b': ')
611 611 ui.write(b' ' * (max(0, width - len(label))))
612 612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
613 613 ui.write(b'\n')
614 614
615 615
616 616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
617 617 def debugcreatestreamclonebundle(ui, repo, fname):
618 618 """create a stream clone bundle file
619 619
620 620 Stream bundles are special bundles that are essentially archives of
621 621 revlog files. They are commonly used for cloning very quickly.
622 622 """
623 623 # TODO we may want to turn this into an abort when this functionality
624 624 # is moved into `hg bundle`.
625 625 if phases.hassecret(repo):
626 626 ui.warn(
627 627 _(
628 628 b'(warning: stream clone bundle will contain secret '
629 629 b'revisions)\n'
630 630 )
631 631 )
632 632
633 633 requirements, gen = streamclone.generatebundlev1(repo)
634 634 changegroup.writechunks(ui, gen, fname)
635 635
636 636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
637 637
638 638
639 639 @command(
640 640 b'debugdag',
641 641 [
642 642 (b't', b'tags', None, _(b'use tags as labels')),
643 643 (b'b', b'branches', None, _(b'annotate with branch names')),
644 644 (b'', b'dots', None, _(b'use dots for runs')),
645 645 (b's', b'spaces', None, _(b'separate elements by spaces')),
646 646 ],
647 647 _(b'[OPTION]... [FILE [REV]...]'),
648 648 optionalrepo=True,
649 649 )
650 650 def debugdag(ui, repo, file_=None, *revs, **opts):
651 651 """format the changelog or an index DAG as a concise textual description
652 652
653 653 If you pass a revlog index, the revlog's DAG is emitted. If you list
654 654 revision numbers, they get labeled in the output as rN.
655 655
656 656 Otherwise, the changelog DAG of the current repo is emitted.
657 657 """
658 658 spaces = opts.get('spaces')
659 659 dots = opts.get('dots')
660 660 if file_:
661 661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
662 662 revs = {int(r) for r in revs}
663 663
664 664 def events():
665 665 for r in rlog:
666 666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
667 667 if r in revs:
668 668 yield b'l', (r, b"r%i" % r)
669 669
670 670 elif repo:
671 671 cl = repo.changelog
672 672 tags = opts.get('tags')
673 673 branches = opts.get('branches')
674 674 if tags:
675 675 labels = {}
676 676 for l, n in repo.tags().items():
677 677 labels.setdefault(cl.rev(n), []).append(l)
678 678
679 679 def events():
680 680 b = b"default"
681 681 for r in cl:
682 682 if branches:
683 683 newb = cl.read(cl.node(r))[5][b'branch']
684 684 if newb != b:
685 685 yield b'a', newb
686 686 b = newb
687 687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
688 688 if tags:
689 689 ls = labels.get(r)
690 690 if ls:
691 691 for l in ls:
692 692 yield b'l', (r, l)
693 693
694 694 else:
695 695 raise error.Abort(_(b'need repo for changelog dag'))
696 696
697 697 for line in dagparser.dagtextlines(
698 698 events(),
699 699 addspaces=spaces,
700 700 wraplabels=True,
701 701 wrapannotations=True,
702 702 wrapnonlinear=dots,
703 703 usedots=dots,
704 704 maxlinewidth=70,
705 705 ):
706 706 ui.write(line)
707 707 ui.write(b"\n")
708 708
709 709
710 710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
711 711 def debugdata(ui, repo, file_, rev=None, **opts):
712 712 """dump the contents of a data file revision"""
713 713 opts = pycompat.byteskwargs(opts)
714 714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
715 715 if rev is not None:
716 716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 717 file_, rev = None, file_
718 718 elif rev is None:
719 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 721 try:
722 722 ui.write(r.rawdata(r.lookup(rev)))
723 723 except KeyError:
724 724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725 725
726 726
727 727 @command(
728 728 b'debugdate',
729 729 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 730 _(b'[-e] DATE [RANGE]'),
731 731 norepo=True,
732 732 optionalrepo=True,
733 733 )
734 734 def debugdate(ui, date, range=None, **opts):
735 735 """parse and display a date"""
736 736 if opts["extended"]:
737 737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 738 else:
739 739 d = dateutil.parsedate(date)
740 740 ui.writenoi18n(b"internal: %d %d\n" % d)
741 741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 742 if range:
743 743 m = dateutil.matchdate(range)
744 744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745 745
746 746
747 747 @command(
748 748 b'debugdeltachain',
749 749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 750 _(b'-c|-m|FILE'),
751 751 optionalrepo=True,
752 752 )
753 753 def debugdeltachain(ui, repo, file_=None, **opts):
754 754 """dump information about delta chains in a revlog
755 755
756 756 Output can be templatized. Available template keywords are:
757 757
758 758 :``rev``: revision number
759 759 :``chainid``: delta chain identifier (numbered by unique base)
760 760 :``chainlen``: delta chain length to this revision
761 761 :``prevrev``: previous revision in delta chain
762 762 :``deltatype``: role of delta / how it was computed
763 763 :``compsize``: compressed size of revision
764 764 :``uncompsize``: uncompressed size of revision
765 765 :``chainsize``: total size of compressed revisions in chain
766 766 :``chainratio``: total chain size divided by uncompressed revision size
767 767 (new delta chains typically start at ratio 2.00)
768 768 :``lindist``: linear distance from base revision in delta chain to end
769 769 of this revision
770 770 :``extradist``: total size of revisions not part of this delta chain from
771 771 base of delta chain to end of this revision; a measurement
772 772 of how much extra data we need to read/seek across to read
773 773 the delta chain for this revision
774 774 :``extraratio``: extradist divided by chainsize; another representation of
775 775 how much unrelated data is needed to load this delta chain
776 776
777 777 If the repository is configured to use the sparse read, additional keywords
778 778 are available:
779 779
780 780 :``readsize``: total size of data read from the disk for a revision
781 781 (sum of the sizes of all the blocks)
782 782 :``largestblock``: size of the largest block of data read from the disk
783 783 :``readdensity``: density of useful bytes in the data read from the disk
784 784 :``srchunks``: in how many data hunks the whole revision would be read
785 785
786 786 The sparse read can be enabled with experimental.sparse-read = True
787 787 """
788 788 opts = pycompat.byteskwargs(opts)
789 789 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
790 790 index = r.index
791 791 start = r.start
792 792 length = r.length
793 793 generaldelta = r._generaldelta
794 794 withsparseread = getattr(r, '_withsparseread', False)
795 795
796 796 def revinfo(rev):
797 797 e = index[rev]
798 798 compsize = e[1]
799 799 uncompsize = e[2]
800 800 chainsize = 0
801 801
802 802 if generaldelta:
803 803 if e[3] == e[5]:
804 804 deltatype = b'p1'
805 805 elif e[3] == e[6]:
806 806 deltatype = b'p2'
807 807 elif e[3] == rev - 1:
808 808 deltatype = b'prev'
809 809 elif e[3] == rev:
810 810 deltatype = b'base'
811 811 else:
812 812 deltatype = b'other'
813 813 else:
814 814 if e[3] == rev:
815 815 deltatype = b'base'
816 816 else:
817 817 deltatype = b'prev'
818 818
819 819 chain = r._deltachain(rev)[0]
820 820 for iterrev in chain:
821 821 e = index[iterrev]
822 822 chainsize += e[1]
823 823
824 824 return compsize, uncompsize, deltatype, chain, chainsize
825 825
826 826 fm = ui.formatter(b'debugdeltachain', opts)
827 827
828 828 fm.plain(
829 829 b' rev chain# chainlen prev delta '
830 830 b'size rawsize chainsize ratio lindist extradist '
831 831 b'extraratio'
832 832 )
833 833 if withsparseread:
834 834 fm.plain(b' readsize largestblk rddensity srchunks')
835 835 fm.plain(b'\n')
836 836
837 837 chainbases = {}
838 838 for rev in r:
839 839 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
840 840 chainbase = chain[0]
841 841 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
842 842 basestart = start(chainbase)
843 843 revstart = start(rev)
844 844 lineardist = revstart + comp - basestart
845 845 extradist = lineardist - chainsize
846 846 try:
847 847 prevrev = chain[-2]
848 848 except IndexError:
849 849 prevrev = -1
850 850
851 851 if uncomp != 0:
852 852 chainratio = float(chainsize) / float(uncomp)
853 853 else:
854 854 chainratio = chainsize
855 855
856 856 if chainsize != 0:
857 857 extraratio = float(extradist) / float(chainsize)
858 858 else:
859 859 extraratio = extradist
860 860
861 861 fm.startitem()
862 862 fm.write(
863 863 b'rev chainid chainlen prevrev deltatype compsize '
864 864 b'uncompsize chainsize chainratio lindist extradist '
865 865 b'extraratio',
866 866 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
867 867 rev,
868 868 chainid,
869 869 len(chain),
870 870 prevrev,
871 871 deltatype,
872 872 comp,
873 873 uncomp,
874 874 chainsize,
875 875 chainratio,
876 876 lineardist,
877 877 extradist,
878 878 extraratio,
879 879 rev=rev,
880 880 chainid=chainid,
881 881 chainlen=len(chain),
882 882 prevrev=prevrev,
883 883 deltatype=deltatype,
884 884 compsize=comp,
885 885 uncompsize=uncomp,
886 886 chainsize=chainsize,
887 887 chainratio=chainratio,
888 888 lindist=lineardist,
889 889 extradist=extradist,
890 890 extraratio=extraratio,
891 891 )
892 892 if withsparseread:
893 893 readsize = 0
894 894 largestblock = 0
895 895 srchunks = 0
896 896
897 897 for revschunk in deltautil.slicechunk(r, chain):
898 898 srchunks += 1
899 899 blkend = start(revschunk[-1]) + length(revschunk[-1])
900 900 blksize = blkend - start(revschunk[0])
901 901
902 902 readsize += blksize
903 903 if largestblock < blksize:
904 904 largestblock = blksize
905 905
906 906 if readsize:
907 907 readdensity = float(chainsize) / float(readsize)
908 908 else:
909 909 readdensity = 1
910 910
911 911 fm.write(
912 912 b'readsize largestblock readdensity srchunks',
913 913 b' %10d %10d %9.5f %8d',
914 914 readsize,
915 915 largestblock,
916 916 readdensity,
917 917 srchunks,
918 918 readsize=readsize,
919 919 largestblock=largestblock,
920 920 readdensity=readdensity,
921 921 srchunks=srchunks,
922 922 )
923 923
924 924 fm.plain(b'\n')
925 925
926 926 fm.end()
927 927
928 928
929 929 @command(
930 930 b'debugdirstate|debugstate',
931 931 [
932 932 (
933 933 b'',
934 934 b'nodates',
935 935 None,
936 936 _(b'do not display the saved mtime (DEPRECATED)'),
937 937 ),
938 938 (b'', b'dates', True, _(b'display the saved mtime')),
939 939 (b'', b'datesort', None, _(b'sort by saved mtime')),
940 940 (
941 941 b'',
942 942 b'all',
943 943 False,
944 944 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
945 945 ),
946 946 ],
947 947 _(b'[OPTION]...'),
948 948 )
949 949 def debugstate(ui, repo, **opts):
950 950 """show the contents of the current dirstate"""
951 951
952 952 nodates = not opts['dates']
953 953 if opts.get('nodates') is not None:
954 954 nodates = True
955 955 datesort = opts.get('datesort')
956 956
957 957 if datesort:
958 958
959 959 def keyfunc(entry):
960 960 filename, _state, _mode, _size, mtime = entry
961 961 return (mtime, filename)
962 962
963 963 else:
964 964 keyfunc = None # sort by filename
965 965 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
966 966 entries.sort(key=keyfunc)
967 967 for entry in entries:
968 968 filename, state, mode, size, mtime = entry
969 969 if mtime == -1:
970 970 timestr = b'unset '
971 971 elif nodates:
972 972 timestr = b'set '
973 973 else:
974 974 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
975 975 timestr = encoding.strtolocal(timestr)
976 976 if mode & 0o20000:
977 977 mode = b'lnk'
978 978 else:
979 979 mode = b'%3o' % (mode & 0o777 & ~util.umask)
980 980 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
981 981 for f in repo.dirstate.copies():
982 982 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
983 983
984 984
985 985 @command(
986 986 b'debugdirstateignorepatternshash',
987 987 [],
988 988 _(b''),
989 989 )
990 990 def debugdirstateignorepatternshash(ui, repo, **opts):
991 991 """show the hash of ignore patterns stored in dirstate if v2,
992 992 or nothing for dirstate-v2
993 993 """
994 994 if repo.dirstate._use_dirstate_v2:
995 995 docket = repo.dirstate._map.docket
996 996 hash_len = 20 # 160 bits for SHA-1
997 997 hash_bytes = docket.tree_metadata[-hash_len:]
998 998 ui.write(binascii.hexlify(hash_bytes) + b'\n')
999 999
1000 1000
1001 1001 @command(
1002 1002 b'debugdiscovery',
1003 1003 [
1004 1004 (b'', b'old', None, _(b'use old-style discovery')),
1005 1005 (
1006 1006 b'',
1007 1007 b'nonheads',
1008 1008 None,
1009 1009 _(b'use old-style discovery with non-heads included'),
1010 1010 ),
1011 1011 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1012 1012 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1013 1013 (
1014 1014 b'',
1015 1015 b'local-as-revs',
1016 1016 b"",
1017 1017 b'treat local has having these revisions only',
1018 1018 ),
1019 1019 (
1020 1020 b'',
1021 1021 b'remote-as-revs',
1022 1022 b"",
1023 1023 b'use local as remote, with only these revisions',
1024 1024 ),
1025 1025 ]
1026 1026 + cmdutil.remoteopts
1027 1027 + cmdutil.formatteropts,
1028 1028 _(b'[--rev REV] [OTHER]'),
1029 1029 )
1030 1030 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1031 1031 """runs the changeset discovery protocol in isolation
1032 1032
1033 1033 The local peer can be "replaced" by a subset of the local repository by
1034 1034 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1035 1035 be "replaced" by a subset of the local repository using the
1036 1036 `--local-as-revs` flag. This is useful to efficiently debug pathological
1037 1037 discovery situation.
1038 1038
1039 1039 The following developer oriented config are relevant for people playing with this command:
1040 1040
1041 1041 * devel.discovery.exchange-heads=True
1042 1042
1043 1043 If False, the discovery will not start with
1044 1044 remote head fetching and local head querying.
1045 1045
1046 1046 * devel.discovery.grow-sample=True
1047 1047
1048 1048 If False, the sample size used in set discovery will not be increased
1049 1049 through the process
1050 1050
1051 1051 * devel.discovery.grow-sample.dynamic=True
1052 1052
1053 1053 When discovery.grow-sample.dynamic is True, the default, the sample size is
1054 1054 adapted to the shape of the undecided set (it is set to the max of:
1055 1055 <target-size>, len(roots(undecided)), len(heads(undecided)
1056 1056
1057 1057 * devel.discovery.grow-sample.rate=1.05
1058 1058
1059 1059 the rate at which the sample grow
1060 1060
1061 1061 * devel.discovery.randomize=True
1062 1062
1063 1063 If andom sampling during discovery are deterministic. It is meant for
1064 1064 integration tests.
1065 1065
1066 1066 * devel.discovery.sample-size=200
1067 1067
1068 1068 Control the initial size of the discovery sample
1069 1069
1070 1070 * devel.discovery.sample-size.initial=100
1071 1071
1072 1072 Control the initial size of the discovery for initial change
1073 1073 """
1074 1074 opts = pycompat.byteskwargs(opts)
1075 1075 unfi = repo.unfiltered()
1076 1076
1077 1077 # setup potential extra filtering
1078 1078 local_revs = opts[b"local_as_revs"]
1079 1079 remote_revs = opts[b"remote_as_revs"]
1080 1080
1081 1081 # make sure tests are repeatable
1082 1082 random.seed(int(opts[b'seed']))
1083 1083
1084 1084 if not remote_revs:
1085 1085
1086 1086 remoteurl, branches = urlutil.get_unique_pull_path(
1087 1087 b'debugdiscovery', repo, ui, remoteurl
1088 1088 )
1089 1089 remote = hg.peer(repo, opts, remoteurl)
1090 1090 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1091 1091 else:
1092 1092 branches = (None, [])
1093 1093 remote_filtered_revs = logcmdutil.revrange(
1094 1094 unfi, [b"not (::(%s))" % remote_revs]
1095 1095 )
1096 1096 remote_filtered_revs = frozenset(remote_filtered_revs)
1097 1097
1098 1098 def remote_func(x):
1099 1099 return remote_filtered_revs
1100 1100
1101 1101 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1102 1102
1103 1103 remote = repo.peer()
1104 1104 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1105 1105
1106 1106 if local_revs:
1107 1107 local_filtered_revs = logcmdutil.revrange(
1108 1108 unfi, [b"not (::(%s))" % local_revs]
1109 1109 )
1110 1110 local_filtered_revs = frozenset(local_filtered_revs)
1111 1111
1112 1112 def local_func(x):
1113 1113 return local_filtered_revs
1114 1114
1115 1115 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1116 1116 repo = repo.filtered(b'debug-discovery-local-filter')
1117 1117
1118 1118 data = {}
1119 1119 if opts.get(b'old'):
1120 1120
1121 1121 def doit(pushedrevs, remoteheads, remote=remote):
1122 1122 if not util.safehasattr(remote, b'branches'):
1123 1123 # enable in-client legacy support
1124 1124 remote = localrepo.locallegacypeer(remote.local())
1125 1125 common, _in, hds = treediscovery.findcommonincoming(
1126 1126 repo, remote, force=True, audit=data
1127 1127 )
1128 1128 common = set(common)
1129 1129 if not opts.get(b'nonheads'):
1130 1130 ui.writenoi18n(
1131 1131 b"unpruned common: %s\n"
1132 1132 % b" ".join(sorted(short(n) for n in common))
1133 1133 )
1134 1134
1135 1135 clnode = repo.changelog.node
1136 1136 common = repo.revs(b'heads(::%ln)', common)
1137 1137 common = {clnode(r) for r in common}
1138 1138 return common, hds
1139 1139
1140 1140 else:
1141 1141
1142 1142 def doit(pushedrevs, remoteheads, remote=remote):
1143 1143 nodes = None
1144 1144 if pushedrevs:
1145 1145 revs = logcmdutil.revrange(repo, pushedrevs)
1146 1146 nodes = [repo[r].node() for r in revs]
1147 1147 common, any, hds = setdiscovery.findcommonheads(
1148 1148 ui, repo, remote, ancestorsof=nodes, audit=data
1149 1149 )
1150 1150 return common, hds
1151 1151
1152 1152 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1153 1153 localrevs = opts[b'rev']
1154 1154
1155 1155 fm = ui.formatter(b'debugdiscovery', opts)
1156 1156 if fm.strict_format:
1157 1157
1158 1158 @contextlib.contextmanager
1159 1159 def may_capture_output():
1160 1160 ui.pushbuffer()
1161 1161 yield
1162 1162 data[b'output'] = ui.popbuffer()
1163 1163
1164 1164 else:
1165 1165 may_capture_output = util.nullcontextmanager
1166 1166 with may_capture_output():
1167 1167 with util.timedcm('debug-discovery') as t:
1168 1168 common, hds = doit(localrevs, remoterevs)
1169 1169
1170 1170 # compute all statistics
1171 1171 heads_common = set(common)
1172 1172 heads_remote = set(hds)
1173 1173 heads_local = set(repo.heads())
1174 1174 # note: they cannot be a local or remote head that is in common and not
1175 1175 # itself a head of common.
1176 1176 heads_common_local = heads_common & heads_local
1177 1177 heads_common_remote = heads_common & heads_remote
1178 1178 heads_common_both = heads_common & heads_remote & heads_local
1179 1179
1180 1180 all = repo.revs(b'all()')
1181 1181 common = repo.revs(b'::%ln', common)
1182 1182 roots_common = repo.revs(b'roots(::%ld)', common)
1183 1183 missing = repo.revs(b'not ::%ld', common)
1184 1184 heads_missing = repo.revs(b'heads(%ld)', missing)
1185 1185 roots_missing = repo.revs(b'roots(%ld)', missing)
1186 1186 assert len(common) + len(missing) == len(all)
1187 1187
1188 1188 initial_undecided = repo.revs(
1189 1189 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1190 1190 )
1191 1191 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1192 1192 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1193 1193 common_initial_undecided = initial_undecided & common
1194 1194 missing_initial_undecided = initial_undecided & missing
1195 1195
1196 1196 data[b'elapsed'] = t.elapsed
1197 1197 data[b'nb-common-heads'] = len(heads_common)
1198 1198 data[b'nb-common-heads-local'] = len(heads_common_local)
1199 1199 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1200 1200 data[b'nb-common-heads-both'] = len(heads_common_both)
1201 1201 data[b'nb-common-roots'] = len(roots_common)
1202 1202 data[b'nb-head-local'] = len(heads_local)
1203 1203 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1204 1204 data[b'nb-head-remote'] = len(heads_remote)
1205 1205 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1206 1206 heads_common_remote
1207 1207 )
1208 1208 data[b'nb-revs'] = len(all)
1209 1209 data[b'nb-revs-common'] = len(common)
1210 1210 data[b'nb-revs-missing'] = len(missing)
1211 1211 data[b'nb-missing-heads'] = len(heads_missing)
1212 1212 data[b'nb-missing-roots'] = len(roots_missing)
1213 1213 data[b'nb-ini_und'] = len(initial_undecided)
1214 1214 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1215 1215 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1216 1216 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1217 1217 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1218 1218
1219 1219 fm.startitem()
1220 1220 fm.data(**pycompat.strkwargs(data))
1221 1221 # display discovery summary
1222 1222 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1223 1223 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1224 fm.plain(b"queries: %(total-queries)9d\n" % data)
1224 1225 fm.plain(b"heads summary:\n")
1225 1226 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1226 1227 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1227 1228 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1228 1229 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1229 1230 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1230 1231 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1231 1232 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1232 1233 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1233 1234 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1234 1235 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1235 1236 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1236 1237 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1237 1238 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1238 1239 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1239 1240 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1240 1241 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1241 1242 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1242 1243 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1243 1244 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1244 1245 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1245 1246 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1246 1247 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1247 1248
1248 1249 if ui.verbose:
1249 1250 fm.plain(
1250 1251 b"common heads: %s\n"
1251 1252 % b" ".join(sorted(short(n) for n in heads_common))
1252 1253 )
1253 1254 fm.end()
1254 1255
1255 1256
1256 1257 _chunksize = 4 << 10
1257 1258
1258 1259
1259 1260 @command(
1260 1261 b'debugdownload',
1261 1262 [
1262 1263 (b'o', b'output', b'', _(b'path')),
1263 1264 ],
1264 1265 optionalrepo=True,
1265 1266 )
1266 1267 def debugdownload(ui, repo, url, output=None, **opts):
1267 1268 """download a resource using Mercurial logic and config"""
1268 1269 fh = urlmod.open(ui, url, output)
1269 1270
1270 1271 dest = ui
1271 1272 if output:
1272 1273 dest = open(output, b"wb", _chunksize)
1273 1274 try:
1274 1275 data = fh.read(_chunksize)
1275 1276 while data:
1276 1277 dest.write(data)
1277 1278 data = fh.read(_chunksize)
1278 1279 finally:
1279 1280 if output:
1280 1281 dest.close()
1281 1282
1282 1283
1283 1284 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1284 1285 def debugextensions(ui, repo, **opts):
1285 1286 '''show information about active extensions'''
1286 1287 opts = pycompat.byteskwargs(opts)
1287 1288 exts = extensions.extensions(ui)
1288 1289 hgver = util.version()
1289 1290 fm = ui.formatter(b'debugextensions', opts)
1290 1291 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1291 1292 isinternal = extensions.ismoduleinternal(extmod)
1292 1293 extsource = None
1293 1294
1294 1295 if util.safehasattr(extmod, '__file__'):
1295 1296 extsource = pycompat.fsencode(extmod.__file__)
1296 1297 elif getattr(sys, 'oxidized', False):
1297 1298 extsource = pycompat.sysexecutable
1298 1299 if isinternal:
1299 1300 exttestedwith = [] # never expose magic string to users
1300 1301 else:
1301 1302 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1302 1303 extbuglink = getattr(extmod, 'buglink', None)
1303 1304
1304 1305 fm.startitem()
1305 1306
1306 1307 if ui.quiet or ui.verbose:
1307 1308 fm.write(b'name', b'%s\n', extname)
1308 1309 else:
1309 1310 fm.write(b'name', b'%s', extname)
1310 1311 if isinternal or hgver in exttestedwith:
1311 1312 fm.plain(b'\n')
1312 1313 elif not exttestedwith:
1313 1314 fm.plain(_(b' (untested!)\n'))
1314 1315 else:
1315 1316 lasttestedversion = exttestedwith[-1]
1316 1317 fm.plain(b' (%s!)\n' % lasttestedversion)
1317 1318
1318 1319 fm.condwrite(
1319 1320 ui.verbose and extsource,
1320 1321 b'source',
1321 1322 _(b' location: %s\n'),
1322 1323 extsource or b"",
1323 1324 )
1324 1325
1325 1326 if ui.verbose:
1326 1327 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1327 1328 fm.data(bundled=isinternal)
1328 1329
1329 1330 fm.condwrite(
1330 1331 ui.verbose and exttestedwith,
1331 1332 b'testedwith',
1332 1333 _(b' tested with: %s\n'),
1333 1334 fm.formatlist(exttestedwith, name=b'ver'),
1334 1335 )
1335 1336
1336 1337 fm.condwrite(
1337 1338 ui.verbose and extbuglink,
1338 1339 b'buglink',
1339 1340 _(b' bug reporting: %s\n'),
1340 1341 extbuglink or b"",
1341 1342 )
1342 1343
1343 1344 fm.end()
1344 1345
1345 1346
1346 1347 @command(
1347 1348 b'debugfileset',
1348 1349 [
1349 1350 (
1350 1351 b'r',
1351 1352 b'rev',
1352 1353 b'',
1353 1354 _(b'apply the filespec on this revision'),
1354 1355 _(b'REV'),
1355 1356 ),
1356 1357 (
1357 1358 b'',
1358 1359 b'all-files',
1359 1360 False,
1360 1361 _(b'test files from all revisions and working directory'),
1361 1362 ),
1362 1363 (
1363 1364 b's',
1364 1365 b'show-matcher',
1365 1366 None,
1366 1367 _(b'print internal representation of matcher'),
1367 1368 ),
1368 1369 (
1369 1370 b'p',
1370 1371 b'show-stage',
1371 1372 [],
1372 1373 _(b'print parsed tree at the given stage'),
1373 1374 _(b'NAME'),
1374 1375 ),
1375 1376 ],
1376 1377 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1377 1378 )
1378 1379 def debugfileset(ui, repo, expr, **opts):
1379 1380 '''parse and apply a fileset specification'''
1380 1381 from . import fileset
1381 1382
1382 1383 fileset.symbols # force import of fileset so we have predicates to optimize
1383 1384 opts = pycompat.byteskwargs(opts)
1384 1385 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1385 1386
1386 1387 stages = [
1387 1388 (b'parsed', pycompat.identity),
1388 1389 (b'analyzed', filesetlang.analyze),
1389 1390 (b'optimized', filesetlang.optimize),
1390 1391 ]
1391 1392 stagenames = {n for n, f in stages}
1392 1393
1393 1394 showalways = set()
1394 1395 if ui.verbose and not opts[b'show_stage']:
1395 1396 # show parsed tree by --verbose (deprecated)
1396 1397 showalways.add(b'parsed')
1397 1398 if opts[b'show_stage'] == [b'all']:
1398 1399 showalways.update(stagenames)
1399 1400 else:
1400 1401 for n in opts[b'show_stage']:
1401 1402 if n not in stagenames:
1402 1403 raise error.Abort(_(b'invalid stage name: %s') % n)
1403 1404 showalways.update(opts[b'show_stage'])
1404 1405
1405 1406 tree = filesetlang.parse(expr)
1406 1407 for n, f in stages:
1407 1408 tree = f(tree)
1408 1409 if n in showalways:
1409 1410 if opts[b'show_stage'] or n != b'parsed':
1410 1411 ui.write(b"* %s:\n" % n)
1411 1412 ui.write(filesetlang.prettyformat(tree), b"\n")
1412 1413
1413 1414 files = set()
1414 1415 if opts[b'all_files']:
1415 1416 for r in repo:
1416 1417 c = repo[r]
1417 1418 files.update(c.files())
1418 1419 files.update(c.substate)
1419 1420 if opts[b'all_files'] or ctx.rev() is None:
1420 1421 wctx = repo[None]
1421 1422 files.update(
1422 1423 repo.dirstate.walk(
1423 1424 scmutil.matchall(repo),
1424 1425 subrepos=list(wctx.substate),
1425 1426 unknown=True,
1426 1427 ignored=True,
1427 1428 )
1428 1429 )
1429 1430 files.update(wctx.substate)
1430 1431 else:
1431 1432 files.update(ctx.files())
1432 1433 files.update(ctx.substate)
1433 1434
1434 1435 m = ctx.matchfileset(repo.getcwd(), expr)
1435 1436 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1436 1437 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1437 1438 for f in sorted(files):
1438 1439 if not m(f):
1439 1440 continue
1440 1441 ui.write(b"%s\n" % f)
1441 1442
1442 1443
1443 1444 @command(
1444 1445 b"debug-repair-issue6528",
1445 1446 [
1446 1447 (
1447 1448 b'',
1448 1449 b'to-report',
1449 1450 b'',
1450 1451 _(b'build a report of affected revisions to this file'),
1451 1452 _(b'FILE'),
1452 1453 ),
1453 1454 (
1454 1455 b'',
1455 1456 b'from-report',
1456 1457 b'',
1457 1458 _(b'repair revisions listed in this report file'),
1458 1459 _(b'FILE'),
1459 1460 ),
1460 1461 (
1461 1462 b'',
1462 1463 b'paranoid',
1463 1464 False,
1464 1465 _(b'check that both detection methods do the same thing'),
1465 1466 ),
1466 1467 ]
1467 1468 + cmdutil.dryrunopts,
1468 1469 )
1469 1470 def debug_repair_issue6528(ui, repo, **opts):
1470 1471 """find affected revisions and repair them. See issue6528 for more details.
1471 1472
1472 1473 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1473 1474 computation of affected revisions for a given repository across clones.
1474 1475 The report format is line-based (with empty lines ignored):
1475 1476
1476 1477 ```
1477 1478 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1478 1479 ```
1479 1480
1480 1481 There can be multiple broken revisions per filelog, they are separated by
1481 1482 a comma with no spaces. The only space is between the revision(s) and the
1482 1483 filename.
1483 1484
1484 1485 Note that this does *not* mean that this repairs future affected revisions,
1485 1486 that needs a separate fix at the exchange level that was introduced in
1486 1487 Mercurial 5.9.1.
1487 1488
1488 1489 There is a `--paranoid` flag to test that the fast implementation is correct
1489 1490 by checking it against the slow implementation. Since this matter is quite
1490 1491 urgent and testing every edge-case is probably quite costly, we use this
1491 1492 method to test on large repositories as a fuzzing method of sorts.
1492 1493 """
1493 1494 cmdutil.check_incompatible_arguments(
1494 1495 opts, 'to_report', ['from_report', 'dry_run']
1495 1496 )
1496 1497 dry_run = opts.get('dry_run')
1497 1498 to_report = opts.get('to_report')
1498 1499 from_report = opts.get('from_report')
1499 1500 paranoid = opts.get('paranoid')
1500 1501 # TODO maybe add filelog pattern and revision pattern parameters to help
1501 1502 # narrow down the search for users that know what they're looking for?
1502 1503
1503 1504 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1504 1505 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1505 1506 raise error.Abort(_(msg))
1506 1507
1507 1508 rewrite.repair_issue6528(
1508 1509 ui,
1509 1510 repo,
1510 1511 dry_run=dry_run,
1511 1512 to_report=to_report,
1512 1513 from_report=from_report,
1513 1514 paranoid=paranoid,
1514 1515 )
1515 1516
1516 1517
1517 1518 @command(b'debugformat', [] + cmdutil.formatteropts)
1518 1519 def debugformat(ui, repo, **opts):
1519 1520 """display format information about the current repository
1520 1521
1521 1522 Use --verbose to get extra information about current config value and
1522 1523 Mercurial default."""
1523 1524 opts = pycompat.byteskwargs(opts)
1524 1525 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1525 1526 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1526 1527
1527 1528 def makeformatname(name):
1528 1529 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1529 1530
1530 1531 fm = ui.formatter(b'debugformat', opts)
1531 1532 if fm.isplain():
1532 1533
1533 1534 def formatvalue(value):
1534 1535 if util.safehasattr(value, b'startswith'):
1535 1536 return value
1536 1537 if value:
1537 1538 return b'yes'
1538 1539 else:
1539 1540 return b'no'
1540 1541
1541 1542 else:
1542 1543 formatvalue = pycompat.identity
1543 1544
1544 1545 fm.plain(b'format-variant')
1545 1546 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1546 1547 fm.plain(b' repo')
1547 1548 if ui.verbose:
1548 1549 fm.plain(b' config default')
1549 1550 fm.plain(b'\n')
1550 1551 for fv in upgrade.allformatvariant:
1551 1552 fm.startitem()
1552 1553 repovalue = fv.fromrepo(repo)
1553 1554 configvalue = fv.fromconfig(repo)
1554 1555
1555 1556 if repovalue != configvalue:
1556 1557 namelabel = b'formatvariant.name.mismatchconfig'
1557 1558 repolabel = b'formatvariant.repo.mismatchconfig'
1558 1559 elif repovalue != fv.default:
1559 1560 namelabel = b'formatvariant.name.mismatchdefault'
1560 1561 repolabel = b'formatvariant.repo.mismatchdefault'
1561 1562 else:
1562 1563 namelabel = b'formatvariant.name.uptodate'
1563 1564 repolabel = b'formatvariant.repo.uptodate'
1564 1565
1565 1566 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1566 1567 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1567 1568 if fv.default != configvalue:
1568 1569 configlabel = b'formatvariant.config.special'
1569 1570 else:
1570 1571 configlabel = b'formatvariant.config.default'
1571 1572 fm.condwrite(
1572 1573 ui.verbose,
1573 1574 b'config',
1574 1575 b' %6s',
1575 1576 formatvalue(configvalue),
1576 1577 label=configlabel,
1577 1578 )
1578 1579 fm.condwrite(
1579 1580 ui.verbose,
1580 1581 b'default',
1581 1582 b' %7s',
1582 1583 formatvalue(fv.default),
1583 1584 label=b'formatvariant.default',
1584 1585 )
1585 1586 fm.plain(b'\n')
1586 1587 fm.end()
1587 1588
1588 1589
1589 1590 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1590 1591 def debugfsinfo(ui, path=b"."):
1591 1592 """show information detected about current filesystem"""
1592 1593 ui.writenoi18n(b'path: %s\n' % path)
1593 1594 ui.writenoi18n(
1594 1595 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1595 1596 )
1596 1597 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1597 1598 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1598 1599 ui.writenoi18n(
1599 1600 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1600 1601 )
1601 1602 ui.writenoi18n(
1602 1603 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1603 1604 )
1604 1605 casesensitive = b'(unknown)'
1605 1606 try:
1606 1607 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1607 1608 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1608 1609 except OSError:
1609 1610 pass
1610 1611 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1611 1612
1612 1613
1613 1614 @command(
1614 1615 b'debuggetbundle',
1615 1616 [
1616 1617 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1617 1618 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1618 1619 (
1619 1620 b't',
1620 1621 b'type',
1621 1622 b'bzip2',
1622 1623 _(b'bundle compression type to use'),
1623 1624 _(b'TYPE'),
1624 1625 ),
1625 1626 ],
1626 1627 _(b'REPO FILE [-H|-C ID]...'),
1627 1628 norepo=True,
1628 1629 )
1629 1630 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1630 1631 """retrieves a bundle from a repo
1631 1632
1632 1633 Every ID must be a full-length hex node id string. Saves the bundle to the
1633 1634 given file.
1634 1635 """
1635 1636 opts = pycompat.byteskwargs(opts)
1636 1637 repo = hg.peer(ui, opts, repopath)
1637 1638 if not repo.capable(b'getbundle'):
1638 1639 raise error.Abort(b"getbundle() not supported by target repository")
1639 1640 args = {}
1640 1641 if common:
1641 1642 args['common'] = [bin(s) for s in common]
1642 1643 if head:
1643 1644 args['heads'] = [bin(s) for s in head]
1644 1645 # TODO: get desired bundlecaps from command line.
1645 1646 args['bundlecaps'] = None
1646 1647 bundle = repo.getbundle(b'debug', **args)
1647 1648
1648 1649 bundletype = opts.get(b'type', b'bzip2').lower()
1649 1650 btypes = {
1650 1651 b'none': b'HG10UN',
1651 1652 b'bzip2': b'HG10BZ',
1652 1653 b'gzip': b'HG10GZ',
1653 1654 b'bundle2': b'HG20',
1654 1655 }
1655 1656 bundletype = btypes.get(bundletype)
1656 1657 if bundletype not in bundle2.bundletypes:
1657 1658 raise error.Abort(_(b'unknown bundle type specified with --type'))
1658 1659 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1659 1660
1660 1661
1661 1662 @command(b'debugignore', [], b'[FILE]')
1662 1663 def debugignore(ui, repo, *files, **opts):
1663 1664 """display the combined ignore pattern and information about ignored files
1664 1665
1665 1666 With no argument display the combined ignore pattern.
1666 1667
1667 1668 Given space separated file names, shows if the given file is ignored and
1668 1669 if so, show the ignore rule (file and line number) that matched it.
1669 1670 """
1670 1671 ignore = repo.dirstate._ignore
1671 1672 if not files:
1672 1673 # Show all the patterns
1673 1674 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1674 1675 else:
1675 1676 m = scmutil.match(repo[None], pats=files)
1676 1677 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1677 1678 for f in m.files():
1678 1679 nf = util.normpath(f)
1679 1680 ignored = None
1680 1681 ignoredata = None
1681 1682 if nf != b'.':
1682 1683 if ignore(nf):
1683 1684 ignored = nf
1684 1685 ignoredata = repo.dirstate._ignorefileandline(nf)
1685 1686 else:
1686 1687 for p in pathutil.finddirs(nf):
1687 1688 if ignore(p):
1688 1689 ignored = p
1689 1690 ignoredata = repo.dirstate._ignorefileandline(p)
1690 1691 break
1691 1692 if ignored:
1692 1693 if ignored == nf:
1693 1694 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1694 1695 else:
1695 1696 ui.write(
1696 1697 _(
1697 1698 b"%s is ignored because of "
1698 1699 b"containing directory %s\n"
1699 1700 )
1700 1701 % (uipathfn(f), ignored)
1701 1702 )
1702 1703 ignorefile, lineno, line = ignoredata
1703 1704 ui.write(
1704 1705 _(b"(ignore rule in %s, line %d: '%s')\n")
1705 1706 % (ignorefile, lineno, line)
1706 1707 )
1707 1708 else:
1708 1709 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1709 1710
1710 1711
1711 1712 @command(
1712 1713 b'debugindex',
1713 1714 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1714 1715 _(b'-c|-m|FILE'),
1715 1716 )
1716 1717 def debugindex(ui, repo, file_=None, **opts):
1717 1718 """dump index data for a storage primitive"""
1718 1719 opts = pycompat.byteskwargs(opts)
1719 1720 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1720 1721
1721 1722 if ui.debugflag:
1722 1723 shortfn = hex
1723 1724 else:
1724 1725 shortfn = short
1725 1726
1726 1727 idlen = 12
1727 1728 for i in store:
1728 1729 idlen = len(shortfn(store.node(i)))
1729 1730 break
1730 1731
1731 1732 fm = ui.formatter(b'debugindex', opts)
1732 1733 fm.plain(
1733 1734 b' rev linkrev %s %s p2\n'
1734 1735 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1735 1736 )
1736 1737
1737 1738 for rev in store:
1738 1739 node = store.node(rev)
1739 1740 parents = store.parents(node)
1740 1741
1741 1742 fm.startitem()
1742 1743 fm.write(b'rev', b'%6d ', rev)
1743 1744 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1744 1745 fm.write(b'node', b'%s ', shortfn(node))
1745 1746 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1746 1747 fm.write(b'p2', b'%s', shortfn(parents[1]))
1747 1748 fm.plain(b'\n')
1748 1749
1749 1750 fm.end()
1750 1751
1751 1752
1752 1753 @command(
1753 1754 b'debugindexdot',
1754 1755 cmdutil.debugrevlogopts,
1755 1756 _(b'-c|-m|FILE'),
1756 1757 optionalrepo=True,
1757 1758 )
1758 1759 def debugindexdot(ui, repo, file_=None, **opts):
1759 1760 """dump an index DAG as a graphviz dot file"""
1760 1761 opts = pycompat.byteskwargs(opts)
1761 1762 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1762 1763 ui.writenoi18n(b"digraph G {\n")
1763 1764 for i in r:
1764 1765 node = r.node(i)
1765 1766 pp = r.parents(node)
1766 1767 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1767 1768 if pp[1] != repo.nullid:
1768 1769 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1769 1770 ui.write(b"}\n")
1770 1771
1771 1772
1772 1773 @command(b'debugindexstats', [])
1773 1774 def debugindexstats(ui, repo):
1774 1775 """show stats related to the changelog index"""
1775 1776 repo.changelog.shortest(repo.nullid, 1)
1776 1777 index = repo.changelog.index
1777 1778 if not util.safehasattr(index, b'stats'):
1778 1779 raise error.Abort(_(b'debugindexstats only works with native code'))
1779 1780 for k, v in sorted(index.stats().items()):
1780 1781 ui.write(b'%s: %d\n' % (k, v))
1781 1782
1782 1783
1783 1784 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1784 1785 def debuginstall(ui, **opts):
1785 1786 """test Mercurial installation
1786 1787
1787 1788 Returns 0 on success.
1788 1789 """
1789 1790 opts = pycompat.byteskwargs(opts)
1790 1791
1791 1792 problems = 0
1792 1793
1793 1794 fm = ui.formatter(b'debuginstall', opts)
1794 1795 fm.startitem()
1795 1796
1796 1797 # encoding might be unknown or wrong. don't translate these messages.
1797 1798 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1798 1799 err = None
1799 1800 try:
1800 1801 codecs.lookup(pycompat.sysstr(encoding.encoding))
1801 1802 except LookupError as inst:
1802 1803 err = stringutil.forcebytestr(inst)
1803 1804 problems += 1
1804 1805 fm.condwrite(
1805 1806 err,
1806 1807 b'encodingerror',
1807 1808 b" %s\n (check that your locale is properly set)\n",
1808 1809 err,
1809 1810 )
1810 1811
1811 1812 # Python
1812 1813 pythonlib = None
1813 1814 if util.safehasattr(os, '__file__'):
1814 1815 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1815 1816 elif getattr(sys, 'oxidized', False):
1816 1817 pythonlib = pycompat.sysexecutable
1817 1818
1818 1819 fm.write(
1819 1820 b'pythonexe',
1820 1821 _(b"checking Python executable (%s)\n"),
1821 1822 pycompat.sysexecutable or _(b"unknown"),
1822 1823 )
1823 1824 fm.write(
1824 1825 b'pythonimplementation',
1825 1826 _(b"checking Python implementation (%s)\n"),
1826 1827 pycompat.sysbytes(platform.python_implementation()),
1827 1828 )
1828 1829 fm.write(
1829 1830 b'pythonver',
1830 1831 _(b"checking Python version (%s)\n"),
1831 1832 (b"%d.%d.%d" % sys.version_info[:3]),
1832 1833 )
1833 1834 fm.write(
1834 1835 b'pythonlib',
1835 1836 _(b"checking Python lib (%s)...\n"),
1836 1837 pythonlib or _(b"unknown"),
1837 1838 )
1838 1839
1839 1840 try:
1840 1841 from . import rustext # pytype: disable=import-error
1841 1842
1842 1843 rustext.__doc__ # trigger lazy import
1843 1844 except ImportError:
1844 1845 rustext = None
1845 1846
1846 1847 security = set(sslutil.supportedprotocols)
1847 1848 if sslutil.hassni:
1848 1849 security.add(b'sni')
1849 1850
1850 1851 fm.write(
1851 1852 b'pythonsecurity',
1852 1853 _(b"checking Python security support (%s)\n"),
1853 1854 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1854 1855 )
1855 1856
1856 1857 # These are warnings, not errors. So don't increment problem count. This
1857 1858 # may change in the future.
1858 1859 if b'tls1.2' not in security:
1859 1860 fm.plain(
1860 1861 _(
1861 1862 b' TLS 1.2 not supported by Python install; '
1862 1863 b'network connections lack modern security\n'
1863 1864 )
1864 1865 )
1865 1866 if b'sni' not in security:
1866 1867 fm.plain(
1867 1868 _(
1868 1869 b' SNI not supported by Python install; may have '
1869 1870 b'connectivity issues with some servers\n'
1870 1871 )
1871 1872 )
1872 1873
1873 1874 fm.plain(
1874 1875 _(
1875 1876 b"checking Rust extensions (%s)\n"
1876 1877 % (b'missing' if rustext is None else b'installed')
1877 1878 ),
1878 1879 )
1879 1880
1880 1881 # TODO print CA cert info
1881 1882
1882 1883 # hg version
1883 1884 hgver = util.version()
1884 1885 fm.write(
1885 1886 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1886 1887 )
1887 1888 fm.write(
1888 1889 b'hgverextra',
1889 1890 _(b"checking Mercurial custom build (%s)\n"),
1890 1891 b'+'.join(hgver.split(b'+')[1:]),
1891 1892 )
1892 1893
1893 1894 # compiled modules
1894 1895 hgmodules = None
1895 1896 if util.safehasattr(sys.modules[__name__], '__file__'):
1896 1897 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1897 1898 elif getattr(sys, 'oxidized', False):
1898 1899 hgmodules = pycompat.sysexecutable
1899 1900
1900 1901 fm.write(
1901 1902 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1902 1903 )
1903 1904 fm.write(
1904 1905 b'hgmodules',
1905 1906 _(b"checking installed modules (%s)...\n"),
1906 1907 hgmodules or _(b"unknown"),
1907 1908 )
1908 1909
1909 1910 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1910 1911 rustext = rustandc # for now, that's the only case
1911 1912 cext = policy.policy in (b'c', b'allow') or rustandc
1912 1913 nopure = cext or rustext
1913 1914 if nopure:
1914 1915 err = None
1915 1916 try:
1916 1917 if cext:
1917 1918 from .cext import ( # pytype: disable=import-error
1918 1919 base85,
1919 1920 bdiff,
1920 1921 mpatch,
1921 1922 osutil,
1922 1923 )
1923 1924
1924 1925 # quiet pyflakes
1925 1926 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1926 1927 if rustext:
1927 1928 from .rustext import ( # pytype: disable=import-error
1928 1929 ancestor,
1929 1930 dirstate,
1930 1931 )
1931 1932
1932 1933 dir(ancestor), dir(dirstate) # quiet pyflakes
1933 1934 except Exception as inst:
1934 1935 err = stringutil.forcebytestr(inst)
1935 1936 problems += 1
1936 1937 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1937 1938
1938 1939 compengines = util.compengines._engines.values()
1939 1940 fm.write(
1940 1941 b'compengines',
1941 1942 _(b'checking registered compression engines (%s)\n'),
1942 1943 fm.formatlist(
1943 1944 sorted(e.name() for e in compengines),
1944 1945 name=b'compengine',
1945 1946 fmt=b'%s',
1946 1947 sep=b', ',
1947 1948 ),
1948 1949 )
1949 1950 fm.write(
1950 1951 b'compenginesavail',
1951 1952 _(b'checking available compression engines (%s)\n'),
1952 1953 fm.formatlist(
1953 1954 sorted(e.name() for e in compengines if e.available()),
1954 1955 name=b'compengine',
1955 1956 fmt=b'%s',
1956 1957 sep=b', ',
1957 1958 ),
1958 1959 )
1959 1960 wirecompengines = compression.compengines.supportedwireengines(
1960 1961 compression.SERVERROLE
1961 1962 )
1962 1963 fm.write(
1963 1964 b'compenginesserver',
1964 1965 _(
1965 1966 b'checking available compression engines '
1966 1967 b'for wire protocol (%s)\n'
1967 1968 ),
1968 1969 fm.formatlist(
1969 1970 [e.name() for e in wirecompengines if e.wireprotosupport()],
1970 1971 name=b'compengine',
1971 1972 fmt=b'%s',
1972 1973 sep=b', ',
1973 1974 ),
1974 1975 )
1975 1976 re2 = b'missing'
1976 1977 if util._re2:
1977 1978 re2 = b'available'
1978 1979 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1979 1980 fm.data(re2=bool(util._re2))
1980 1981
1981 1982 # templates
1982 1983 p = templater.templatedir()
1983 1984 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1984 1985 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1985 1986 if p:
1986 1987 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1987 1988 if m:
1988 1989 # template found, check if it is working
1989 1990 err = None
1990 1991 try:
1991 1992 templater.templater.frommapfile(m)
1992 1993 except Exception as inst:
1993 1994 err = stringutil.forcebytestr(inst)
1994 1995 p = None
1995 1996 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1996 1997 else:
1997 1998 p = None
1998 1999 fm.condwrite(
1999 2000 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2000 2001 )
2001 2002 fm.condwrite(
2002 2003 not m,
2003 2004 b'defaulttemplatenotfound',
2004 2005 _(b" template '%s' not found\n"),
2005 2006 b"default",
2006 2007 )
2007 2008 if not p:
2008 2009 problems += 1
2009 2010 fm.condwrite(
2010 2011 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2011 2012 )
2012 2013
2013 2014 # editor
2014 2015 editor = ui.geteditor()
2015 2016 editor = util.expandpath(editor)
2016 2017 editorbin = procutil.shellsplit(editor)[0]
2017 2018 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2018 2019 cmdpath = procutil.findexe(editorbin)
2019 2020 fm.condwrite(
2020 2021 not cmdpath and editor == b'vi',
2021 2022 b'vinotfound',
2022 2023 _(
2023 2024 b" No commit editor set and can't find %s in PATH\n"
2024 2025 b" (specify a commit editor in your configuration"
2025 2026 b" file)\n"
2026 2027 ),
2027 2028 not cmdpath and editor == b'vi' and editorbin,
2028 2029 )
2029 2030 fm.condwrite(
2030 2031 not cmdpath and editor != b'vi',
2031 2032 b'editornotfound',
2032 2033 _(
2033 2034 b" Can't find editor '%s' in PATH\n"
2034 2035 b" (specify a commit editor in your configuration"
2035 2036 b" file)\n"
2036 2037 ),
2037 2038 not cmdpath and editorbin,
2038 2039 )
2039 2040 if not cmdpath and editor != b'vi':
2040 2041 problems += 1
2041 2042
2042 2043 # check username
2043 2044 username = None
2044 2045 err = None
2045 2046 try:
2046 2047 username = ui.username()
2047 2048 except error.Abort as e:
2048 2049 err = e.message
2049 2050 problems += 1
2050 2051
2051 2052 fm.condwrite(
2052 2053 username, b'username', _(b"checking username (%s)\n"), username
2053 2054 )
2054 2055 fm.condwrite(
2055 2056 err,
2056 2057 b'usernameerror',
2057 2058 _(
2058 2059 b"checking username...\n %s\n"
2059 2060 b" (specify a username in your configuration file)\n"
2060 2061 ),
2061 2062 err,
2062 2063 )
2063 2064
2064 2065 for name, mod in extensions.extensions():
2065 2066 handler = getattr(mod, 'debuginstall', None)
2066 2067 if handler is not None:
2067 2068 problems += handler(ui, fm)
2068 2069
2069 2070 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2070 2071 if not problems:
2071 2072 fm.data(problems=problems)
2072 2073 fm.condwrite(
2073 2074 problems,
2074 2075 b'problems',
2075 2076 _(b"%d problems detected, please check your install!\n"),
2076 2077 problems,
2077 2078 )
2078 2079 fm.end()
2079 2080
2080 2081 return problems
2081 2082
2082 2083
2083 2084 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2084 2085 def debugknown(ui, repopath, *ids, **opts):
2085 2086 """test whether node ids are known to a repo
2086 2087
2087 2088 Every ID must be a full-length hex node id string. Returns a list of 0s
2088 2089 and 1s indicating unknown/known.
2089 2090 """
2090 2091 opts = pycompat.byteskwargs(opts)
2091 2092 repo = hg.peer(ui, opts, repopath)
2092 2093 if not repo.capable(b'known'):
2093 2094 raise error.Abort(b"known() not supported by target repository")
2094 2095 flags = repo.known([bin(s) for s in ids])
2095 2096 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2096 2097
2097 2098
2098 2099 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2099 2100 def debuglabelcomplete(ui, repo, *args):
2100 2101 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2101 2102 debugnamecomplete(ui, repo, *args)
2102 2103
2103 2104
2104 2105 @command(
2105 2106 b'debuglocks',
2106 2107 [
2107 2108 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2108 2109 (
2109 2110 b'W',
2110 2111 b'force-free-wlock',
2111 2112 None,
2112 2113 _(b'free the working state lock (DANGEROUS)'),
2113 2114 ),
2114 2115 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2115 2116 (
2116 2117 b'S',
2117 2118 b'set-wlock',
2118 2119 None,
2119 2120 _(b'set the working state lock until stopped'),
2120 2121 ),
2121 2122 ],
2122 2123 _(b'[OPTION]...'),
2123 2124 )
2124 2125 def debuglocks(ui, repo, **opts):
2125 2126 """show or modify state of locks
2126 2127
2127 2128 By default, this command will show which locks are held. This
2128 2129 includes the user and process holding the lock, the amount of time
2129 2130 the lock has been held, and the machine name where the process is
2130 2131 running if it's not local.
2131 2132
2132 2133 Locks protect the integrity of Mercurial's data, so should be
2133 2134 treated with care. System crashes or other interruptions may cause
2134 2135 locks to not be properly released, though Mercurial will usually
2135 2136 detect and remove such stale locks automatically.
2136 2137
2137 2138 However, detecting stale locks may not always be possible (for
2138 2139 instance, on a shared filesystem). Removing locks may also be
2139 2140 blocked by filesystem permissions.
2140 2141
2141 2142 Setting a lock will prevent other commands from changing the data.
2142 2143 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2143 2144 The set locks are removed when the command exits.
2144 2145
2145 2146 Returns 0 if no locks are held.
2146 2147
2147 2148 """
2148 2149
2149 2150 if opts.get('force_free_lock'):
2150 2151 repo.svfs.unlink(b'lock')
2151 2152 if opts.get('force_free_wlock'):
2152 2153 repo.vfs.unlink(b'wlock')
2153 2154 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2154 2155 return 0
2155 2156
2156 2157 locks = []
2157 2158 try:
2158 2159 if opts.get('set_wlock'):
2159 2160 try:
2160 2161 locks.append(repo.wlock(False))
2161 2162 except error.LockHeld:
2162 2163 raise error.Abort(_(b'wlock is already held'))
2163 2164 if opts.get('set_lock'):
2164 2165 try:
2165 2166 locks.append(repo.lock(False))
2166 2167 except error.LockHeld:
2167 2168 raise error.Abort(_(b'lock is already held'))
2168 2169 if len(locks):
2169 2170 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2170 2171 return 0
2171 2172 finally:
2172 2173 release(*locks)
2173 2174
2174 2175 now = time.time()
2175 2176 held = 0
2176 2177
2177 2178 def report(vfs, name, method):
2178 2179 # this causes stale locks to get reaped for more accurate reporting
2179 2180 try:
2180 2181 l = method(False)
2181 2182 except error.LockHeld:
2182 2183 l = None
2183 2184
2184 2185 if l:
2185 2186 l.release()
2186 2187 else:
2187 2188 try:
2188 2189 st = vfs.lstat(name)
2189 2190 age = now - st[stat.ST_MTIME]
2190 2191 user = util.username(st.st_uid)
2191 2192 locker = vfs.readlock(name)
2192 2193 if b":" in locker:
2193 2194 host, pid = locker.split(b':')
2194 2195 if host == socket.gethostname():
2195 2196 locker = b'user %s, process %s' % (user or b'None', pid)
2196 2197 else:
2197 2198 locker = b'user %s, process %s, host %s' % (
2198 2199 user or b'None',
2199 2200 pid,
2200 2201 host,
2201 2202 )
2202 2203 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2203 2204 return 1
2204 2205 except OSError as e:
2205 2206 if e.errno != errno.ENOENT:
2206 2207 raise
2207 2208
2208 2209 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2209 2210 return 0
2210 2211
2211 2212 held += report(repo.svfs, b"lock", repo.lock)
2212 2213 held += report(repo.vfs, b"wlock", repo.wlock)
2213 2214
2214 2215 return held
2215 2216
2216 2217
2217 2218 @command(
2218 2219 b'debugmanifestfulltextcache',
2219 2220 [
2220 2221 (b'', b'clear', False, _(b'clear the cache')),
2221 2222 (
2222 2223 b'a',
2223 2224 b'add',
2224 2225 [],
2225 2226 _(b'add the given manifest nodes to the cache'),
2226 2227 _(b'NODE'),
2227 2228 ),
2228 2229 ],
2229 2230 b'',
2230 2231 )
2231 2232 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2232 2233 """show, clear or amend the contents of the manifest fulltext cache"""
2233 2234
2234 2235 def getcache():
2235 2236 r = repo.manifestlog.getstorage(b'')
2236 2237 try:
2237 2238 return r._fulltextcache
2238 2239 except AttributeError:
2239 2240 msg = _(
2240 2241 b"Current revlog implementation doesn't appear to have a "
2241 2242 b"manifest fulltext cache\n"
2242 2243 )
2243 2244 raise error.Abort(msg)
2244 2245
2245 2246 if opts.get('clear'):
2246 2247 with repo.wlock():
2247 2248 cache = getcache()
2248 2249 cache.clear(clear_persisted_data=True)
2249 2250 return
2250 2251
2251 2252 if add:
2252 2253 with repo.wlock():
2253 2254 m = repo.manifestlog
2254 2255 store = m.getstorage(b'')
2255 2256 for n in add:
2256 2257 try:
2257 2258 manifest = m[store.lookup(n)]
2258 2259 except error.LookupError as e:
2259 2260 raise error.Abort(
2260 2261 bytes(e), hint=b"Check your manifest node id"
2261 2262 )
2262 2263 manifest.read() # stores revisision in cache too
2263 2264 return
2264 2265
2265 2266 cache = getcache()
2266 2267 if not len(cache):
2267 2268 ui.write(_(b'cache empty\n'))
2268 2269 else:
2269 2270 ui.write(
2270 2271 _(
2271 2272 b'cache contains %d manifest entries, in order of most to '
2272 2273 b'least recent:\n'
2273 2274 )
2274 2275 % (len(cache),)
2275 2276 )
2276 2277 totalsize = 0
2277 2278 for nodeid in cache:
2278 2279 # Use cache.get to not update the LRU order
2279 2280 data = cache.peek(nodeid)
2280 2281 size = len(data)
2281 2282 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2282 2283 ui.write(
2283 2284 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2284 2285 )
2285 2286 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2286 2287 ui.write(
2287 2288 _(b'total cache data size %s, on-disk %s\n')
2288 2289 % (util.bytecount(totalsize), util.bytecount(ondisk))
2289 2290 )
2290 2291
2291 2292
2292 2293 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2293 2294 def debugmergestate(ui, repo, *args, **opts):
2294 2295 """print merge state
2295 2296
2296 2297 Use --verbose to print out information about whether v1 or v2 merge state
2297 2298 was chosen."""
2298 2299
2299 2300 if ui.verbose:
2300 2301 ms = mergestatemod.mergestate(repo)
2301 2302
2302 2303 # sort so that reasonable information is on top
2303 2304 v1records = ms._readrecordsv1()
2304 2305 v2records = ms._readrecordsv2()
2305 2306
2306 2307 if not v1records and not v2records:
2307 2308 pass
2308 2309 elif not v2records:
2309 2310 ui.writenoi18n(b'no version 2 merge state\n')
2310 2311 elif ms._v1v2match(v1records, v2records):
2311 2312 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2312 2313 else:
2313 2314 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2314 2315
2315 2316 opts = pycompat.byteskwargs(opts)
2316 2317 if not opts[b'template']:
2317 2318 opts[b'template'] = (
2318 2319 b'{if(commits, "", "no merge state found\n")}'
2319 2320 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2320 2321 b'{files % "file: {path} (state \\"{state}\\")\n'
2321 2322 b'{if(local_path, "'
2322 2323 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2323 2324 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2324 2325 b' other path: {other_path} (node {other_node})\n'
2325 2326 b'")}'
2326 2327 b'{if(rename_side, "'
2327 2328 b' rename side: {rename_side}\n'
2328 2329 b' renamed path: {renamed_path}\n'
2329 2330 b'")}'
2330 2331 b'{extras % " extra: {key} = {value}\n"}'
2331 2332 b'"}'
2332 2333 b'{extras % "extra: {file} ({key} = {value})\n"}'
2333 2334 )
2334 2335
2335 2336 ms = mergestatemod.mergestate.read(repo)
2336 2337
2337 2338 fm = ui.formatter(b'debugmergestate', opts)
2338 2339 fm.startitem()
2339 2340
2340 2341 fm_commits = fm.nested(b'commits')
2341 2342 if ms.active():
2342 2343 for name, node, label_index in (
2343 2344 (b'local', ms.local, 0),
2344 2345 (b'other', ms.other, 1),
2345 2346 ):
2346 2347 fm_commits.startitem()
2347 2348 fm_commits.data(name=name)
2348 2349 fm_commits.data(node=hex(node))
2349 2350 if ms._labels and len(ms._labels) > label_index:
2350 2351 fm_commits.data(label=ms._labels[label_index])
2351 2352 fm_commits.end()
2352 2353
2353 2354 fm_files = fm.nested(b'files')
2354 2355 if ms.active():
2355 2356 for f in ms:
2356 2357 fm_files.startitem()
2357 2358 fm_files.data(path=f)
2358 2359 state = ms._state[f]
2359 2360 fm_files.data(state=state[0])
2360 2361 if state[0] in (
2361 2362 mergestatemod.MERGE_RECORD_UNRESOLVED,
2362 2363 mergestatemod.MERGE_RECORD_RESOLVED,
2363 2364 ):
2364 2365 fm_files.data(local_key=state[1])
2365 2366 fm_files.data(local_path=state[2])
2366 2367 fm_files.data(ancestor_path=state[3])
2367 2368 fm_files.data(ancestor_node=state[4])
2368 2369 fm_files.data(other_path=state[5])
2369 2370 fm_files.data(other_node=state[6])
2370 2371 fm_files.data(local_flags=state[7])
2371 2372 elif state[0] in (
2372 2373 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2373 2374 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2374 2375 ):
2375 2376 fm_files.data(renamed_path=state[1])
2376 2377 fm_files.data(rename_side=state[2])
2377 2378 fm_extras = fm_files.nested(b'extras')
2378 2379 for k, v in sorted(ms.extras(f).items()):
2379 2380 fm_extras.startitem()
2380 2381 fm_extras.data(key=k)
2381 2382 fm_extras.data(value=v)
2382 2383 fm_extras.end()
2383 2384
2384 2385 fm_files.end()
2385 2386
2386 2387 fm_extras = fm.nested(b'extras')
2387 2388 for f, d in sorted(ms.allextras().items()):
2388 2389 if f in ms:
2389 2390 # If file is in mergestate, we have already processed it's extras
2390 2391 continue
2391 2392 for k, v in d.items():
2392 2393 fm_extras.startitem()
2393 2394 fm_extras.data(file=f)
2394 2395 fm_extras.data(key=k)
2395 2396 fm_extras.data(value=v)
2396 2397 fm_extras.end()
2397 2398
2398 2399 fm.end()
2399 2400
2400 2401
2401 2402 @command(b'debugnamecomplete', [], _(b'NAME...'))
2402 2403 def debugnamecomplete(ui, repo, *args):
2403 2404 '''complete "names" - tags, open branch names, bookmark names'''
2404 2405
2405 2406 names = set()
2406 2407 # since we previously only listed open branches, we will handle that
2407 2408 # specially (after this for loop)
2408 2409 for name, ns in repo.names.items():
2409 2410 if name != b'branches':
2410 2411 names.update(ns.listnames(repo))
2411 2412 names.update(
2412 2413 tag
2413 2414 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2414 2415 if not closed
2415 2416 )
2416 2417 completions = set()
2417 2418 if not args:
2418 2419 args = [b'']
2419 2420 for a in args:
2420 2421 completions.update(n for n in names if n.startswith(a))
2421 2422 ui.write(b'\n'.join(sorted(completions)))
2422 2423 ui.write(b'\n')
2423 2424
2424 2425
2425 2426 @command(
2426 2427 b'debugnodemap',
2427 2428 [
2428 2429 (
2429 2430 b'',
2430 2431 b'dump-new',
2431 2432 False,
2432 2433 _(b'write a (new) persistent binary nodemap on stdout'),
2433 2434 ),
2434 2435 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2435 2436 (
2436 2437 b'',
2437 2438 b'check',
2438 2439 False,
2439 2440 _(b'check that the data on disk data are correct.'),
2440 2441 ),
2441 2442 (
2442 2443 b'',
2443 2444 b'metadata',
2444 2445 False,
2445 2446 _(b'display the on disk meta data for the nodemap'),
2446 2447 ),
2447 2448 ],
2448 2449 )
2449 2450 def debugnodemap(ui, repo, **opts):
2450 2451 """write and inspect on disk nodemap"""
2451 2452 if opts['dump_new']:
2452 2453 unfi = repo.unfiltered()
2453 2454 cl = unfi.changelog
2454 2455 if util.safehasattr(cl.index, "nodemap_data_all"):
2455 2456 data = cl.index.nodemap_data_all()
2456 2457 else:
2457 2458 data = nodemap.persistent_data(cl.index)
2458 2459 ui.write(data)
2459 2460 elif opts['dump_disk']:
2460 2461 unfi = repo.unfiltered()
2461 2462 cl = unfi.changelog
2462 2463 nm_data = nodemap.persisted_data(cl)
2463 2464 if nm_data is not None:
2464 2465 docket, data = nm_data
2465 2466 ui.write(data[:])
2466 2467 elif opts['check']:
2467 2468 unfi = repo.unfiltered()
2468 2469 cl = unfi.changelog
2469 2470 nm_data = nodemap.persisted_data(cl)
2470 2471 if nm_data is not None:
2471 2472 docket, data = nm_data
2472 2473 return nodemap.check_data(ui, cl.index, data)
2473 2474 elif opts['metadata']:
2474 2475 unfi = repo.unfiltered()
2475 2476 cl = unfi.changelog
2476 2477 nm_data = nodemap.persisted_data(cl)
2477 2478 if nm_data is not None:
2478 2479 docket, data = nm_data
2479 2480 ui.write((b"uid: %s\n") % docket.uid)
2480 2481 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2481 2482 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2482 2483 ui.write((b"data-length: %d\n") % docket.data_length)
2483 2484 ui.write((b"data-unused: %d\n") % docket.data_unused)
2484 2485 unused_perc = docket.data_unused * 100.0 / docket.data_length
2485 2486 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2486 2487
2487 2488
2488 2489 @command(
2489 2490 b'debugobsolete',
2490 2491 [
2491 2492 (b'', b'flags', 0, _(b'markers flag')),
2492 2493 (
2493 2494 b'',
2494 2495 b'record-parents',
2495 2496 False,
2496 2497 _(b'record parent information for the precursor'),
2497 2498 ),
2498 2499 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2499 2500 (
2500 2501 b'',
2501 2502 b'exclusive',
2502 2503 False,
2503 2504 _(b'restrict display to markers only relevant to REV'),
2504 2505 ),
2505 2506 (b'', b'index', False, _(b'display index of the marker')),
2506 2507 (b'', b'delete', [], _(b'delete markers specified by indices')),
2507 2508 ]
2508 2509 + cmdutil.commitopts2
2509 2510 + cmdutil.formatteropts,
2510 2511 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2511 2512 )
2512 2513 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2513 2514 """create arbitrary obsolete marker
2514 2515
2515 2516 With no arguments, displays the list of obsolescence markers."""
2516 2517
2517 2518 opts = pycompat.byteskwargs(opts)
2518 2519
2519 2520 def parsenodeid(s):
2520 2521 try:
2521 2522 # We do not use revsingle/revrange functions here to accept
2522 2523 # arbitrary node identifiers, possibly not present in the
2523 2524 # local repository.
2524 2525 n = bin(s)
2525 2526 if len(n) != repo.nodeconstants.nodelen:
2526 2527 raise TypeError()
2527 2528 return n
2528 2529 except TypeError:
2529 2530 raise error.InputError(
2530 2531 b'changeset references must be full hexadecimal '
2531 2532 b'node identifiers'
2532 2533 )
2533 2534
2534 2535 if opts.get(b'delete'):
2535 2536 indices = []
2536 2537 for v in opts.get(b'delete'):
2537 2538 try:
2538 2539 indices.append(int(v))
2539 2540 except ValueError:
2540 2541 raise error.InputError(
2541 2542 _(b'invalid index value: %r') % v,
2542 2543 hint=_(b'use integers for indices'),
2543 2544 )
2544 2545
2545 2546 if repo.currenttransaction():
2546 2547 raise error.Abort(
2547 2548 _(b'cannot delete obsmarkers in the middle of transaction.')
2548 2549 )
2549 2550
2550 2551 with repo.lock():
2551 2552 n = repair.deleteobsmarkers(repo.obsstore, indices)
2552 2553 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2553 2554
2554 2555 return
2555 2556
2556 2557 if precursor is not None:
2557 2558 if opts[b'rev']:
2558 2559 raise error.InputError(
2559 2560 b'cannot select revision when creating marker'
2560 2561 )
2561 2562 metadata = {}
2562 2563 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2563 2564 succs = tuple(parsenodeid(succ) for succ in successors)
2564 2565 l = repo.lock()
2565 2566 try:
2566 2567 tr = repo.transaction(b'debugobsolete')
2567 2568 try:
2568 2569 date = opts.get(b'date')
2569 2570 if date:
2570 2571 date = dateutil.parsedate(date)
2571 2572 else:
2572 2573 date = None
2573 2574 prec = parsenodeid(precursor)
2574 2575 parents = None
2575 2576 if opts[b'record_parents']:
2576 2577 if prec not in repo.unfiltered():
2577 2578 raise error.Abort(
2578 2579 b'cannot used --record-parents on '
2579 2580 b'unknown changesets'
2580 2581 )
2581 2582 parents = repo.unfiltered()[prec].parents()
2582 2583 parents = tuple(p.node() for p in parents)
2583 2584 repo.obsstore.create(
2584 2585 tr,
2585 2586 prec,
2586 2587 succs,
2587 2588 opts[b'flags'],
2588 2589 parents=parents,
2589 2590 date=date,
2590 2591 metadata=metadata,
2591 2592 ui=ui,
2592 2593 )
2593 2594 tr.close()
2594 2595 except ValueError as exc:
2595 2596 raise error.Abort(
2596 2597 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2597 2598 )
2598 2599 finally:
2599 2600 tr.release()
2600 2601 finally:
2601 2602 l.release()
2602 2603 else:
2603 2604 if opts[b'rev']:
2604 2605 revs = logcmdutil.revrange(repo, opts[b'rev'])
2605 2606 nodes = [repo[r].node() for r in revs]
2606 2607 markers = list(
2607 2608 obsutil.getmarkers(
2608 2609 repo, nodes=nodes, exclusive=opts[b'exclusive']
2609 2610 )
2610 2611 )
2611 2612 markers.sort(key=lambda x: x._data)
2612 2613 else:
2613 2614 markers = obsutil.getmarkers(repo)
2614 2615
2615 2616 markerstoiter = markers
2616 2617 isrelevant = lambda m: True
2617 2618 if opts.get(b'rev') and opts.get(b'index'):
2618 2619 markerstoiter = obsutil.getmarkers(repo)
2619 2620 markerset = set(markers)
2620 2621 isrelevant = lambda m: m in markerset
2621 2622
2622 2623 fm = ui.formatter(b'debugobsolete', opts)
2623 2624 for i, m in enumerate(markerstoiter):
2624 2625 if not isrelevant(m):
2625 2626 # marker can be irrelevant when we're iterating over a set
2626 2627 # of markers (markerstoiter) which is bigger than the set
2627 2628 # of markers we want to display (markers)
2628 2629 # this can happen if both --index and --rev options are
2629 2630 # provided and thus we need to iterate over all of the markers
2630 2631 # to get the correct indices, but only display the ones that
2631 2632 # are relevant to --rev value
2632 2633 continue
2633 2634 fm.startitem()
2634 2635 ind = i if opts.get(b'index') else None
2635 2636 cmdutil.showmarker(fm, m, index=ind)
2636 2637 fm.end()
2637 2638
2638 2639
2639 2640 @command(
2640 2641 b'debugp1copies',
2641 2642 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2642 2643 _(b'[-r REV]'),
2643 2644 )
2644 2645 def debugp1copies(ui, repo, **opts):
2645 2646 """dump copy information compared to p1"""
2646 2647
2647 2648 opts = pycompat.byteskwargs(opts)
2648 2649 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2649 2650 for dst, src in ctx.p1copies().items():
2650 2651 ui.write(b'%s -> %s\n' % (src, dst))
2651 2652
2652 2653
2653 2654 @command(
2654 2655 b'debugp2copies',
2655 2656 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2656 2657 _(b'[-r REV]'),
2657 2658 )
2658 2659 def debugp1copies(ui, repo, **opts):
2659 2660 """dump copy information compared to p2"""
2660 2661
2661 2662 opts = pycompat.byteskwargs(opts)
2662 2663 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2663 2664 for dst, src in ctx.p2copies().items():
2664 2665 ui.write(b'%s -> %s\n' % (src, dst))
2665 2666
2666 2667
2667 2668 @command(
2668 2669 b'debugpathcomplete',
2669 2670 [
2670 2671 (b'f', b'full', None, _(b'complete an entire path')),
2671 2672 (b'n', b'normal', None, _(b'show only normal files')),
2672 2673 (b'a', b'added', None, _(b'show only added files')),
2673 2674 (b'r', b'removed', None, _(b'show only removed files')),
2674 2675 ],
2675 2676 _(b'FILESPEC...'),
2676 2677 )
2677 2678 def debugpathcomplete(ui, repo, *specs, **opts):
2678 2679 """complete part or all of a tracked path
2679 2680
2680 2681 This command supports shells that offer path name completion. It
2681 2682 currently completes only files already known to the dirstate.
2682 2683
2683 2684 Completion extends only to the next path segment unless
2684 2685 --full is specified, in which case entire paths are used."""
2685 2686
2686 2687 def complete(path, acceptable):
2687 2688 dirstate = repo.dirstate
2688 2689 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2689 2690 rootdir = repo.root + pycompat.ossep
2690 2691 if spec != repo.root and not spec.startswith(rootdir):
2691 2692 return [], []
2692 2693 if os.path.isdir(spec):
2693 2694 spec += b'/'
2694 2695 spec = spec[len(rootdir) :]
2695 2696 fixpaths = pycompat.ossep != b'/'
2696 2697 if fixpaths:
2697 2698 spec = spec.replace(pycompat.ossep, b'/')
2698 2699 speclen = len(spec)
2699 2700 fullpaths = opts['full']
2700 2701 files, dirs = set(), set()
2701 2702 adddir, addfile = dirs.add, files.add
2702 2703 for f, st in dirstate.items():
2703 2704 if f.startswith(spec) and st.state in acceptable:
2704 2705 if fixpaths:
2705 2706 f = f.replace(b'/', pycompat.ossep)
2706 2707 if fullpaths:
2707 2708 addfile(f)
2708 2709 continue
2709 2710 s = f.find(pycompat.ossep, speclen)
2710 2711 if s >= 0:
2711 2712 adddir(f[:s])
2712 2713 else:
2713 2714 addfile(f)
2714 2715 return files, dirs
2715 2716
2716 2717 acceptable = b''
2717 2718 if opts['normal']:
2718 2719 acceptable += b'nm'
2719 2720 if opts['added']:
2720 2721 acceptable += b'a'
2721 2722 if opts['removed']:
2722 2723 acceptable += b'r'
2723 2724 cwd = repo.getcwd()
2724 2725 if not specs:
2725 2726 specs = [b'.']
2726 2727
2727 2728 files, dirs = set(), set()
2728 2729 for spec in specs:
2729 2730 f, d = complete(spec, acceptable or b'nmar')
2730 2731 files.update(f)
2731 2732 dirs.update(d)
2732 2733 files.update(dirs)
2733 2734 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2734 2735 ui.write(b'\n')
2735 2736
2736 2737
2737 2738 @command(
2738 2739 b'debugpathcopies',
2739 2740 cmdutil.walkopts,
2740 2741 b'hg debugpathcopies REV1 REV2 [FILE]',
2741 2742 inferrepo=True,
2742 2743 )
2743 2744 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2744 2745 """show copies between two revisions"""
2745 2746 ctx1 = scmutil.revsingle(repo, rev1)
2746 2747 ctx2 = scmutil.revsingle(repo, rev2)
2747 2748 m = scmutil.match(ctx1, pats, opts)
2748 2749 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2749 2750 ui.write(b'%s -> %s\n' % (src, dst))
2750 2751
2751 2752
2752 2753 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2753 2754 def debugpeer(ui, path):
2754 2755 """establish a connection to a peer repository"""
2755 2756 # Always enable peer request logging. Requires --debug to display
2756 2757 # though.
2757 2758 overrides = {
2758 2759 (b'devel', b'debug.peer-request'): True,
2759 2760 }
2760 2761
2761 2762 with ui.configoverride(overrides):
2762 2763 peer = hg.peer(ui, {}, path)
2763 2764
2764 2765 try:
2765 2766 local = peer.local() is not None
2766 2767 canpush = peer.canpush()
2767 2768
2768 2769 ui.write(_(b'url: %s\n') % peer.url())
2769 2770 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2770 2771 ui.write(
2771 2772 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2772 2773 )
2773 2774 finally:
2774 2775 peer.close()
2775 2776
2776 2777
2777 2778 @command(
2778 2779 b'debugpickmergetool',
2779 2780 [
2780 2781 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2781 2782 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2782 2783 ]
2783 2784 + cmdutil.walkopts
2784 2785 + cmdutil.mergetoolopts,
2785 2786 _(b'[PATTERN]...'),
2786 2787 inferrepo=True,
2787 2788 )
2788 2789 def debugpickmergetool(ui, repo, *pats, **opts):
2789 2790 """examine which merge tool is chosen for specified file
2790 2791
2791 2792 As described in :hg:`help merge-tools`, Mercurial examines
2792 2793 configurations below in this order to decide which merge tool is
2793 2794 chosen for specified file.
2794 2795
2795 2796 1. ``--tool`` option
2796 2797 2. ``HGMERGE`` environment variable
2797 2798 3. configurations in ``merge-patterns`` section
2798 2799 4. configuration of ``ui.merge``
2799 2800 5. configurations in ``merge-tools`` section
2800 2801 6. ``hgmerge`` tool (for historical reason only)
2801 2802 7. default tool for fallback (``:merge`` or ``:prompt``)
2802 2803
2803 2804 This command writes out examination result in the style below::
2804 2805
2805 2806 FILE = MERGETOOL
2806 2807
2807 2808 By default, all files known in the first parent context of the
2808 2809 working directory are examined. Use file patterns and/or -I/-X
2809 2810 options to limit target files. -r/--rev is also useful to examine
2810 2811 files in another context without actual updating to it.
2811 2812
2812 2813 With --debug, this command shows warning messages while matching
2813 2814 against ``merge-patterns`` and so on, too. It is recommended to
2814 2815 use this option with explicit file patterns and/or -I/-X options,
2815 2816 because this option increases amount of output per file according
2816 2817 to configurations in hgrc.
2817 2818
2818 2819 With -v/--verbose, this command shows configurations below at
2819 2820 first (only if specified).
2820 2821
2821 2822 - ``--tool`` option
2822 2823 - ``HGMERGE`` environment variable
2823 2824 - configuration of ``ui.merge``
2824 2825
2825 2826 If merge tool is chosen before matching against
2826 2827 ``merge-patterns``, this command can't show any helpful
2827 2828 information, even with --debug. In such case, information above is
2828 2829 useful to know why a merge tool is chosen.
2829 2830 """
2830 2831 opts = pycompat.byteskwargs(opts)
2831 2832 overrides = {}
2832 2833 if opts[b'tool']:
2833 2834 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2834 2835 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2835 2836
2836 2837 with ui.configoverride(overrides, b'debugmergepatterns'):
2837 2838 hgmerge = encoding.environ.get(b"HGMERGE")
2838 2839 if hgmerge is not None:
2839 2840 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2840 2841 uimerge = ui.config(b"ui", b"merge")
2841 2842 if uimerge:
2842 2843 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2843 2844
2844 2845 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2845 2846 m = scmutil.match(ctx, pats, opts)
2846 2847 changedelete = opts[b'changedelete']
2847 2848 for path in ctx.walk(m):
2848 2849 fctx = ctx[path]
2849 2850 with ui.silent(
2850 2851 error=True
2851 2852 ) if not ui.debugflag else util.nullcontextmanager():
2852 2853 tool, toolpath = filemerge._picktool(
2853 2854 repo,
2854 2855 ui,
2855 2856 path,
2856 2857 fctx.isbinary(),
2857 2858 b'l' in fctx.flags(),
2858 2859 changedelete,
2859 2860 )
2860 2861 ui.write(b'%s = %s\n' % (path, tool))
2861 2862
2862 2863
2863 2864 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2864 2865 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2865 2866 """access the pushkey key/value protocol
2866 2867
2867 2868 With two args, list the keys in the given namespace.
2868 2869
2869 2870 With five args, set a key to new if it currently is set to old.
2870 2871 Reports success or failure.
2871 2872 """
2872 2873
2873 2874 target = hg.peer(ui, {}, repopath)
2874 2875 try:
2875 2876 if keyinfo:
2876 2877 key, old, new = keyinfo
2877 2878 with target.commandexecutor() as e:
2878 2879 r = e.callcommand(
2879 2880 b'pushkey',
2880 2881 {
2881 2882 b'namespace': namespace,
2882 2883 b'key': key,
2883 2884 b'old': old,
2884 2885 b'new': new,
2885 2886 },
2886 2887 ).result()
2887 2888
2888 2889 ui.status(pycompat.bytestr(r) + b'\n')
2889 2890 return not r
2890 2891 else:
2891 2892 for k, v in sorted(target.listkeys(namespace).items()):
2892 2893 ui.write(
2893 2894 b"%s\t%s\n"
2894 2895 % (stringutil.escapestr(k), stringutil.escapestr(v))
2895 2896 )
2896 2897 finally:
2897 2898 target.close()
2898 2899
2899 2900
2900 2901 @command(b'debugpvec', [], _(b'A B'))
2901 2902 def debugpvec(ui, repo, a, b=None):
2902 2903 ca = scmutil.revsingle(repo, a)
2903 2904 cb = scmutil.revsingle(repo, b)
2904 2905 pa = pvec.ctxpvec(ca)
2905 2906 pb = pvec.ctxpvec(cb)
2906 2907 if pa == pb:
2907 2908 rel = b"="
2908 2909 elif pa > pb:
2909 2910 rel = b">"
2910 2911 elif pa < pb:
2911 2912 rel = b"<"
2912 2913 elif pa | pb:
2913 2914 rel = b"|"
2914 2915 ui.write(_(b"a: %s\n") % pa)
2915 2916 ui.write(_(b"b: %s\n") % pb)
2916 2917 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2917 2918 ui.write(
2918 2919 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2919 2920 % (
2920 2921 abs(pa._depth - pb._depth),
2921 2922 pvec._hamming(pa._vec, pb._vec),
2922 2923 pa.distance(pb),
2923 2924 rel,
2924 2925 )
2925 2926 )
2926 2927
2927 2928
2928 2929 @command(
2929 2930 b'debugrebuilddirstate|debugrebuildstate',
2930 2931 [
2931 2932 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2932 2933 (
2933 2934 b'',
2934 2935 b'minimal',
2935 2936 None,
2936 2937 _(
2937 2938 b'only rebuild files that are inconsistent with '
2938 2939 b'the working copy parent'
2939 2940 ),
2940 2941 ),
2941 2942 ],
2942 2943 _(b'[-r REV]'),
2943 2944 )
2944 2945 def debugrebuilddirstate(ui, repo, rev, **opts):
2945 2946 """rebuild the dirstate as it would look like for the given revision
2946 2947
2947 2948 If no revision is specified the first current parent will be used.
2948 2949
2949 2950 The dirstate will be set to the files of the given revision.
2950 2951 The actual working directory content or existing dirstate
2951 2952 information such as adds or removes is not considered.
2952 2953
2953 2954 ``minimal`` will only rebuild the dirstate status for files that claim to be
2954 2955 tracked but are not in the parent manifest, or that exist in the parent
2955 2956 manifest but are not in the dirstate. It will not change adds, removes, or
2956 2957 modified files that are in the working copy parent.
2957 2958
2958 2959 One use of this command is to make the next :hg:`status` invocation
2959 2960 check the actual file content.
2960 2961 """
2961 2962 ctx = scmutil.revsingle(repo, rev)
2962 2963 with repo.wlock():
2963 2964 dirstate = repo.dirstate
2964 2965 changedfiles = None
2965 2966 # See command doc for what minimal does.
2966 2967 if opts.get('minimal'):
2967 2968 manifestfiles = set(ctx.manifest().keys())
2968 2969 dirstatefiles = set(dirstate)
2969 2970 manifestonly = manifestfiles - dirstatefiles
2970 2971 dsonly = dirstatefiles - manifestfiles
2971 2972 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2972 2973 changedfiles = manifestonly | dsnotadded
2973 2974
2974 2975 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2975 2976
2976 2977
2977 2978 @command(
2978 2979 b'debugrebuildfncache',
2979 2980 [
2980 2981 (
2981 2982 b'',
2982 2983 b'only-data',
2983 2984 False,
2984 2985 _(b'only look for wrong .d files (much faster)'),
2985 2986 )
2986 2987 ],
2987 2988 b'',
2988 2989 )
2989 2990 def debugrebuildfncache(ui, repo, **opts):
2990 2991 """rebuild the fncache file"""
2991 2992 opts = pycompat.byteskwargs(opts)
2992 2993 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2993 2994
2994 2995
2995 2996 @command(
2996 2997 b'debugrename',
2997 2998 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2998 2999 _(b'[-r REV] [FILE]...'),
2999 3000 )
3000 3001 def debugrename(ui, repo, *pats, **opts):
3001 3002 """dump rename information"""
3002 3003
3003 3004 opts = pycompat.byteskwargs(opts)
3004 3005 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3005 3006 m = scmutil.match(ctx, pats, opts)
3006 3007 for abs in ctx.walk(m):
3007 3008 fctx = ctx[abs]
3008 3009 o = fctx.filelog().renamed(fctx.filenode())
3009 3010 rel = repo.pathto(abs)
3010 3011 if o:
3011 3012 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3012 3013 else:
3013 3014 ui.write(_(b"%s not renamed\n") % rel)
3014 3015
3015 3016
3016 3017 @command(b'debugrequires|debugrequirements', [], b'')
3017 3018 def debugrequirements(ui, repo):
3018 3019 """print the current repo requirements"""
3019 3020 for r in sorted(repo.requirements):
3020 3021 ui.write(b"%s\n" % r)
3021 3022
3022 3023
3023 3024 @command(
3024 3025 b'debugrevlog',
3025 3026 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3026 3027 _(b'-c|-m|FILE'),
3027 3028 optionalrepo=True,
3028 3029 )
3029 3030 def debugrevlog(ui, repo, file_=None, **opts):
3030 3031 """show data and statistics about a revlog"""
3031 3032 opts = pycompat.byteskwargs(opts)
3032 3033 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3033 3034
3034 3035 if opts.get(b"dump"):
3035 3036 numrevs = len(r)
3036 3037 ui.write(
3037 3038 (
3038 3039 b"# rev p1rev p2rev start end deltastart base p1 p2"
3039 3040 b" rawsize totalsize compression heads chainlen\n"
3040 3041 )
3041 3042 )
3042 3043 ts = 0
3043 3044 heads = set()
3044 3045
3045 3046 for rev in pycompat.xrange(numrevs):
3046 3047 dbase = r.deltaparent(rev)
3047 3048 if dbase == -1:
3048 3049 dbase = rev
3049 3050 cbase = r.chainbase(rev)
3050 3051 clen = r.chainlen(rev)
3051 3052 p1, p2 = r.parentrevs(rev)
3052 3053 rs = r.rawsize(rev)
3053 3054 ts = ts + rs
3054 3055 heads -= set(r.parentrevs(rev))
3055 3056 heads.add(rev)
3056 3057 try:
3057 3058 compression = ts / r.end(rev)
3058 3059 except ZeroDivisionError:
3059 3060 compression = 0
3060 3061 ui.write(
3061 3062 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3062 3063 b"%11d %5d %8d\n"
3063 3064 % (
3064 3065 rev,
3065 3066 p1,
3066 3067 p2,
3067 3068 r.start(rev),
3068 3069 r.end(rev),
3069 3070 r.start(dbase),
3070 3071 r.start(cbase),
3071 3072 r.start(p1),
3072 3073 r.start(p2),
3073 3074 rs,
3074 3075 ts,
3075 3076 compression,
3076 3077 len(heads),
3077 3078 clen,
3078 3079 )
3079 3080 )
3080 3081 return 0
3081 3082
3082 3083 format = r._format_version
3083 3084 v = r._format_flags
3084 3085 flags = []
3085 3086 gdelta = False
3086 3087 if v & revlog.FLAG_INLINE_DATA:
3087 3088 flags.append(b'inline')
3088 3089 if v & revlog.FLAG_GENERALDELTA:
3089 3090 gdelta = True
3090 3091 flags.append(b'generaldelta')
3091 3092 if not flags:
3092 3093 flags = [b'(none)']
3093 3094
3094 3095 ### tracks merge vs single parent
3095 3096 nummerges = 0
3096 3097
3097 3098 ### tracks ways the "delta" are build
3098 3099 # nodelta
3099 3100 numempty = 0
3100 3101 numemptytext = 0
3101 3102 numemptydelta = 0
3102 3103 # full file content
3103 3104 numfull = 0
3104 3105 # intermediate snapshot against a prior snapshot
3105 3106 numsemi = 0
3106 3107 # snapshot count per depth
3107 3108 numsnapdepth = collections.defaultdict(lambda: 0)
3108 3109 # delta against previous revision
3109 3110 numprev = 0
3110 3111 # delta against first or second parent (not prev)
3111 3112 nump1 = 0
3112 3113 nump2 = 0
3113 3114 # delta against neither prev nor parents
3114 3115 numother = 0
3115 3116 # delta against prev that are also first or second parent
3116 3117 # (details of `numprev`)
3117 3118 nump1prev = 0
3118 3119 nump2prev = 0
3119 3120
3120 3121 # data about delta chain of each revs
3121 3122 chainlengths = []
3122 3123 chainbases = []
3123 3124 chainspans = []
3124 3125
3125 3126 # data about each revision
3126 3127 datasize = [None, 0, 0]
3127 3128 fullsize = [None, 0, 0]
3128 3129 semisize = [None, 0, 0]
3129 3130 # snapshot count per depth
3130 3131 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3131 3132 deltasize = [None, 0, 0]
3132 3133 chunktypecounts = {}
3133 3134 chunktypesizes = {}
3134 3135
3135 3136 def addsize(size, l):
3136 3137 if l[0] is None or size < l[0]:
3137 3138 l[0] = size
3138 3139 if size > l[1]:
3139 3140 l[1] = size
3140 3141 l[2] += size
3141 3142
3142 3143 numrevs = len(r)
3143 3144 for rev in pycompat.xrange(numrevs):
3144 3145 p1, p2 = r.parentrevs(rev)
3145 3146 delta = r.deltaparent(rev)
3146 3147 if format > 0:
3147 3148 addsize(r.rawsize(rev), datasize)
3148 3149 if p2 != nullrev:
3149 3150 nummerges += 1
3150 3151 size = r.length(rev)
3151 3152 if delta == nullrev:
3152 3153 chainlengths.append(0)
3153 3154 chainbases.append(r.start(rev))
3154 3155 chainspans.append(size)
3155 3156 if size == 0:
3156 3157 numempty += 1
3157 3158 numemptytext += 1
3158 3159 else:
3159 3160 numfull += 1
3160 3161 numsnapdepth[0] += 1
3161 3162 addsize(size, fullsize)
3162 3163 addsize(size, snapsizedepth[0])
3163 3164 else:
3164 3165 chainlengths.append(chainlengths[delta] + 1)
3165 3166 baseaddr = chainbases[delta]
3166 3167 revaddr = r.start(rev)
3167 3168 chainbases.append(baseaddr)
3168 3169 chainspans.append((revaddr - baseaddr) + size)
3169 3170 if size == 0:
3170 3171 numempty += 1
3171 3172 numemptydelta += 1
3172 3173 elif r.issnapshot(rev):
3173 3174 addsize(size, semisize)
3174 3175 numsemi += 1
3175 3176 depth = r.snapshotdepth(rev)
3176 3177 numsnapdepth[depth] += 1
3177 3178 addsize(size, snapsizedepth[depth])
3178 3179 else:
3179 3180 addsize(size, deltasize)
3180 3181 if delta == rev - 1:
3181 3182 numprev += 1
3182 3183 if delta == p1:
3183 3184 nump1prev += 1
3184 3185 elif delta == p2:
3185 3186 nump2prev += 1
3186 3187 elif delta == p1:
3187 3188 nump1 += 1
3188 3189 elif delta == p2:
3189 3190 nump2 += 1
3190 3191 elif delta != nullrev:
3191 3192 numother += 1
3192 3193
3193 3194 # Obtain data on the raw chunks in the revlog.
3194 3195 if util.safehasattr(r, b'_getsegmentforrevs'):
3195 3196 segment = r._getsegmentforrevs(rev, rev)[1]
3196 3197 else:
3197 3198 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3198 3199 if segment:
3199 3200 chunktype = bytes(segment[0:1])
3200 3201 else:
3201 3202 chunktype = b'empty'
3202 3203
3203 3204 if chunktype not in chunktypecounts:
3204 3205 chunktypecounts[chunktype] = 0
3205 3206 chunktypesizes[chunktype] = 0
3206 3207
3207 3208 chunktypecounts[chunktype] += 1
3208 3209 chunktypesizes[chunktype] += size
3209 3210
3210 3211 # Adjust size min value for empty cases
3211 3212 for size in (datasize, fullsize, semisize, deltasize):
3212 3213 if size[0] is None:
3213 3214 size[0] = 0
3214 3215
3215 3216 numdeltas = numrevs - numfull - numempty - numsemi
3216 3217 numoprev = numprev - nump1prev - nump2prev
3217 3218 totalrawsize = datasize[2]
3218 3219 datasize[2] /= numrevs
3219 3220 fulltotal = fullsize[2]
3220 3221 if numfull == 0:
3221 3222 fullsize[2] = 0
3222 3223 else:
3223 3224 fullsize[2] /= numfull
3224 3225 semitotal = semisize[2]
3225 3226 snaptotal = {}
3226 3227 if numsemi > 0:
3227 3228 semisize[2] /= numsemi
3228 3229 for depth in snapsizedepth:
3229 3230 snaptotal[depth] = snapsizedepth[depth][2]
3230 3231 snapsizedepth[depth][2] /= numsnapdepth[depth]
3231 3232
3232 3233 deltatotal = deltasize[2]
3233 3234 if numdeltas > 0:
3234 3235 deltasize[2] /= numdeltas
3235 3236 totalsize = fulltotal + semitotal + deltatotal
3236 3237 avgchainlen = sum(chainlengths) / numrevs
3237 3238 maxchainlen = max(chainlengths)
3238 3239 maxchainspan = max(chainspans)
3239 3240 compratio = 1
3240 3241 if totalsize:
3241 3242 compratio = totalrawsize / totalsize
3242 3243
3243 3244 basedfmtstr = b'%%%dd\n'
3244 3245 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3245 3246
3246 3247 def dfmtstr(max):
3247 3248 return basedfmtstr % len(str(max))
3248 3249
3249 3250 def pcfmtstr(max, padding=0):
3250 3251 return basepcfmtstr % (len(str(max)), b' ' * padding)
3251 3252
3252 3253 def pcfmt(value, total):
3253 3254 if total:
3254 3255 return (value, 100 * float(value) / total)
3255 3256 else:
3256 3257 return value, 100.0
3257 3258
3258 3259 ui.writenoi18n(b'format : %d\n' % format)
3259 3260 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3260 3261
3261 3262 ui.write(b'\n')
3262 3263 fmt = pcfmtstr(totalsize)
3263 3264 fmt2 = dfmtstr(totalsize)
3264 3265 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3265 3266 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3266 3267 ui.writenoi18n(
3267 3268 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3268 3269 )
3269 3270 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3270 3271 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3271 3272 ui.writenoi18n(
3272 3273 b' text : '
3273 3274 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3274 3275 )
3275 3276 ui.writenoi18n(
3276 3277 b' delta : '
3277 3278 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3278 3279 )
3279 3280 ui.writenoi18n(
3280 3281 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3281 3282 )
3282 3283 for depth in sorted(numsnapdepth):
3283 3284 ui.write(
3284 3285 (b' lvl-%-3d : ' % depth)
3285 3286 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3286 3287 )
3287 3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3288 3289 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3289 3290 ui.writenoi18n(
3290 3291 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3291 3292 )
3292 3293 for depth in sorted(numsnapdepth):
3293 3294 ui.write(
3294 3295 (b' lvl-%-3d : ' % depth)
3295 3296 + fmt % pcfmt(snaptotal[depth], totalsize)
3296 3297 )
3297 3298 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3298 3299
3299 3300 def fmtchunktype(chunktype):
3300 3301 if chunktype == b'empty':
3301 3302 return b' %s : ' % chunktype
3302 3303 elif chunktype in pycompat.bytestr(string.ascii_letters):
3303 3304 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3304 3305 else:
3305 3306 return b' 0x%s : ' % hex(chunktype)
3306 3307
3307 3308 ui.write(b'\n')
3308 3309 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3309 3310 for chunktype in sorted(chunktypecounts):
3310 3311 ui.write(fmtchunktype(chunktype))
3311 3312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3312 3313 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3313 3314 for chunktype in sorted(chunktypecounts):
3314 3315 ui.write(fmtchunktype(chunktype))
3315 3316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3316 3317
3317 3318 ui.write(b'\n')
3318 3319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3319 3320 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3320 3321 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3321 3322 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3322 3323 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3323 3324
3324 3325 if format > 0:
3325 3326 ui.write(b'\n')
3326 3327 ui.writenoi18n(
3327 3328 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3328 3329 % tuple(datasize)
3329 3330 )
3330 3331 ui.writenoi18n(
3331 3332 b'full revision size (min/max/avg) : %d / %d / %d\n'
3332 3333 % tuple(fullsize)
3333 3334 )
3334 3335 ui.writenoi18n(
3335 3336 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3336 3337 % tuple(semisize)
3337 3338 )
3338 3339 for depth in sorted(snapsizedepth):
3339 3340 if depth == 0:
3340 3341 continue
3341 3342 ui.writenoi18n(
3342 3343 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3343 3344 % ((depth,) + tuple(snapsizedepth[depth]))
3344 3345 )
3345 3346 ui.writenoi18n(
3346 3347 b'delta size (min/max/avg) : %d / %d / %d\n'
3347 3348 % tuple(deltasize)
3348 3349 )
3349 3350
3350 3351 if numdeltas > 0:
3351 3352 ui.write(b'\n')
3352 3353 fmt = pcfmtstr(numdeltas)
3353 3354 fmt2 = pcfmtstr(numdeltas, 4)
3354 3355 ui.writenoi18n(
3355 3356 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3356 3357 )
3357 3358 if numprev > 0:
3358 3359 ui.writenoi18n(
3359 3360 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3360 3361 )
3361 3362 ui.writenoi18n(
3362 3363 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3363 3364 )
3364 3365 ui.writenoi18n(
3365 3366 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3366 3367 )
3367 3368 if gdelta:
3368 3369 ui.writenoi18n(
3369 3370 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3370 3371 )
3371 3372 ui.writenoi18n(
3372 3373 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3373 3374 )
3374 3375 ui.writenoi18n(
3375 3376 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3376 3377 )
3377 3378
3378 3379
3379 3380 @command(
3380 3381 b'debugrevlogindex',
3381 3382 cmdutil.debugrevlogopts
3382 3383 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3383 3384 _(b'[-f FORMAT] -c|-m|FILE'),
3384 3385 optionalrepo=True,
3385 3386 )
3386 3387 def debugrevlogindex(ui, repo, file_=None, **opts):
3387 3388 """dump the contents of a revlog index"""
3388 3389 opts = pycompat.byteskwargs(opts)
3389 3390 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3390 3391 format = opts.get(b'format', 0)
3391 3392 if format not in (0, 1):
3392 3393 raise error.Abort(_(b"unknown format %d") % format)
3393 3394
3394 3395 if ui.debugflag:
3395 3396 shortfn = hex
3396 3397 else:
3397 3398 shortfn = short
3398 3399
3399 3400 # There might not be anything in r, so have a sane default
3400 3401 idlen = 12
3401 3402 for i in r:
3402 3403 idlen = len(shortfn(r.node(i)))
3403 3404 break
3404 3405
3405 3406 if format == 0:
3406 3407 if ui.verbose:
3407 3408 ui.writenoi18n(
3408 3409 b" rev offset length linkrev %s %s p2\n"
3409 3410 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3410 3411 )
3411 3412 else:
3412 3413 ui.writenoi18n(
3413 3414 b" rev linkrev %s %s p2\n"
3414 3415 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3415 3416 )
3416 3417 elif format == 1:
3417 3418 if ui.verbose:
3418 3419 ui.writenoi18n(
3419 3420 (
3420 3421 b" rev flag offset length size link p1"
3421 3422 b" p2 %s\n"
3422 3423 )
3423 3424 % b"nodeid".rjust(idlen)
3424 3425 )
3425 3426 else:
3426 3427 ui.writenoi18n(
3427 3428 b" rev flag size link p1 p2 %s\n"
3428 3429 % b"nodeid".rjust(idlen)
3429 3430 )
3430 3431
3431 3432 for i in r:
3432 3433 node = r.node(i)
3433 3434 if format == 0:
3434 3435 try:
3435 3436 pp = r.parents(node)
3436 3437 except Exception:
3437 3438 pp = [repo.nullid, repo.nullid]
3438 3439 if ui.verbose:
3439 3440 ui.write(
3440 3441 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3441 3442 % (
3442 3443 i,
3443 3444 r.start(i),
3444 3445 r.length(i),
3445 3446 r.linkrev(i),
3446 3447 shortfn(node),
3447 3448 shortfn(pp[0]),
3448 3449 shortfn(pp[1]),
3449 3450 )
3450 3451 )
3451 3452 else:
3452 3453 ui.write(
3453 3454 b"% 6d % 7d %s %s %s\n"
3454 3455 % (
3455 3456 i,
3456 3457 r.linkrev(i),
3457 3458 shortfn(node),
3458 3459 shortfn(pp[0]),
3459 3460 shortfn(pp[1]),
3460 3461 )
3461 3462 )
3462 3463 elif format == 1:
3463 3464 pr = r.parentrevs(i)
3464 3465 if ui.verbose:
3465 3466 ui.write(
3466 3467 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3467 3468 % (
3468 3469 i,
3469 3470 r.flags(i),
3470 3471 r.start(i),
3471 3472 r.length(i),
3472 3473 r.rawsize(i),
3473 3474 r.linkrev(i),
3474 3475 pr[0],
3475 3476 pr[1],
3476 3477 shortfn(node),
3477 3478 )
3478 3479 )
3479 3480 else:
3480 3481 ui.write(
3481 3482 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3482 3483 % (
3483 3484 i,
3484 3485 r.flags(i),
3485 3486 r.rawsize(i),
3486 3487 r.linkrev(i),
3487 3488 pr[0],
3488 3489 pr[1],
3489 3490 shortfn(node),
3490 3491 )
3491 3492 )
3492 3493
3493 3494
3494 3495 @command(
3495 3496 b'debugrevspec',
3496 3497 [
3497 3498 (
3498 3499 b'',
3499 3500 b'optimize',
3500 3501 None,
3501 3502 _(b'print parsed tree after optimizing (DEPRECATED)'),
3502 3503 ),
3503 3504 (
3504 3505 b'',
3505 3506 b'show-revs',
3506 3507 True,
3507 3508 _(b'print list of result revisions (default)'),
3508 3509 ),
3509 3510 (
3510 3511 b's',
3511 3512 b'show-set',
3512 3513 None,
3513 3514 _(b'print internal representation of result set'),
3514 3515 ),
3515 3516 (
3516 3517 b'p',
3517 3518 b'show-stage',
3518 3519 [],
3519 3520 _(b'print parsed tree at the given stage'),
3520 3521 _(b'NAME'),
3521 3522 ),
3522 3523 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3523 3524 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3524 3525 ],
3525 3526 b'REVSPEC',
3526 3527 )
3527 3528 def debugrevspec(ui, repo, expr, **opts):
3528 3529 """parse and apply a revision specification
3529 3530
3530 3531 Use -p/--show-stage option to print the parsed tree at the given stages.
3531 3532 Use -p all to print tree at every stage.
3532 3533
3533 3534 Use --no-show-revs option with -s or -p to print only the set
3534 3535 representation or the parsed tree respectively.
3535 3536
3536 3537 Use --verify-optimized to compare the optimized result with the unoptimized
3537 3538 one. Returns 1 if the optimized result differs.
3538 3539 """
3539 3540 opts = pycompat.byteskwargs(opts)
3540 3541 aliases = ui.configitems(b'revsetalias')
3541 3542 stages = [
3542 3543 (b'parsed', lambda tree: tree),
3543 3544 (
3544 3545 b'expanded',
3545 3546 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3546 3547 ),
3547 3548 (b'concatenated', revsetlang.foldconcat),
3548 3549 (b'analyzed', revsetlang.analyze),
3549 3550 (b'optimized', revsetlang.optimize),
3550 3551 ]
3551 3552 if opts[b'no_optimized']:
3552 3553 stages = stages[:-1]
3553 3554 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3554 3555 raise error.Abort(
3555 3556 _(b'cannot use --verify-optimized with --no-optimized')
3556 3557 )
3557 3558 stagenames = {n for n, f in stages}
3558 3559
3559 3560 showalways = set()
3560 3561 showchanged = set()
3561 3562 if ui.verbose and not opts[b'show_stage']:
3562 3563 # show parsed tree by --verbose (deprecated)
3563 3564 showalways.add(b'parsed')
3564 3565 showchanged.update([b'expanded', b'concatenated'])
3565 3566 if opts[b'optimize']:
3566 3567 showalways.add(b'optimized')
3567 3568 if opts[b'show_stage'] and opts[b'optimize']:
3568 3569 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3569 3570 if opts[b'show_stage'] == [b'all']:
3570 3571 showalways.update(stagenames)
3571 3572 else:
3572 3573 for n in opts[b'show_stage']:
3573 3574 if n not in stagenames:
3574 3575 raise error.Abort(_(b'invalid stage name: %s') % n)
3575 3576 showalways.update(opts[b'show_stage'])
3576 3577
3577 3578 treebystage = {}
3578 3579 printedtree = None
3579 3580 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3580 3581 for n, f in stages:
3581 3582 treebystage[n] = tree = f(tree)
3582 3583 if n in showalways or (n in showchanged and tree != printedtree):
3583 3584 if opts[b'show_stage'] or n != b'parsed':
3584 3585 ui.write(b"* %s:\n" % n)
3585 3586 ui.write(revsetlang.prettyformat(tree), b"\n")
3586 3587 printedtree = tree
3587 3588
3588 3589 if opts[b'verify_optimized']:
3589 3590 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3590 3591 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3591 3592 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3592 3593 ui.writenoi18n(
3593 3594 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3594 3595 )
3595 3596 ui.writenoi18n(
3596 3597 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3597 3598 )
3598 3599 arevs = list(arevs)
3599 3600 brevs = list(brevs)
3600 3601 if arevs == brevs:
3601 3602 return 0
3602 3603 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3603 3604 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3604 3605 sm = difflib.SequenceMatcher(None, arevs, brevs)
3605 3606 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3606 3607 if tag in ('delete', 'replace'):
3607 3608 for c in arevs[alo:ahi]:
3608 3609 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3609 3610 if tag in ('insert', 'replace'):
3610 3611 for c in brevs[blo:bhi]:
3611 3612 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3612 3613 if tag == 'equal':
3613 3614 for c in arevs[alo:ahi]:
3614 3615 ui.write(b' %d\n' % c)
3615 3616 return 1
3616 3617
3617 3618 func = revset.makematcher(tree)
3618 3619 revs = func(repo)
3619 3620 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3620 3621 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3621 3622 if not opts[b'show_revs']:
3622 3623 return
3623 3624 for c in revs:
3624 3625 ui.write(b"%d\n" % c)
3625 3626
3626 3627
3627 3628 @command(
3628 3629 b'debugserve',
3629 3630 [
3630 3631 (
3631 3632 b'',
3632 3633 b'sshstdio',
3633 3634 False,
3634 3635 _(b'run an SSH server bound to process handles'),
3635 3636 ),
3636 3637 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3637 3638 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3638 3639 ],
3639 3640 b'',
3640 3641 )
3641 3642 def debugserve(ui, repo, **opts):
3642 3643 """run a server with advanced settings
3643 3644
3644 3645 This command is similar to :hg:`serve`. It exists partially as a
3645 3646 workaround to the fact that ``hg serve --stdio`` must have specific
3646 3647 arguments for security reasons.
3647 3648 """
3648 3649 opts = pycompat.byteskwargs(opts)
3649 3650
3650 3651 if not opts[b'sshstdio']:
3651 3652 raise error.Abort(_(b'only --sshstdio is currently supported'))
3652 3653
3653 3654 logfh = None
3654 3655
3655 3656 if opts[b'logiofd'] and opts[b'logiofile']:
3656 3657 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3657 3658
3658 3659 if opts[b'logiofd']:
3659 3660 # Ideally we would be line buffered. But line buffering in binary
3660 3661 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3661 3662 # buffering could have performance impacts. But since this isn't
3662 3663 # performance critical code, it should be fine.
3663 3664 try:
3664 3665 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3665 3666 except OSError as e:
3666 3667 if e.errno != errno.ESPIPE:
3667 3668 raise
3668 3669 # can't seek a pipe, so `ab` mode fails on py3
3669 3670 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3670 3671 elif opts[b'logiofile']:
3671 3672 logfh = open(opts[b'logiofile'], b'ab', 0)
3672 3673
3673 3674 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3674 3675 s.serve_forever()
3675 3676
3676 3677
3677 3678 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3678 3679 def debugsetparents(ui, repo, rev1, rev2=None):
3679 3680 """manually set the parents of the current working directory (DANGEROUS)
3680 3681
3681 3682 This command is not what you are looking for and should not be used. Using
3682 3683 this command will most certainly results in slight corruption of the file
3683 3684 level histories withing your repository. DO NOT USE THIS COMMAND.
3684 3685
3685 3686 The command update the p1 and p2 field in the dirstate, and not touching
3686 3687 anything else. This useful for writing repository conversion tools, but
3687 3688 should be used with extreme care. For example, neither the working
3688 3689 directory nor the dirstate is updated, so file status may be incorrect
3689 3690 after running this command. Only used if you are one of the few people that
3690 3691 deeply unstand both conversion tools and file level histories. If you are
3691 3692 reading this help, you are not one of this people (most of them sailed west
3692 3693 from Mithlond anyway.
3693 3694
3694 3695 So one last time DO NOT USE THIS COMMAND.
3695 3696
3696 3697 Returns 0 on success.
3697 3698 """
3698 3699
3699 3700 node1 = scmutil.revsingle(repo, rev1).node()
3700 3701 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3701 3702
3702 3703 with repo.wlock():
3703 3704 repo.setparents(node1, node2)
3704 3705
3705 3706
3706 3707 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3707 3708 def debugsidedata(ui, repo, file_, rev=None, **opts):
3708 3709 """dump the side data for a cl/manifest/file revision
3709 3710
3710 3711 Use --verbose to dump the sidedata content."""
3711 3712 opts = pycompat.byteskwargs(opts)
3712 3713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3713 3714 if rev is not None:
3714 3715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3715 3716 file_, rev = None, file_
3716 3717 elif rev is None:
3717 3718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3718 3719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3719 3720 r = getattr(r, '_revlog', r)
3720 3721 try:
3721 3722 sidedata = r.sidedata(r.lookup(rev))
3722 3723 except KeyError:
3723 3724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3724 3725 if sidedata:
3725 3726 sidedata = list(sidedata.items())
3726 3727 sidedata.sort()
3727 3728 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3728 3729 for key, value in sidedata:
3729 3730 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3730 3731 if ui.verbose:
3731 3732 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3732 3733
3733 3734
3734 3735 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3735 3736 def debugssl(ui, repo, source=None, **opts):
3736 3737 """test a secure connection to a server
3737 3738
3738 3739 This builds the certificate chain for the server on Windows, installing the
3739 3740 missing intermediates and trusted root via Windows Update if necessary. It
3740 3741 does nothing on other platforms.
3741 3742
3742 3743 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3743 3744 that server is used. See :hg:`help urls` for more information.
3744 3745
3745 3746 If the update succeeds, retry the original operation. Otherwise, the cause
3746 3747 of the SSL error is likely another issue.
3747 3748 """
3748 3749 if not pycompat.iswindows:
3749 3750 raise error.Abort(
3750 3751 _(b'certificate chain building is only possible on Windows')
3751 3752 )
3752 3753
3753 3754 if not source:
3754 3755 if not repo:
3755 3756 raise error.Abort(
3756 3757 _(
3757 3758 b"there is no Mercurial repository here, and no "
3758 3759 b"server specified"
3759 3760 )
3760 3761 )
3761 3762 source = b"default"
3762 3763
3763 3764 source, branches = urlutil.get_unique_pull_path(
3764 3765 b'debugssl', repo, ui, source
3765 3766 )
3766 3767 url = urlutil.url(source)
3767 3768
3768 3769 defaultport = {b'https': 443, b'ssh': 22}
3769 3770 if url.scheme in defaultport:
3770 3771 try:
3771 3772 addr = (url.host, int(url.port or defaultport[url.scheme]))
3772 3773 except ValueError:
3773 3774 raise error.Abort(_(b"malformed port number in URL"))
3774 3775 else:
3775 3776 raise error.Abort(_(b"only https and ssh connections are supported"))
3776 3777
3777 3778 from . import win32
3778 3779
3779 3780 s = ssl.wrap_socket(
3780 3781 socket.socket(),
3781 3782 ssl_version=ssl.PROTOCOL_TLS,
3782 3783 cert_reqs=ssl.CERT_NONE,
3783 3784 ca_certs=None,
3784 3785 )
3785 3786
3786 3787 try:
3787 3788 s.connect(addr)
3788 3789 cert = s.getpeercert(True)
3789 3790
3790 3791 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3791 3792
3792 3793 complete = win32.checkcertificatechain(cert, build=False)
3793 3794
3794 3795 if not complete:
3795 3796 ui.status(_(b'certificate chain is incomplete, updating... '))
3796 3797
3797 3798 if not win32.checkcertificatechain(cert):
3798 3799 ui.status(_(b'failed.\n'))
3799 3800 else:
3800 3801 ui.status(_(b'done.\n'))
3801 3802 else:
3802 3803 ui.status(_(b'full certificate chain is available\n'))
3803 3804 finally:
3804 3805 s.close()
3805 3806
3806 3807
3807 3808 @command(
3808 3809 b"debugbackupbundle",
3809 3810 [
3810 3811 (
3811 3812 b"",
3812 3813 b"recover",
3813 3814 b"",
3814 3815 b"brings the specified changeset back into the repository",
3815 3816 )
3816 3817 ]
3817 3818 + cmdutil.logopts,
3818 3819 _(b"hg debugbackupbundle [--recover HASH]"),
3819 3820 )
3820 3821 def debugbackupbundle(ui, repo, *pats, **opts):
3821 3822 """lists the changesets available in backup bundles
3822 3823
3823 3824 Without any arguments, this command prints a list of the changesets in each
3824 3825 backup bundle.
3825 3826
3826 3827 --recover takes a changeset hash and unbundles the first bundle that
3827 3828 contains that hash, which puts that changeset back in your repository.
3828 3829
3829 3830 --verbose will print the entire commit message and the bundle path for that
3830 3831 backup.
3831 3832 """
3832 3833 backups = list(
3833 3834 filter(
3834 3835 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3835 3836 )
3836 3837 )
3837 3838 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3838 3839
3839 3840 opts = pycompat.byteskwargs(opts)
3840 3841 opts[b"bundle"] = b""
3841 3842 opts[b"force"] = None
3842 3843 limit = logcmdutil.getlimit(opts)
3843 3844
3844 3845 def display(other, chlist, displayer):
3845 3846 if opts.get(b"newest_first"):
3846 3847 chlist.reverse()
3847 3848 count = 0
3848 3849 for n in chlist:
3849 3850 if limit is not None and count >= limit:
3850 3851 break
3851 3852 parents = [
3852 3853 True for p in other.changelog.parents(n) if p != repo.nullid
3853 3854 ]
3854 3855 if opts.get(b"no_merges") and len(parents) == 2:
3855 3856 continue
3856 3857 count += 1
3857 3858 displayer.show(other[n])
3858 3859
3859 3860 recovernode = opts.get(b"recover")
3860 3861 if recovernode:
3861 3862 if scmutil.isrevsymbol(repo, recovernode):
3862 3863 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3863 3864 return
3864 3865 elif backups:
3865 3866 msg = _(
3866 3867 b"Recover changesets using: hg debugbackupbundle --recover "
3867 3868 b"<changeset hash>\n\nAvailable backup changesets:"
3868 3869 )
3869 3870 ui.status(msg, label=b"status.removed")
3870 3871 else:
3871 3872 ui.status(_(b"no backup changesets found\n"))
3872 3873 return
3873 3874
3874 3875 for backup in backups:
3875 3876 # Much of this is copied from the hg incoming logic
3876 3877 source = os.path.relpath(backup, encoding.getcwd())
3877 3878 source, branches = urlutil.get_unique_pull_path(
3878 3879 b'debugbackupbundle',
3879 3880 repo,
3880 3881 ui,
3881 3882 source,
3882 3883 default_branches=opts.get(b'branch'),
3883 3884 )
3884 3885 try:
3885 3886 other = hg.peer(repo, opts, source)
3886 3887 except error.LookupError as ex:
3887 3888 msg = _(b"\nwarning: unable to open bundle %s") % source
3888 3889 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3889 3890 ui.warn(msg, hint=hint)
3890 3891 continue
3891 3892 revs, checkout = hg.addbranchrevs(
3892 3893 repo, other, branches, opts.get(b"rev")
3893 3894 )
3894 3895
3895 3896 if revs:
3896 3897 revs = [other.lookup(rev) for rev in revs]
3897 3898
3898 3899 with ui.silent():
3899 3900 try:
3900 3901 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3901 3902 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3902 3903 )
3903 3904 except error.LookupError:
3904 3905 continue
3905 3906
3906 3907 try:
3907 3908 if not chlist:
3908 3909 continue
3909 3910 if recovernode:
3910 3911 with repo.lock(), repo.transaction(b"unbundle") as tr:
3911 3912 if scmutil.isrevsymbol(other, recovernode):
3912 3913 ui.status(_(b"Unbundling %s\n") % (recovernode))
3913 3914 f = hg.openpath(ui, source)
3914 3915 gen = exchange.readbundle(ui, f, source)
3915 3916 if isinstance(gen, bundle2.unbundle20):
3916 3917 bundle2.applybundle(
3917 3918 repo,
3918 3919 gen,
3919 3920 tr,
3920 3921 source=b"unbundle",
3921 3922 url=b"bundle:" + source,
3922 3923 )
3923 3924 else:
3924 3925 gen.apply(repo, b"unbundle", b"bundle:" + source)
3925 3926 break
3926 3927 else:
3927 3928 backupdate = encoding.strtolocal(
3928 3929 time.strftime(
3929 3930 "%a %H:%M, %Y-%m-%d",
3930 3931 time.localtime(os.path.getmtime(source)),
3931 3932 )
3932 3933 )
3933 3934 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3934 3935 if ui.verbose:
3935 3936 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3936 3937 else:
3937 3938 opts[
3938 3939 b"template"
3939 3940 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3940 3941 displayer = logcmdutil.changesetdisplayer(
3941 3942 ui, other, opts, False
3942 3943 )
3943 3944 display(other, chlist, displayer)
3944 3945 displayer.close()
3945 3946 finally:
3946 3947 cleanupfn()
3947 3948
3948 3949
3949 3950 @command(
3950 3951 b'debugsub',
3951 3952 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3952 3953 _(b'[-r REV] [REV]'),
3953 3954 )
3954 3955 def debugsub(ui, repo, rev=None):
3955 3956 ctx = scmutil.revsingle(repo, rev, None)
3956 3957 for k, v in sorted(ctx.substate.items()):
3957 3958 ui.writenoi18n(b'path %s\n' % k)
3958 3959 ui.writenoi18n(b' source %s\n' % v[0])
3959 3960 ui.writenoi18n(b' revision %s\n' % v[1])
3960 3961
3961 3962
3962 3963 @command(b'debugshell', optionalrepo=True)
3963 3964 def debugshell(ui, repo):
3964 3965 """run an interactive Python interpreter
3965 3966
3966 3967 The local namespace is provided with a reference to the ui and
3967 3968 the repo instance (if available).
3968 3969 """
3969 3970 import code
3970 3971
3971 3972 imported_objects = {
3972 3973 'ui': ui,
3973 3974 'repo': repo,
3974 3975 }
3975 3976
3976 3977 code.interact(local=imported_objects)
3977 3978
3978 3979
3979 3980 @command(
3980 3981 b'debugsuccessorssets',
3981 3982 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3982 3983 _(b'[REV]'),
3983 3984 )
3984 3985 def debugsuccessorssets(ui, repo, *revs, **opts):
3985 3986 """show set of successors for revision
3986 3987
3987 3988 A successors set of changeset A is a consistent group of revisions that
3988 3989 succeed A. It contains non-obsolete changesets only unless closests
3989 3990 successors set is set.
3990 3991
3991 3992 In most cases a changeset A has a single successors set containing a single
3992 3993 successor (changeset A replaced by A').
3993 3994
3994 3995 A changeset that is made obsolete with no successors are called "pruned".
3995 3996 Such changesets have no successors sets at all.
3996 3997
3997 3998 A changeset that has been "split" will have a successors set containing
3998 3999 more than one successor.
3999 4000
4000 4001 A changeset that has been rewritten in multiple different ways is called
4001 4002 "divergent". Such changesets have multiple successor sets (each of which
4002 4003 may also be split, i.e. have multiple successors).
4003 4004
4004 4005 Results are displayed as follows::
4005 4006
4006 4007 <rev1>
4007 4008 <successors-1A>
4008 4009 <rev2>
4009 4010 <successors-2A>
4010 4011 <successors-2B1> <successors-2B2> <successors-2B3>
4011 4012
4012 4013 Here rev2 has two possible (i.e. divergent) successors sets. The first
4013 4014 holds one element, whereas the second holds three (i.e. the changeset has
4014 4015 been split).
4015 4016 """
4016 4017 # passed to successorssets caching computation from one call to another
4017 4018 cache = {}
4018 4019 ctx2str = bytes
4019 4020 node2str = short
4020 4021 for rev in logcmdutil.revrange(repo, revs):
4021 4022 ctx = repo[rev]
4022 4023 ui.write(b'%s\n' % ctx2str(ctx))
4023 4024 for succsset in obsutil.successorssets(
4024 4025 repo, ctx.node(), closest=opts['closest'], cache=cache
4025 4026 ):
4026 4027 if succsset:
4027 4028 ui.write(b' ')
4028 4029 ui.write(node2str(succsset[0]))
4029 4030 for node in succsset[1:]:
4030 4031 ui.write(b' ')
4031 4032 ui.write(node2str(node))
4032 4033 ui.write(b'\n')
4033 4034
4034 4035
4035 4036 @command(b'debugtagscache', [])
4036 4037 def debugtagscache(ui, repo):
4037 4038 """display the contents of .hg/cache/hgtagsfnodes1"""
4038 4039 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4039 4040 flog = repo.file(b'.hgtags')
4040 4041 for r in repo:
4041 4042 node = repo[r].node()
4042 4043 tagsnode = cache.getfnode(node, computemissing=False)
4043 4044 if tagsnode:
4044 4045 tagsnodedisplay = hex(tagsnode)
4045 4046 if not flog.hasnode(tagsnode):
4046 4047 tagsnodedisplay += b' (unknown node)'
4047 4048 elif tagsnode is None:
4048 4049 tagsnodedisplay = b'missing'
4049 4050 else:
4050 4051 tagsnodedisplay = b'invalid'
4051 4052
4052 4053 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4053 4054
4054 4055
4055 4056 @command(
4056 4057 b'debugtemplate',
4057 4058 [
4058 4059 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4059 4060 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4060 4061 ],
4061 4062 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4062 4063 optionalrepo=True,
4063 4064 )
4064 4065 def debugtemplate(ui, repo, tmpl, **opts):
4065 4066 """parse and apply a template
4066 4067
4067 4068 If -r/--rev is given, the template is processed as a log template and
4068 4069 applied to the given changesets. Otherwise, it is processed as a generic
4069 4070 template.
4070 4071
4071 4072 Use --verbose to print the parsed tree.
4072 4073 """
4073 4074 revs = None
4074 4075 if opts['rev']:
4075 4076 if repo is None:
4076 4077 raise error.RepoError(
4077 4078 _(b'there is no Mercurial repository here (.hg not found)')
4078 4079 )
4079 4080 revs = logcmdutil.revrange(repo, opts['rev'])
4080 4081
4081 4082 props = {}
4082 4083 for d in opts['define']:
4083 4084 try:
4084 4085 k, v = (e.strip() for e in d.split(b'=', 1))
4085 4086 if not k or k == b'ui':
4086 4087 raise ValueError
4087 4088 props[k] = v
4088 4089 except ValueError:
4089 4090 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4090 4091
4091 4092 if ui.verbose:
4092 4093 aliases = ui.configitems(b'templatealias')
4093 4094 tree = templater.parse(tmpl)
4094 4095 ui.note(templater.prettyformat(tree), b'\n')
4095 4096 newtree = templater.expandaliases(tree, aliases)
4096 4097 if newtree != tree:
4097 4098 ui.notenoi18n(
4098 4099 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4099 4100 )
4100 4101
4101 4102 if revs is None:
4102 4103 tres = formatter.templateresources(ui, repo)
4103 4104 t = formatter.maketemplater(ui, tmpl, resources=tres)
4104 4105 if ui.verbose:
4105 4106 kwds, funcs = t.symbolsuseddefault()
4106 4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4107 4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4108 4109 ui.write(t.renderdefault(props))
4109 4110 else:
4110 4111 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4111 4112 if ui.verbose:
4112 4113 kwds, funcs = displayer.t.symbolsuseddefault()
4113 4114 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4114 4115 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4115 4116 for r in revs:
4116 4117 displayer.show(repo[r], **pycompat.strkwargs(props))
4117 4118 displayer.close()
4118 4119
4119 4120
4120 4121 @command(
4121 4122 b'debuguigetpass',
4122 4123 [
4123 4124 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4124 4125 ],
4125 4126 _(b'[-p TEXT]'),
4126 4127 norepo=True,
4127 4128 )
4128 4129 def debuguigetpass(ui, prompt=b''):
4129 4130 """show prompt to type password"""
4130 4131 r = ui.getpass(prompt)
4131 4132 if r is None:
4132 4133 r = b"<default response>"
4133 4134 ui.writenoi18n(b'response: %s\n' % r)
4134 4135
4135 4136
4136 4137 @command(
4137 4138 b'debuguiprompt',
4138 4139 [
4139 4140 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4140 4141 ],
4141 4142 _(b'[-p TEXT]'),
4142 4143 norepo=True,
4143 4144 )
4144 4145 def debuguiprompt(ui, prompt=b''):
4145 4146 """show plain prompt"""
4146 4147 r = ui.prompt(prompt)
4147 4148 ui.writenoi18n(b'response: %s\n' % r)
4148 4149
4149 4150
4150 4151 @command(b'debugupdatecaches', [])
4151 4152 def debugupdatecaches(ui, repo, *pats, **opts):
4152 4153 """warm all known caches in the repository"""
4153 4154 with repo.wlock(), repo.lock():
4154 4155 repo.updatecaches(caches=repository.CACHES_ALL)
4155 4156
4156 4157
4157 4158 @command(
4158 4159 b'debugupgraderepo',
4159 4160 [
4160 4161 (
4161 4162 b'o',
4162 4163 b'optimize',
4163 4164 [],
4164 4165 _(b'extra optimization to perform'),
4165 4166 _(b'NAME'),
4166 4167 ),
4167 4168 (b'', b'run', False, _(b'performs an upgrade')),
4168 4169 (b'', b'backup', True, _(b'keep the old repository content around')),
4169 4170 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4170 4171 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4171 4172 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4172 4173 ],
4173 4174 )
4174 4175 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4175 4176 """upgrade a repository to use different features
4176 4177
4177 4178 If no arguments are specified, the repository is evaluated for upgrade
4178 4179 and a list of problems and potential optimizations is printed.
4179 4180
4180 4181 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4181 4182 can be influenced via additional arguments. More details will be provided
4182 4183 by the command output when run without ``--run``.
4183 4184
4184 4185 During the upgrade, the repository will be locked and no writes will be
4185 4186 allowed.
4186 4187
4187 4188 At the end of the upgrade, the repository may not be readable while new
4188 4189 repository data is swapped in. This window will be as long as it takes to
4189 4190 rename some directories inside the ``.hg`` directory. On most machines, this
4190 4191 should complete almost instantaneously and the chances of a consumer being
4191 4192 unable to access the repository should be low.
4192 4193
4193 4194 By default, all revlogs will be upgraded. You can restrict this using flags
4194 4195 such as `--manifest`:
4195 4196
4196 4197 * `--manifest`: only optimize the manifest
4197 4198 * `--no-manifest`: optimize all revlog but the manifest
4198 4199 * `--changelog`: optimize the changelog only
4199 4200 * `--no-changelog --no-manifest`: optimize filelogs only
4200 4201 * `--filelogs`: optimize the filelogs only
4201 4202 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4202 4203 """
4203 4204 return upgrade.upgraderepo(
4204 4205 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4205 4206 )
4206 4207
4207 4208
4208 4209 @command(
4209 4210 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4210 4211 )
4211 4212 def debugwalk(ui, repo, *pats, **opts):
4212 4213 """show how files match on given patterns"""
4213 4214 opts = pycompat.byteskwargs(opts)
4214 4215 m = scmutil.match(repo[None], pats, opts)
4215 4216 if ui.verbose:
4216 4217 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4217 4218 items = list(repo[None].walk(m))
4218 4219 if not items:
4219 4220 return
4220 4221 f = lambda fn: fn
4221 4222 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4222 4223 f = lambda fn: util.normpath(fn)
4223 4224 fmt = b'f %%-%ds %%-%ds %%s' % (
4224 4225 max([len(abs) for abs in items]),
4225 4226 max([len(repo.pathto(abs)) for abs in items]),
4226 4227 )
4227 4228 for abs in items:
4228 4229 line = fmt % (
4229 4230 abs,
4230 4231 f(repo.pathto(abs)),
4231 4232 m.exact(abs) and b'exact' or b'',
4232 4233 )
4233 4234 ui.write(b"%s\n" % line.rstrip())
4234 4235
4235 4236
4236 4237 @command(b'debugwhyunstable', [], _(b'REV'))
4237 4238 def debugwhyunstable(ui, repo, rev):
4238 4239 """explain instabilities of a changeset"""
4239 4240 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4240 4241 dnodes = b''
4241 4242 if entry.get(b'divergentnodes'):
4242 4243 dnodes = (
4243 4244 b' '.join(
4244 4245 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4245 4246 for ctx in entry[b'divergentnodes']
4246 4247 )
4247 4248 + b' '
4248 4249 )
4249 4250 ui.write(
4250 4251 b'%s: %s%s %s\n'
4251 4252 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4252 4253 )
4253 4254
4254 4255
4255 4256 @command(
4256 4257 b'debugwireargs',
4257 4258 [
4258 4259 (b'', b'three', b'', b'three'),
4259 4260 (b'', b'four', b'', b'four'),
4260 4261 (b'', b'five', b'', b'five'),
4261 4262 ]
4262 4263 + cmdutil.remoteopts,
4263 4264 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4264 4265 norepo=True,
4265 4266 )
4266 4267 def debugwireargs(ui, repopath, *vals, **opts):
4267 4268 opts = pycompat.byteskwargs(opts)
4268 4269 repo = hg.peer(ui, opts, repopath)
4269 4270 try:
4270 4271 for opt in cmdutil.remoteopts:
4271 4272 del opts[opt[1]]
4272 4273 args = {}
4273 4274 for k, v in opts.items():
4274 4275 if v:
4275 4276 args[k] = v
4276 4277 args = pycompat.strkwargs(args)
4277 4278 # run twice to check that we don't mess up the stream for the next command
4278 4279 res1 = repo.debugwireargs(*vals, **args)
4279 4280 res2 = repo.debugwireargs(*vals, **args)
4280 4281 ui.write(b"%s\n" % res1)
4281 4282 if res1 != res2:
4282 4283 ui.warn(b"%s\n" % res2)
4283 4284 finally:
4284 4285 repo.close()
4285 4286
4286 4287
4287 4288 def _parsewirelangblocks(fh):
4288 4289 activeaction = None
4289 4290 blocklines = []
4290 4291 lastindent = 0
4291 4292
4292 4293 for line in fh:
4293 4294 line = line.rstrip()
4294 4295 if not line:
4295 4296 continue
4296 4297
4297 4298 if line.startswith(b'#'):
4298 4299 continue
4299 4300
4300 4301 if not line.startswith(b' '):
4301 4302 # New block. Flush previous one.
4302 4303 if activeaction:
4303 4304 yield activeaction, blocklines
4304 4305
4305 4306 activeaction = line
4306 4307 blocklines = []
4307 4308 lastindent = 0
4308 4309 continue
4309 4310
4310 4311 # Else we start with an indent.
4311 4312
4312 4313 if not activeaction:
4313 4314 raise error.Abort(_(b'indented line outside of block'))
4314 4315
4315 4316 indent = len(line) - len(line.lstrip())
4316 4317
4317 4318 # If this line is indented more than the last line, concatenate it.
4318 4319 if indent > lastindent and blocklines:
4319 4320 blocklines[-1] += line.lstrip()
4320 4321 else:
4321 4322 blocklines.append(line)
4322 4323 lastindent = indent
4323 4324
4324 4325 # Flush last block.
4325 4326 if activeaction:
4326 4327 yield activeaction, blocklines
4327 4328
4328 4329
4329 4330 @command(
4330 4331 b'debugwireproto',
4331 4332 [
4332 4333 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4333 4334 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4334 4335 (
4335 4336 b'',
4336 4337 b'noreadstderr',
4337 4338 False,
4338 4339 _(b'do not read from stderr of the remote'),
4339 4340 ),
4340 4341 (
4341 4342 b'',
4342 4343 b'nologhandshake',
4343 4344 False,
4344 4345 _(b'do not log I/O related to the peer handshake'),
4345 4346 ),
4346 4347 ]
4347 4348 + cmdutil.remoteopts,
4348 4349 _(b'[PATH]'),
4349 4350 optionalrepo=True,
4350 4351 )
4351 4352 def debugwireproto(ui, repo, path=None, **opts):
4352 4353 """send wire protocol commands to a server
4353 4354
4354 4355 This command can be used to issue wire protocol commands to remote
4355 4356 peers and to debug the raw data being exchanged.
4356 4357
4357 4358 ``--localssh`` will start an SSH server against the current repository
4358 4359 and connect to that. By default, the connection will perform a handshake
4359 4360 and establish an appropriate peer instance.
4360 4361
4361 4362 ``--peer`` can be used to bypass the handshake protocol and construct a
4362 4363 peer instance using the specified class type. Valid values are ``raw``,
4363 4364 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4364 4365 don't support higher-level command actions.
4365 4366
4366 4367 ``--noreadstderr`` can be used to disable automatic reading from stderr
4367 4368 of the peer (for SSH connections only). Disabling automatic reading of
4368 4369 stderr is useful for making output more deterministic.
4369 4370
4370 4371 Commands are issued via a mini language which is specified via stdin.
4371 4372 The language consists of individual actions to perform. An action is
4372 4373 defined by a block. A block is defined as a line with no leading
4373 4374 space followed by 0 or more lines with leading space. Blocks are
4374 4375 effectively a high-level command with additional metadata.
4375 4376
4376 4377 Lines beginning with ``#`` are ignored.
4377 4378
4378 4379 The following sections denote available actions.
4379 4380
4380 4381 raw
4381 4382 ---
4382 4383
4383 4384 Send raw data to the server.
4384 4385
4385 4386 The block payload contains the raw data to send as one atomic send
4386 4387 operation. The data may not actually be delivered in a single system
4387 4388 call: it depends on the abilities of the transport being used.
4388 4389
4389 4390 Each line in the block is de-indented and concatenated. Then, that
4390 4391 value is evaluated as a Python b'' literal. This allows the use of
4391 4392 backslash escaping, etc.
4392 4393
4393 4394 raw+
4394 4395 ----
4395 4396
4396 4397 Behaves like ``raw`` except flushes output afterwards.
4397 4398
4398 4399 command <X>
4399 4400 -----------
4400 4401
4401 4402 Send a request to run a named command, whose name follows the ``command``
4402 4403 string.
4403 4404
4404 4405 Arguments to the command are defined as lines in this block. The format of
4405 4406 each line is ``<key> <value>``. e.g.::
4406 4407
4407 4408 command listkeys
4408 4409 namespace bookmarks
4409 4410
4410 4411 If the value begins with ``eval:``, it will be interpreted as a Python
4411 4412 literal expression. Otherwise values are interpreted as Python b'' literals.
4412 4413 This allows sending complex types and encoding special byte sequences via
4413 4414 backslash escaping.
4414 4415
4415 4416 The following arguments have special meaning:
4416 4417
4417 4418 ``PUSHFILE``
4418 4419 When defined, the *push* mechanism of the peer will be used instead
4419 4420 of the static request-response mechanism and the content of the
4420 4421 file specified in the value of this argument will be sent as the
4421 4422 command payload.
4422 4423
4423 4424 This can be used to submit a local bundle file to the remote.
4424 4425
4425 4426 batchbegin
4426 4427 ----------
4427 4428
4428 4429 Instruct the peer to begin a batched send.
4429 4430
4430 4431 All ``command`` blocks are queued for execution until the next
4431 4432 ``batchsubmit`` block.
4432 4433
4433 4434 batchsubmit
4434 4435 -----------
4435 4436
4436 4437 Submit previously queued ``command`` blocks as a batch request.
4437 4438
4438 4439 This action MUST be paired with a ``batchbegin`` action.
4439 4440
4440 4441 httprequest <method> <path>
4441 4442 ---------------------------
4442 4443
4443 4444 (HTTP peer only)
4444 4445
4445 4446 Send an HTTP request to the peer.
4446 4447
4447 4448 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4448 4449
4449 4450 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4450 4451 headers to add to the request. e.g. ``Accept: foo``.
4451 4452
4452 4453 The following arguments are special:
4453 4454
4454 4455 ``BODYFILE``
4455 4456 The content of the file defined as the value to this argument will be
4456 4457 transferred verbatim as the HTTP request body.
4457 4458
4458 4459 ``frame <type> <flags> <payload>``
4459 4460 Send a unified protocol frame as part of the request body.
4460 4461
4461 4462 All frames will be collected and sent as the body to the HTTP
4462 4463 request.
4463 4464
4464 4465 close
4465 4466 -----
4466 4467
4467 4468 Close the connection to the server.
4468 4469
4469 4470 flush
4470 4471 -----
4471 4472
4472 4473 Flush data written to the server.
4473 4474
4474 4475 readavailable
4475 4476 -------------
4476 4477
4477 4478 Close the write end of the connection and read all available data from
4478 4479 the server.
4479 4480
4480 4481 If the connection to the server encompasses multiple pipes, we poll both
4481 4482 pipes and read available data.
4482 4483
4483 4484 readline
4484 4485 --------
4485 4486
4486 4487 Read a line of output from the server. If there are multiple output
4487 4488 pipes, reads only the main pipe.
4488 4489
4489 4490 ereadline
4490 4491 ---------
4491 4492
4492 4493 Like ``readline``, but read from the stderr pipe, if available.
4493 4494
4494 4495 read <X>
4495 4496 --------
4496 4497
4497 4498 ``read()`` N bytes from the server's main output pipe.
4498 4499
4499 4500 eread <X>
4500 4501 ---------
4501 4502
4502 4503 ``read()`` N bytes from the server's stderr pipe, if available.
4503 4504
4504 4505 Specifying Unified Frame-Based Protocol Frames
4505 4506 ----------------------------------------------
4506 4507
4507 4508 It is possible to emit a *Unified Frame-Based Protocol* by using special
4508 4509 syntax.
4509 4510
4510 4511 A frame is composed as a type, flags, and payload. These can be parsed
4511 4512 from a string of the form:
4512 4513
4513 4514 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4514 4515
4515 4516 ``request-id`` and ``stream-id`` are integers defining the request and
4516 4517 stream identifiers.
4517 4518
4518 4519 ``type`` can be an integer value for the frame type or the string name
4519 4520 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4520 4521 ``command-name``.
4521 4522
4522 4523 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4523 4524 components. Each component (and there can be just one) can be an integer
4524 4525 or a flag name for stream flags or frame flags, respectively. Values are
4525 4526 resolved to integers and then bitwise OR'd together.
4526 4527
4527 4528 ``payload`` represents the raw frame payload. If it begins with
4528 4529 ``cbor:``, the following string is evaluated as Python code and the
4529 4530 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4530 4531 as a Python byte string literal.
4531 4532 """
4532 4533 opts = pycompat.byteskwargs(opts)
4533 4534
4534 4535 if opts[b'localssh'] and not repo:
4535 4536 raise error.Abort(_(b'--localssh requires a repository'))
4536 4537
4537 4538 if opts[b'peer'] and opts[b'peer'] not in (
4538 4539 b'raw',
4539 4540 b'ssh1',
4540 4541 ):
4541 4542 raise error.Abort(
4542 4543 _(b'invalid value for --peer'),
4543 4544 hint=_(b'valid values are "raw" and "ssh1"'),
4544 4545 )
4545 4546
4546 4547 if path and opts[b'localssh']:
4547 4548 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4548 4549
4549 4550 if ui.interactive():
4550 4551 ui.write(_(b'(waiting for commands on stdin)\n'))
4551 4552
4552 4553 blocks = list(_parsewirelangblocks(ui.fin))
4553 4554
4554 4555 proc = None
4555 4556 stdin = None
4556 4557 stdout = None
4557 4558 stderr = None
4558 4559 opener = None
4559 4560
4560 4561 if opts[b'localssh']:
4561 4562 # We start the SSH server in its own process so there is process
4562 4563 # separation. This prevents a whole class of potential bugs around
4563 4564 # shared state from interfering with server operation.
4564 4565 args = procutil.hgcmd() + [
4565 4566 b'-R',
4566 4567 repo.root,
4567 4568 b'debugserve',
4568 4569 b'--sshstdio',
4569 4570 ]
4570 4571 proc = subprocess.Popen(
4571 4572 pycompat.rapply(procutil.tonativestr, args),
4572 4573 stdin=subprocess.PIPE,
4573 4574 stdout=subprocess.PIPE,
4574 4575 stderr=subprocess.PIPE,
4575 4576 bufsize=0,
4576 4577 )
4577 4578
4578 4579 stdin = proc.stdin
4579 4580 stdout = proc.stdout
4580 4581 stderr = proc.stderr
4581 4582
4582 4583 # We turn the pipes into observers so we can log I/O.
4583 4584 if ui.verbose or opts[b'peer'] == b'raw':
4584 4585 stdin = util.makeloggingfileobject(
4585 4586 ui, proc.stdin, b'i', logdata=True
4586 4587 )
4587 4588 stdout = util.makeloggingfileobject(
4588 4589 ui, proc.stdout, b'o', logdata=True
4589 4590 )
4590 4591 stderr = util.makeloggingfileobject(
4591 4592 ui, proc.stderr, b'e', logdata=True
4592 4593 )
4593 4594
4594 4595 # --localssh also implies the peer connection settings.
4595 4596
4596 4597 url = b'ssh://localserver'
4597 4598 autoreadstderr = not opts[b'noreadstderr']
4598 4599
4599 4600 if opts[b'peer'] == b'ssh1':
4600 4601 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4601 4602 peer = sshpeer.sshv1peer(
4602 4603 ui,
4603 4604 url,
4604 4605 proc,
4605 4606 stdin,
4606 4607 stdout,
4607 4608 stderr,
4608 4609 None,
4609 4610 autoreadstderr=autoreadstderr,
4610 4611 )
4611 4612 elif opts[b'peer'] == b'raw':
4612 4613 ui.write(_(b'using raw connection to peer\n'))
4613 4614 peer = None
4614 4615 else:
4615 4616 ui.write(_(b'creating ssh peer from handshake results\n'))
4616 4617 peer = sshpeer.makepeer(
4617 4618 ui,
4618 4619 url,
4619 4620 proc,
4620 4621 stdin,
4621 4622 stdout,
4622 4623 stderr,
4623 4624 autoreadstderr=autoreadstderr,
4624 4625 )
4625 4626
4626 4627 elif path:
4627 4628 # We bypass hg.peer() so we can proxy the sockets.
4628 4629 # TODO consider not doing this because we skip
4629 4630 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4630 4631 u = urlutil.url(path)
4631 4632 if u.scheme != b'http':
4632 4633 raise error.Abort(_(b'only http:// paths are currently supported'))
4633 4634
4634 4635 url, authinfo = u.authinfo()
4635 4636 openerargs = {
4636 4637 'useragent': b'Mercurial debugwireproto',
4637 4638 }
4638 4639
4639 4640 # Turn pipes/sockets into observers so we can log I/O.
4640 4641 if ui.verbose:
4641 4642 openerargs.update(
4642 4643 {
4643 4644 'loggingfh': ui,
4644 4645 'loggingname': b's',
4645 4646 'loggingopts': {
4646 4647 'logdata': True,
4647 4648 'logdataapis': False,
4648 4649 },
4649 4650 }
4650 4651 )
4651 4652
4652 4653 if ui.debugflag:
4653 4654 openerargs['loggingopts']['logdataapis'] = True
4654 4655
4655 4656 # Don't send default headers when in raw mode. This allows us to
4656 4657 # bypass most of the behavior of our URL handling code so we can
4657 4658 # have near complete control over what's sent on the wire.
4658 4659 if opts[b'peer'] == b'raw':
4659 4660 openerargs['sendaccept'] = False
4660 4661
4661 4662 opener = urlmod.opener(ui, authinfo, **openerargs)
4662 4663
4663 4664 if opts[b'peer'] == b'raw':
4664 4665 ui.write(_(b'using raw connection to peer\n'))
4665 4666 peer = None
4666 4667 elif opts[b'peer']:
4667 4668 raise error.Abort(
4668 4669 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4669 4670 )
4670 4671 else:
4671 4672 peer = httppeer.makepeer(ui, path, opener=opener)
4672 4673
4673 4674 # We /could/ populate stdin/stdout with sock.makefile()...
4674 4675 else:
4675 4676 raise error.Abort(_(b'unsupported connection configuration'))
4676 4677
4677 4678 batchedcommands = None
4678 4679
4679 4680 # Now perform actions based on the parsed wire language instructions.
4680 4681 for action, lines in blocks:
4681 4682 if action in (b'raw', b'raw+'):
4682 4683 if not stdin:
4683 4684 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4684 4685
4685 4686 # Concatenate the data together.
4686 4687 data = b''.join(l.lstrip() for l in lines)
4687 4688 data = stringutil.unescapestr(data)
4688 4689 stdin.write(data)
4689 4690
4690 4691 if action == b'raw+':
4691 4692 stdin.flush()
4692 4693 elif action == b'flush':
4693 4694 if not stdin:
4694 4695 raise error.Abort(_(b'cannot call flush on this peer'))
4695 4696 stdin.flush()
4696 4697 elif action.startswith(b'command'):
4697 4698 if not peer:
4698 4699 raise error.Abort(
4699 4700 _(
4700 4701 b'cannot send commands unless peer instance '
4701 4702 b'is available'
4702 4703 )
4703 4704 )
4704 4705
4705 4706 command = action.split(b' ', 1)[1]
4706 4707
4707 4708 args = {}
4708 4709 for line in lines:
4709 4710 # We need to allow empty values.
4710 4711 fields = line.lstrip().split(b' ', 1)
4711 4712 if len(fields) == 1:
4712 4713 key = fields[0]
4713 4714 value = b''
4714 4715 else:
4715 4716 key, value = fields
4716 4717
4717 4718 if value.startswith(b'eval:'):
4718 4719 value = stringutil.evalpythonliteral(value[5:])
4719 4720 else:
4720 4721 value = stringutil.unescapestr(value)
4721 4722
4722 4723 args[key] = value
4723 4724
4724 4725 if batchedcommands is not None:
4725 4726 batchedcommands.append((command, args))
4726 4727 continue
4727 4728
4728 4729 ui.status(_(b'sending %s command\n') % command)
4729 4730
4730 4731 if b'PUSHFILE' in args:
4731 4732 with open(args[b'PUSHFILE'], 'rb') as fh:
4732 4733 del args[b'PUSHFILE']
4733 4734 res, output = peer._callpush(
4734 4735 command, fh, **pycompat.strkwargs(args)
4735 4736 )
4736 4737 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4737 4738 ui.status(
4738 4739 _(b'remote output: %s\n') % stringutil.escapestr(output)
4739 4740 )
4740 4741 else:
4741 4742 with peer.commandexecutor() as e:
4742 4743 res = e.callcommand(command, args).result()
4743 4744
4744 4745 ui.status(
4745 4746 _(b'response: %s\n')
4746 4747 % stringutil.pprint(res, bprefix=True, indent=2)
4747 4748 )
4748 4749
4749 4750 elif action == b'batchbegin':
4750 4751 if batchedcommands is not None:
4751 4752 raise error.Abort(_(b'nested batchbegin not allowed'))
4752 4753
4753 4754 batchedcommands = []
4754 4755 elif action == b'batchsubmit':
4755 4756 # There is a batching API we could go through. But it would be
4756 4757 # difficult to normalize requests into function calls. It is easier
4757 4758 # to bypass this layer and normalize to commands + args.
4758 4759 ui.status(
4759 4760 _(b'sending batch with %d sub-commands\n')
4760 4761 % len(batchedcommands)
4761 4762 )
4762 4763 assert peer is not None
4763 4764 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4764 4765 ui.status(
4765 4766 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4766 4767 )
4767 4768
4768 4769 batchedcommands = None
4769 4770
4770 4771 elif action.startswith(b'httprequest '):
4771 4772 if not opener:
4772 4773 raise error.Abort(
4773 4774 _(b'cannot use httprequest without an HTTP peer')
4774 4775 )
4775 4776
4776 4777 request = action.split(b' ', 2)
4777 4778 if len(request) != 3:
4778 4779 raise error.Abort(
4779 4780 _(
4780 4781 b'invalid httprequest: expected format is '
4781 4782 b'"httprequest <method> <path>'
4782 4783 )
4783 4784 )
4784 4785
4785 4786 method, httppath = request[1:]
4786 4787 headers = {}
4787 4788 body = None
4788 4789 frames = []
4789 4790 for line in lines:
4790 4791 line = line.lstrip()
4791 4792 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4792 4793 if m:
4793 4794 # Headers need to use native strings.
4794 4795 key = pycompat.strurl(m.group(1))
4795 4796 value = pycompat.strurl(m.group(2))
4796 4797 headers[key] = value
4797 4798 continue
4798 4799
4799 4800 if line.startswith(b'BODYFILE '):
4800 4801 with open(line.split(b' ', 1), b'rb') as fh:
4801 4802 body = fh.read()
4802 4803 elif line.startswith(b'frame '):
4803 4804 frame = wireprotoframing.makeframefromhumanstring(
4804 4805 line[len(b'frame ') :]
4805 4806 )
4806 4807
4807 4808 frames.append(frame)
4808 4809 else:
4809 4810 raise error.Abort(
4810 4811 _(b'unknown argument to httprequest: %s') % line
4811 4812 )
4812 4813
4813 4814 url = path + httppath
4814 4815
4815 4816 if frames:
4816 4817 body = b''.join(bytes(f) for f in frames)
4817 4818
4818 4819 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4819 4820
4820 4821 # urllib.Request insists on using has_data() as a proxy for
4821 4822 # determining the request method. Override that to use our
4822 4823 # explicitly requested method.
4823 4824 req.get_method = lambda: pycompat.sysstr(method)
4824 4825
4825 4826 try:
4826 4827 res = opener.open(req)
4827 4828 body = res.read()
4828 4829 except util.urlerr.urlerror as e:
4829 4830 # read() method must be called, but only exists in Python 2
4830 4831 getattr(e, 'read', lambda: None)()
4831 4832 continue
4832 4833
4833 4834 ct = res.headers.get('Content-Type')
4834 4835 if ct == 'application/mercurial-cbor':
4835 4836 ui.write(
4836 4837 _(b'cbor> %s\n')
4837 4838 % stringutil.pprint(
4838 4839 cborutil.decodeall(body), bprefix=True, indent=2
4839 4840 )
4840 4841 )
4841 4842
4842 4843 elif action == b'close':
4843 4844 assert peer is not None
4844 4845 peer.close()
4845 4846 elif action == b'readavailable':
4846 4847 if not stdout or not stderr:
4847 4848 raise error.Abort(
4848 4849 _(b'readavailable not available on this peer')
4849 4850 )
4850 4851
4851 4852 stdin.close()
4852 4853 stdout.read()
4853 4854 stderr.read()
4854 4855
4855 4856 elif action == b'readline':
4856 4857 if not stdout:
4857 4858 raise error.Abort(_(b'readline not available on this peer'))
4858 4859 stdout.readline()
4859 4860 elif action == b'ereadline':
4860 4861 if not stderr:
4861 4862 raise error.Abort(_(b'ereadline not available on this peer'))
4862 4863 stderr.readline()
4863 4864 elif action.startswith(b'read '):
4864 4865 count = int(action.split(b' ', 1)[1])
4865 4866 if not stdout:
4866 4867 raise error.Abort(_(b'read not available on this peer'))
4867 4868 stdout.read(count)
4868 4869 elif action.startswith(b'eread '):
4869 4870 count = int(action.split(b' ', 1)[1])
4870 4871 if not stderr:
4871 4872 raise error.Abort(_(b'eread not available on this peer'))
4872 4873 stderr.read(count)
4873 4874 else:
4874 4875 raise error.Abort(_(b'unknown action: %s') % action)
4875 4876
4876 4877 if batchedcommands is not None:
4877 4878 raise error.Abort(_(b'unclosed "batchbegin" request'))
4878 4879
4879 4880 if peer:
4880 4881 peer.close()
4881 4882
4882 4883 if proc:
4883 4884 proc.kill()
@@ -1,519 +1,526 b''
1 1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 2 #
3 3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8 """
9 9 Algorithm works in the following way. You have two repository: local and
10 10 remote. They both contains a DAG of changelists.
11 11
12 12 The goal of the discovery protocol is to find one set of node *common*,
13 13 the set of nodes shared by local and remote.
14 14
15 15 One of the issue with the original protocol was latency, it could
16 16 potentially require lots of roundtrips to discover that the local repo was a
17 17 subset of remote (which is a very common case, you usually have few changes
18 18 compared to upstream, while upstream probably had lots of development).
19 19
20 20 The new protocol only requires one interface for the remote repo: `known()`,
21 21 which given a set of changelists tells you if they are present in the DAG.
22 22
23 23 The algorithm then works as follow:
24 24
25 25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 26 all nodes are in `unknown`.
27 27 - Take a sample from `unknown`, call `remote.known(sample)`
28 28 - For each node that remote knows, move it and all its ancestors to `common`
29 29 - For each node that remote doesn't know, move it and all its descendants
30 30 to `missing`
31 31 - Iterate until `unknown` is empty
32 32
33 33 There are a couple optimizations, first is instead of starting with a random
34 34 sample of missing, start by sending all heads, in the case where the local
35 35 repo is a subset, you computed the answer in one round trip.
36 36
37 37 Then you can do something similar to the bisecting strategy used when
38 38 finding faulty changesets. Instead of random samples, you can try picking
39 39 nodes that will maximize the number of nodes that will be
40 40 classified with it (since all ancestors or descendants will be marked as well).
41 41 """
42 42
43 43
44 44 import collections
45 45 import random
46 46
47 47 from .i18n import _
48 48 from .node import nullrev
49 49 from . import (
50 50 error,
51 51 policy,
52 52 util,
53 53 )
54 54
55 55
56 56 def _updatesample(revs, heads, sample, parentfn, quicksamplesize=0):
57 57 """update an existing sample to match the expected size
58 58
59 59 The sample is updated with revs exponentially distant from each head of the
60 60 <revs> set. (H~1, H~2, H~4, H~8, etc).
61 61
62 62 If a target size is specified, the sampling will stop once this size is
63 63 reached. Otherwise sampling will happen until roots of the <revs> set are
64 64 reached.
65 65
66 66 :revs: set of revs we want to discover (if None, assume the whole dag)
67 67 :heads: set of DAG head revs
68 68 :sample: a sample to update
69 69 :parentfn: a callable to resolve parents for a revision
70 70 :quicksamplesize: optional target size of the sample"""
71 71 dist = {}
72 72 visit = collections.deque(heads)
73 73 seen = set()
74 74 factor = 1
75 75 while visit:
76 76 curr = visit.popleft()
77 77 if curr in seen:
78 78 continue
79 79 d = dist.setdefault(curr, 1)
80 80 if d > factor:
81 81 factor *= 2
82 82 if d == factor:
83 83 sample.add(curr)
84 84 if quicksamplesize and (len(sample) >= quicksamplesize):
85 85 return
86 86 seen.add(curr)
87 87
88 88 for p in parentfn(curr):
89 89 if p != nullrev and (not revs or p in revs):
90 90 dist.setdefault(p, d + 1)
91 91 visit.append(p)
92 92
93 93
94 94 def _limitsample(sample, desiredlen, randomize=True):
95 95 """return a random subset of sample of at most desiredlen item.
96 96
97 97 If randomize is False, though, a deterministic subset is returned.
98 98 This is meant for integration tests.
99 99 """
100 100 if len(sample) <= desiredlen:
101 101 return sample
102 102 if randomize:
103 103 return set(random.sample(sample, desiredlen))
104 104 sample = list(sample)
105 105 sample.sort()
106 106 return set(sample[:desiredlen])
107 107
108 108
109 109 class partialdiscovery:
110 110 """an object representing ongoing discovery
111 111
112 112 Feed with data from the remote repository, this object keep track of the
113 113 current set of changeset in various states:
114 114
115 115 - common: revs also known remotely
116 116 - undecided: revs we don't have information on yet
117 117 - missing: revs missing remotely
118 118 (all tracked revisions are known locally)
119 119 """
120 120
121 121 def __init__(self, repo, targetheads, respectsize, randomize=True):
122 122 self._repo = repo
123 123 self._targetheads = targetheads
124 124 self._common = repo.changelog.incrementalmissingrevs()
125 125 self._undecided = None
126 126 self.missing = set()
127 127 self._childrenmap = None
128 128 self._respectsize = respectsize
129 129 self.randomize = randomize
130 130
131 131 def addcommons(self, commons):
132 132 """register nodes known as common"""
133 133 self._common.addbases(commons)
134 134 if self._undecided is not None:
135 135 self._common.removeancestorsfrom(self._undecided)
136 136
137 137 def addmissings(self, missings):
138 138 """register some nodes as missing"""
139 139 newmissing = self._repo.revs(b'%ld::%ld', missings, self.undecided)
140 140 if newmissing:
141 141 self.missing.update(newmissing)
142 142 self.undecided.difference_update(newmissing)
143 143
144 144 def addinfo(self, sample):
145 145 """consume an iterable of (rev, known) tuples"""
146 146 common = set()
147 147 missing = set()
148 148 for rev, known in sample:
149 149 if known:
150 150 common.add(rev)
151 151 else:
152 152 missing.add(rev)
153 153 if common:
154 154 self.addcommons(common)
155 155 if missing:
156 156 self.addmissings(missing)
157 157
158 158 def hasinfo(self):
159 159 """return True is we have any clue about the remote state"""
160 160 return self._common.hasbases()
161 161
162 162 def iscomplete(self):
163 163 """True if all the necessary data have been gathered"""
164 164 return self._undecided is not None and not self._undecided
165 165
166 166 @property
167 167 def undecided(self):
168 168 if self._undecided is not None:
169 169 return self._undecided
170 170 self._undecided = set(self._common.missingancestors(self._targetheads))
171 171 return self._undecided
172 172
173 173 def stats(self):
174 174 return {
175 175 'undecided': len(self.undecided),
176 176 }
177 177
178 178 def commonheads(self):
179 179 """the heads of the known common set"""
180 180 # heads(common) == heads(common.bases) since common represents
181 181 # common.bases and all its ancestors
182 182 return self._common.basesheads()
183 183
184 184 def _parentsgetter(self):
185 185 getrev = self._repo.changelog.index.__getitem__
186 186
187 187 def getparents(r):
188 188 return getrev(r)[5:7]
189 189
190 190 return getparents
191 191
192 192 def _childrengetter(self):
193 193
194 194 if self._childrenmap is not None:
195 195 # During discovery, the `undecided` set keep shrinking.
196 196 # Therefore, the map computed for an iteration N will be
197 197 # valid for iteration N+1. Instead of computing the same
198 198 # data over and over we cached it the first time.
199 199 return self._childrenmap.__getitem__
200 200
201 201 # _updatesample() essentially does interaction over revisions to look
202 202 # up their children. This lookup is expensive and doing it in a loop is
203 203 # quadratic. We precompute the children for all relevant revisions and
204 204 # make the lookup in _updatesample() a simple dict lookup.
205 205 self._childrenmap = children = {}
206 206
207 207 parentrevs = self._parentsgetter()
208 208 revs = self.undecided
209 209
210 210 for rev in sorted(revs):
211 211 # Always ensure revision has an entry so we don't need to worry
212 212 # about missing keys.
213 213 children[rev] = []
214 214 for prev in parentrevs(rev):
215 215 if prev == nullrev:
216 216 continue
217 217 c = children.get(prev)
218 218 if c is not None:
219 219 c.append(rev)
220 220 return children.__getitem__
221 221
222 222 def takequicksample(self, headrevs, size):
223 223 """takes a quick sample of size <size>
224 224
225 225 It is meant for initial sampling and focuses on querying heads and close
226 226 ancestors of heads.
227 227
228 228 :headrevs: set of head revisions in local DAG to consider
229 229 :size: the maximum size of the sample"""
230 230 revs = self.undecided
231 231 if len(revs) <= size:
232 232 return list(revs)
233 233 sample = set(self._repo.revs(b'heads(%ld)', revs))
234 234
235 235 if len(sample) >= size:
236 236 return _limitsample(sample, size, randomize=self.randomize)
237 237
238 238 _updatesample(
239 239 None, headrevs, sample, self._parentsgetter(), quicksamplesize=size
240 240 )
241 241 return sample
242 242
243 243 def takefullsample(self, headrevs, size):
244 244 revs = self.undecided
245 245 if len(revs) <= size:
246 246 return list(revs)
247 247 repo = self._repo
248 248 sample = set(repo.revs(b'heads(%ld)', revs))
249 249 parentrevs = self._parentsgetter()
250 250
251 251 # update from heads
252 252 revsheads = sample.copy()
253 253 _updatesample(revs, revsheads, sample, parentrevs)
254 254
255 255 # update from roots
256 256 revsroots = set(repo.revs(b'roots(%ld)', revs))
257 257 childrenrevs = self._childrengetter()
258 258 _updatesample(revs, revsroots, sample, childrenrevs)
259 259 assert sample
260 260
261 261 if not self._respectsize:
262 262 size = max(size, min(len(revsroots), len(revsheads)))
263 263
264 264 sample = _limitsample(sample, size, randomize=self.randomize)
265 265 if len(sample) < size:
266 266 more = size - len(sample)
267 267 takefrom = list(revs - sample)
268 268 if self.randomize:
269 269 sample.update(random.sample(takefrom, more))
270 270 else:
271 271 takefrom.sort()
272 272 sample.update(takefrom[:more])
273 273 return sample
274 274
275 275
276 276 pure_partialdiscovery = partialdiscovery
277 277
278 278 partialdiscovery = policy.importrust(
279 279 'discovery', member='PartialDiscovery', default=partialdiscovery
280 280 )
281 281
282 282
283 283 def findcommonheads(
284 284 ui,
285 285 local,
286 286 remote,
287 287 abortwhenunrelated=True,
288 288 ancestorsof=None,
289 289 audit=None,
290 290 ):
291 291 """Return a tuple (common, anyincoming, remoteheads) used to identify
292 292 missing nodes from or in remote.
293 293
294 294 The audit argument is an optional dictionnary that a caller can pass. it
295 295 will be updated with extra data about the discovery, this is useful for
296 296 debug.
297 297 """
298 298
299 299 samplegrowth = float(ui.config(b'devel', b'discovery.grow-sample.rate'))
300 300
301 if audit is not None:
302 audit[b'total-queries'] = 0
303
301 304 start = util.timer()
302 305
303 306 roundtrips = 0
304 307 cl = local.changelog
305 308 clnode = cl.node
306 309 clrev = cl.rev
307 310
308 311 if ancestorsof is not None:
309 312 ownheads = [clrev(n) for n in ancestorsof]
310 313 else:
311 314 ownheads = [rev for rev in cl.headrevs() if rev != nullrev]
312 315
313 316 initial_head_exchange = ui.configbool(b'devel', b'discovery.exchange-heads')
314 317 initialsamplesize = ui.configint(b'devel', b'discovery.sample-size.initial')
315 318 fullsamplesize = ui.configint(b'devel', b'discovery.sample-size')
316 319 # We also ask remote about all the local heads. That set can be arbitrarily
317 320 # large, so we used to limit it size to `initialsamplesize`. We no longer
318 321 # do as it proved counter productive. The skipped heads could lead to a
319 322 # large "undecided" set, slower to be clarified than if we asked the
320 323 # question for all heads right away.
321 324 #
322 325 # We are already fetching all server heads using the `heads` commands,
323 326 # sending a equivalent number of heads the other way should not have a
324 327 # significant impact. In addition, it is very likely that we are going to
325 328 # have to issue "known" request for an equivalent amount of revisions in
326 329 # order to decide if theses heads are common or missing.
327 330 #
328 331 # find a detailled analysis below.
329 332 #
330 333 # Case A: local and server both has few heads
331 334 #
332 335 # Ownheads is below initialsamplesize, limit would not have any effect.
333 336 #
334 337 # Case B: local has few heads and server has many
335 338 #
336 339 # Ownheads is below initialsamplesize, limit would not have any effect.
337 340 #
338 341 # Case C: local and server both has many heads
339 342 #
340 343 # We now transfert some more data, but not significantly more than is
341 344 # already transfered to carry the server heads.
342 345 #
343 346 # Case D: local has many heads, server has few
344 347 #
345 348 # D.1 local heads are mostly known remotely
346 349 #
347 350 # All the known head will have be part of a `known` request at some
348 351 # point for the discovery to finish. Sending them all earlier is
349 352 # actually helping.
350 353 #
351 354 # (This case is fairly unlikely, it requires the numerous heads to all
352 355 # be merged server side in only a few heads)
353 356 #
354 357 # D.2 local heads are mostly missing remotely
355 358 #
356 359 # To determine that the heads are missing, we'll have to issue `known`
357 360 # request for them or one of their ancestors. This amount of `known`
358 361 # request will likely be in the same order of magnitude than the amount
359 362 # of local heads.
360 363 #
361 364 # The only case where we can be more efficient using `known` request on
362 365 # ancestors are case were all the "missing" local heads are based on a
363 366 # few changeset, also "missing". This means we would have a "complex"
364 367 # graph (with many heads) attached to, but very independant to a the
365 368 # "simple" graph on the server. This is a fairly usual case and have
366 369 # not been met in the wild so far.
367 370 if initial_head_exchange:
368 371 if remote.limitedarguments:
369 372 sample = _limitsample(ownheads, initialsamplesize)
370 373 # indices between sample and externalized version must match
371 374 sample = list(sample)
372 375 else:
373 376 sample = ownheads
374 377
375 378 ui.debug(b"query 1; heads\n")
376 379 roundtrips += 1
377 380 with remote.commandexecutor() as e:
378 381 fheads = e.callcommand(b'heads', {})
382 if audit is not None:
383 audit[b'total-queries'] += len(sample)
379 384 fknown = e.callcommand(
380 385 b'known',
381 386 {
382 387 b'nodes': [clnode(r) for r in sample],
383 388 },
384 389 )
385 390
386 391 srvheadhashes, yesno = fheads.result(), fknown.result()
387 392
388 393 if audit is not None:
389 394 audit[b'total-roundtrips'] = 1
390 395
391 396 if cl.tiprev() == nullrev:
392 397 if srvheadhashes != [cl.nullid]:
393 398 return [cl.nullid], True, srvheadhashes
394 399 return [cl.nullid], False, []
395 400 else:
396 401 # we still need the remote head for the function return
397 402 with remote.commandexecutor() as e:
398 403 fheads = e.callcommand(b'heads', {})
399 404 srvheadhashes = fheads.result()
400 405
401 406 # start actual discovery (we note this before the next "if" for
402 407 # compatibility reasons)
403 408 ui.status(_(b"searching for changes\n"))
404 409
405 410 knownsrvheads = [] # revnos of remote heads that are known locally
406 411 for node in srvheadhashes:
407 412 if node == cl.nullid:
408 413 continue
409 414
410 415 try:
411 416 knownsrvheads.append(clrev(node))
412 417 # Catches unknown and filtered nodes.
413 418 except error.LookupError:
414 419 continue
415 420
416 421 if initial_head_exchange:
417 422 # early exit if we know all the specified remote heads already
418 423 if len(knownsrvheads) == len(srvheadhashes):
419 424 ui.debug(b"all remote heads known locally\n")
420 425 return srvheadhashes, False, srvheadhashes
421 426
422 427 if len(sample) == len(ownheads) and all(yesno):
423 428 ui.note(_(b"all local changesets known remotely\n"))
424 429 ownheadhashes = [clnode(r) for r in ownheads]
425 430 return ownheadhashes, True, srvheadhashes
426 431
427 432 # full blown discovery
428 433
429 434 # if the server has a limit to its arguments size, we can't grow the sample.
430 435 configbool = local.ui.configbool
431 436 grow_sample = configbool(b'devel', b'discovery.grow-sample')
432 437 grow_sample = grow_sample and not remote.limitedarguments
433 438
434 439 dynamic_sample = configbool(b'devel', b'discovery.grow-sample.dynamic')
435 440 hard_limit_sample = not (dynamic_sample or remote.limitedarguments)
436 441
437 442 randomize = ui.configbool(b'devel', b'discovery.randomize')
438 443 if cl.index.rust_ext_compat:
439 444 pd = partialdiscovery
440 445 else:
441 446 pd = pure_partialdiscovery
442 447 disco = pd(local, ownheads, hard_limit_sample, randomize=randomize)
443 448 if initial_head_exchange:
444 449 # treat remote heads (and maybe own heads) as a first implicit sample
445 450 # response
446 451 disco.addcommons(knownsrvheads)
447 452 disco.addinfo(zip(sample, yesno))
448 453
449 454 full = not initial_head_exchange
450 455 progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
451 456 while not disco.iscomplete():
452 457
453 458 if full or disco.hasinfo():
454 459 if full:
455 460 ui.note(_(b"sampling from both directions\n"))
456 461 else:
457 462 ui.debug(b"taking initial sample\n")
458 463 samplefunc = disco.takefullsample
459 464 targetsize = fullsamplesize
460 465 if grow_sample:
461 466 fullsamplesize = int(fullsamplesize * samplegrowth)
462 467 else:
463 468 # use even cheaper initial sample
464 469 ui.debug(b"taking quick initial sample\n")
465 470 samplefunc = disco.takequicksample
466 471 targetsize = initialsamplesize
467 472 sample = samplefunc(ownheads, targetsize)
468 473
469 474 roundtrips += 1
470 475 progress.update(roundtrips)
471 476 stats = disco.stats()
472 477 ui.debug(
473 478 b"query %i; still undecided: %i, sample size is: %i\n"
474 479 % (roundtrips, stats['undecided'], len(sample))
475 480 )
476 481
477 482 # indices between sample and externalized version must match
478 483 sample = list(sample)
479 484
480 485 with remote.commandexecutor() as e:
486 if audit is not None:
487 audit[b'total-queries'] += len(sample)
481 488 yesno = e.callcommand(
482 489 b'known',
483 490 {
484 491 b'nodes': [clnode(r) for r in sample],
485 492 },
486 493 ).result()
487 494
488 495 full = True
489 496
490 497 disco.addinfo(zip(sample, yesno))
491 498
492 499 result = disco.commonheads()
493 500 elapsed = util.timer() - start
494 501 progress.complete()
495 502 ui.debug(b"%d total queries in %.4fs\n" % (roundtrips, elapsed))
496 503 msg = (
497 504 b'found %d common and %d unknown server heads,'
498 505 b' %d roundtrips in %.4fs\n'
499 506 )
500 507 missing = set(result) - set(knownsrvheads)
501 508 ui.log(b'discovery', msg, len(result), len(missing), roundtrips, elapsed)
502 509
503 510 if audit is not None:
504 511 audit[b'total-roundtrips'] = roundtrips
505 512
506 513 if not result and srvheadhashes != [cl.nullid]:
507 514 if abortwhenunrelated:
508 515 raise error.Abort(_(b"repository is unrelated"))
509 516 else:
510 517 ui.warn(_(b"warning: repository is unrelated\n"))
511 518 return (
512 519 {cl.nullid},
513 520 True,
514 521 srvheadhashes,
515 522 )
516 523
517 524 anyincoming = srvheadhashes != [cl.nullid]
518 525 result = {clnode(r) for r in result}
519 526 return result, anyincoming, srvheadhashes
@@ -1,186 +1,194 b''
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import collections
10 10
11 11 from .i18n import _
12 12 from .node import short
13 13 from . import (
14 14 error,
15 15 pycompat,
16 16 )
17 17
18 18
19 19 def findcommonincoming(repo, remote, heads=None, force=False, audit=None):
20 20 """Return a tuple (common, fetch, heads) used to identify the common
21 21 subset of nodes between repo and remote.
22 22
23 23 "common" is a list of (at least) the heads of the common subset.
24 24 "fetch" is a list of roots of the nodes that would be incoming, to be
25 25 supplied to changegroupsubset.
26 26 "heads" is either the supplied heads, or else the remote's heads.
27 27 """
28 28
29 29 knownnode = repo.changelog.hasnode
30 30 search = []
31 31 fetch = set()
32 32 seen = set()
33 33 seenbranch = set()
34 34 base = set()
35 35
36 36 if not heads:
37 37 with remote.commandexecutor() as e:
38 38 heads = e.callcommand(b'heads', {}).result()
39 39
40 40 if audit is not None:
41 41 audit[b'total-roundtrips'] = 1
42 audit[b'total-queries'] = 0
42 43
43 44 if repo.changelog.tip() == repo.nullid:
44 45 base.add(repo.nullid)
45 46 if heads != [repo.nullid]:
46 47 return [repo.nullid], [repo.nullid], list(heads)
47 48 return [repo.nullid], [], heads
48 49
49 50 # assume we're closer to the tip than the root
50 51 # and start by examining the heads
51 52 repo.ui.status(_(b"searching for changes\n"))
52 53
53 54 unknown = []
54 55 for h in heads:
55 56 if not knownnode(h):
56 57 unknown.append(h)
57 58 else:
58 59 base.add(h)
59 60
60 61 if not unknown:
61 62 return list(base), [], list(heads)
62 63
63 64 req = set(unknown)
64 65 reqcnt = 0
65 66 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
66 67
67 68 # search through remote branches
68 69 # a 'branch' here is a linear segment of history, with four parts:
69 70 # head, root, first parent, second parent
70 71 # (a branch always has two parents (or none) by definition)
71 72 with remote.commandexecutor() as e:
73 if audit is not None:
74 audit[b'total-queries'] += len(unknown)
72 75 branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
73 76
74 77 unknown = collections.deque(branches)
75 78 while unknown:
76 79 r = []
77 80 while unknown:
78 81 n = unknown.popleft()
79 82 if n[0] in seen:
80 83 continue
81 84
82 85 repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
83 86 if n[0] == repo.nullid: # found the end of the branch
84 87 pass
85 88 elif n in seenbranch:
86 89 repo.ui.debug(b"branch already found\n")
87 90 continue
88 91 elif n[1] and knownnode(n[1]): # do we know the base?
89 92 repo.ui.debug(
90 93 b"found incomplete branch %s:%s\n"
91 94 % (short(n[0]), short(n[1]))
92 95 )
93 96 search.append(n[0:2]) # schedule branch range for scanning
94 97 seenbranch.add(n)
95 98 else:
96 99 if n[1] not in seen and n[1] not in fetch:
97 100 if knownnode(n[2]) and knownnode(n[3]):
98 101 repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
99 102 fetch.add(n[1]) # earliest unknown
100 103 for p in n[2:4]:
101 104 if knownnode(p):
102 105 base.add(p) # latest known
103 106
104 107 for p in n[2:4]:
105 108 if p not in req and not knownnode(p):
106 109 r.append(p)
107 110 req.add(p)
108 111 seen.add(n[0])
109 112
110 113 if r:
111 114 reqcnt += 1
112 115 progress.increment()
113 116 repo.ui.debug(
114 117 b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
115 118 )
116 119 for p in pycompat.xrange(0, len(r), 10):
117 120 with remote.commandexecutor() as e:
121 subset = r[p : p + 10]
122 if audit is not None:
123 audit[b'total-queries'] += len(subset)
118 124 branches = e.callcommand(
119 125 b'branches',
120 126 {
121 b'nodes': r[p : p + 10],
127 b'nodes': subset,
122 128 },
123 129 ).result()
124 130
125 131 for b in branches:
126 132 repo.ui.debug(
127 133 b"received %s:%s\n" % (short(b[0]), short(b[1]))
128 134 )
129 135 unknown.append(b)
130 136
131 137 # do binary search on the branches we found
132 138 while search:
133 139 newsearch = []
134 140 reqcnt += 1
135 141 progress.increment()
136 142
137 143 with remote.commandexecutor() as e:
144 if audit is not None:
145 audit[b'total-queries'] += len(search)
138 146 between = e.callcommand(b'between', {b'pairs': search}).result()
139 147
140 148 for n, l in zip(search, between):
141 149 l.append(n[1])
142 150 p = n[0]
143 151 f = 1
144 152 for i in l:
145 153 repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
146 154 if knownnode(i):
147 155 if f <= 2:
148 156 repo.ui.debug(
149 157 b"found new branch changeset %s\n" % short(p)
150 158 )
151 159 fetch.add(p)
152 160 base.add(i)
153 161 else:
154 162 repo.ui.debug(
155 163 b"narrowed branch search to %s:%s\n"
156 164 % (short(p), short(i))
157 165 )
158 166 newsearch.append((p, i))
159 167 break
160 168 p, f = i, f * 2
161 169 search = newsearch
162 170
163 171 # sanity check our fetch list
164 172 for f in fetch:
165 173 if knownnode(f):
166 174 raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
167 175
168 176 base = list(base)
169 177 if base == [repo.nullid]:
170 178 if force:
171 179 repo.ui.warn(_(b"warning: repository is unrelated\n"))
172 180 else:
173 181 raise error.Abort(_(b"repository is unrelated"))
174 182
175 183 repo.ui.debug(
176 184 b"found new changesets starting at "
177 185 + b" ".join([short(f) for f in fetch])
178 186 + b"\n"
179 187 )
180 188
181 189 progress.complete()
182 190 repo.ui.debug(b"%d total queries\n" % reqcnt)
183 191 if audit is not None:
184 192 audit[b'total-roundtrips'] = reqcnt
185 193
186 194 return base, list(fetch), heads
@@ -1,1762 +1,1806 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 18 > echo
19 19 > echo "% -- a -> b set (tip only)"
20 20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 21 > echo
22 22 > echo "% -- b -> a tree"
23 23 > hg -R b debugdiscovery a --verbose --old
24 24 > echo
25 25 > echo "% -- b -> a set"
26 26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 27 > echo
28 28 > echo "% -- b -> a set (tip only)"
29 29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 30 > cd ..
31 31 > }
32 32
33 33
34 34 Small superset:
35 35
36 36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 37 > +2:f +1:a1:b1
38 38 > <f +4 :a2
39 39 > +5 :b2
40 40 > <f +3 :b3'
41 41
42 42 % -- a -> b tree
43 43 comparing with b
44 44 searching for changes
45 45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 46 elapsed time: * seconds (glob)
47 47 round-trips: 2
48 queries: 6
48 49 heads summary:
49 50 total common heads: 2
50 51 also local heads: 2
51 52 also remote heads: 1
52 53 both: 1
53 54 local heads: 2
54 55 common: 2
55 56 missing: 0
56 57 remote heads: 3
57 58 common: 1
58 59 unknown: 2
59 60 local changesets: 7
60 61 common: 7
61 62 heads: 2
62 63 roots: 1
63 64 missing: 0
64 65 heads: 0
65 66 roots: 0
66 67 first undecided set: 3
67 68 heads: 1
68 69 roots: 1
69 70 common: 3
70 71 missing: 0
71 72 common heads: 01241442b3c2 b5714e113bc0
72 73
73 74 % -- a -> b set
74 75 comparing with b
75 76 query 1; heads
76 77 searching for changes
77 78 all local changesets known remotely
78 79 elapsed time: * seconds (glob)
79 80 round-trips: 1
81 queries: 2
80 82 heads summary:
81 83 total common heads: 2
82 84 also local heads: 2
83 85 also remote heads: 1
84 86 both: 1
85 87 local heads: 2
86 88 common: 2
87 89 missing: 0
88 90 remote heads: 3
89 91 common: 1
90 92 unknown: 2
91 93 local changesets: 7
92 94 common: 7
93 95 heads: 2
94 96 roots: 1
95 97 missing: 0
96 98 heads: 0
97 99 roots: 0
98 100 first undecided set: 3
99 101 heads: 1
100 102 roots: 1
101 103 common: 3
102 104 missing: 0
103 105 common heads: 01241442b3c2 b5714e113bc0
104 106
105 107 % -- a -> b set (tip only)
106 108 comparing with b
107 109 query 1; heads
108 110 searching for changes
109 111 all local changesets known remotely
110 112 elapsed time: * seconds (glob)
111 113 round-trips: 1
114 queries: 1
112 115 heads summary:
113 116 total common heads: 1
114 117 also local heads: 1
115 118 also remote heads: 0
116 119 both: 0
117 120 local heads: 2
118 121 common: 1
119 122 missing: 1
120 123 remote heads: 3
121 124 common: 0
122 125 unknown: 3
123 126 local changesets: 7
124 127 common: 6
125 128 heads: 1
126 129 roots: 1
127 130 missing: 1
128 131 heads: 1
129 132 roots: 1
130 133 first undecided set: 6
131 134 heads: 2
132 135 roots: 1
133 136 common: 5
134 137 missing: 1
135 138 common heads: b5714e113bc0
136 139
137 140 % -- b -> a tree
138 141 comparing with a
139 142 searching for changes
140 143 unpruned common: 01241442b3c2 b5714e113bc0
141 144 elapsed time: * seconds (glob)
142 145 round-trips: 1
146 queries: 0
143 147 heads summary:
144 148 total common heads: 2
145 149 also local heads: 1
146 150 also remote heads: 2
147 151 both: 1
148 152 local heads: 3
149 153 common: 1
150 154 missing: 2
151 155 remote heads: 2
152 156 common: 2
153 157 unknown: 0
154 158 local changesets: 15
155 159 common: 7
156 160 heads: 2
157 161 roots: 1
158 162 missing: 8
159 163 heads: 2
160 164 roots: 2
161 165 first undecided set: 8
162 166 heads: 2
163 167 roots: 2
164 168 common: 0
165 169 missing: 8
166 170 common heads: 01241442b3c2 b5714e113bc0
167 171
168 172 % -- b -> a set
169 173 comparing with a
170 174 query 1; heads
171 175 searching for changes
172 176 all remote heads known locally
173 177 elapsed time: * seconds (glob)
174 178 round-trips: 1
179 queries: 3
175 180 heads summary:
176 181 total common heads: 2
177 182 also local heads: 1
178 183 also remote heads: 2
179 184 both: 1
180 185 local heads: 3
181 186 common: 1
182 187 missing: 2
183 188 remote heads: 2
184 189 common: 2
185 190 unknown: 0
186 191 local changesets: 15
187 192 common: 7
188 193 heads: 2
189 194 roots: 1
190 195 missing: 8
191 196 heads: 2
192 197 roots: 2
193 198 first undecided set: 8
194 199 heads: 2
195 200 roots: 2
196 201 common: 0
197 202 missing: 8
198 203 common heads: 01241442b3c2 b5714e113bc0
199 204
200 205 % -- b -> a set (tip only)
201 206 comparing with a
202 207 query 1; heads
203 208 searching for changes
204 209 all remote heads known locally
205 210 elapsed time: * seconds (glob)
206 211 round-trips: 1
212 queries: 1
207 213 heads summary:
208 214 total common heads: 2
209 215 also local heads: 1
210 216 also remote heads: 2
211 217 both: 1
212 218 local heads: 3
213 219 common: 1
214 220 missing: 2
215 221 remote heads: 2
216 222 common: 2
217 223 unknown: 0
218 224 local changesets: 15
219 225 common: 7
220 226 heads: 2
221 227 roots: 1
222 228 missing: 8
223 229 heads: 2
224 230 roots: 2
225 231 first undecided set: 8
226 232 heads: 2
227 233 roots: 2
228 234 common: 0
229 235 missing: 8
230 236 common heads: 01241442b3c2 b5714e113bc0
231 237
232 238
233 239 Many new:
234 240
235 241 $ testdesc '-ra1 -ra2' '-rb' '
236 242 > +2:f +3:a1 +3:b
237 243 > <f +30 :a2'
238 244
239 245 % -- a -> b tree
240 246 comparing with b
241 247 searching for changes
242 248 unpruned common: bebd167eb94d
243 249 elapsed time: * seconds (glob)
244 250 round-trips: 2
251 queries: 3
245 252 heads summary:
246 253 total common heads: 1
247 254 also local heads: 1
248 255 also remote heads: 0
249 256 both: 0
250 257 local heads: 2
251 258 common: 1
252 259 missing: 1
253 260 remote heads: 1
254 261 common: 0
255 262 unknown: 1
256 263 local changesets: 35
257 264 common: 5
258 265 heads: 1
259 266 roots: 1
260 267 missing: 30
261 268 heads: 1
262 269 roots: 1
263 270 first undecided set: 34
264 271 heads: 2
265 272 roots: 1
266 273 common: 4
267 274 missing: 30
268 275 common heads: bebd167eb94d
269 276
270 277 % -- a -> b set
271 278 comparing with b
272 279 query 1; heads
273 280 searching for changes
274 281 taking initial sample
275 282 searching: 2 queries
276 283 query 2; still undecided: 29, sample size is: 29
277 284 2 total queries in *.????s (glob)
278 285 elapsed time: * seconds (glob)
279 286 round-trips: 2
287 queries: 31
280 288 heads summary:
281 289 total common heads: 1
282 290 also local heads: 1
283 291 also remote heads: 0
284 292 both: 0
285 293 local heads: 2
286 294 common: 1
287 295 missing: 1
288 296 remote heads: 1
289 297 common: 0
290 298 unknown: 1
291 299 local changesets: 35
292 300 common: 5
293 301 heads: 1
294 302 roots: 1
295 303 missing: 30
296 304 heads: 1
297 305 roots: 1
298 306 first undecided set: 34
299 307 heads: 2
300 308 roots: 1
301 309 common: 4
302 310 missing: 30
303 311 common heads: bebd167eb94d
304 312
305 313 % -- a -> b set (tip only)
306 314 comparing with b
307 315 query 1; heads
308 316 searching for changes
309 317 taking quick initial sample
310 318 searching: 2 queries
311 319 query 2; still undecided: 31, sample size is: 31
312 320 2 total queries in *.????s (glob)
313 321 elapsed time: * seconds (glob)
314 322 round-trips: 2
323 queries: 32
315 324 heads summary:
316 325 total common heads: 1
317 326 also local heads: 0
318 327 also remote heads: 0
319 328 both: 0
320 329 local heads: 2
321 330 common: 0
322 331 missing: 2
323 332 remote heads: 1
324 333 common: 0
325 334 unknown: 1
326 335 local changesets: 35
327 336 common: 2
328 337 heads: 1
329 338 roots: 1
330 339 missing: 33
331 340 heads: 2
332 341 roots: 2
333 342 first undecided set: 35
334 343 heads: 2
335 344 roots: 1
336 345 common: 2
337 346 missing: 33
338 347 common heads: 66f7d451a68b
339 348
340 349 % -- b -> a tree
341 350 comparing with a
342 351 searching for changes
343 352 unpruned common: 66f7d451a68b bebd167eb94d
344 353 elapsed time: * seconds (glob)
345 354 round-trips: 4
355 queries: 5
346 356 heads summary:
347 357 total common heads: 1
348 358 also local heads: 0
349 359 also remote heads: 1
350 360 both: 0
351 361 local heads: 1
352 362 common: 0
353 363 missing: 1
354 364 remote heads: 2
355 365 common: 1
356 366 unknown: 1
357 367 local changesets: 8
358 368 common: 5
359 369 heads: 1
360 370 roots: 1
361 371 missing: 3
362 372 heads: 1
363 373 roots: 1
364 374 first undecided set: 3
365 375 heads: 1
366 376 roots: 1
367 377 common: 0
368 378 missing: 3
369 379 common heads: bebd167eb94d
370 380
371 381 % -- b -> a set
372 382 comparing with a
373 383 query 1; heads
374 384 searching for changes
375 385 taking initial sample
376 386 searching: 2 queries
377 387 query 2; still undecided: 2, sample size is: 2
378 388 2 total queries in *.????s (glob)
379 389 elapsed time: * seconds (glob)
380 390 round-trips: 2
391 queries: 3
381 392 heads summary:
382 393 total common heads: 1
383 394 also local heads: 0
384 395 also remote heads: 1
385 396 both: 0
386 397 local heads: 1
387 398 common: 0
388 399 missing: 1
389 400 remote heads: 2
390 401 common: 1
391 402 unknown: 1
392 403 local changesets: 8
393 404 common: 5
394 405 heads: 1
395 406 roots: 1
396 407 missing: 3
397 408 heads: 1
398 409 roots: 1
399 410 first undecided set: 3
400 411 heads: 1
401 412 roots: 1
402 413 common: 0
403 414 missing: 3
404 415 common heads: bebd167eb94d
405 416
406 417 % -- b -> a set (tip only)
407 418 comparing with a
408 419 query 1; heads
409 420 searching for changes
410 421 taking initial sample
411 422 searching: 2 queries
412 423 query 2; still undecided: 2, sample size is: 2
413 424 2 total queries in *.????s (glob)
414 425 elapsed time: * seconds (glob)
415 426 round-trips: 2
427 queries: 3
416 428 heads summary:
417 429 total common heads: 1
418 430 also local heads: 0
419 431 also remote heads: 1
420 432 both: 0
421 433 local heads: 1
422 434 common: 0
423 435 missing: 1
424 436 remote heads: 2
425 437 common: 1
426 438 unknown: 1
427 439 local changesets: 8
428 440 common: 5
429 441 heads: 1
430 442 roots: 1
431 443 missing: 3
432 444 heads: 1
433 445 roots: 1
434 446 first undecided set: 3
435 447 heads: 1
436 448 roots: 1
437 449 common: 0
438 450 missing: 3
439 451 common heads: bebd167eb94d
440 452
441 453 Both sides many new with stub:
442 454
443 455 $ testdesc '-ra1 -ra2' '-rb' '
444 456 > +2:f +2:a1 +30 :b
445 457 > <f +30 :a2'
446 458
447 459 % -- a -> b tree
448 460 comparing with b
449 461 searching for changes
450 462 unpruned common: 2dc09a01254d
451 463 elapsed time: * seconds (glob)
452 464 round-trips: 4
465 queries: 5
453 466 heads summary:
454 467 total common heads: 1
455 468 also local heads: 1
456 469 also remote heads: 0
457 470 both: 0
458 471 local heads: 2
459 472 common: 1
460 473 missing: 1
461 474 remote heads: 1
462 475 common: 0
463 476 unknown: 1
464 477 local changesets: 34
465 478 common: 4
466 479 heads: 1
467 480 roots: 1
468 481 missing: 30
469 482 heads: 1
470 483 roots: 1
471 484 first undecided set: 33
472 485 heads: 2
473 486 roots: 1
474 487 common: 3
475 488 missing: 30
476 489 common heads: 2dc09a01254d
477 490
478 491 % -- a -> b set
479 492 comparing with b
480 493 query 1; heads
481 494 searching for changes
482 495 taking initial sample
483 496 searching: 2 queries
484 497 query 2; still undecided: 29, sample size is: 29
485 498 2 total queries in *.????s (glob)
486 499 elapsed time: * seconds (glob)
487 500 round-trips: 2
501 queries: 31
488 502 heads summary:
489 503 total common heads: 1
490 504 also local heads: 1
491 505 also remote heads: 0
492 506 both: 0
493 507 local heads: 2
494 508 common: 1
495 509 missing: 1
496 510 remote heads: 1
497 511 common: 0
498 512 unknown: 1
499 513 local changesets: 34
500 514 common: 4
501 515 heads: 1
502 516 roots: 1
503 517 missing: 30
504 518 heads: 1
505 519 roots: 1
506 520 first undecided set: 33
507 521 heads: 2
508 522 roots: 1
509 523 common: 3
510 524 missing: 30
511 525 common heads: 2dc09a01254d
512 526
513 527 % -- a -> b set (tip only)
514 528 comparing with b
515 529 query 1; heads
516 530 searching for changes
517 531 taking quick initial sample
518 532 searching: 2 queries
519 533 query 2; still undecided: 31, sample size is: 31
520 534 2 total queries in *.????s (glob)
521 535 elapsed time: * seconds (glob)
522 536 round-trips: 2
537 queries: 32
523 538 heads summary:
524 539 total common heads: 1
525 540 also local heads: 0
526 541 also remote heads: 0
527 542 both: 0
528 543 local heads: 2
529 544 common: 0
530 545 missing: 2
531 546 remote heads: 1
532 547 common: 0
533 548 unknown: 1
534 549 local changesets: 34
535 550 common: 2
536 551 heads: 1
537 552 roots: 1
538 553 missing: 32
539 554 heads: 2
540 555 roots: 2
541 556 first undecided set: 34
542 557 heads: 2
543 558 roots: 1
544 559 common: 2
545 560 missing: 32
546 561 common heads: 66f7d451a68b
547 562
548 563 % -- b -> a tree
549 564 comparing with a
550 565 searching for changes
551 566 unpruned common: 2dc09a01254d 66f7d451a68b
552 567 elapsed time: * seconds (glob)
553 568 round-trips: 4
569 queries: 5
554 570 heads summary:
555 571 total common heads: 1
556 572 also local heads: 0
557 573 also remote heads: 1
558 574 both: 0
559 575 local heads: 1
560 576 common: 0
561 577 missing: 1
562 578 remote heads: 2
563 579 common: 1
564 580 unknown: 1
565 581 local changesets: 34
566 582 common: 4
567 583 heads: 1
568 584 roots: 1
569 585 missing: 30
570 586 heads: 1
571 587 roots: 1
572 588 first undecided set: 30
573 589 heads: 1
574 590 roots: 1
575 591 common: 0
576 592 missing: 30
577 593 common heads: 2dc09a01254d
578 594
579 595 % -- b -> a set
580 596 comparing with a
581 597 query 1; heads
582 598 searching for changes
583 599 taking initial sample
584 600 searching: 2 queries
585 601 query 2; still undecided: 29, sample size is: 29
586 602 2 total queries in *.????s (glob)
587 603 elapsed time: * seconds (glob)
588 604 round-trips: 2
605 queries: 30
589 606 heads summary:
590 607 total common heads: 1
591 608 also local heads: 0
592 609 also remote heads: 1
593 610 both: 0
594 611 local heads: 1
595 612 common: 0
596 613 missing: 1
597 614 remote heads: 2
598 615 common: 1
599 616 unknown: 1
600 617 local changesets: 34
601 618 common: 4
602 619 heads: 1
603 620 roots: 1
604 621 missing: 30
605 622 heads: 1
606 623 roots: 1
607 624 first undecided set: 30
608 625 heads: 1
609 626 roots: 1
610 627 common: 0
611 628 missing: 30
612 629 common heads: 2dc09a01254d
613 630
614 631 % -- b -> a set (tip only)
615 632 comparing with a
616 633 query 1; heads
617 634 searching for changes
618 635 taking initial sample
619 636 searching: 2 queries
620 637 query 2; still undecided: 29, sample size is: 29
621 638 2 total queries in *.????s (glob)
622 639 elapsed time: * seconds (glob)
623 640 round-trips: 2
641 queries: 30
624 642 heads summary:
625 643 total common heads: 1
626 644 also local heads: 0
627 645 also remote heads: 1
628 646 both: 0
629 647 local heads: 1
630 648 common: 0
631 649 missing: 1
632 650 remote heads: 2
633 651 common: 1
634 652 unknown: 1
635 653 local changesets: 34
636 654 common: 4
637 655 heads: 1
638 656 roots: 1
639 657 missing: 30
640 658 heads: 1
641 659 roots: 1
642 660 first undecided set: 30
643 661 heads: 1
644 662 roots: 1
645 663 common: 0
646 664 missing: 30
647 665 common heads: 2dc09a01254d
648 666
649 667
650 668 Both many new:
651 669
652 670 $ testdesc '-ra' '-rb' '
653 671 > +2:f +30 :b
654 672 > <f +30 :a'
655 673
656 674 % -- a -> b tree
657 675 comparing with b
658 676 searching for changes
659 677 unpruned common: 66f7d451a68b
660 678 elapsed time: * seconds (glob)
661 679 round-trips: 4
680 queries: 5
662 681 heads summary:
663 682 total common heads: 1
664 683 also local heads: 0
665 684 also remote heads: 0
666 685 both: 0
667 686 local heads: 1
668 687 common: 0
669 688 missing: 1
670 689 remote heads: 1
671 690 common: 0
672 691 unknown: 1
673 692 local changesets: 32
674 693 common: 2
675 694 heads: 1
676 695 roots: 1
677 696 missing: 30
678 697 heads: 1
679 698 roots: 1
680 699 first undecided set: 32
681 700 heads: 1
682 701 roots: 1
683 702 common: 2
684 703 missing: 30
685 704 common heads: 66f7d451a68b
686 705
687 706 % -- a -> b set
688 707 comparing with b
689 708 query 1; heads
690 709 searching for changes
691 710 taking quick initial sample
692 711 searching: 2 queries
693 712 query 2; still undecided: 31, sample size is: 31
694 713 2 total queries in *.????s (glob)
695 714 elapsed time: * seconds (glob)
696 715 round-trips: 2
716 queries: 32
697 717 heads summary:
698 718 total common heads: 1
699 719 also local heads: 0
700 720 also remote heads: 0
701 721 both: 0
702 722 local heads: 1
703 723 common: 0
704 724 missing: 1
705 725 remote heads: 1
706 726 common: 0
707 727 unknown: 1
708 728 local changesets: 32
709 729 common: 2
710 730 heads: 1
711 731 roots: 1
712 732 missing: 30
713 733 heads: 1
714 734 roots: 1
715 735 first undecided set: 32
716 736 heads: 1
717 737 roots: 1
718 738 common: 2
719 739 missing: 30
720 740 common heads: 66f7d451a68b
721 741
722 742 % -- a -> b set (tip only)
723 743 comparing with b
724 744 query 1; heads
725 745 searching for changes
726 746 taking quick initial sample
727 747 searching: 2 queries
728 748 query 2; still undecided: 31, sample size is: 31
729 749 2 total queries in *.????s (glob)
730 750 elapsed time: * seconds (glob)
731 751 round-trips: 2
752 queries: 32
732 753 heads summary:
733 754 total common heads: 1
734 755 also local heads: 0
735 756 also remote heads: 0
736 757 both: 0
737 758 local heads: 1
738 759 common: 0
739 760 missing: 1
740 761 remote heads: 1
741 762 common: 0
742 763 unknown: 1
743 764 local changesets: 32
744 765 common: 2
745 766 heads: 1
746 767 roots: 1
747 768 missing: 30
748 769 heads: 1
749 770 roots: 1
750 771 first undecided set: 32
751 772 heads: 1
752 773 roots: 1
753 774 common: 2
754 775 missing: 30
755 776 common heads: 66f7d451a68b
756 777
757 778 % -- b -> a tree
758 779 comparing with a
759 780 searching for changes
760 781 unpruned common: 66f7d451a68b
761 782 elapsed time: * seconds (glob)
762 783 round-trips: 4
784 queries: 5
763 785 heads summary:
764 786 total common heads: 1
765 787 also local heads: 0
766 788 also remote heads: 0
767 789 both: 0
768 790 local heads: 1
769 791 common: 0
770 792 missing: 1
771 793 remote heads: 1
772 794 common: 0
773 795 unknown: 1
774 796 local changesets: 32
775 797 common: 2
776 798 heads: 1
777 799 roots: 1
778 800 missing: 30
779 801 heads: 1
780 802 roots: 1
781 803 first undecided set: 32
782 804 heads: 1
783 805 roots: 1
784 806 common: 2
785 807 missing: 30
786 808 common heads: 66f7d451a68b
787 809
788 810 % -- b -> a set
789 811 comparing with a
790 812 query 1; heads
791 813 searching for changes
792 814 taking quick initial sample
793 815 searching: 2 queries
794 816 query 2; still undecided: 31, sample size is: 31
795 817 2 total queries in *.????s (glob)
796 818 elapsed time: * seconds (glob)
797 819 round-trips: 2
820 queries: 32
798 821 heads summary:
799 822 total common heads: 1
800 823 also local heads: 0
801 824 also remote heads: 0
802 825 both: 0
803 826 local heads: 1
804 827 common: 0
805 828 missing: 1
806 829 remote heads: 1
807 830 common: 0
808 831 unknown: 1
809 832 local changesets: 32
810 833 common: 2
811 834 heads: 1
812 835 roots: 1
813 836 missing: 30
814 837 heads: 1
815 838 roots: 1
816 839 first undecided set: 32
817 840 heads: 1
818 841 roots: 1
819 842 common: 2
820 843 missing: 30
821 844 common heads: 66f7d451a68b
822 845
823 846 % -- b -> a set (tip only)
824 847 comparing with a
825 848 query 1; heads
826 849 searching for changes
827 850 taking quick initial sample
828 851 searching: 2 queries
829 852 query 2; still undecided: 31, sample size is: 31
830 853 2 total queries in *.????s (glob)
831 854 elapsed time: * seconds (glob)
832 855 round-trips: 2
856 queries: 32
833 857 heads summary:
834 858 total common heads: 1
835 859 also local heads: 0
836 860 also remote heads: 0
837 861 both: 0
838 862 local heads: 1
839 863 common: 0
840 864 missing: 1
841 865 remote heads: 1
842 866 common: 0
843 867 unknown: 1
844 868 local changesets: 32
845 869 common: 2
846 870 heads: 1
847 871 roots: 1
848 872 missing: 30
849 873 heads: 1
850 874 roots: 1
851 875 first undecided set: 32
852 876 heads: 1
853 877 roots: 1
854 878 common: 2
855 879 missing: 30
856 880 common heads: 66f7d451a68b
857 881
858 882
859 883 Both many new skewed:
860 884
861 885 $ testdesc '-ra' '-rb' '
862 886 > +2:f +30 :b
863 887 > <f +50 :a'
864 888
865 889 % -- a -> b tree
866 890 comparing with b
867 891 searching for changes
868 892 unpruned common: 66f7d451a68b
869 893 elapsed time: * seconds (glob)
870 894 round-trips: 4
895 queries: 5
871 896 heads summary:
872 897 total common heads: 1
873 898 also local heads: 0
874 899 also remote heads: 0
875 900 both: 0
876 901 local heads: 1
877 902 common: 0
878 903 missing: 1
879 904 remote heads: 1
880 905 common: 0
881 906 unknown: 1
882 907 local changesets: 52
883 908 common: 2
884 909 heads: 1
885 910 roots: 1
886 911 missing: 50
887 912 heads: 1
888 913 roots: 1
889 914 first undecided set: 52
890 915 heads: 1
891 916 roots: 1
892 917 common: 2
893 918 missing: 50
894 919 common heads: 66f7d451a68b
895 920
896 921 % -- a -> b set
897 922 comparing with b
898 923 query 1; heads
899 924 searching for changes
900 925 taking quick initial sample
901 926 searching: 2 queries
902 927 query 2; still undecided: 51, sample size is: 51
903 928 2 total queries in *.????s (glob)
904 929 elapsed time: * seconds (glob)
905 930 round-trips: 2
931 queries: 52
906 932 heads summary:
907 933 total common heads: 1
908 934 also local heads: 0
909 935 also remote heads: 0
910 936 both: 0
911 937 local heads: 1
912 938 common: 0
913 939 missing: 1
914 940 remote heads: 1
915 941 common: 0
916 942 unknown: 1
917 943 local changesets: 52
918 944 common: 2
919 945 heads: 1
920 946 roots: 1
921 947 missing: 50
922 948 heads: 1
923 949 roots: 1
924 950 first undecided set: 52
925 951 heads: 1
926 952 roots: 1
927 953 common: 2
928 954 missing: 50
929 955 common heads: 66f7d451a68b
930 956
931 957 % -- a -> b set (tip only)
932 958 comparing with b
933 959 query 1; heads
934 960 searching for changes
935 961 taking quick initial sample
936 962 searching: 2 queries
937 963 query 2; still undecided: 51, sample size is: 51
938 964 2 total queries in *.????s (glob)
939 965 elapsed time: * seconds (glob)
940 966 round-trips: 2
967 queries: 52
941 968 heads summary:
942 969 total common heads: 1
943 970 also local heads: 0
944 971 also remote heads: 0
945 972 both: 0
946 973 local heads: 1
947 974 common: 0
948 975 missing: 1
949 976 remote heads: 1
950 977 common: 0
951 978 unknown: 1
952 979 local changesets: 52
953 980 common: 2
954 981 heads: 1
955 982 roots: 1
956 983 missing: 50
957 984 heads: 1
958 985 roots: 1
959 986 first undecided set: 52
960 987 heads: 1
961 988 roots: 1
962 989 common: 2
963 990 missing: 50
964 991 common heads: 66f7d451a68b
965 992
966 993 % -- b -> a tree
967 994 comparing with a
968 995 searching for changes
969 996 unpruned common: 66f7d451a68b
970 997 elapsed time: * seconds (glob)
971 998 round-trips: 3
999 queries: 4
972 1000 heads summary:
973 1001 total common heads: 1
974 1002 also local heads: 0
975 1003 also remote heads: 0
976 1004 both: 0
977 1005 local heads: 1
978 1006 common: 0
979 1007 missing: 1
980 1008 remote heads: 1
981 1009 common: 0
982 1010 unknown: 1
983 1011 local changesets: 32
984 1012 common: 2
985 1013 heads: 1
986 1014 roots: 1
987 1015 missing: 30
988 1016 heads: 1
989 1017 roots: 1
990 1018 first undecided set: 32
991 1019 heads: 1
992 1020 roots: 1
993 1021 common: 2
994 1022 missing: 30
995 1023 common heads: 66f7d451a68b
996 1024
997 1025 % -- b -> a set
998 1026 comparing with a
999 1027 query 1; heads
1000 1028 searching for changes
1001 1029 taking quick initial sample
1002 1030 searching: 2 queries
1003 1031 query 2; still undecided: 31, sample size is: 31
1004 1032 2 total queries in *.????s (glob)
1005 1033 elapsed time: * seconds (glob)
1006 1034 round-trips: 2
1035 queries: 32
1007 1036 heads summary:
1008 1037 total common heads: 1
1009 1038 also local heads: 0
1010 1039 also remote heads: 0
1011 1040 both: 0
1012 1041 local heads: 1
1013 1042 common: 0
1014 1043 missing: 1
1015 1044 remote heads: 1
1016 1045 common: 0
1017 1046 unknown: 1
1018 1047 local changesets: 32
1019 1048 common: 2
1020 1049 heads: 1
1021 1050 roots: 1
1022 1051 missing: 30
1023 1052 heads: 1
1024 1053 roots: 1
1025 1054 first undecided set: 32
1026 1055 heads: 1
1027 1056 roots: 1
1028 1057 common: 2
1029 1058 missing: 30
1030 1059 common heads: 66f7d451a68b
1031 1060
1032 1061 % -- b -> a set (tip only)
1033 1062 comparing with a
1034 1063 query 1; heads
1035 1064 searching for changes
1036 1065 taking quick initial sample
1037 1066 searching: 2 queries
1038 1067 query 2; still undecided: 31, sample size is: 31
1039 1068 2 total queries in *.????s (glob)
1040 1069 elapsed time: * seconds (glob)
1041 1070 round-trips: 2
1071 queries: 32
1042 1072 heads summary:
1043 1073 total common heads: 1
1044 1074 also local heads: 0
1045 1075 also remote heads: 0
1046 1076 both: 0
1047 1077 local heads: 1
1048 1078 common: 0
1049 1079 missing: 1
1050 1080 remote heads: 1
1051 1081 common: 0
1052 1082 unknown: 1
1053 1083 local changesets: 32
1054 1084 common: 2
1055 1085 heads: 1
1056 1086 roots: 1
1057 1087 missing: 30
1058 1088 heads: 1
1059 1089 roots: 1
1060 1090 first undecided set: 32
1061 1091 heads: 1
1062 1092 roots: 1
1063 1093 common: 2
1064 1094 missing: 30
1065 1095 common heads: 66f7d451a68b
1066 1096
1067 1097
1068 1098 Both many new on top of long history:
1069 1099
1070 1100 $ testdesc '-ra' '-rb' '
1071 1101 > +1000:f +30 :b
1072 1102 > <f +50 :a'
1073 1103
1074 1104 % -- a -> b tree
1075 1105 comparing with b
1076 1106 searching for changes
1077 1107 unpruned common: 7ead0cba2838
1078 1108 elapsed time: * seconds (glob)
1079 1109 round-trips: 4
1110 queries: 5
1080 1111 heads summary:
1081 1112 total common heads: 1
1082 1113 also local heads: 0
1083 1114 also remote heads: 0
1084 1115 both: 0
1085 1116 local heads: 1
1086 1117 common: 0
1087 1118 missing: 1
1088 1119 remote heads: 1
1089 1120 common: 0
1090 1121 unknown: 1
1091 1122 local changesets: 1050
1092 1123 common: 1000
1093 1124 heads: 1
1094 1125 roots: 1
1095 1126 missing: 50
1096 1127 heads: 1
1097 1128 roots: 1
1098 1129 first undecided set: 1050
1099 1130 heads: 1
1100 1131 roots: 1
1101 1132 common: 1000
1102 1133 missing: 50
1103 1134 common heads: 7ead0cba2838
1104 1135
1105 1136 % -- a -> b set
1106 1137 comparing with b
1107 1138 query 1; heads
1108 1139 searching for changes
1109 1140 taking quick initial sample
1110 1141 searching: 2 queries
1111 1142 query 2; still undecided: 1049, sample size is: 11
1112 1143 sampling from both directions
1113 1144 searching: 3 queries
1114 1145 query 3; still undecided: 31, sample size is: 31
1115 1146 3 total queries in *.????s (glob)
1116 1147 elapsed time: * seconds (glob)
1117 1148 round-trips: 3
1149 queries: 43
1118 1150 heads summary:
1119 1151 total common heads: 1
1120 1152 also local heads: 0
1121 1153 also remote heads: 0
1122 1154 both: 0
1123 1155 local heads: 1
1124 1156 common: 0
1125 1157 missing: 1
1126 1158 remote heads: 1
1127 1159 common: 0
1128 1160 unknown: 1
1129 1161 local changesets: 1050
1130 1162 common: 1000
1131 1163 heads: 1
1132 1164 roots: 1
1133 1165 missing: 50
1134 1166 heads: 1
1135 1167 roots: 1
1136 1168 first undecided set: 1050
1137 1169 heads: 1
1138 1170 roots: 1
1139 1171 common: 1000
1140 1172 missing: 50
1141 1173 common heads: 7ead0cba2838
1142 1174
1143 1175 % -- a -> b set (tip only)
1144 1176 comparing with b
1145 1177 query 1; heads
1146 1178 searching for changes
1147 1179 taking quick initial sample
1148 1180 searching: 2 queries
1149 1181 query 2; still undecided: 1049, sample size is: 11
1150 1182 sampling from both directions
1151 1183 searching: 3 queries
1152 1184 query 3; still undecided: 31, sample size is: 31
1153 1185 3 total queries in *.????s (glob)
1154 1186 elapsed time: * seconds (glob)
1155 1187 round-trips: 3
1188 queries: 43
1156 1189 heads summary:
1157 1190 total common heads: 1
1158 1191 also local heads: 0
1159 1192 also remote heads: 0
1160 1193 both: 0
1161 1194 local heads: 1
1162 1195 common: 0
1163 1196 missing: 1
1164 1197 remote heads: 1
1165 1198 common: 0
1166 1199 unknown: 1
1167 1200 local changesets: 1050
1168 1201 common: 1000
1169 1202 heads: 1
1170 1203 roots: 1
1171 1204 missing: 50
1172 1205 heads: 1
1173 1206 roots: 1
1174 1207 first undecided set: 1050
1175 1208 heads: 1
1176 1209 roots: 1
1177 1210 common: 1000
1178 1211 missing: 50
1179 1212 common heads: 7ead0cba2838
1180 1213
1181 1214 % -- b -> a tree
1182 1215 comparing with a
1183 1216 searching for changes
1184 1217 unpruned common: 7ead0cba2838
1185 1218 elapsed time: * seconds (glob)
1186 1219 round-trips: 3
1220 queries: 4
1187 1221 heads summary:
1188 1222 total common heads: 1
1189 1223 also local heads: 0
1190 1224 also remote heads: 0
1191 1225 both: 0
1192 1226 local heads: 1
1193 1227 common: 0
1194 1228 missing: 1
1195 1229 remote heads: 1
1196 1230 common: 0
1197 1231 unknown: 1
1198 1232 local changesets: 1030
1199 1233 common: 1000
1200 1234 heads: 1
1201 1235 roots: 1
1202 1236 missing: 30
1203 1237 heads: 1
1204 1238 roots: 1
1205 1239 first undecided set: 1030
1206 1240 heads: 1
1207 1241 roots: 1
1208 1242 common: 1000
1209 1243 missing: 30
1210 1244 common heads: 7ead0cba2838
1211 1245
1212 1246 % -- b -> a set
1213 1247 comparing with a
1214 1248 query 1; heads
1215 1249 searching for changes
1216 1250 taking quick initial sample
1217 1251 searching: 2 queries
1218 1252 query 2; still undecided: 1029, sample size is: 11
1219 1253 sampling from both directions
1220 1254 searching: 3 queries
1221 1255 query 3; still undecided: 15, sample size is: 15
1222 1256 3 total queries in *.????s (glob)
1223 1257 elapsed time: * seconds (glob)
1224 1258 round-trips: 3
1259 queries: 27
1225 1260 heads summary:
1226 1261 total common heads: 1
1227 1262 also local heads: 0
1228 1263 also remote heads: 0
1229 1264 both: 0
1230 1265 local heads: 1
1231 1266 common: 0
1232 1267 missing: 1
1233 1268 remote heads: 1
1234 1269 common: 0
1235 1270 unknown: 1
1236 1271 local changesets: 1030
1237 1272 common: 1000
1238 1273 heads: 1
1239 1274 roots: 1
1240 1275 missing: 30
1241 1276 heads: 1
1242 1277 roots: 1
1243 1278 first undecided set: 1030
1244 1279 heads: 1
1245 1280 roots: 1
1246 1281 common: 1000
1247 1282 missing: 30
1248 1283 common heads: 7ead0cba2838
1249 1284
1250 1285 % -- b -> a set (tip only)
1251 1286 comparing with a
1252 1287 query 1; heads
1253 1288 searching for changes
1254 1289 taking quick initial sample
1255 1290 searching: 2 queries
1256 1291 query 2; still undecided: 1029, sample size is: 11
1257 1292 sampling from both directions
1258 1293 searching: 3 queries
1259 1294 query 3; still undecided: 15, sample size is: 15
1260 1295 3 total queries in *.????s (glob)
1261 1296 elapsed time: * seconds (glob)
1262 1297 round-trips: 3
1298 queries: 27
1263 1299 heads summary:
1264 1300 total common heads: 1
1265 1301 also local heads: 0
1266 1302 also remote heads: 0
1267 1303 both: 0
1268 1304 local heads: 1
1269 1305 common: 0
1270 1306 missing: 1
1271 1307 remote heads: 1
1272 1308 common: 0
1273 1309 unknown: 1
1274 1310 local changesets: 1030
1275 1311 common: 1000
1276 1312 heads: 1
1277 1313 roots: 1
1278 1314 missing: 30
1279 1315 heads: 1
1280 1316 roots: 1
1281 1317 first undecided set: 1030
1282 1318 heads: 1
1283 1319 roots: 1
1284 1320 common: 1000
1285 1321 missing: 30
1286 1322 common heads: 7ead0cba2838
1287 1323
1288 1324
1289 1325 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1290 1326
1291 1327 $ hg init manyheads
1292 1328 $ cd manyheads
1293 1329 $ echo "+300:r @a" >dagdesc
1294 1330 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1295 1331 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1296 1332 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1297 1333 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1298 1334 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1299 1335 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1300 1336 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1301 1337 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1302 1338 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1303 1339 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1304 1340 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1305 1341 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1306 1342 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1307 1343 $ echo "@b *r+3" >>dagdesc # one more head
1308 1344 $ hg debugbuilddag <dagdesc
1309 1345 reading DAG from stdin
1310 1346
1311 1347 $ hg heads -t --template . | wc -c
1312 1348 \s*261 (re)
1313 1349
1314 1350 $ hg clone -b a . a
1315 1351 adding changesets
1316 1352 adding manifests
1317 1353 adding file changes
1318 1354 added 1340 changesets with 0 changes to 0 files (+259 heads)
1319 1355 new changesets 1ea73414a91b:1c51e2c80832
1320 1356 updating to branch a
1321 1357 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1322 1358 $ hg clone -b b . b
1323 1359 adding changesets
1324 1360 adding manifests
1325 1361 adding file changes
1326 1362 added 304 changesets with 0 changes to 0 files
1327 1363 new changesets 1ea73414a91b:513314ca8b3a
1328 1364 updating to branch b
1329 1365 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1330 1366
1331 1367 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
1332 1368 comparing with b
1333 1369 query 1; heads
1334 1370 searching for changes
1335 1371 taking quick initial sample
1336 1372 searching: 2 queries
1337 1373 query 2; still undecided: 1080, sample size is: 50
1338 1374 sampling from both directions
1339 1375 searching: 3 queries
1340 1376 query 3; still undecided: 1030, sample size is: 200
1341 1377 sampling from both directions
1342 1378 searching: 4 queries
1343 1379 query 4; still undecided: 547, sample size is: 210
1344 1380 sampling from both directions
1345 1381 searching: 5 queries
1346 1382 query 5; still undecided: 336, sample size is: 220
1347 1383 sampling from both directions
1348 1384 searching: 6 queries
1349 1385 query 6; still undecided: 114, sample size is: 114
1350 1386 6 total queries in *.????s (glob)
1351 1387 elapsed time: * seconds (glob)
1352 1388 round-trips: 6
1389 queries: 1054
1353 1390 heads summary:
1354 1391 total common heads: 1
1355 1392 also local heads: 0
1356 1393 also remote heads: 0
1357 1394 both: 0
1358 1395 local heads: 260
1359 1396 common: 0
1360 1397 missing: 260
1361 1398 remote heads: 1
1362 1399 common: 0
1363 1400 unknown: 1
1364 1401 local changesets: 1340
1365 1402 common: 300
1366 1403 heads: 1
1367 1404 roots: 1
1368 1405 missing: 1040
1369 1406 heads: 260
1370 1407 roots: 260
1371 1408 first undecided set: 1340
1372 1409 heads: 260
1373 1410 roots: 1
1374 1411 common: 300
1375 1412 missing: 1040
1376 1413 common heads: 3ee37d65064a
1377 1414 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1378 1415 comparing with b
1379 1416 query 1; heads
1380 1417 searching for changes
1381 1418 taking quick initial sample
1382 1419 searching: 2 queries
1383 1420 query 2; still undecided: 303, sample size is: 9
1384 1421 sampling from both directions
1385 1422 searching: 3 queries
1386 1423 query 3; still undecided: 3, sample size is: 3
1387 1424 3 total queries in *.????s (glob)
1388 1425 elapsed time: * seconds (glob)
1389 1426 round-trips: 3
1427 queries: 13
1390 1428 heads summary:
1391 1429 total common heads: 1
1392 1430 also local heads: 0
1393 1431 also remote heads: 0
1394 1432 both: 0
1395 1433 local heads: 260
1396 1434 common: 0
1397 1435 missing: 260
1398 1436 remote heads: 1
1399 1437 common: 0
1400 1438 unknown: 1
1401 1439 local changesets: 1340
1402 1440 common: 300
1403 1441 heads: 1
1404 1442 roots: 1
1405 1443 missing: 1040
1406 1444 heads: 260
1407 1445 roots: 260
1408 1446 first undecided set: 1340
1409 1447 heads: 260
1410 1448 roots: 1
1411 1449 common: 300
1412 1450 missing: 1040
1413 1451 common heads: 3ee37d65064a
1414 1452
1415 1453 $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
1416 1454 comparing with b
1417 1455 searching for changes
1418 1456 sampling from both directions
1419 1457 query 1; still undecided: 1340, sample size is: 50
1420 1458 sampling from both directions
1421 1459 query 2; still undecided: 995, sample size is: 60
1422 1460 sampling from both directions
1423 1461 query 3; still undecided: 913, sample size is: 72
1424 1462 sampling from both directions
1425 1463 query 4; still undecided: 816, sample size is: 204
1426 1464 sampling from both directions
1427 1465 query 5; still undecided: 612, sample size is: 153
1428 1466 sampling from both directions
1429 1467 query 6; still undecided: 456, sample size is: 123
1430 1468 sampling from both directions
1431 1469 query 7; still undecided: 332, sample size is: 147
1432 1470 sampling from both directions
1433 1471 query 8; still undecided: 184, sample size is: 176
1434 1472 sampling from both directions
1435 1473 query 9; still undecided: 8, sample size is: 8
1436 1474 9 total queries in *s (glob)
1437 1475 elapsed time: * seconds (glob)
1438 1476 round-trips: 9
1477 queries: 993
1439 1478 heads summary:
1440 1479 total common heads: 1
1441 1480 also local heads: 0
1442 1481 also remote heads: 0
1443 1482 both: 0
1444 1483 local heads: 260
1445 1484 common: 0
1446 1485 missing: 260
1447 1486 remote heads: 1
1448 1487 common: 0
1449 1488 unknown: 1
1450 1489 local changesets: 1340
1451 1490 common: 300
1452 1491 heads: 1
1453 1492 roots: 1
1454 1493 missing: 1040
1455 1494 heads: 260
1456 1495 roots: 260
1457 1496 first undecided set: 1340
1458 1497 heads: 260
1459 1498 roots: 1
1460 1499 common: 300
1461 1500 missing: 1040
1462 1501 common heads: 3ee37d65064a
1463 1502
1464 1503 Test actual protocol when pulling one new head in addition to common heads
1465 1504
1466 1505 $ hg clone -U b c
1467 1506 $ hg -R c id -ir tip
1468 1507 513314ca8b3a
1469 1508 $ hg -R c up -qr default
1470 1509 $ touch c/f
1471 1510 $ hg -R c ci -Aqm "extra head"
1472 1511 $ hg -R c id -i
1473 1512 e64a39e7da8b
1474 1513
1475 1514 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1476 1515 $ cat hg.pid >> $DAEMON_PIDS
1477 1516
1478 1517 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1479 1518 comparing with http://localhost:$HGPORT/
1480 1519 searching for changes
1481 1520 e64a39e7da8b
1482 1521
1483 1522 $ killdaemons.py
1484 1523 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1485 1524 "GET /?cmd=capabilities HTTP/1.1" 200 -
1486 1525 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1487 1526 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1488 1527 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1489 1528 $ cat errors.log
1490 1529
1491 1530 $ cd ..
1492 1531
1493 1532
1494 1533 Issue 4438 - test coverage for 3ef893520a85 issues.
1495 1534
1496 1535 $ mkdir issue4438
1497 1536 $ cd issue4438
1498 1537 #if false
1499 1538 generate new bundles:
1500 1539 $ hg init r1
1501 1540 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1502 1541 $ hg clone -q r1 r2
1503 1542 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1504 1543 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1505 1544 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1506 1545 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1507 1546 #else
1508 1547 use existing bundles:
1509 1548 $ hg init r1
1510 1549 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1511 1550 $ hg -R r1 -q up
1512 1551 $ hg init r2
1513 1552 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1514 1553 $ hg -R r2 -q up
1515 1554 #endif
1516 1555
1517 1556 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1518 1557
1519 1558 $ hg -R r1 outgoing r2 -T'{rev} '
1520 1559 comparing with r2
1521 1560 searching for changes
1522 1561 101 102 103 104 105 106 107 108 109 110 (no-eol)
1523 1562
1524 1563 The case where all the 'initialsamplesize' samples already were common would
1525 1564 give 'all remote heads known locally' without checking the remaining heads -
1526 1565 fixed in 86c35b7ae300:
1527 1566
1528 1567 $ cat >> r1/.hg/hgrc << EOF
1529 1568 > [devel]
1530 1569 > discovery.randomize = False
1531 1570 > EOF
1532 1571
1533 1572 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1534 1573 > --config blackbox.track='command commandfinish discovery'
1535 1574 comparing with r2
1536 1575 searching for changes
1537 1576 101 102 103 104 105 106 107 108 109 110 (no-eol)
1538 1577 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1539 1578 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --no-profile --cmdserver chgunix * (glob) (chg !)
1540 1579 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1541 1580 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1542 1581 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1543 1582 $ cd ..
1544 1583
1545 1584 Even if the set of revs to discover is restricted, unrelated revs may be
1546 1585 returned as common heads.
1547 1586
1548 1587 $ mkdir ancestorsof
1549 1588 $ cd ancestorsof
1550 1589 $ hg init a
1551 1590 $ hg clone a b -q
1552 1591 $ cd b
1553 1592 $ hg debugbuilddag '.:root *root *root'
1554 1593 $ hg log -G -T '{node|short}'
1555 1594 o fa942426a6fd
1556 1595 |
1557 1596 | o 66f7d451a68b
1558 1597 |/
1559 1598 o 1ea73414a91b
1560 1599
1561 1600 $ hg push -r 66f7d451a68b -q
1562 1601 $ hg debugdiscovery --verbose --rev fa942426a6fd
1563 1602 comparing with $TESTTMP/ancestorsof/a
1564 1603 searching for changes
1565 1604 elapsed time: * seconds (glob)
1566 1605 round-trips: 1
1606 queries: 1
1567 1607 heads summary:
1568 1608 total common heads: 1
1569 1609 also local heads: 1
1570 1610 also remote heads: 1
1571 1611 both: 1
1572 1612 local heads: 2
1573 1613 common: 1
1574 1614 missing: 1
1575 1615 remote heads: 1
1576 1616 common: 1
1577 1617 unknown: 0
1578 1618 local changesets: 3
1579 1619 common: 2
1580 1620 heads: 1
1581 1621 roots: 1
1582 1622 missing: 1
1583 1623 heads: 1
1584 1624 roots: 1
1585 1625 first undecided set: 1
1586 1626 heads: 1
1587 1627 roots: 1
1588 1628 common: 0
1589 1629 missing: 1
1590 1630 common heads: 66f7d451a68b
1591 1631
1592 1632 $ cd ..
1593 1633
1594 1634
1595 1635 Test debuging discovery using different subset of the same repository
1596 1636 =====================================================================
1597 1637
1598 1638 remote is a local subset
1599 1639 ------------------------
1600 1640
1601 1641 remote will be last 25 heads of the local graph
1602 1642
1603 1643 $ cd $TESTTMP/manyheads
1604 1644 $ hg -R a debugdiscovery \
1605 1645 > --debug \
1606 1646 > --remote-as-revs 'last(heads(all()), 25)' \
1607 1647 > --config devel.discovery.randomize=false
1608 1648 query 1; heads
1609 1649 searching for changes
1610 1650 all remote heads known locally
1611 1651 elapsed time: * seconds (glob)
1612 1652 round-trips: 1
1653 queries: 260
1613 1654 heads summary:
1614 1655 total common heads: 25
1615 1656 also local heads: 25
1616 1657 also remote heads: 25
1617 1658 both: 25
1618 1659 local heads: 260
1619 1660 common: 25
1620 1661 missing: 235
1621 1662 remote heads: 25
1622 1663 common: 25
1623 1664 unknown: 0
1624 1665 local changesets: 1340
1625 1666 common: 400
1626 1667 heads: 25
1627 1668 roots: 1
1628 1669 missing: 940
1629 1670 heads: 235
1630 1671 roots: 235
1631 1672 first undecided set: 940
1632 1673 heads: 235
1633 1674 roots: 235
1634 1675 common: 0
1635 1676 missing: 940
1636 1677 common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
1637 1678
1638 1679 local is a local subset
1639 1680 ------------------------
1640 1681
1641 1682 remote will be last 25 heads of the local graph
1642 1683
1643 1684 $ cd $TESTTMP/manyheads
1644 1685 $ hg -R a debugdiscovery b \
1645 1686 > --debug \
1646 1687 > --local-as-revs 'first(heads(all()), 25)' \
1647 1688 > --config devel.discovery.randomize=false
1648 1689 comparing with b
1649 1690 query 1; heads
1650 1691 searching for changes
1651 1692 taking quick initial sample
1652 1693 query 2; still undecided: 375, sample size is: 81
1653 1694 sampling from both directions
1654 1695 query 3; still undecided: 3, sample size is: 3
1655 1696 3 total queries *s (glob)
1656 1697 elapsed time: * seconds (glob)
1657 1698 round-trips: 3
1699 queries: 109
1658 1700 heads summary:
1659 1701 total common heads: 1
1660 1702 also local heads: 0
1661 1703 also remote heads: 0
1662 1704 both: 0
1663 1705 local heads: 25
1664 1706 common: 0
1665 1707 missing: 25
1666 1708 remote heads: 1
1667 1709 common: 0
1668 1710 unknown: 1
1669 1711 local changesets: 400
1670 1712 common: 300
1671 1713 heads: 1
1672 1714 roots: 1
1673 1715 missing: 100
1674 1716 heads: 25
1675 1717 roots: 25
1676 1718 first undecided set: 400
1677 1719 heads: 25
1678 1720 roots: 1
1679 1721 common: 300
1680 1722 missing: 100
1681 1723 common heads: 3ee37d65064a
1682 1724
1683 1725 both local and remove are subset
1684 1726 ------------------------
1685 1727
1686 1728 remote will be last 25 heads of the local graph
1687 1729
1688 1730 $ cd $TESTTMP/manyheads
1689 1731 $ hg -R a debugdiscovery \
1690 1732 > --debug \
1691 1733 > --local-as-revs 'first(heads(all()), 25)' \
1692 1734 > --remote-as-revs 'last(heads(all()), 25)' \
1693 1735 > --config devel.discovery.randomize=false
1694 1736 query 1; heads
1695 1737 searching for changes
1696 1738 taking quick initial sample
1697 1739 query 2; still undecided: 375, sample size is: 81
1698 1740 sampling from both directions
1699 1741 query 3; still undecided: 3, sample size is: 3
1700 1742 3 total queries in *s (glob)
1701 1743 elapsed time: * seconds (glob)
1702 1744 round-trips: 3
1745 queries: 109
1703 1746 heads summary:
1704 1747 total common heads: 1
1705 1748 also local heads: 0
1706 1749 also remote heads: 0
1707 1750 both: 0
1708 1751 local heads: 25
1709 1752 common: 0
1710 1753 missing: 25
1711 1754 remote heads: 25
1712 1755 common: 0
1713 1756 unknown: 25
1714 1757 local changesets: 400
1715 1758 common: 300
1716 1759 heads: 1
1717 1760 roots: 1
1718 1761 missing: 100
1719 1762 heads: 25
1720 1763 roots: 25
1721 1764 first undecided set: 400
1722 1765 heads: 25
1723 1766 roots: 1
1724 1767 common: 300
1725 1768 missing: 100
1726 1769 common heads: 3ee37d65064a
1727 1770
1728 1771 Test -T json output
1729 1772 -------------------
1730 1773
1731 1774 $ hg -R a debugdiscovery \
1732 1775 > -T json \
1733 1776 > --debug \
1734 1777 > --local-as-revs 'first(heads(all()), 25)' \
1735 1778 > --remote-as-revs 'last(heads(all()), 25)' \
1736 1779 > --config devel.discovery.randomize=false
1737 1780 [
1738 1781 {
1739 1782 "elapsed": *, (glob)
1740 1783 "nb-common-heads": 1,
1741 1784 "nb-common-heads-both": 0,
1742 1785 "nb-common-heads-local": 0,
1743 1786 "nb-common-heads-remote": 0,
1744 1787 "nb-common-roots": 1,
1745 1788 "nb-head-local": 25,
1746 1789 "nb-head-local-missing": 25,
1747 1790 "nb-head-remote": 25,
1748 1791 "nb-head-remote-unknown": 25,
1749 1792 "nb-ini_und": 400,
1750 1793 "nb-ini_und-common": 300,
1751 1794 "nb-ini_und-heads": 25,
1752 1795 "nb-ini_und-missing": 100,
1753 1796 "nb-ini_und-roots": 1,
1754 1797 "nb-missing-heads": 25,
1755 1798 "nb-missing-roots": 25,
1756 1799 "nb-revs": 400,
1757 1800 "nb-revs-common": 300,
1758 1801 "nb-revs-missing": 100,
1759 1802 "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob)
1803 "total-queries": 109,
1760 1804 "total-roundtrips": 3
1761 1805 }
1762 1806 ]
General Comments 0
You need to be logged in to leave comments. Login now