##// END OF EJS Templates
debugdiscovery: display some information about the initial "undecided" set...
marmoute -
r46693:0e5065b6 default
parent child Browse files
Show More
@@ -1,4634 +1,4652
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import glob
15 15 import operator
16 16 import os
17 17 import platform
18 18 import random
19 19 import re
20 20 import socket
21 21 import ssl
22 22 import stat
23 23 import string
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 revlog,
73 73 revset,
74 74 revsetlang,
75 75 scmutil,
76 76 setdiscovery,
77 77 simplemerge,
78 78 sshpeer,
79 79 sslutil,
80 80 streamclone,
81 81 strip,
82 82 tags as tagsmod,
83 83 templater,
84 84 treediscovery,
85 85 upgrade,
86 86 url as urlmod,
87 87 util,
88 88 vfs as vfsmod,
89 89 wireprotoframing,
90 90 wireprotoserver,
91 91 wireprotov2peer,
92 92 )
93 93 from .utils import (
94 94 cborutil,
95 95 compression,
96 96 dateutil,
97 97 procutil,
98 98 stringutil,
99 99 )
100 100
101 101 from .revlogutils import (
102 102 deltas as deltautil,
103 103 nodemap,
104 104 sidedata,
105 105 )
106 106
107 107 release = lockmod.release
108 108
109 109 table = {}
110 110 table.update(strip.command._table)
111 111 command = registrar.command(table)
112 112
113 113
114 114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
115 115 def debugancestor(ui, repo, *args):
116 116 """find the ancestor revision of two revisions in a given index"""
117 117 if len(args) == 3:
118 118 index, rev1, rev2 = args
119 119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
120 120 lookup = r.lookup
121 121 elif len(args) == 2:
122 122 if not repo:
123 123 raise error.Abort(
124 124 _(b'there is no Mercurial repository here (.hg not found)')
125 125 )
126 126 rev1, rev2 = args
127 127 r = repo.changelog
128 128 lookup = repo.lookup
129 129 else:
130 130 raise error.Abort(_(b'either two or three arguments required'))
131 131 a = r.ancestor(lookup(rev1), lookup(rev2))
132 132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
133 133
134 134
135 135 @command(b'debugantivirusrunning', [])
136 136 def debugantivirusrunning(ui, repo):
137 137 """attempt to trigger an antivirus scanner to see if one is active"""
138 138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
139 139 f.write(
140 140 util.b85decode(
141 141 # This is a base85-armored version of the EICAR test file. See
142 142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
143 143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
144 144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
145 145 )
146 146 )
147 147 # Give an AV engine time to scan the file.
148 148 time.sleep(2)
149 149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
150 150
151 151
152 152 @command(b'debugapplystreamclonebundle', [], b'FILE')
153 153 def debugapplystreamclonebundle(ui, repo, fname):
154 154 """apply a stream clone bundle file"""
155 155 f = hg.openpath(ui, fname)
156 156 gen = exchange.readbundle(ui, f, fname)
157 157 gen.apply(repo)
158 158
159 159
160 160 @command(
161 161 b'debugbuilddag',
162 162 [
163 163 (
164 164 b'm',
165 165 b'mergeable-file',
166 166 None,
167 167 _(b'add single file mergeable changes'),
168 168 ),
169 169 (
170 170 b'o',
171 171 b'overwritten-file',
172 172 None,
173 173 _(b'add single file all revs overwrite'),
174 174 ),
175 175 (b'n', b'new-file', None, _(b'add new file at each rev')),
176 176 ],
177 177 _(b'[OPTION]... [TEXT]'),
178 178 )
179 179 def debugbuilddag(
180 180 ui,
181 181 repo,
182 182 text=None,
183 183 mergeable_file=False,
184 184 overwritten_file=False,
185 185 new_file=False,
186 186 ):
187 187 """builds a repo with a given DAG from scratch in the current empty repo
188 188
189 189 The description of the DAG is read from stdin if not given on the
190 190 command line.
191 191
192 192 Elements:
193 193
194 194 - "+n" is a linear run of n nodes based on the current default parent
195 195 - "." is a single node based on the current default parent
196 196 - "$" resets the default parent to null (implied at the start);
197 197 otherwise the default parent is always the last node created
198 198 - "<p" sets the default parent to the backref p
199 199 - "*p" is a fork at parent p, which is a backref
200 200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
201 201 - "/p2" is a merge of the preceding node and p2
202 202 - ":tag" defines a local tag for the preceding node
203 203 - "@branch" sets the named branch for subsequent nodes
204 204 - "#...\\n" is a comment up to the end of the line
205 205
206 206 Whitespace between the above elements is ignored.
207 207
208 208 A backref is either
209 209
210 210 - a number n, which references the node curr-n, where curr is the current
211 211 node, or
212 212 - the name of a local tag you placed earlier using ":tag", or
213 213 - empty to denote the default parent.
214 214
215 215 All string valued-elements are either strictly alphanumeric, or must
216 216 be enclosed in double quotes ("..."), with "\\" as escape character.
217 217 """
218 218
219 219 if text is None:
220 220 ui.status(_(b"reading DAG from stdin\n"))
221 221 text = ui.fin.read()
222 222
223 223 cl = repo.changelog
224 224 if len(cl) > 0:
225 225 raise error.Abort(_(b'repository is not empty'))
226 226
227 227 # determine number of revs in DAG
228 228 total = 0
229 229 for type, data in dagparser.parsedag(text):
230 230 if type == b'n':
231 231 total += 1
232 232
233 233 if mergeable_file:
234 234 linesperrev = 2
235 235 # make a file with k lines per rev
236 236 initialmergedlines = [
237 237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
238 238 ]
239 239 initialmergedlines.append(b"")
240 240
241 241 tags = []
242 242 progress = ui.makeprogress(
243 243 _(b'building'), unit=_(b'revisions'), total=total
244 244 )
245 245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
246 246 at = -1
247 247 atbranch = b'default'
248 248 nodeids = []
249 249 id = 0
250 250 progress.update(id)
251 251 for type, data in dagparser.parsedag(text):
252 252 if type == b'n':
253 253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
254 254 id, ps = data
255 255
256 256 files = []
257 257 filecontent = {}
258 258
259 259 p2 = None
260 260 if mergeable_file:
261 261 fn = b"mf"
262 262 p1 = repo[ps[0]]
263 263 if len(ps) > 1:
264 264 p2 = repo[ps[1]]
265 265 pa = p1.ancestor(p2)
266 266 base, local, other = [
267 267 x[fn].data() for x in (pa, p1, p2)
268 268 ]
269 269 m3 = simplemerge.Merge3Text(base, local, other)
270 270 ml = [l.strip() for l in m3.merge_lines()]
271 271 ml.append(b"")
272 272 elif at > 0:
273 273 ml = p1[fn].data().split(b"\n")
274 274 else:
275 275 ml = initialmergedlines
276 276 ml[id * linesperrev] += b" r%i" % id
277 277 mergedtext = b"\n".join(ml)
278 278 files.append(fn)
279 279 filecontent[fn] = mergedtext
280 280
281 281 if overwritten_file:
282 282 fn = b"of"
283 283 files.append(fn)
284 284 filecontent[fn] = b"r%i\n" % id
285 285
286 286 if new_file:
287 287 fn = b"nf%i" % id
288 288 files.append(fn)
289 289 filecontent[fn] = b"r%i\n" % id
290 290 if len(ps) > 1:
291 291 if not p2:
292 292 p2 = repo[ps[1]]
293 293 for fn in p2:
294 294 if fn.startswith(b"nf"):
295 295 files.append(fn)
296 296 filecontent[fn] = p2[fn].data()
297 297
298 298 def fctxfn(repo, cx, path):
299 299 if path in filecontent:
300 300 return context.memfilectx(
301 301 repo, cx, path, filecontent[path]
302 302 )
303 303 return None
304 304
305 305 if len(ps) == 0 or ps[0] < 0:
306 306 pars = [None, None]
307 307 elif len(ps) == 1:
308 308 pars = [nodeids[ps[0]], None]
309 309 else:
310 310 pars = [nodeids[p] for p in ps]
311 311 cx = context.memctx(
312 312 repo,
313 313 pars,
314 314 b"r%i" % id,
315 315 files,
316 316 fctxfn,
317 317 date=(id, 0),
318 318 user=b"debugbuilddag",
319 319 extra={b'branch': atbranch},
320 320 )
321 321 nodeid = repo.commitctx(cx)
322 322 nodeids.append(nodeid)
323 323 at = id
324 324 elif type == b'l':
325 325 id, name = data
326 326 ui.note((b'tag %s\n' % name))
327 327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
328 328 elif type == b'a':
329 329 ui.note((b'branch %s\n' % data))
330 330 atbranch = data
331 331 progress.update(id)
332 332
333 333 if tags:
334 334 repo.vfs.write(b"localtags", b"".join(tags))
335 335
336 336
337 337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
338 338 indent_string = b' ' * indent
339 339 if all:
340 340 ui.writenoi18n(
341 341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
342 342 % indent_string
343 343 )
344 344
345 345 def showchunks(named):
346 346 ui.write(b"\n%s%s\n" % (indent_string, named))
347 347 for deltadata in gen.deltaiter():
348 348 node, p1, p2, cs, deltabase, delta, flags = deltadata
349 349 ui.write(
350 350 b"%s%s %s %s %s %s %d\n"
351 351 % (
352 352 indent_string,
353 353 hex(node),
354 354 hex(p1),
355 355 hex(p2),
356 356 hex(cs),
357 357 hex(deltabase),
358 358 len(delta),
359 359 )
360 360 )
361 361
362 362 gen.changelogheader()
363 363 showchunks(b"changelog")
364 364 gen.manifestheader()
365 365 showchunks(b"manifest")
366 366 for chunkdata in iter(gen.filelogheader, {}):
367 367 fname = chunkdata[b'filename']
368 368 showchunks(fname)
369 369 else:
370 370 if isinstance(gen, bundle2.unbundle20):
371 371 raise error.Abort(_(b'use debugbundle2 for this file'))
372 372 gen.changelogheader()
373 373 for deltadata in gen.deltaiter():
374 374 node, p1, p2, cs, deltabase, delta, flags = deltadata
375 375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
376 376
377 377
378 378 def _debugobsmarkers(ui, part, indent=0, **opts):
379 379 """display version and markers contained in 'data'"""
380 380 opts = pycompat.byteskwargs(opts)
381 381 data = part.read()
382 382 indent_string = b' ' * indent
383 383 try:
384 384 version, markers = obsolete._readmarkers(data)
385 385 except error.UnknownVersion as exc:
386 386 msg = b"%sunsupported version: %s (%d bytes)\n"
387 387 msg %= indent_string, exc.version, len(data)
388 388 ui.write(msg)
389 389 else:
390 390 msg = b"%sversion: %d (%d bytes)\n"
391 391 msg %= indent_string, version, len(data)
392 392 ui.write(msg)
393 393 fm = ui.formatter(b'debugobsolete', opts)
394 394 for rawmarker in sorted(markers):
395 395 m = obsutil.marker(None, rawmarker)
396 396 fm.startitem()
397 397 fm.plain(indent_string)
398 398 cmdutil.showmarker(fm, m)
399 399 fm.end()
400 400
401 401
402 402 def _debugphaseheads(ui, data, indent=0):
403 403 """display version and markers contained in 'data'"""
404 404 indent_string = b' ' * indent
405 405 headsbyphase = phases.binarydecode(data)
406 406 for phase in phases.allphases:
407 407 for head in headsbyphase[phase]:
408 408 ui.write(indent_string)
409 409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
410 410
411 411
412 412 def _quasirepr(thing):
413 413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
414 414 return b'{%s}' % (
415 415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
416 416 )
417 417 return pycompat.bytestr(repr(thing))
418 418
419 419
420 420 def _debugbundle2(ui, gen, all=None, **opts):
421 421 """lists the contents of a bundle2"""
422 422 if not isinstance(gen, bundle2.unbundle20):
423 423 raise error.Abort(_(b'not a bundle2 file'))
424 424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
425 425 parttypes = opts.get('part_type', [])
426 426 for part in gen.iterparts():
427 427 if parttypes and part.type not in parttypes:
428 428 continue
429 429 msg = b'%s -- %s (mandatory: %r)\n'
430 430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
431 431 if part.type == b'changegroup':
432 432 version = part.params.get(b'version', b'01')
433 433 cg = changegroup.getunbundler(version, part, b'UN')
434 434 if not ui.quiet:
435 435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
436 436 if part.type == b'obsmarkers':
437 437 if not ui.quiet:
438 438 _debugobsmarkers(ui, part, indent=4, **opts)
439 439 if part.type == b'phase-heads':
440 440 if not ui.quiet:
441 441 _debugphaseheads(ui, part, indent=4)
442 442
443 443
444 444 @command(
445 445 b'debugbundle',
446 446 [
447 447 (b'a', b'all', None, _(b'show all details')),
448 448 (b'', b'part-type', [], _(b'show only the named part type')),
449 449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
450 450 ],
451 451 _(b'FILE'),
452 452 norepo=True,
453 453 )
454 454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
455 455 """lists the contents of a bundle"""
456 456 with hg.openpath(ui, bundlepath) as f:
457 457 if spec:
458 458 spec = exchange.getbundlespec(ui, f)
459 459 ui.write(b'%s\n' % spec)
460 460 return
461 461
462 462 gen = exchange.readbundle(ui, f, bundlepath)
463 463 if isinstance(gen, bundle2.unbundle20):
464 464 return _debugbundle2(ui, gen, all=all, **opts)
465 465 _debugchangegroup(ui, gen, all=all, **opts)
466 466
467 467
468 468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
469 469 def debugcapabilities(ui, path, **opts):
470 470 """lists the capabilities of a remote peer"""
471 471 opts = pycompat.byteskwargs(opts)
472 472 peer = hg.peer(ui, opts, path)
473 473 caps = peer.capabilities()
474 474 ui.writenoi18n(b'Main capabilities:\n')
475 475 for c in sorted(caps):
476 476 ui.write(b' %s\n' % c)
477 477 b2caps = bundle2.bundle2caps(peer)
478 478 if b2caps:
479 479 ui.writenoi18n(b'Bundle2 capabilities:\n')
480 480 for key, values in sorted(pycompat.iteritems(b2caps)):
481 481 ui.write(b' %s\n' % key)
482 482 for v in values:
483 483 ui.write(b' %s\n' % v)
484 484
485 485
486 486 @command(b'debugchangedfiles', [], b'REV')
487 487 def debugchangedfiles(ui, repo, rev):
488 488 """list the stored files changes for a revision"""
489 489 ctx = scmutil.revsingle(repo, rev, None)
490 490 sd = repo.changelog.sidedata(ctx.rev())
491 491 files_block = sd.get(sidedata.SD_FILES)
492 492 if files_block is not None:
493 493 files = metadata.decode_files_sidedata(sd)
494 494 for f in sorted(files.touched):
495 495 if f in files.added:
496 496 action = b"added"
497 497 elif f in files.removed:
498 498 action = b"removed"
499 499 elif f in files.merged:
500 500 action = b"merged"
501 501 elif f in files.salvaged:
502 502 action = b"salvaged"
503 503 else:
504 504 action = b"touched"
505 505
506 506 copy_parent = b""
507 507 copy_source = b""
508 508 if f in files.copied_from_p1:
509 509 copy_parent = b"p1"
510 510 copy_source = files.copied_from_p1[f]
511 511 elif f in files.copied_from_p2:
512 512 copy_parent = b"p2"
513 513 copy_source = files.copied_from_p2[f]
514 514
515 515 data = (action, copy_parent, f, copy_source)
516 516 template = b"%-8s %2s: %s, %s;\n"
517 517 ui.write(template % data)
518 518
519 519
520 520 @command(b'debugcheckstate', [], b'')
521 521 def debugcheckstate(ui, repo):
522 522 """validate the correctness of the current dirstate"""
523 523 parent1, parent2 = repo.dirstate.parents()
524 524 m1 = repo[parent1].manifest()
525 525 m2 = repo[parent2].manifest()
526 526 errors = 0
527 527 for f in repo.dirstate:
528 528 state = repo.dirstate[f]
529 529 if state in b"nr" and f not in m1:
530 530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
531 531 errors += 1
532 532 if state in b"a" and f in m1:
533 533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
534 534 errors += 1
535 535 if state in b"m" and f not in m1 and f not in m2:
536 536 ui.warn(
537 537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
538 538 )
539 539 errors += 1
540 540 for f in m1:
541 541 state = repo.dirstate[f]
542 542 if state not in b"nrm":
543 543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
544 544 errors += 1
545 545 if errors:
546 546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
547 547 raise error.Abort(errstr)
548 548
549 549
550 550 @command(
551 551 b'debugcolor',
552 552 [(b'', b'style', None, _(b'show all configured styles'))],
553 553 b'hg debugcolor',
554 554 )
555 555 def debugcolor(ui, repo, **opts):
556 556 """show available color, effects or style"""
557 557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
558 558 if opts.get('style'):
559 559 return _debugdisplaystyle(ui)
560 560 else:
561 561 return _debugdisplaycolor(ui)
562 562
563 563
564 564 def _debugdisplaycolor(ui):
565 565 ui = ui.copy()
566 566 ui._styles.clear()
567 567 for effect in color._activeeffects(ui).keys():
568 568 ui._styles[effect] = effect
569 569 if ui._terminfoparams:
570 570 for k, v in ui.configitems(b'color'):
571 571 if k.startswith(b'color.'):
572 572 ui._styles[k] = k[6:]
573 573 elif k.startswith(b'terminfo.'):
574 574 ui._styles[k] = k[9:]
575 575 ui.write(_(b'available colors:\n'))
576 576 # sort label with a '_' after the other to group '_background' entry.
577 577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
578 578 for colorname, label in items:
579 579 ui.write(b'%s\n' % colorname, label=label)
580 580
581 581
582 582 def _debugdisplaystyle(ui):
583 583 ui.write(_(b'available style:\n'))
584 584 if not ui._styles:
585 585 return
586 586 width = max(len(s) for s in ui._styles)
587 587 for label, effects in sorted(ui._styles.items()):
588 588 ui.write(b'%s' % label, label=label)
589 589 if effects:
590 590 # 50
591 591 ui.write(b': ')
592 592 ui.write(b' ' * (max(0, width - len(label))))
593 593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
594 594 ui.write(b'\n')
595 595
596 596
597 597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
598 598 def debugcreatestreamclonebundle(ui, repo, fname):
599 599 """create a stream clone bundle file
600 600
601 601 Stream bundles are special bundles that are essentially archives of
602 602 revlog files. They are commonly used for cloning very quickly.
603 603 """
604 604 # TODO we may want to turn this into an abort when this functionality
605 605 # is moved into `hg bundle`.
606 606 if phases.hassecret(repo):
607 607 ui.warn(
608 608 _(
609 609 b'(warning: stream clone bundle will contain secret '
610 610 b'revisions)\n'
611 611 )
612 612 )
613 613
614 614 requirements, gen = streamclone.generatebundlev1(repo)
615 615 changegroup.writechunks(ui, gen, fname)
616 616
617 617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
618 618
619 619
620 620 @command(
621 621 b'debugdag',
622 622 [
623 623 (b't', b'tags', None, _(b'use tags as labels')),
624 624 (b'b', b'branches', None, _(b'annotate with branch names')),
625 625 (b'', b'dots', None, _(b'use dots for runs')),
626 626 (b's', b'spaces', None, _(b'separate elements by spaces')),
627 627 ],
628 628 _(b'[OPTION]... [FILE [REV]...]'),
629 629 optionalrepo=True,
630 630 )
631 631 def debugdag(ui, repo, file_=None, *revs, **opts):
632 632 """format the changelog or an index DAG as a concise textual description
633 633
634 634 If you pass a revlog index, the revlog's DAG is emitted. If you list
635 635 revision numbers, they get labeled in the output as rN.
636 636
637 637 Otherwise, the changelog DAG of the current repo is emitted.
638 638 """
639 639 spaces = opts.get('spaces')
640 640 dots = opts.get('dots')
641 641 if file_:
642 642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
643 643 revs = {int(r) for r in revs}
644 644
645 645 def events():
646 646 for r in rlog:
647 647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
648 648 if r in revs:
649 649 yield b'l', (r, b"r%i" % r)
650 650
651 651 elif repo:
652 652 cl = repo.changelog
653 653 tags = opts.get('tags')
654 654 branches = opts.get('branches')
655 655 if tags:
656 656 labels = {}
657 657 for l, n in repo.tags().items():
658 658 labels.setdefault(cl.rev(n), []).append(l)
659 659
660 660 def events():
661 661 b = b"default"
662 662 for r in cl:
663 663 if branches:
664 664 newb = cl.read(cl.node(r))[5][b'branch']
665 665 if newb != b:
666 666 yield b'a', newb
667 667 b = newb
668 668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
669 669 if tags:
670 670 ls = labels.get(r)
671 671 if ls:
672 672 for l in ls:
673 673 yield b'l', (r, l)
674 674
675 675 else:
676 676 raise error.Abort(_(b'need repo for changelog dag'))
677 677
678 678 for line in dagparser.dagtextlines(
679 679 events(),
680 680 addspaces=spaces,
681 681 wraplabels=True,
682 682 wrapannotations=True,
683 683 wrapnonlinear=dots,
684 684 usedots=dots,
685 685 maxlinewidth=70,
686 686 ):
687 687 ui.write(line)
688 688 ui.write(b"\n")
689 689
690 690
691 691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
692 692 def debugdata(ui, repo, file_, rev=None, **opts):
693 693 """dump the contents of a data file revision"""
694 694 opts = pycompat.byteskwargs(opts)
695 695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
696 696 if rev is not None:
697 697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 698 file_, rev = None, file_
699 699 elif rev is None:
700 700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
701 701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
702 702 try:
703 703 ui.write(r.rawdata(r.lookup(rev)))
704 704 except KeyError:
705 705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
706 706
707 707
708 708 @command(
709 709 b'debugdate',
710 710 [(b'e', b'extended', None, _(b'try extended date formats'))],
711 711 _(b'[-e] DATE [RANGE]'),
712 712 norepo=True,
713 713 optionalrepo=True,
714 714 )
715 715 def debugdate(ui, date, range=None, **opts):
716 716 """parse and display a date"""
717 717 if opts["extended"]:
718 718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
719 719 else:
720 720 d = dateutil.parsedate(date)
721 721 ui.writenoi18n(b"internal: %d %d\n" % d)
722 722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
723 723 if range:
724 724 m = dateutil.matchdate(range)
725 725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
726 726
727 727
728 728 @command(
729 729 b'debugdeltachain',
730 730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
731 731 _(b'-c|-m|FILE'),
732 732 optionalrepo=True,
733 733 )
734 734 def debugdeltachain(ui, repo, file_=None, **opts):
735 735 """dump information about delta chains in a revlog
736 736
737 737 Output can be templatized. Available template keywords are:
738 738
739 739 :``rev``: revision number
740 740 :``chainid``: delta chain identifier (numbered by unique base)
741 741 :``chainlen``: delta chain length to this revision
742 742 :``prevrev``: previous revision in delta chain
743 743 :``deltatype``: role of delta / how it was computed
744 744 :``compsize``: compressed size of revision
745 745 :``uncompsize``: uncompressed size of revision
746 746 :``chainsize``: total size of compressed revisions in chain
747 747 :``chainratio``: total chain size divided by uncompressed revision size
748 748 (new delta chains typically start at ratio 2.00)
749 749 :``lindist``: linear distance from base revision in delta chain to end
750 750 of this revision
751 751 :``extradist``: total size of revisions not part of this delta chain from
752 752 base of delta chain to end of this revision; a measurement
753 753 of how much extra data we need to read/seek across to read
754 754 the delta chain for this revision
755 755 :``extraratio``: extradist divided by chainsize; another representation of
756 756 how much unrelated data is needed to load this delta chain
757 757
758 758 If the repository is configured to use the sparse read, additional keywords
759 759 are available:
760 760
761 761 :``readsize``: total size of data read from the disk for a revision
762 762 (sum of the sizes of all the blocks)
763 763 :``largestblock``: size of the largest block of data read from the disk
764 764 :``readdensity``: density of useful bytes in the data read from the disk
765 765 :``srchunks``: in how many data hunks the whole revision would be read
766 766
767 767 The sparse read can be enabled with experimental.sparse-read = True
768 768 """
769 769 opts = pycompat.byteskwargs(opts)
770 770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
771 771 index = r.index
772 772 start = r.start
773 773 length = r.length
774 774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
775 775 withsparseread = getattr(r, '_withsparseread', False)
776 776
777 777 def revinfo(rev):
778 778 e = index[rev]
779 779 compsize = e[1]
780 780 uncompsize = e[2]
781 781 chainsize = 0
782 782
783 783 if generaldelta:
784 784 if e[3] == e[5]:
785 785 deltatype = b'p1'
786 786 elif e[3] == e[6]:
787 787 deltatype = b'p2'
788 788 elif e[3] == rev - 1:
789 789 deltatype = b'prev'
790 790 elif e[3] == rev:
791 791 deltatype = b'base'
792 792 else:
793 793 deltatype = b'other'
794 794 else:
795 795 if e[3] == rev:
796 796 deltatype = b'base'
797 797 else:
798 798 deltatype = b'prev'
799 799
800 800 chain = r._deltachain(rev)[0]
801 801 for iterrev in chain:
802 802 e = index[iterrev]
803 803 chainsize += e[1]
804 804
805 805 return compsize, uncompsize, deltatype, chain, chainsize
806 806
807 807 fm = ui.formatter(b'debugdeltachain', opts)
808 808
809 809 fm.plain(
810 810 b' rev chain# chainlen prev delta '
811 811 b'size rawsize chainsize ratio lindist extradist '
812 812 b'extraratio'
813 813 )
814 814 if withsparseread:
815 815 fm.plain(b' readsize largestblk rddensity srchunks')
816 816 fm.plain(b'\n')
817 817
818 818 chainbases = {}
819 819 for rev in r:
820 820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
821 821 chainbase = chain[0]
822 822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
823 823 basestart = start(chainbase)
824 824 revstart = start(rev)
825 825 lineardist = revstart + comp - basestart
826 826 extradist = lineardist - chainsize
827 827 try:
828 828 prevrev = chain[-2]
829 829 except IndexError:
830 830 prevrev = -1
831 831
832 832 if uncomp != 0:
833 833 chainratio = float(chainsize) / float(uncomp)
834 834 else:
835 835 chainratio = chainsize
836 836
837 837 if chainsize != 0:
838 838 extraratio = float(extradist) / float(chainsize)
839 839 else:
840 840 extraratio = extradist
841 841
842 842 fm.startitem()
843 843 fm.write(
844 844 b'rev chainid chainlen prevrev deltatype compsize '
845 845 b'uncompsize chainsize chainratio lindist extradist '
846 846 b'extraratio',
847 847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
848 848 rev,
849 849 chainid,
850 850 len(chain),
851 851 prevrev,
852 852 deltatype,
853 853 comp,
854 854 uncomp,
855 855 chainsize,
856 856 chainratio,
857 857 lineardist,
858 858 extradist,
859 859 extraratio,
860 860 rev=rev,
861 861 chainid=chainid,
862 862 chainlen=len(chain),
863 863 prevrev=prevrev,
864 864 deltatype=deltatype,
865 865 compsize=comp,
866 866 uncompsize=uncomp,
867 867 chainsize=chainsize,
868 868 chainratio=chainratio,
869 869 lindist=lineardist,
870 870 extradist=extradist,
871 871 extraratio=extraratio,
872 872 )
873 873 if withsparseread:
874 874 readsize = 0
875 875 largestblock = 0
876 876 srchunks = 0
877 877
878 878 for revschunk in deltautil.slicechunk(r, chain):
879 879 srchunks += 1
880 880 blkend = start(revschunk[-1]) + length(revschunk[-1])
881 881 blksize = blkend - start(revschunk[0])
882 882
883 883 readsize += blksize
884 884 if largestblock < blksize:
885 885 largestblock = blksize
886 886
887 887 if readsize:
888 888 readdensity = float(chainsize) / float(readsize)
889 889 else:
890 890 readdensity = 1
891 891
892 892 fm.write(
893 893 b'readsize largestblock readdensity srchunks',
894 894 b' %10d %10d %9.5f %8d',
895 895 readsize,
896 896 largestblock,
897 897 readdensity,
898 898 srchunks,
899 899 readsize=readsize,
900 900 largestblock=largestblock,
901 901 readdensity=readdensity,
902 902 srchunks=srchunks,
903 903 )
904 904
905 905 fm.plain(b'\n')
906 906
907 907 fm.end()
908 908
909 909
910 910 @command(
911 911 b'debugdirstate|debugstate',
912 912 [
913 913 (
914 914 b'',
915 915 b'nodates',
916 916 None,
917 917 _(b'do not display the saved mtime (DEPRECATED)'),
918 918 ),
919 919 (b'', b'dates', True, _(b'display the saved mtime')),
920 920 (b'', b'datesort', None, _(b'sort by saved mtime')),
921 921 ],
922 922 _(b'[OPTION]...'),
923 923 )
924 924 def debugstate(ui, repo, **opts):
925 925 """show the contents of the current dirstate"""
926 926
927 927 nodates = not opts['dates']
928 928 if opts.get('nodates') is not None:
929 929 nodates = True
930 930 datesort = opts.get('datesort')
931 931
932 932 if datesort:
933 933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
934 934 else:
935 935 keyfunc = None # sort by filename
936 936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
937 937 if ent[3] == -1:
938 938 timestr = b'unset '
939 939 elif nodates:
940 940 timestr = b'set '
941 941 else:
942 942 timestr = time.strftime(
943 943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
944 944 )
945 945 timestr = encoding.strtolocal(timestr)
946 946 if ent[1] & 0o20000:
947 947 mode = b'lnk'
948 948 else:
949 949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
950 950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
951 951 for f in repo.dirstate.copies():
952 952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
953 953
954 954
955 955 @command(
956 956 b'debugdiscovery',
957 957 [
958 958 (b'', b'old', None, _(b'use old-style discovery')),
959 959 (
960 960 b'',
961 961 b'nonheads',
962 962 None,
963 963 _(b'use old-style discovery with non-heads included'),
964 964 ),
965 965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
966 966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
967 967 ]
968 968 + cmdutil.remoteopts,
969 969 _(b'[--rev REV] [OTHER]'),
970 970 )
971 971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
972 972 """runs the changeset discovery protocol in isolation"""
973 973 opts = pycompat.byteskwargs(opts)
974 974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
975 975 remote = hg.peer(repo, opts, remoteurl)
976 976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
977 977
978 978 # make sure tests are repeatable
979 979 random.seed(int(opts[b'seed']))
980 980
981 981 if opts.get(b'old'):
982 982
983 983 def doit(pushedrevs, remoteheads, remote=remote):
984 984 if not util.safehasattr(remote, b'branches'):
985 985 # enable in-client legacy support
986 986 remote = localrepo.locallegacypeer(remote.local())
987 987 common, _in, hds = treediscovery.findcommonincoming(
988 988 repo, remote, force=True
989 989 )
990 990 common = set(common)
991 991 if not opts.get(b'nonheads'):
992 992 ui.writenoi18n(
993 993 b"unpruned common: %s\n"
994 994 % b" ".join(sorted(short(n) for n in common))
995 995 )
996 996
997 997 clnode = repo.changelog.node
998 998 common = repo.revs(b'heads(::%ln)', common)
999 999 common = {clnode(r) for r in common}
1000 1000 return common, hds
1001 1001
1002 1002 else:
1003 1003
1004 1004 def doit(pushedrevs, remoteheads, remote=remote):
1005 1005 nodes = None
1006 1006 if pushedrevs:
1007 1007 revs = scmutil.revrange(repo, pushedrevs)
1008 1008 nodes = [repo[r].node() for r in revs]
1009 1009 common, any, hds = setdiscovery.findcommonheads(
1010 1010 ui, repo, remote, ancestorsof=nodes
1011 1011 )
1012 1012 return common, hds
1013 1013
1014 1014 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1015 1015 localrevs = opts[b'rev']
1016 1016 with util.timedcm('debug-discovery') as t:
1017 1017 common, hds = doit(localrevs, remoterevs)
1018 1018
1019 1019 # compute all statistics
1020 1020 heads_common = set(common)
1021 1021 heads_remote = set(hds)
1022 1022 heads_local = set(repo.heads())
1023 1023 # note: they cannot be a local or remote head that is in common and not
1024 1024 # itself a head of common.
1025 1025 heads_common_local = heads_common & heads_local
1026 1026 heads_common_remote = heads_common & heads_remote
1027 1027 heads_common_both = heads_common & heads_remote & heads_local
1028 1028
1029 1029 all = repo.revs(b'all()')
1030 1030 common = repo.revs(b'::%ln', common)
1031 1031 roots_common = repo.revs(b'roots(::%ld)', common)
1032 1032 missing = repo.revs(b'not ::%ld', common)
1033 1033 heads_missing = repo.revs(b'heads(%ld)', missing)
1034 1034 roots_missing = repo.revs(b'roots(%ld)', missing)
1035 1035 assert len(common) + len(missing) == len(all)
1036 1036
1037 initial_undecided = repo.revs(
1038 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1039 )
1040 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1041 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1042 common_initial_undecided = initial_undecided & common
1043 missing_initial_undecided = initial_undecided & missing
1044
1037 1045 data = {}
1038 1046 data[b'elapsed'] = t.elapsed
1039 1047 data[b'nb-common-heads'] = len(heads_common)
1040 1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1041 1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1042 1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1043 1051 data[b'nb-common-roots'] = len(roots_common)
1044 1052 data[b'nb-head-local'] = len(heads_local)
1045 1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1046 1054 data[b'nb-head-remote'] = len(heads_remote)
1047 1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1048 1056 heads_common_remote
1049 1057 )
1050 1058 data[b'nb-revs'] = len(all)
1051 1059 data[b'nb-revs-common'] = len(common)
1052 1060 data[b'nb-revs-missing'] = len(missing)
1053 1061 data[b'nb-missing-heads'] = len(roots_missing)
1054 1062 data[b'nb-missing-roots'] = len(heads_missing)
1063 data[b'nb-ini_und'] = len(initial_undecided)
1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1055 1068
1056 1069 # display discovery summary
1057 1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1058 1071 ui.writenoi18n(b"heads summary:\n")
1059 1072 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1060 1073 ui.writenoi18n(
1061 1074 b" also local heads: %(nb-common-heads-local)9d\n" % data
1062 1075 )
1063 1076 ui.writenoi18n(
1064 1077 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1065 1078 )
1066 1079 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1067 1080 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1068 1081 ui.writenoi18n(
1069 1082 b" common: %(nb-common-heads-local)9d\n" % data
1070 1083 )
1071 1084 ui.writenoi18n(
1072 1085 b" missing: %(nb-head-local-missing)9d\n" % data
1073 1086 )
1074 1087 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1075 1088 ui.writenoi18n(
1076 1089 b" common: %(nb-common-heads-remote)9d\n" % data
1077 1090 )
1078 1091 ui.writenoi18n(
1079 1092 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1080 1093 )
1081 1094 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1082 1095 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1083 1096 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1084 1097 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1085 1098 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1086 1099 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1087 1100 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1101 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1102 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1103 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1104 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1105 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1088 1106
1089 1107 if ui.verbose:
1090 1108 ui.writenoi18n(
1091 1109 b"common heads: %s\n"
1092 1110 % b" ".join(sorted(short(n) for n in heads_common))
1093 1111 )
1094 1112
1095 1113
1096 1114 _chunksize = 4 << 10
1097 1115
1098 1116
1099 1117 @command(
1100 1118 b'debugdownload',
1101 1119 [
1102 1120 (b'o', b'output', b'', _(b'path')),
1103 1121 ],
1104 1122 optionalrepo=True,
1105 1123 )
1106 1124 def debugdownload(ui, repo, url, output=None, **opts):
1107 1125 """download a resource using Mercurial logic and config"""
1108 1126 fh = urlmod.open(ui, url, output)
1109 1127
1110 1128 dest = ui
1111 1129 if output:
1112 1130 dest = open(output, b"wb", _chunksize)
1113 1131 try:
1114 1132 data = fh.read(_chunksize)
1115 1133 while data:
1116 1134 dest.write(data)
1117 1135 data = fh.read(_chunksize)
1118 1136 finally:
1119 1137 if output:
1120 1138 dest.close()
1121 1139
1122 1140
1123 1141 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1124 1142 def debugextensions(ui, repo, **opts):
1125 1143 '''show information about active extensions'''
1126 1144 opts = pycompat.byteskwargs(opts)
1127 1145 exts = extensions.extensions(ui)
1128 1146 hgver = util.version()
1129 1147 fm = ui.formatter(b'debugextensions', opts)
1130 1148 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1131 1149 isinternal = extensions.ismoduleinternal(extmod)
1132 1150 extsource = None
1133 1151
1134 1152 if util.safehasattr(extmod, '__file__'):
1135 1153 extsource = pycompat.fsencode(extmod.__file__)
1136 1154 elif getattr(sys, 'oxidized', False):
1137 1155 extsource = pycompat.sysexecutable
1138 1156 if isinternal:
1139 1157 exttestedwith = [] # never expose magic string to users
1140 1158 else:
1141 1159 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1142 1160 extbuglink = getattr(extmod, 'buglink', None)
1143 1161
1144 1162 fm.startitem()
1145 1163
1146 1164 if ui.quiet or ui.verbose:
1147 1165 fm.write(b'name', b'%s\n', extname)
1148 1166 else:
1149 1167 fm.write(b'name', b'%s', extname)
1150 1168 if isinternal or hgver in exttestedwith:
1151 1169 fm.plain(b'\n')
1152 1170 elif not exttestedwith:
1153 1171 fm.plain(_(b' (untested!)\n'))
1154 1172 else:
1155 1173 lasttestedversion = exttestedwith[-1]
1156 1174 fm.plain(b' (%s!)\n' % lasttestedversion)
1157 1175
1158 1176 fm.condwrite(
1159 1177 ui.verbose and extsource,
1160 1178 b'source',
1161 1179 _(b' location: %s\n'),
1162 1180 extsource or b"",
1163 1181 )
1164 1182
1165 1183 if ui.verbose:
1166 1184 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1167 1185 fm.data(bundled=isinternal)
1168 1186
1169 1187 fm.condwrite(
1170 1188 ui.verbose and exttestedwith,
1171 1189 b'testedwith',
1172 1190 _(b' tested with: %s\n'),
1173 1191 fm.formatlist(exttestedwith, name=b'ver'),
1174 1192 )
1175 1193
1176 1194 fm.condwrite(
1177 1195 ui.verbose and extbuglink,
1178 1196 b'buglink',
1179 1197 _(b' bug reporting: %s\n'),
1180 1198 extbuglink or b"",
1181 1199 )
1182 1200
1183 1201 fm.end()
1184 1202
1185 1203
1186 1204 @command(
1187 1205 b'debugfileset',
1188 1206 [
1189 1207 (
1190 1208 b'r',
1191 1209 b'rev',
1192 1210 b'',
1193 1211 _(b'apply the filespec on this revision'),
1194 1212 _(b'REV'),
1195 1213 ),
1196 1214 (
1197 1215 b'',
1198 1216 b'all-files',
1199 1217 False,
1200 1218 _(b'test files from all revisions and working directory'),
1201 1219 ),
1202 1220 (
1203 1221 b's',
1204 1222 b'show-matcher',
1205 1223 None,
1206 1224 _(b'print internal representation of matcher'),
1207 1225 ),
1208 1226 (
1209 1227 b'p',
1210 1228 b'show-stage',
1211 1229 [],
1212 1230 _(b'print parsed tree at the given stage'),
1213 1231 _(b'NAME'),
1214 1232 ),
1215 1233 ],
1216 1234 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1217 1235 )
1218 1236 def debugfileset(ui, repo, expr, **opts):
1219 1237 '''parse and apply a fileset specification'''
1220 1238 from . import fileset
1221 1239
1222 1240 fileset.symbols # force import of fileset so we have predicates to optimize
1223 1241 opts = pycompat.byteskwargs(opts)
1224 1242 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1225 1243
1226 1244 stages = [
1227 1245 (b'parsed', pycompat.identity),
1228 1246 (b'analyzed', filesetlang.analyze),
1229 1247 (b'optimized', filesetlang.optimize),
1230 1248 ]
1231 1249 stagenames = {n for n, f in stages}
1232 1250
1233 1251 showalways = set()
1234 1252 if ui.verbose and not opts[b'show_stage']:
1235 1253 # show parsed tree by --verbose (deprecated)
1236 1254 showalways.add(b'parsed')
1237 1255 if opts[b'show_stage'] == [b'all']:
1238 1256 showalways.update(stagenames)
1239 1257 else:
1240 1258 for n in opts[b'show_stage']:
1241 1259 if n not in stagenames:
1242 1260 raise error.Abort(_(b'invalid stage name: %s') % n)
1243 1261 showalways.update(opts[b'show_stage'])
1244 1262
1245 1263 tree = filesetlang.parse(expr)
1246 1264 for n, f in stages:
1247 1265 tree = f(tree)
1248 1266 if n in showalways:
1249 1267 if opts[b'show_stage'] or n != b'parsed':
1250 1268 ui.write(b"* %s:\n" % n)
1251 1269 ui.write(filesetlang.prettyformat(tree), b"\n")
1252 1270
1253 1271 files = set()
1254 1272 if opts[b'all_files']:
1255 1273 for r in repo:
1256 1274 c = repo[r]
1257 1275 files.update(c.files())
1258 1276 files.update(c.substate)
1259 1277 if opts[b'all_files'] or ctx.rev() is None:
1260 1278 wctx = repo[None]
1261 1279 files.update(
1262 1280 repo.dirstate.walk(
1263 1281 scmutil.matchall(repo),
1264 1282 subrepos=list(wctx.substate),
1265 1283 unknown=True,
1266 1284 ignored=True,
1267 1285 )
1268 1286 )
1269 1287 files.update(wctx.substate)
1270 1288 else:
1271 1289 files.update(ctx.files())
1272 1290 files.update(ctx.substate)
1273 1291
1274 1292 m = ctx.matchfileset(repo.getcwd(), expr)
1275 1293 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1276 1294 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1277 1295 for f in sorted(files):
1278 1296 if not m(f):
1279 1297 continue
1280 1298 ui.write(b"%s\n" % f)
1281 1299
1282 1300
1283 1301 @command(b'debugformat', [] + cmdutil.formatteropts)
1284 1302 def debugformat(ui, repo, **opts):
1285 1303 """display format information about the current repository
1286 1304
1287 1305 Use --verbose to get extra information about current config value and
1288 1306 Mercurial default."""
1289 1307 opts = pycompat.byteskwargs(opts)
1290 1308 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1291 1309 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1292 1310
1293 1311 def makeformatname(name):
1294 1312 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1295 1313
1296 1314 fm = ui.formatter(b'debugformat', opts)
1297 1315 if fm.isplain():
1298 1316
1299 1317 def formatvalue(value):
1300 1318 if util.safehasattr(value, b'startswith'):
1301 1319 return value
1302 1320 if value:
1303 1321 return b'yes'
1304 1322 else:
1305 1323 return b'no'
1306 1324
1307 1325 else:
1308 1326 formatvalue = pycompat.identity
1309 1327
1310 1328 fm.plain(b'format-variant')
1311 1329 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1312 1330 fm.plain(b' repo')
1313 1331 if ui.verbose:
1314 1332 fm.plain(b' config default')
1315 1333 fm.plain(b'\n')
1316 1334 for fv in upgrade.allformatvariant:
1317 1335 fm.startitem()
1318 1336 repovalue = fv.fromrepo(repo)
1319 1337 configvalue = fv.fromconfig(repo)
1320 1338
1321 1339 if repovalue != configvalue:
1322 1340 namelabel = b'formatvariant.name.mismatchconfig'
1323 1341 repolabel = b'formatvariant.repo.mismatchconfig'
1324 1342 elif repovalue != fv.default:
1325 1343 namelabel = b'formatvariant.name.mismatchdefault'
1326 1344 repolabel = b'formatvariant.repo.mismatchdefault'
1327 1345 else:
1328 1346 namelabel = b'formatvariant.name.uptodate'
1329 1347 repolabel = b'formatvariant.repo.uptodate'
1330 1348
1331 1349 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1332 1350 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1333 1351 if fv.default != configvalue:
1334 1352 configlabel = b'formatvariant.config.special'
1335 1353 else:
1336 1354 configlabel = b'formatvariant.config.default'
1337 1355 fm.condwrite(
1338 1356 ui.verbose,
1339 1357 b'config',
1340 1358 b' %6s',
1341 1359 formatvalue(configvalue),
1342 1360 label=configlabel,
1343 1361 )
1344 1362 fm.condwrite(
1345 1363 ui.verbose,
1346 1364 b'default',
1347 1365 b' %7s',
1348 1366 formatvalue(fv.default),
1349 1367 label=b'formatvariant.default',
1350 1368 )
1351 1369 fm.plain(b'\n')
1352 1370 fm.end()
1353 1371
1354 1372
1355 1373 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1356 1374 def debugfsinfo(ui, path=b"."):
1357 1375 """show information detected about current filesystem"""
1358 1376 ui.writenoi18n(b'path: %s\n' % path)
1359 1377 ui.writenoi18n(
1360 1378 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1361 1379 )
1362 1380 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1363 1381 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1364 1382 ui.writenoi18n(
1365 1383 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1366 1384 )
1367 1385 ui.writenoi18n(
1368 1386 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1369 1387 )
1370 1388 casesensitive = b'(unknown)'
1371 1389 try:
1372 1390 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1373 1391 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1374 1392 except OSError:
1375 1393 pass
1376 1394 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1377 1395
1378 1396
1379 1397 @command(
1380 1398 b'debuggetbundle',
1381 1399 [
1382 1400 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1383 1401 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1384 1402 (
1385 1403 b't',
1386 1404 b'type',
1387 1405 b'bzip2',
1388 1406 _(b'bundle compression type to use'),
1389 1407 _(b'TYPE'),
1390 1408 ),
1391 1409 ],
1392 1410 _(b'REPO FILE [-H|-C ID]...'),
1393 1411 norepo=True,
1394 1412 )
1395 1413 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1396 1414 """retrieves a bundle from a repo
1397 1415
1398 1416 Every ID must be a full-length hex node id string. Saves the bundle to the
1399 1417 given file.
1400 1418 """
1401 1419 opts = pycompat.byteskwargs(opts)
1402 1420 repo = hg.peer(ui, opts, repopath)
1403 1421 if not repo.capable(b'getbundle'):
1404 1422 raise error.Abort(b"getbundle() not supported by target repository")
1405 1423 args = {}
1406 1424 if common:
1407 1425 args['common'] = [bin(s) for s in common]
1408 1426 if head:
1409 1427 args['heads'] = [bin(s) for s in head]
1410 1428 # TODO: get desired bundlecaps from command line.
1411 1429 args['bundlecaps'] = None
1412 1430 bundle = repo.getbundle(b'debug', **args)
1413 1431
1414 1432 bundletype = opts.get(b'type', b'bzip2').lower()
1415 1433 btypes = {
1416 1434 b'none': b'HG10UN',
1417 1435 b'bzip2': b'HG10BZ',
1418 1436 b'gzip': b'HG10GZ',
1419 1437 b'bundle2': b'HG20',
1420 1438 }
1421 1439 bundletype = btypes.get(bundletype)
1422 1440 if bundletype not in bundle2.bundletypes:
1423 1441 raise error.Abort(_(b'unknown bundle type specified with --type'))
1424 1442 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1425 1443
1426 1444
1427 1445 @command(b'debugignore', [], b'[FILE]')
1428 1446 def debugignore(ui, repo, *files, **opts):
1429 1447 """display the combined ignore pattern and information about ignored files
1430 1448
1431 1449 With no argument display the combined ignore pattern.
1432 1450
1433 1451 Given space separated file names, shows if the given file is ignored and
1434 1452 if so, show the ignore rule (file and line number) that matched it.
1435 1453 """
1436 1454 ignore = repo.dirstate._ignore
1437 1455 if not files:
1438 1456 # Show all the patterns
1439 1457 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1440 1458 else:
1441 1459 m = scmutil.match(repo[None], pats=files)
1442 1460 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1443 1461 for f in m.files():
1444 1462 nf = util.normpath(f)
1445 1463 ignored = None
1446 1464 ignoredata = None
1447 1465 if nf != b'.':
1448 1466 if ignore(nf):
1449 1467 ignored = nf
1450 1468 ignoredata = repo.dirstate._ignorefileandline(nf)
1451 1469 else:
1452 1470 for p in pathutil.finddirs(nf):
1453 1471 if ignore(p):
1454 1472 ignored = p
1455 1473 ignoredata = repo.dirstate._ignorefileandline(p)
1456 1474 break
1457 1475 if ignored:
1458 1476 if ignored == nf:
1459 1477 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1460 1478 else:
1461 1479 ui.write(
1462 1480 _(
1463 1481 b"%s is ignored because of "
1464 1482 b"containing directory %s\n"
1465 1483 )
1466 1484 % (uipathfn(f), ignored)
1467 1485 )
1468 1486 ignorefile, lineno, line = ignoredata
1469 1487 ui.write(
1470 1488 _(b"(ignore rule in %s, line %d: '%s')\n")
1471 1489 % (ignorefile, lineno, line)
1472 1490 )
1473 1491 else:
1474 1492 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1475 1493
1476 1494
1477 1495 @command(
1478 1496 b'debugindex',
1479 1497 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1480 1498 _(b'-c|-m|FILE'),
1481 1499 )
1482 1500 def debugindex(ui, repo, file_=None, **opts):
1483 1501 """dump index data for a storage primitive"""
1484 1502 opts = pycompat.byteskwargs(opts)
1485 1503 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1486 1504
1487 1505 if ui.debugflag:
1488 1506 shortfn = hex
1489 1507 else:
1490 1508 shortfn = short
1491 1509
1492 1510 idlen = 12
1493 1511 for i in store:
1494 1512 idlen = len(shortfn(store.node(i)))
1495 1513 break
1496 1514
1497 1515 fm = ui.formatter(b'debugindex', opts)
1498 1516 fm.plain(
1499 1517 b' rev linkrev %s %s p2\n'
1500 1518 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1501 1519 )
1502 1520
1503 1521 for rev in store:
1504 1522 node = store.node(rev)
1505 1523 parents = store.parents(node)
1506 1524
1507 1525 fm.startitem()
1508 1526 fm.write(b'rev', b'%6d ', rev)
1509 1527 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1510 1528 fm.write(b'node', b'%s ', shortfn(node))
1511 1529 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1512 1530 fm.write(b'p2', b'%s', shortfn(parents[1]))
1513 1531 fm.plain(b'\n')
1514 1532
1515 1533 fm.end()
1516 1534
1517 1535
1518 1536 @command(
1519 1537 b'debugindexdot',
1520 1538 cmdutil.debugrevlogopts,
1521 1539 _(b'-c|-m|FILE'),
1522 1540 optionalrepo=True,
1523 1541 )
1524 1542 def debugindexdot(ui, repo, file_=None, **opts):
1525 1543 """dump an index DAG as a graphviz dot file"""
1526 1544 opts = pycompat.byteskwargs(opts)
1527 1545 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1528 1546 ui.writenoi18n(b"digraph G {\n")
1529 1547 for i in r:
1530 1548 node = r.node(i)
1531 1549 pp = r.parents(node)
1532 1550 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1533 1551 if pp[1] != nullid:
1534 1552 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1535 1553 ui.write(b"}\n")
1536 1554
1537 1555
1538 1556 @command(b'debugindexstats', [])
1539 1557 def debugindexstats(ui, repo):
1540 1558 """show stats related to the changelog index"""
1541 1559 repo.changelog.shortest(nullid, 1)
1542 1560 index = repo.changelog.index
1543 1561 if not util.safehasattr(index, b'stats'):
1544 1562 raise error.Abort(_(b'debugindexstats only works with native code'))
1545 1563 for k, v in sorted(index.stats().items()):
1546 1564 ui.write(b'%s: %d\n' % (k, v))
1547 1565
1548 1566
1549 1567 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1550 1568 def debuginstall(ui, **opts):
1551 1569 """test Mercurial installation
1552 1570
1553 1571 Returns 0 on success.
1554 1572 """
1555 1573 opts = pycompat.byteskwargs(opts)
1556 1574
1557 1575 problems = 0
1558 1576
1559 1577 fm = ui.formatter(b'debuginstall', opts)
1560 1578 fm.startitem()
1561 1579
1562 1580 # encoding might be unknown or wrong. don't translate these messages.
1563 1581 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1564 1582 err = None
1565 1583 try:
1566 1584 codecs.lookup(pycompat.sysstr(encoding.encoding))
1567 1585 except LookupError as inst:
1568 1586 err = stringutil.forcebytestr(inst)
1569 1587 problems += 1
1570 1588 fm.condwrite(
1571 1589 err,
1572 1590 b'encodingerror',
1573 1591 b" %s\n (check that your locale is properly set)\n",
1574 1592 err,
1575 1593 )
1576 1594
1577 1595 # Python
1578 1596 pythonlib = None
1579 1597 if util.safehasattr(os, '__file__'):
1580 1598 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1581 1599 elif getattr(sys, 'oxidized', False):
1582 1600 pythonlib = pycompat.sysexecutable
1583 1601
1584 1602 fm.write(
1585 1603 b'pythonexe',
1586 1604 _(b"checking Python executable (%s)\n"),
1587 1605 pycompat.sysexecutable or _(b"unknown"),
1588 1606 )
1589 1607 fm.write(
1590 1608 b'pythonimplementation',
1591 1609 _(b"checking Python implementation (%s)\n"),
1592 1610 pycompat.sysbytes(platform.python_implementation()),
1593 1611 )
1594 1612 fm.write(
1595 1613 b'pythonver',
1596 1614 _(b"checking Python version (%s)\n"),
1597 1615 (b"%d.%d.%d" % sys.version_info[:3]),
1598 1616 )
1599 1617 fm.write(
1600 1618 b'pythonlib',
1601 1619 _(b"checking Python lib (%s)...\n"),
1602 1620 pythonlib or _(b"unknown"),
1603 1621 )
1604 1622
1605 1623 try:
1606 1624 from . import rustext
1607 1625
1608 1626 rustext.__doc__ # trigger lazy import
1609 1627 except ImportError:
1610 1628 rustext = None
1611 1629
1612 1630 security = set(sslutil.supportedprotocols)
1613 1631 if sslutil.hassni:
1614 1632 security.add(b'sni')
1615 1633
1616 1634 fm.write(
1617 1635 b'pythonsecurity',
1618 1636 _(b"checking Python security support (%s)\n"),
1619 1637 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1620 1638 )
1621 1639
1622 1640 # These are warnings, not errors. So don't increment problem count. This
1623 1641 # may change in the future.
1624 1642 if b'tls1.2' not in security:
1625 1643 fm.plain(
1626 1644 _(
1627 1645 b' TLS 1.2 not supported by Python install; '
1628 1646 b'network connections lack modern security\n'
1629 1647 )
1630 1648 )
1631 1649 if b'sni' not in security:
1632 1650 fm.plain(
1633 1651 _(
1634 1652 b' SNI not supported by Python install; may have '
1635 1653 b'connectivity issues with some servers\n'
1636 1654 )
1637 1655 )
1638 1656
1639 1657 fm.plain(
1640 1658 _(
1641 1659 b"checking Rust extensions (%s)\n"
1642 1660 % (b'missing' if rustext is None else b'installed')
1643 1661 ),
1644 1662 )
1645 1663
1646 1664 # TODO print CA cert info
1647 1665
1648 1666 # hg version
1649 1667 hgver = util.version()
1650 1668 fm.write(
1651 1669 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1652 1670 )
1653 1671 fm.write(
1654 1672 b'hgverextra',
1655 1673 _(b"checking Mercurial custom build (%s)\n"),
1656 1674 b'+'.join(hgver.split(b'+')[1:]),
1657 1675 )
1658 1676
1659 1677 # compiled modules
1660 1678 hgmodules = None
1661 1679 if util.safehasattr(sys.modules[__name__], '__file__'):
1662 1680 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1663 1681 elif getattr(sys, 'oxidized', False):
1664 1682 hgmodules = pycompat.sysexecutable
1665 1683
1666 1684 fm.write(
1667 1685 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1668 1686 )
1669 1687 fm.write(
1670 1688 b'hgmodules',
1671 1689 _(b"checking installed modules (%s)...\n"),
1672 1690 hgmodules or _(b"unknown"),
1673 1691 )
1674 1692
1675 1693 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1676 1694 rustext = rustandc # for now, that's the only case
1677 1695 cext = policy.policy in (b'c', b'allow') or rustandc
1678 1696 nopure = cext or rustext
1679 1697 if nopure:
1680 1698 err = None
1681 1699 try:
1682 1700 if cext:
1683 1701 from .cext import ( # pytype: disable=import-error
1684 1702 base85,
1685 1703 bdiff,
1686 1704 mpatch,
1687 1705 osutil,
1688 1706 )
1689 1707
1690 1708 # quiet pyflakes
1691 1709 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1692 1710 if rustext:
1693 1711 from .rustext import ( # pytype: disable=import-error
1694 1712 ancestor,
1695 1713 dirstate,
1696 1714 )
1697 1715
1698 1716 dir(ancestor), dir(dirstate) # quiet pyflakes
1699 1717 except Exception as inst:
1700 1718 err = stringutil.forcebytestr(inst)
1701 1719 problems += 1
1702 1720 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1703 1721
1704 1722 compengines = util.compengines._engines.values()
1705 1723 fm.write(
1706 1724 b'compengines',
1707 1725 _(b'checking registered compression engines (%s)\n'),
1708 1726 fm.formatlist(
1709 1727 sorted(e.name() for e in compengines),
1710 1728 name=b'compengine',
1711 1729 fmt=b'%s',
1712 1730 sep=b', ',
1713 1731 ),
1714 1732 )
1715 1733 fm.write(
1716 1734 b'compenginesavail',
1717 1735 _(b'checking available compression engines (%s)\n'),
1718 1736 fm.formatlist(
1719 1737 sorted(e.name() for e in compengines if e.available()),
1720 1738 name=b'compengine',
1721 1739 fmt=b'%s',
1722 1740 sep=b', ',
1723 1741 ),
1724 1742 )
1725 1743 wirecompengines = compression.compengines.supportedwireengines(
1726 1744 compression.SERVERROLE
1727 1745 )
1728 1746 fm.write(
1729 1747 b'compenginesserver',
1730 1748 _(
1731 1749 b'checking available compression engines '
1732 1750 b'for wire protocol (%s)\n'
1733 1751 ),
1734 1752 fm.formatlist(
1735 1753 [e.name() for e in wirecompengines if e.wireprotosupport()],
1736 1754 name=b'compengine',
1737 1755 fmt=b'%s',
1738 1756 sep=b', ',
1739 1757 ),
1740 1758 )
1741 1759 re2 = b'missing'
1742 1760 if util._re2:
1743 1761 re2 = b'available'
1744 1762 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1745 1763 fm.data(re2=bool(util._re2))
1746 1764
1747 1765 # templates
1748 1766 p = templater.templatedir()
1749 1767 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1750 1768 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1751 1769 if p:
1752 1770 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1753 1771 if m:
1754 1772 # template found, check if it is working
1755 1773 err = None
1756 1774 try:
1757 1775 templater.templater.frommapfile(m)
1758 1776 except Exception as inst:
1759 1777 err = stringutil.forcebytestr(inst)
1760 1778 p = None
1761 1779 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1762 1780 else:
1763 1781 p = None
1764 1782 fm.condwrite(
1765 1783 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1766 1784 )
1767 1785 fm.condwrite(
1768 1786 not m,
1769 1787 b'defaulttemplatenotfound',
1770 1788 _(b" template '%s' not found\n"),
1771 1789 b"default",
1772 1790 )
1773 1791 if not p:
1774 1792 problems += 1
1775 1793 fm.condwrite(
1776 1794 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1777 1795 )
1778 1796
1779 1797 # editor
1780 1798 editor = ui.geteditor()
1781 1799 editor = util.expandpath(editor)
1782 1800 editorbin = procutil.shellsplit(editor)[0]
1783 1801 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1784 1802 cmdpath = procutil.findexe(editorbin)
1785 1803 fm.condwrite(
1786 1804 not cmdpath and editor == b'vi',
1787 1805 b'vinotfound',
1788 1806 _(
1789 1807 b" No commit editor set and can't find %s in PATH\n"
1790 1808 b" (specify a commit editor in your configuration"
1791 1809 b" file)\n"
1792 1810 ),
1793 1811 not cmdpath and editor == b'vi' and editorbin,
1794 1812 )
1795 1813 fm.condwrite(
1796 1814 not cmdpath and editor != b'vi',
1797 1815 b'editornotfound',
1798 1816 _(
1799 1817 b" Can't find editor '%s' in PATH\n"
1800 1818 b" (specify a commit editor in your configuration"
1801 1819 b" file)\n"
1802 1820 ),
1803 1821 not cmdpath and editorbin,
1804 1822 )
1805 1823 if not cmdpath and editor != b'vi':
1806 1824 problems += 1
1807 1825
1808 1826 # check username
1809 1827 username = None
1810 1828 err = None
1811 1829 try:
1812 1830 username = ui.username()
1813 1831 except error.Abort as e:
1814 1832 err = e.message
1815 1833 problems += 1
1816 1834
1817 1835 fm.condwrite(
1818 1836 username, b'username', _(b"checking username (%s)\n"), username
1819 1837 )
1820 1838 fm.condwrite(
1821 1839 err,
1822 1840 b'usernameerror',
1823 1841 _(
1824 1842 b"checking username...\n %s\n"
1825 1843 b" (specify a username in your configuration file)\n"
1826 1844 ),
1827 1845 err,
1828 1846 )
1829 1847
1830 1848 for name, mod in extensions.extensions():
1831 1849 handler = getattr(mod, 'debuginstall', None)
1832 1850 if handler is not None:
1833 1851 problems += handler(ui, fm)
1834 1852
1835 1853 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1836 1854 if not problems:
1837 1855 fm.data(problems=problems)
1838 1856 fm.condwrite(
1839 1857 problems,
1840 1858 b'problems',
1841 1859 _(b"%d problems detected, please check your install!\n"),
1842 1860 problems,
1843 1861 )
1844 1862 fm.end()
1845 1863
1846 1864 return problems
1847 1865
1848 1866
1849 1867 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1850 1868 def debugknown(ui, repopath, *ids, **opts):
1851 1869 """test whether node ids are known to a repo
1852 1870
1853 1871 Every ID must be a full-length hex node id string. Returns a list of 0s
1854 1872 and 1s indicating unknown/known.
1855 1873 """
1856 1874 opts = pycompat.byteskwargs(opts)
1857 1875 repo = hg.peer(ui, opts, repopath)
1858 1876 if not repo.capable(b'known'):
1859 1877 raise error.Abort(b"known() not supported by target repository")
1860 1878 flags = repo.known([bin(s) for s in ids])
1861 1879 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1862 1880
1863 1881
1864 1882 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1865 1883 def debuglabelcomplete(ui, repo, *args):
1866 1884 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1867 1885 debugnamecomplete(ui, repo, *args)
1868 1886
1869 1887
1870 1888 @command(
1871 1889 b'debuglocks',
1872 1890 [
1873 1891 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1874 1892 (
1875 1893 b'W',
1876 1894 b'force-wlock',
1877 1895 None,
1878 1896 _(b'free the working state lock (DANGEROUS)'),
1879 1897 ),
1880 1898 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1881 1899 (
1882 1900 b'S',
1883 1901 b'set-wlock',
1884 1902 None,
1885 1903 _(b'set the working state lock until stopped'),
1886 1904 ),
1887 1905 ],
1888 1906 _(b'[OPTION]...'),
1889 1907 )
1890 1908 def debuglocks(ui, repo, **opts):
1891 1909 """show or modify state of locks
1892 1910
1893 1911 By default, this command will show which locks are held. This
1894 1912 includes the user and process holding the lock, the amount of time
1895 1913 the lock has been held, and the machine name where the process is
1896 1914 running if it's not local.
1897 1915
1898 1916 Locks protect the integrity of Mercurial's data, so should be
1899 1917 treated with care. System crashes or other interruptions may cause
1900 1918 locks to not be properly released, though Mercurial will usually
1901 1919 detect and remove such stale locks automatically.
1902 1920
1903 1921 However, detecting stale locks may not always be possible (for
1904 1922 instance, on a shared filesystem). Removing locks may also be
1905 1923 blocked by filesystem permissions.
1906 1924
1907 1925 Setting a lock will prevent other commands from changing the data.
1908 1926 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1909 1927 The set locks are removed when the command exits.
1910 1928
1911 1929 Returns 0 if no locks are held.
1912 1930
1913 1931 """
1914 1932
1915 1933 if opts.get('force_lock'):
1916 1934 repo.svfs.unlink(b'lock')
1917 1935 if opts.get('force_wlock'):
1918 1936 repo.vfs.unlink(b'wlock')
1919 1937 if opts.get('force_lock') or opts.get('force_wlock'):
1920 1938 return 0
1921 1939
1922 1940 locks = []
1923 1941 try:
1924 1942 if opts.get('set_wlock'):
1925 1943 try:
1926 1944 locks.append(repo.wlock(False))
1927 1945 except error.LockHeld:
1928 1946 raise error.Abort(_(b'wlock is already held'))
1929 1947 if opts.get('set_lock'):
1930 1948 try:
1931 1949 locks.append(repo.lock(False))
1932 1950 except error.LockHeld:
1933 1951 raise error.Abort(_(b'lock is already held'))
1934 1952 if len(locks):
1935 1953 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1936 1954 return 0
1937 1955 finally:
1938 1956 release(*locks)
1939 1957
1940 1958 now = time.time()
1941 1959 held = 0
1942 1960
1943 1961 def report(vfs, name, method):
1944 1962 # this causes stale locks to get reaped for more accurate reporting
1945 1963 try:
1946 1964 l = method(False)
1947 1965 except error.LockHeld:
1948 1966 l = None
1949 1967
1950 1968 if l:
1951 1969 l.release()
1952 1970 else:
1953 1971 try:
1954 1972 st = vfs.lstat(name)
1955 1973 age = now - st[stat.ST_MTIME]
1956 1974 user = util.username(st.st_uid)
1957 1975 locker = vfs.readlock(name)
1958 1976 if b":" in locker:
1959 1977 host, pid = locker.split(b':')
1960 1978 if host == socket.gethostname():
1961 1979 locker = b'user %s, process %s' % (user or b'None', pid)
1962 1980 else:
1963 1981 locker = b'user %s, process %s, host %s' % (
1964 1982 user or b'None',
1965 1983 pid,
1966 1984 host,
1967 1985 )
1968 1986 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1969 1987 return 1
1970 1988 except OSError as e:
1971 1989 if e.errno != errno.ENOENT:
1972 1990 raise
1973 1991
1974 1992 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1975 1993 return 0
1976 1994
1977 1995 held += report(repo.svfs, b"lock", repo.lock)
1978 1996 held += report(repo.vfs, b"wlock", repo.wlock)
1979 1997
1980 1998 return held
1981 1999
1982 2000
1983 2001 @command(
1984 2002 b'debugmanifestfulltextcache',
1985 2003 [
1986 2004 (b'', b'clear', False, _(b'clear the cache')),
1987 2005 (
1988 2006 b'a',
1989 2007 b'add',
1990 2008 [],
1991 2009 _(b'add the given manifest nodes to the cache'),
1992 2010 _(b'NODE'),
1993 2011 ),
1994 2012 ],
1995 2013 b'',
1996 2014 )
1997 2015 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1998 2016 """show, clear or amend the contents of the manifest fulltext cache"""
1999 2017
2000 2018 def getcache():
2001 2019 r = repo.manifestlog.getstorage(b'')
2002 2020 try:
2003 2021 return r._fulltextcache
2004 2022 except AttributeError:
2005 2023 msg = _(
2006 2024 b"Current revlog implementation doesn't appear to have a "
2007 2025 b"manifest fulltext cache\n"
2008 2026 )
2009 2027 raise error.Abort(msg)
2010 2028
2011 2029 if opts.get('clear'):
2012 2030 with repo.wlock():
2013 2031 cache = getcache()
2014 2032 cache.clear(clear_persisted_data=True)
2015 2033 return
2016 2034
2017 2035 if add:
2018 2036 with repo.wlock():
2019 2037 m = repo.manifestlog
2020 2038 store = m.getstorage(b'')
2021 2039 for n in add:
2022 2040 try:
2023 2041 manifest = m[store.lookup(n)]
2024 2042 except error.LookupError as e:
2025 2043 raise error.Abort(e, hint=b"Check your manifest node id")
2026 2044 manifest.read() # stores revisision in cache too
2027 2045 return
2028 2046
2029 2047 cache = getcache()
2030 2048 if not len(cache):
2031 2049 ui.write(_(b'cache empty\n'))
2032 2050 else:
2033 2051 ui.write(
2034 2052 _(
2035 2053 b'cache contains %d manifest entries, in order of most to '
2036 2054 b'least recent:\n'
2037 2055 )
2038 2056 % (len(cache),)
2039 2057 )
2040 2058 totalsize = 0
2041 2059 for nodeid in cache:
2042 2060 # Use cache.get to not update the LRU order
2043 2061 data = cache.peek(nodeid)
2044 2062 size = len(data)
2045 2063 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2046 2064 ui.write(
2047 2065 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2048 2066 )
2049 2067 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2050 2068 ui.write(
2051 2069 _(b'total cache data size %s, on-disk %s\n')
2052 2070 % (util.bytecount(totalsize), util.bytecount(ondisk))
2053 2071 )
2054 2072
2055 2073
2056 2074 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2057 2075 def debugmergestate(ui, repo, *args, **opts):
2058 2076 """print merge state
2059 2077
2060 2078 Use --verbose to print out information about whether v1 or v2 merge state
2061 2079 was chosen."""
2062 2080
2063 2081 if ui.verbose:
2064 2082 ms = mergestatemod.mergestate(repo)
2065 2083
2066 2084 # sort so that reasonable information is on top
2067 2085 v1records = ms._readrecordsv1()
2068 2086 v2records = ms._readrecordsv2()
2069 2087
2070 2088 if not v1records and not v2records:
2071 2089 pass
2072 2090 elif not v2records:
2073 2091 ui.writenoi18n(b'no version 2 merge state\n')
2074 2092 elif ms._v1v2match(v1records, v2records):
2075 2093 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2076 2094 else:
2077 2095 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2078 2096
2079 2097 opts = pycompat.byteskwargs(opts)
2080 2098 if not opts[b'template']:
2081 2099 opts[b'template'] = (
2082 2100 b'{if(commits, "", "no merge state found\n")}'
2083 2101 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2084 2102 b'{files % "file: {path} (state \\"{state}\\")\n'
2085 2103 b'{if(local_path, "'
2086 2104 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2087 2105 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2088 2106 b' other path: {other_path} (node {other_node})\n'
2089 2107 b'")}'
2090 2108 b'{if(rename_side, "'
2091 2109 b' rename side: {rename_side}\n'
2092 2110 b' renamed path: {renamed_path}\n'
2093 2111 b'")}'
2094 2112 b'{extras % " extra: {key} = {value}\n"}'
2095 2113 b'"}'
2096 2114 b'{extras % "extra: {file} ({key} = {value})\n"}'
2097 2115 )
2098 2116
2099 2117 ms = mergestatemod.mergestate.read(repo)
2100 2118
2101 2119 fm = ui.formatter(b'debugmergestate', opts)
2102 2120 fm.startitem()
2103 2121
2104 2122 fm_commits = fm.nested(b'commits')
2105 2123 if ms.active():
2106 2124 for name, node, label_index in (
2107 2125 (b'local', ms.local, 0),
2108 2126 (b'other', ms.other, 1),
2109 2127 ):
2110 2128 fm_commits.startitem()
2111 2129 fm_commits.data(name=name)
2112 2130 fm_commits.data(node=hex(node))
2113 2131 if ms._labels and len(ms._labels) > label_index:
2114 2132 fm_commits.data(label=ms._labels[label_index])
2115 2133 fm_commits.end()
2116 2134
2117 2135 fm_files = fm.nested(b'files')
2118 2136 if ms.active():
2119 2137 for f in ms:
2120 2138 fm_files.startitem()
2121 2139 fm_files.data(path=f)
2122 2140 state = ms._state[f]
2123 2141 fm_files.data(state=state[0])
2124 2142 if state[0] in (
2125 2143 mergestatemod.MERGE_RECORD_UNRESOLVED,
2126 2144 mergestatemod.MERGE_RECORD_RESOLVED,
2127 2145 ):
2128 2146 fm_files.data(local_key=state[1])
2129 2147 fm_files.data(local_path=state[2])
2130 2148 fm_files.data(ancestor_path=state[3])
2131 2149 fm_files.data(ancestor_node=state[4])
2132 2150 fm_files.data(other_path=state[5])
2133 2151 fm_files.data(other_node=state[6])
2134 2152 fm_files.data(local_flags=state[7])
2135 2153 elif state[0] in (
2136 2154 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2137 2155 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2138 2156 ):
2139 2157 fm_files.data(renamed_path=state[1])
2140 2158 fm_files.data(rename_side=state[2])
2141 2159 fm_extras = fm_files.nested(b'extras')
2142 2160 for k, v in sorted(ms.extras(f).items()):
2143 2161 fm_extras.startitem()
2144 2162 fm_extras.data(key=k)
2145 2163 fm_extras.data(value=v)
2146 2164 fm_extras.end()
2147 2165
2148 2166 fm_files.end()
2149 2167
2150 2168 fm_extras = fm.nested(b'extras')
2151 2169 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2152 2170 if f in ms:
2153 2171 # If file is in mergestate, we have already processed it's extras
2154 2172 continue
2155 2173 for k, v in pycompat.iteritems(d):
2156 2174 fm_extras.startitem()
2157 2175 fm_extras.data(file=f)
2158 2176 fm_extras.data(key=k)
2159 2177 fm_extras.data(value=v)
2160 2178 fm_extras.end()
2161 2179
2162 2180 fm.end()
2163 2181
2164 2182
2165 2183 @command(b'debugnamecomplete', [], _(b'NAME...'))
2166 2184 def debugnamecomplete(ui, repo, *args):
2167 2185 '''complete "names" - tags, open branch names, bookmark names'''
2168 2186
2169 2187 names = set()
2170 2188 # since we previously only listed open branches, we will handle that
2171 2189 # specially (after this for loop)
2172 2190 for name, ns in pycompat.iteritems(repo.names):
2173 2191 if name != b'branches':
2174 2192 names.update(ns.listnames(repo))
2175 2193 names.update(
2176 2194 tag
2177 2195 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2178 2196 if not closed
2179 2197 )
2180 2198 completions = set()
2181 2199 if not args:
2182 2200 args = [b'']
2183 2201 for a in args:
2184 2202 completions.update(n for n in names if n.startswith(a))
2185 2203 ui.write(b'\n'.join(sorted(completions)))
2186 2204 ui.write(b'\n')
2187 2205
2188 2206
2189 2207 @command(
2190 2208 b'debugnodemap',
2191 2209 [
2192 2210 (
2193 2211 b'',
2194 2212 b'dump-new',
2195 2213 False,
2196 2214 _(b'write a (new) persistent binary nodemap on stdin'),
2197 2215 ),
2198 2216 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2199 2217 (
2200 2218 b'',
2201 2219 b'check',
2202 2220 False,
2203 2221 _(b'check that the data on disk data are correct.'),
2204 2222 ),
2205 2223 (
2206 2224 b'',
2207 2225 b'metadata',
2208 2226 False,
2209 2227 _(b'display the on disk meta data for the nodemap'),
2210 2228 ),
2211 2229 ],
2212 2230 )
2213 2231 def debugnodemap(ui, repo, **opts):
2214 2232 """write and inspect on disk nodemap"""
2215 2233 if opts['dump_new']:
2216 2234 unfi = repo.unfiltered()
2217 2235 cl = unfi.changelog
2218 2236 if util.safehasattr(cl.index, "nodemap_data_all"):
2219 2237 data = cl.index.nodemap_data_all()
2220 2238 else:
2221 2239 data = nodemap.persistent_data(cl.index)
2222 2240 ui.write(data)
2223 2241 elif opts['dump_disk']:
2224 2242 unfi = repo.unfiltered()
2225 2243 cl = unfi.changelog
2226 2244 nm_data = nodemap.persisted_data(cl)
2227 2245 if nm_data is not None:
2228 2246 docket, data = nm_data
2229 2247 ui.write(data[:])
2230 2248 elif opts['check']:
2231 2249 unfi = repo.unfiltered()
2232 2250 cl = unfi.changelog
2233 2251 nm_data = nodemap.persisted_data(cl)
2234 2252 if nm_data is not None:
2235 2253 docket, data = nm_data
2236 2254 return nodemap.check_data(ui, cl.index, data)
2237 2255 elif opts['metadata']:
2238 2256 unfi = repo.unfiltered()
2239 2257 cl = unfi.changelog
2240 2258 nm_data = nodemap.persisted_data(cl)
2241 2259 if nm_data is not None:
2242 2260 docket, data = nm_data
2243 2261 ui.write((b"uid: %s\n") % docket.uid)
2244 2262 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2245 2263 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2246 2264 ui.write((b"data-length: %d\n") % docket.data_length)
2247 2265 ui.write((b"data-unused: %d\n") % docket.data_unused)
2248 2266 unused_perc = docket.data_unused * 100.0 / docket.data_length
2249 2267 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2250 2268
2251 2269
2252 2270 @command(
2253 2271 b'debugobsolete',
2254 2272 [
2255 2273 (b'', b'flags', 0, _(b'markers flag')),
2256 2274 (
2257 2275 b'',
2258 2276 b'record-parents',
2259 2277 False,
2260 2278 _(b'record parent information for the precursor'),
2261 2279 ),
2262 2280 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2263 2281 (
2264 2282 b'',
2265 2283 b'exclusive',
2266 2284 False,
2267 2285 _(b'restrict display to markers only relevant to REV'),
2268 2286 ),
2269 2287 (b'', b'index', False, _(b'display index of the marker')),
2270 2288 (b'', b'delete', [], _(b'delete markers specified by indices')),
2271 2289 ]
2272 2290 + cmdutil.commitopts2
2273 2291 + cmdutil.formatteropts,
2274 2292 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2275 2293 )
2276 2294 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2277 2295 """create arbitrary obsolete marker
2278 2296
2279 2297 With no arguments, displays the list of obsolescence markers."""
2280 2298
2281 2299 opts = pycompat.byteskwargs(opts)
2282 2300
2283 2301 def parsenodeid(s):
2284 2302 try:
2285 2303 # We do not use revsingle/revrange functions here to accept
2286 2304 # arbitrary node identifiers, possibly not present in the
2287 2305 # local repository.
2288 2306 n = bin(s)
2289 2307 if len(n) != len(nullid):
2290 2308 raise TypeError()
2291 2309 return n
2292 2310 except TypeError:
2293 2311 raise error.InputError(
2294 2312 b'changeset references must be full hexadecimal '
2295 2313 b'node identifiers'
2296 2314 )
2297 2315
2298 2316 if opts.get(b'delete'):
2299 2317 indices = []
2300 2318 for v in opts.get(b'delete'):
2301 2319 try:
2302 2320 indices.append(int(v))
2303 2321 except ValueError:
2304 2322 raise error.InputError(
2305 2323 _(b'invalid index value: %r') % v,
2306 2324 hint=_(b'use integers for indices'),
2307 2325 )
2308 2326
2309 2327 if repo.currenttransaction():
2310 2328 raise error.Abort(
2311 2329 _(b'cannot delete obsmarkers in the middle of transaction.')
2312 2330 )
2313 2331
2314 2332 with repo.lock():
2315 2333 n = repair.deleteobsmarkers(repo.obsstore, indices)
2316 2334 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2317 2335
2318 2336 return
2319 2337
2320 2338 if precursor is not None:
2321 2339 if opts[b'rev']:
2322 2340 raise error.InputError(
2323 2341 b'cannot select revision when creating marker'
2324 2342 )
2325 2343 metadata = {}
2326 2344 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2327 2345 succs = tuple(parsenodeid(succ) for succ in successors)
2328 2346 l = repo.lock()
2329 2347 try:
2330 2348 tr = repo.transaction(b'debugobsolete')
2331 2349 try:
2332 2350 date = opts.get(b'date')
2333 2351 if date:
2334 2352 date = dateutil.parsedate(date)
2335 2353 else:
2336 2354 date = None
2337 2355 prec = parsenodeid(precursor)
2338 2356 parents = None
2339 2357 if opts[b'record_parents']:
2340 2358 if prec not in repo.unfiltered():
2341 2359 raise error.Abort(
2342 2360 b'cannot used --record-parents on '
2343 2361 b'unknown changesets'
2344 2362 )
2345 2363 parents = repo.unfiltered()[prec].parents()
2346 2364 parents = tuple(p.node() for p in parents)
2347 2365 repo.obsstore.create(
2348 2366 tr,
2349 2367 prec,
2350 2368 succs,
2351 2369 opts[b'flags'],
2352 2370 parents=parents,
2353 2371 date=date,
2354 2372 metadata=metadata,
2355 2373 ui=ui,
2356 2374 )
2357 2375 tr.close()
2358 2376 except ValueError as exc:
2359 2377 raise error.Abort(
2360 2378 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2361 2379 )
2362 2380 finally:
2363 2381 tr.release()
2364 2382 finally:
2365 2383 l.release()
2366 2384 else:
2367 2385 if opts[b'rev']:
2368 2386 revs = scmutil.revrange(repo, opts[b'rev'])
2369 2387 nodes = [repo[r].node() for r in revs]
2370 2388 markers = list(
2371 2389 obsutil.getmarkers(
2372 2390 repo, nodes=nodes, exclusive=opts[b'exclusive']
2373 2391 )
2374 2392 )
2375 2393 markers.sort(key=lambda x: x._data)
2376 2394 else:
2377 2395 markers = obsutil.getmarkers(repo)
2378 2396
2379 2397 markerstoiter = markers
2380 2398 isrelevant = lambda m: True
2381 2399 if opts.get(b'rev') and opts.get(b'index'):
2382 2400 markerstoiter = obsutil.getmarkers(repo)
2383 2401 markerset = set(markers)
2384 2402 isrelevant = lambda m: m in markerset
2385 2403
2386 2404 fm = ui.formatter(b'debugobsolete', opts)
2387 2405 for i, m in enumerate(markerstoiter):
2388 2406 if not isrelevant(m):
2389 2407 # marker can be irrelevant when we're iterating over a set
2390 2408 # of markers (markerstoiter) which is bigger than the set
2391 2409 # of markers we want to display (markers)
2392 2410 # this can happen if both --index and --rev options are
2393 2411 # provided and thus we need to iterate over all of the markers
2394 2412 # to get the correct indices, but only display the ones that
2395 2413 # are relevant to --rev value
2396 2414 continue
2397 2415 fm.startitem()
2398 2416 ind = i if opts.get(b'index') else None
2399 2417 cmdutil.showmarker(fm, m, index=ind)
2400 2418 fm.end()
2401 2419
2402 2420
2403 2421 @command(
2404 2422 b'debugp1copies',
2405 2423 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2406 2424 _(b'[-r REV]'),
2407 2425 )
2408 2426 def debugp1copies(ui, repo, **opts):
2409 2427 """dump copy information compared to p1"""
2410 2428
2411 2429 opts = pycompat.byteskwargs(opts)
2412 2430 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2413 2431 for dst, src in ctx.p1copies().items():
2414 2432 ui.write(b'%s -> %s\n' % (src, dst))
2415 2433
2416 2434
2417 2435 @command(
2418 2436 b'debugp2copies',
2419 2437 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2420 2438 _(b'[-r REV]'),
2421 2439 )
2422 2440 def debugp1copies(ui, repo, **opts):
2423 2441 """dump copy information compared to p2"""
2424 2442
2425 2443 opts = pycompat.byteskwargs(opts)
2426 2444 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2427 2445 for dst, src in ctx.p2copies().items():
2428 2446 ui.write(b'%s -> %s\n' % (src, dst))
2429 2447
2430 2448
2431 2449 @command(
2432 2450 b'debugpathcomplete',
2433 2451 [
2434 2452 (b'f', b'full', None, _(b'complete an entire path')),
2435 2453 (b'n', b'normal', None, _(b'show only normal files')),
2436 2454 (b'a', b'added', None, _(b'show only added files')),
2437 2455 (b'r', b'removed', None, _(b'show only removed files')),
2438 2456 ],
2439 2457 _(b'FILESPEC...'),
2440 2458 )
2441 2459 def debugpathcomplete(ui, repo, *specs, **opts):
2442 2460 """complete part or all of a tracked path
2443 2461
2444 2462 This command supports shells that offer path name completion. It
2445 2463 currently completes only files already known to the dirstate.
2446 2464
2447 2465 Completion extends only to the next path segment unless
2448 2466 --full is specified, in which case entire paths are used."""
2449 2467
2450 2468 def complete(path, acceptable):
2451 2469 dirstate = repo.dirstate
2452 2470 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2453 2471 rootdir = repo.root + pycompat.ossep
2454 2472 if spec != repo.root and not spec.startswith(rootdir):
2455 2473 return [], []
2456 2474 if os.path.isdir(spec):
2457 2475 spec += b'/'
2458 2476 spec = spec[len(rootdir) :]
2459 2477 fixpaths = pycompat.ossep != b'/'
2460 2478 if fixpaths:
2461 2479 spec = spec.replace(pycompat.ossep, b'/')
2462 2480 speclen = len(spec)
2463 2481 fullpaths = opts['full']
2464 2482 files, dirs = set(), set()
2465 2483 adddir, addfile = dirs.add, files.add
2466 2484 for f, st in pycompat.iteritems(dirstate):
2467 2485 if f.startswith(spec) and st[0] in acceptable:
2468 2486 if fixpaths:
2469 2487 f = f.replace(b'/', pycompat.ossep)
2470 2488 if fullpaths:
2471 2489 addfile(f)
2472 2490 continue
2473 2491 s = f.find(pycompat.ossep, speclen)
2474 2492 if s >= 0:
2475 2493 adddir(f[:s])
2476 2494 else:
2477 2495 addfile(f)
2478 2496 return files, dirs
2479 2497
2480 2498 acceptable = b''
2481 2499 if opts['normal']:
2482 2500 acceptable += b'nm'
2483 2501 if opts['added']:
2484 2502 acceptable += b'a'
2485 2503 if opts['removed']:
2486 2504 acceptable += b'r'
2487 2505 cwd = repo.getcwd()
2488 2506 if not specs:
2489 2507 specs = [b'.']
2490 2508
2491 2509 files, dirs = set(), set()
2492 2510 for spec in specs:
2493 2511 f, d = complete(spec, acceptable or b'nmar')
2494 2512 files.update(f)
2495 2513 dirs.update(d)
2496 2514 files.update(dirs)
2497 2515 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2498 2516 ui.write(b'\n')
2499 2517
2500 2518
2501 2519 @command(
2502 2520 b'debugpathcopies',
2503 2521 cmdutil.walkopts,
2504 2522 b'hg debugpathcopies REV1 REV2 [FILE]',
2505 2523 inferrepo=True,
2506 2524 )
2507 2525 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2508 2526 """show copies between two revisions"""
2509 2527 ctx1 = scmutil.revsingle(repo, rev1)
2510 2528 ctx2 = scmutil.revsingle(repo, rev2)
2511 2529 m = scmutil.match(ctx1, pats, opts)
2512 2530 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2513 2531 ui.write(b'%s -> %s\n' % (src, dst))
2514 2532
2515 2533
2516 2534 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2517 2535 def debugpeer(ui, path):
2518 2536 """establish a connection to a peer repository"""
2519 2537 # Always enable peer request logging. Requires --debug to display
2520 2538 # though.
2521 2539 overrides = {
2522 2540 (b'devel', b'debug.peer-request'): True,
2523 2541 }
2524 2542
2525 2543 with ui.configoverride(overrides):
2526 2544 peer = hg.peer(ui, {}, path)
2527 2545
2528 2546 local = peer.local() is not None
2529 2547 canpush = peer.canpush()
2530 2548
2531 2549 ui.write(_(b'url: %s\n') % peer.url())
2532 2550 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2533 2551 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2534 2552
2535 2553
2536 2554 @command(
2537 2555 b'debugpickmergetool',
2538 2556 [
2539 2557 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2540 2558 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2541 2559 ]
2542 2560 + cmdutil.walkopts
2543 2561 + cmdutil.mergetoolopts,
2544 2562 _(b'[PATTERN]...'),
2545 2563 inferrepo=True,
2546 2564 )
2547 2565 def debugpickmergetool(ui, repo, *pats, **opts):
2548 2566 """examine which merge tool is chosen for specified file
2549 2567
2550 2568 As described in :hg:`help merge-tools`, Mercurial examines
2551 2569 configurations below in this order to decide which merge tool is
2552 2570 chosen for specified file.
2553 2571
2554 2572 1. ``--tool`` option
2555 2573 2. ``HGMERGE`` environment variable
2556 2574 3. configurations in ``merge-patterns`` section
2557 2575 4. configuration of ``ui.merge``
2558 2576 5. configurations in ``merge-tools`` section
2559 2577 6. ``hgmerge`` tool (for historical reason only)
2560 2578 7. default tool for fallback (``:merge`` or ``:prompt``)
2561 2579
2562 2580 This command writes out examination result in the style below::
2563 2581
2564 2582 FILE = MERGETOOL
2565 2583
2566 2584 By default, all files known in the first parent context of the
2567 2585 working directory are examined. Use file patterns and/or -I/-X
2568 2586 options to limit target files. -r/--rev is also useful to examine
2569 2587 files in another context without actual updating to it.
2570 2588
2571 2589 With --debug, this command shows warning messages while matching
2572 2590 against ``merge-patterns`` and so on, too. It is recommended to
2573 2591 use this option with explicit file patterns and/or -I/-X options,
2574 2592 because this option increases amount of output per file according
2575 2593 to configurations in hgrc.
2576 2594
2577 2595 With -v/--verbose, this command shows configurations below at
2578 2596 first (only if specified).
2579 2597
2580 2598 - ``--tool`` option
2581 2599 - ``HGMERGE`` environment variable
2582 2600 - configuration of ``ui.merge``
2583 2601
2584 2602 If merge tool is chosen before matching against
2585 2603 ``merge-patterns``, this command can't show any helpful
2586 2604 information, even with --debug. In such case, information above is
2587 2605 useful to know why a merge tool is chosen.
2588 2606 """
2589 2607 opts = pycompat.byteskwargs(opts)
2590 2608 overrides = {}
2591 2609 if opts[b'tool']:
2592 2610 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2593 2611 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2594 2612
2595 2613 with ui.configoverride(overrides, b'debugmergepatterns'):
2596 2614 hgmerge = encoding.environ.get(b"HGMERGE")
2597 2615 if hgmerge is not None:
2598 2616 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2599 2617 uimerge = ui.config(b"ui", b"merge")
2600 2618 if uimerge:
2601 2619 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2602 2620
2603 2621 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2604 2622 m = scmutil.match(ctx, pats, opts)
2605 2623 changedelete = opts[b'changedelete']
2606 2624 for path in ctx.walk(m):
2607 2625 fctx = ctx[path]
2608 2626 try:
2609 2627 if not ui.debugflag:
2610 2628 ui.pushbuffer(error=True)
2611 2629 tool, toolpath = filemerge._picktool(
2612 2630 repo,
2613 2631 ui,
2614 2632 path,
2615 2633 fctx.isbinary(),
2616 2634 b'l' in fctx.flags(),
2617 2635 changedelete,
2618 2636 )
2619 2637 finally:
2620 2638 if not ui.debugflag:
2621 2639 ui.popbuffer()
2622 2640 ui.write(b'%s = %s\n' % (path, tool))
2623 2641
2624 2642
2625 2643 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2626 2644 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2627 2645 """access the pushkey key/value protocol
2628 2646
2629 2647 With two args, list the keys in the given namespace.
2630 2648
2631 2649 With five args, set a key to new if it currently is set to old.
2632 2650 Reports success or failure.
2633 2651 """
2634 2652
2635 2653 target = hg.peer(ui, {}, repopath)
2636 2654 if keyinfo:
2637 2655 key, old, new = keyinfo
2638 2656 with target.commandexecutor() as e:
2639 2657 r = e.callcommand(
2640 2658 b'pushkey',
2641 2659 {
2642 2660 b'namespace': namespace,
2643 2661 b'key': key,
2644 2662 b'old': old,
2645 2663 b'new': new,
2646 2664 },
2647 2665 ).result()
2648 2666
2649 2667 ui.status(pycompat.bytestr(r) + b'\n')
2650 2668 return not r
2651 2669 else:
2652 2670 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2653 2671 ui.write(
2654 2672 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2655 2673 )
2656 2674
2657 2675
2658 2676 @command(b'debugpvec', [], _(b'A B'))
2659 2677 def debugpvec(ui, repo, a, b=None):
2660 2678 ca = scmutil.revsingle(repo, a)
2661 2679 cb = scmutil.revsingle(repo, b)
2662 2680 pa = pvec.ctxpvec(ca)
2663 2681 pb = pvec.ctxpvec(cb)
2664 2682 if pa == pb:
2665 2683 rel = b"="
2666 2684 elif pa > pb:
2667 2685 rel = b">"
2668 2686 elif pa < pb:
2669 2687 rel = b"<"
2670 2688 elif pa | pb:
2671 2689 rel = b"|"
2672 2690 ui.write(_(b"a: %s\n") % pa)
2673 2691 ui.write(_(b"b: %s\n") % pb)
2674 2692 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2675 2693 ui.write(
2676 2694 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2677 2695 % (
2678 2696 abs(pa._depth - pb._depth),
2679 2697 pvec._hamming(pa._vec, pb._vec),
2680 2698 pa.distance(pb),
2681 2699 rel,
2682 2700 )
2683 2701 )
2684 2702
2685 2703
2686 2704 @command(
2687 2705 b'debugrebuilddirstate|debugrebuildstate',
2688 2706 [
2689 2707 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2690 2708 (
2691 2709 b'',
2692 2710 b'minimal',
2693 2711 None,
2694 2712 _(
2695 2713 b'only rebuild files that are inconsistent with '
2696 2714 b'the working copy parent'
2697 2715 ),
2698 2716 ),
2699 2717 ],
2700 2718 _(b'[-r REV]'),
2701 2719 )
2702 2720 def debugrebuilddirstate(ui, repo, rev, **opts):
2703 2721 """rebuild the dirstate as it would look like for the given revision
2704 2722
2705 2723 If no revision is specified the first current parent will be used.
2706 2724
2707 2725 The dirstate will be set to the files of the given revision.
2708 2726 The actual working directory content or existing dirstate
2709 2727 information such as adds or removes is not considered.
2710 2728
2711 2729 ``minimal`` will only rebuild the dirstate status for files that claim to be
2712 2730 tracked but are not in the parent manifest, or that exist in the parent
2713 2731 manifest but are not in the dirstate. It will not change adds, removes, or
2714 2732 modified files that are in the working copy parent.
2715 2733
2716 2734 One use of this command is to make the next :hg:`status` invocation
2717 2735 check the actual file content.
2718 2736 """
2719 2737 ctx = scmutil.revsingle(repo, rev)
2720 2738 with repo.wlock():
2721 2739 dirstate = repo.dirstate
2722 2740 changedfiles = None
2723 2741 # See command doc for what minimal does.
2724 2742 if opts.get('minimal'):
2725 2743 manifestfiles = set(ctx.manifest().keys())
2726 2744 dirstatefiles = set(dirstate)
2727 2745 manifestonly = manifestfiles - dirstatefiles
2728 2746 dsonly = dirstatefiles - manifestfiles
2729 2747 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2730 2748 changedfiles = manifestonly | dsnotadded
2731 2749
2732 2750 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2733 2751
2734 2752
2735 2753 @command(b'debugrebuildfncache', [], b'')
2736 2754 def debugrebuildfncache(ui, repo):
2737 2755 """rebuild the fncache file"""
2738 2756 repair.rebuildfncache(ui, repo)
2739 2757
2740 2758
2741 2759 @command(
2742 2760 b'debugrename',
2743 2761 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2744 2762 _(b'[-r REV] [FILE]...'),
2745 2763 )
2746 2764 def debugrename(ui, repo, *pats, **opts):
2747 2765 """dump rename information"""
2748 2766
2749 2767 opts = pycompat.byteskwargs(opts)
2750 2768 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2751 2769 m = scmutil.match(ctx, pats, opts)
2752 2770 for abs in ctx.walk(m):
2753 2771 fctx = ctx[abs]
2754 2772 o = fctx.filelog().renamed(fctx.filenode())
2755 2773 rel = repo.pathto(abs)
2756 2774 if o:
2757 2775 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2758 2776 else:
2759 2777 ui.write(_(b"%s not renamed\n") % rel)
2760 2778
2761 2779
2762 2780 @command(b'debugrequires|debugrequirements', [], b'')
2763 2781 def debugrequirements(ui, repo):
2764 2782 """ print the current repo requirements """
2765 2783 for r in sorted(repo.requirements):
2766 2784 ui.write(b"%s\n" % r)
2767 2785
2768 2786
2769 2787 @command(
2770 2788 b'debugrevlog',
2771 2789 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2772 2790 _(b'-c|-m|FILE'),
2773 2791 optionalrepo=True,
2774 2792 )
2775 2793 def debugrevlog(ui, repo, file_=None, **opts):
2776 2794 """show data and statistics about a revlog"""
2777 2795 opts = pycompat.byteskwargs(opts)
2778 2796 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2779 2797
2780 2798 if opts.get(b"dump"):
2781 2799 numrevs = len(r)
2782 2800 ui.write(
2783 2801 (
2784 2802 b"# rev p1rev p2rev start end deltastart base p1 p2"
2785 2803 b" rawsize totalsize compression heads chainlen\n"
2786 2804 )
2787 2805 )
2788 2806 ts = 0
2789 2807 heads = set()
2790 2808
2791 2809 for rev in pycompat.xrange(numrevs):
2792 2810 dbase = r.deltaparent(rev)
2793 2811 if dbase == -1:
2794 2812 dbase = rev
2795 2813 cbase = r.chainbase(rev)
2796 2814 clen = r.chainlen(rev)
2797 2815 p1, p2 = r.parentrevs(rev)
2798 2816 rs = r.rawsize(rev)
2799 2817 ts = ts + rs
2800 2818 heads -= set(r.parentrevs(rev))
2801 2819 heads.add(rev)
2802 2820 try:
2803 2821 compression = ts / r.end(rev)
2804 2822 except ZeroDivisionError:
2805 2823 compression = 0
2806 2824 ui.write(
2807 2825 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2808 2826 b"%11d %5d %8d\n"
2809 2827 % (
2810 2828 rev,
2811 2829 p1,
2812 2830 p2,
2813 2831 r.start(rev),
2814 2832 r.end(rev),
2815 2833 r.start(dbase),
2816 2834 r.start(cbase),
2817 2835 r.start(p1),
2818 2836 r.start(p2),
2819 2837 rs,
2820 2838 ts,
2821 2839 compression,
2822 2840 len(heads),
2823 2841 clen,
2824 2842 )
2825 2843 )
2826 2844 return 0
2827 2845
2828 2846 v = r.version
2829 2847 format = v & 0xFFFF
2830 2848 flags = []
2831 2849 gdelta = False
2832 2850 if v & revlog.FLAG_INLINE_DATA:
2833 2851 flags.append(b'inline')
2834 2852 if v & revlog.FLAG_GENERALDELTA:
2835 2853 gdelta = True
2836 2854 flags.append(b'generaldelta')
2837 2855 if not flags:
2838 2856 flags = [b'(none)']
2839 2857
2840 2858 ### tracks merge vs single parent
2841 2859 nummerges = 0
2842 2860
2843 2861 ### tracks ways the "delta" are build
2844 2862 # nodelta
2845 2863 numempty = 0
2846 2864 numemptytext = 0
2847 2865 numemptydelta = 0
2848 2866 # full file content
2849 2867 numfull = 0
2850 2868 # intermediate snapshot against a prior snapshot
2851 2869 numsemi = 0
2852 2870 # snapshot count per depth
2853 2871 numsnapdepth = collections.defaultdict(lambda: 0)
2854 2872 # delta against previous revision
2855 2873 numprev = 0
2856 2874 # delta against first or second parent (not prev)
2857 2875 nump1 = 0
2858 2876 nump2 = 0
2859 2877 # delta against neither prev nor parents
2860 2878 numother = 0
2861 2879 # delta against prev that are also first or second parent
2862 2880 # (details of `numprev`)
2863 2881 nump1prev = 0
2864 2882 nump2prev = 0
2865 2883
2866 2884 # data about delta chain of each revs
2867 2885 chainlengths = []
2868 2886 chainbases = []
2869 2887 chainspans = []
2870 2888
2871 2889 # data about each revision
2872 2890 datasize = [None, 0, 0]
2873 2891 fullsize = [None, 0, 0]
2874 2892 semisize = [None, 0, 0]
2875 2893 # snapshot count per depth
2876 2894 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2877 2895 deltasize = [None, 0, 0]
2878 2896 chunktypecounts = {}
2879 2897 chunktypesizes = {}
2880 2898
2881 2899 def addsize(size, l):
2882 2900 if l[0] is None or size < l[0]:
2883 2901 l[0] = size
2884 2902 if size > l[1]:
2885 2903 l[1] = size
2886 2904 l[2] += size
2887 2905
2888 2906 numrevs = len(r)
2889 2907 for rev in pycompat.xrange(numrevs):
2890 2908 p1, p2 = r.parentrevs(rev)
2891 2909 delta = r.deltaparent(rev)
2892 2910 if format > 0:
2893 2911 addsize(r.rawsize(rev), datasize)
2894 2912 if p2 != nullrev:
2895 2913 nummerges += 1
2896 2914 size = r.length(rev)
2897 2915 if delta == nullrev:
2898 2916 chainlengths.append(0)
2899 2917 chainbases.append(r.start(rev))
2900 2918 chainspans.append(size)
2901 2919 if size == 0:
2902 2920 numempty += 1
2903 2921 numemptytext += 1
2904 2922 else:
2905 2923 numfull += 1
2906 2924 numsnapdepth[0] += 1
2907 2925 addsize(size, fullsize)
2908 2926 addsize(size, snapsizedepth[0])
2909 2927 else:
2910 2928 chainlengths.append(chainlengths[delta] + 1)
2911 2929 baseaddr = chainbases[delta]
2912 2930 revaddr = r.start(rev)
2913 2931 chainbases.append(baseaddr)
2914 2932 chainspans.append((revaddr - baseaddr) + size)
2915 2933 if size == 0:
2916 2934 numempty += 1
2917 2935 numemptydelta += 1
2918 2936 elif r.issnapshot(rev):
2919 2937 addsize(size, semisize)
2920 2938 numsemi += 1
2921 2939 depth = r.snapshotdepth(rev)
2922 2940 numsnapdepth[depth] += 1
2923 2941 addsize(size, snapsizedepth[depth])
2924 2942 else:
2925 2943 addsize(size, deltasize)
2926 2944 if delta == rev - 1:
2927 2945 numprev += 1
2928 2946 if delta == p1:
2929 2947 nump1prev += 1
2930 2948 elif delta == p2:
2931 2949 nump2prev += 1
2932 2950 elif delta == p1:
2933 2951 nump1 += 1
2934 2952 elif delta == p2:
2935 2953 nump2 += 1
2936 2954 elif delta != nullrev:
2937 2955 numother += 1
2938 2956
2939 2957 # Obtain data on the raw chunks in the revlog.
2940 2958 if util.safehasattr(r, b'_getsegmentforrevs'):
2941 2959 segment = r._getsegmentforrevs(rev, rev)[1]
2942 2960 else:
2943 2961 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2944 2962 if segment:
2945 2963 chunktype = bytes(segment[0:1])
2946 2964 else:
2947 2965 chunktype = b'empty'
2948 2966
2949 2967 if chunktype not in chunktypecounts:
2950 2968 chunktypecounts[chunktype] = 0
2951 2969 chunktypesizes[chunktype] = 0
2952 2970
2953 2971 chunktypecounts[chunktype] += 1
2954 2972 chunktypesizes[chunktype] += size
2955 2973
2956 2974 # Adjust size min value for empty cases
2957 2975 for size in (datasize, fullsize, semisize, deltasize):
2958 2976 if size[0] is None:
2959 2977 size[0] = 0
2960 2978
2961 2979 numdeltas = numrevs - numfull - numempty - numsemi
2962 2980 numoprev = numprev - nump1prev - nump2prev
2963 2981 totalrawsize = datasize[2]
2964 2982 datasize[2] /= numrevs
2965 2983 fulltotal = fullsize[2]
2966 2984 if numfull == 0:
2967 2985 fullsize[2] = 0
2968 2986 else:
2969 2987 fullsize[2] /= numfull
2970 2988 semitotal = semisize[2]
2971 2989 snaptotal = {}
2972 2990 if numsemi > 0:
2973 2991 semisize[2] /= numsemi
2974 2992 for depth in snapsizedepth:
2975 2993 snaptotal[depth] = snapsizedepth[depth][2]
2976 2994 snapsizedepth[depth][2] /= numsnapdepth[depth]
2977 2995
2978 2996 deltatotal = deltasize[2]
2979 2997 if numdeltas > 0:
2980 2998 deltasize[2] /= numdeltas
2981 2999 totalsize = fulltotal + semitotal + deltatotal
2982 3000 avgchainlen = sum(chainlengths) / numrevs
2983 3001 maxchainlen = max(chainlengths)
2984 3002 maxchainspan = max(chainspans)
2985 3003 compratio = 1
2986 3004 if totalsize:
2987 3005 compratio = totalrawsize / totalsize
2988 3006
2989 3007 basedfmtstr = b'%%%dd\n'
2990 3008 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2991 3009
2992 3010 def dfmtstr(max):
2993 3011 return basedfmtstr % len(str(max))
2994 3012
2995 3013 def pcfmtstr(max, padding=0):
2996 3014 return basepcfmtstr % (len(str(max)), b' ' * padding)
2997 3015
2998 3016 def pcfmt(value, total):
2999 3017 if total:
3000 3018 return (value, 100 * float(value) / total)
3001 3019 else:
3002 3020 return value, 100.0
3003 3021
3004 3022 ui.writenoi18n(b'format : %d\n' % format)
3005 3023 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3006 3024
3007 3025 ui.write(b'\n')
3008 3026 fmt = pcfmtstr(totalsize)
3009 3027 fmt2 = dfmtstr(totalsize)
3010 3028 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3011 3029 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3012 3030 ui.writenoi18n(
3013 3031 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3014 3032 )
3015 3033 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3016 3034 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3017 3035 ui.writenoi18n(
3018 3036 b' text : '
3019 3037 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3020 3038 )
3021 3039 ui.writenoi18n(
3022 3040 b' delta : '
3023 3041 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3024 3042 )
3025 3043 ui.writenoi18n(
3026 3044 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3027 3045 )
3028 3046 for depth in sorted(numsnapdepth):
3029 3047 ui.write(
3030 3048 (b' lvl-%-3d : ' % depth)
3031 3049 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3032 3050 )
3033 3051 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3034 3052 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3035 3053 ui.writenoi18n(
3036 3054 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3037 3055 )
3038 3056 for depth in sorted(numsnapdepth):
3039 3057 ui.write(
3040 3058 (b' lvl-%-3d : ' % depth)
3041 3059 + fmt % pcfmt(snaptotal[depth], totalsize)
3042 3060 )
3043 3061 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3044 3062
3045 3063 def fmtchunktype(chunktype):
3046 3064 if chunktype == b'empty':
3047 3065 return b' %s : ' % chunktype
3048 3066 elif chunktype in pycompat.bytestr(string.ascii_letters):
3049 3067 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3050 3068 else:
3051 3069 return b' 0x%s : ' % hex(chunktype)
3052 3070
3053 3071 ui.write(b'\n')
3054 3072 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3055 3073 for chunktype in sorted(chunktypecounts):
3056 3074 ui.write(fmtchunktype(chunktype))
3057 3075 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3058 3076 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3059 3077 for chunktype in sorted(chunktypecounts):
3060 3078 ui.write(fmtchunktype(chunktype))
3061 3079 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3062 3080
3063 3081 ui.write(b'\n')
3064 3082 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3065 3083 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3066 3084 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3067 3085 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3068 3086 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3069 3087
3070 3088 if format > 0:
3071 3089 ui.write(b'\n')
3072 3090 ui.writenoi18n(
3073 3091 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3074 3092 % tuple(datasize)
3075 3093 )
3076 3094 ui.writenoi18n(
3077 3095 b'full revision size (min/max/avg) : %d / %d / %d\n'
3078 3096 % tuple(fullsize)
3079 3097 )
3080 3098 ui.writenoi18n(
3081 3099 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3082 3100 % tuple(semisize)
3083 3101 )
3084 3102 for depth in sorted(snapsizedepth):
3085 3103 if depth == 0:
3086 3104 continue
3087 3105 ui.writenoi18n(
3088 3106 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3089 3107 % ((depth,) + tuple(snapsizedepth[depth]))
3090 3108 )
3091 3109 ui.writenoi18n(
3092 3110 b'delta size (min/max/avg) : %d / %d / %d\n'
3093 3111 % tuple(deltasize)
3094 3112 )
3095 3113
3096 3114 if numdeltas > 0:
3097 3115 ui.write(b'\n')
3098 3116 fmt = pcfmtstr(numdeltas)
3099 3117 fmt2 = pcfmtstr(numdeltas, 4)
3100 3118 ui.writenoi18n(
3101 3119 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3102 3120 )
3103 3121 if numprev > 0:
3104 3122 ui.writenoi18n(
3105 3123 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3106 3124 )
3107 3125 ui.writenoi18n(
3108 3126 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3109 3127 )
3110 3128 ui.writenoi18n(
3111 3129 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3112 3130 )
3113 3131 if gdelta:
3114 3132 ui.writenoi18n(
3115 3133 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3116 3134 )
3117 3135 ui.writenoi18n(
3118 3136 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3119 3137 )
3120 3138 ui.writenoi18n(
3121 3139 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3122 3140 )
3123 3141
3124 3142
3125 3143 @command(
3126 3144 b'debugrevlogindex',
3127 3145 cmdutil.debugrevlogopts
3128 3146 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3129 3147 _(b'[-f FORMAT] -c|-m|FILE'),
3130 3148 optionalrepo=True,
3131 3149 )
3132 3150 def debugrevlogindex(ui, repo, file_=None, **opts):
3133 3151 """dump the contents of a revlog index"""
3134 3152 opts = pycompat.byteskwargs(opts)
3135 3153 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3136 3154 format = opts.get(b'format', 0)
3137 3155 if format not in (0, 1):
3138 3156 raise error.Abort(_(b"unknown format %d") % format)
3139 3157
3140 3158 if ui.debugflag:
3141 3159 shortfn = hex
3142 3160 else:
3143 3161 shortfn = short
3144 3162
3145 3163 # There might not be anything in r, so have a sane default
3146 3164 idlen = 12
3147 3165 for i in r:
3148 3166 idlen = len(shortfn(r.node(i)))
3149 3167 break
3150 3168
3151 3169 if format == 0:
3152 3170 if ui.verbose:
3153 3171 ui.writenoi18n(
3154 3172 b" rev offset length linkrev %s %s p2\n"
3155 3173 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3156 3174 )
3157 3175 else:
3158 3176 ui.writenoi18n(
3159 3177 b" rev linkrev %s %s p2\n"
3160 3178 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3161 3179 )
3162 3180 elif format == 1:
3163 3181 if ui.verbose:
3164 3182 ui.writenoi18n(
3165 3183 (
3166 3184 b" rev flag offset length size link p1"
3167 3185 b" p2 %s\n"
3168 3186 )
3169 3187 % b"nodeid".rjust(idlen)
3170 3188 )
3171 3189 else:
3172 3190 ui.writenoi18n(
3173 3191 b" rev flag size link p1 p2 %s\n"
3174 3192 % b"nodeid".rjust(idlen)
3175 3193 )
3176 3194
3177 3195 for i in r:
3178 3196 node = r.node(i)
3179 3197 if format == 0:
3180 3198 try:
3181 3199 pp = r.parents(node)
3182 3200 except Exception:
3183 3201 pp = [nullid, nullid]
3184 3202 if ui.verbose:
3185 3203 ui.write(
3186 3204 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3187 3205 % (
3188 3206 i,
3189 3207 r.start(i),
3190 3208 r.length(i),
3191 3209 r.linkrev(i),
3192 3210 shortfn(node),
3193 3211 shortfn(pp[0]),
3194 3212 shortfn(pp[1]),
3195 3213 )
3196 3214 )
3197 3215 else:
3198 3216 ui.write(
3199 3217 b"% 6d % 7d %s %s %s\n"
3200 3218 % (
3201 3219 i,
3202 3220 r.linkrev(i),
3203 3221 shortfn(node),
3204 3222 shortfn(pp[0]),
3205 3223 shortfn(pp[1]),
3206 3224 )
3207 3225 )
3208 3226 elif format == 1:
3209 3227 pr = r.parentrevs(i)
3210 3228 if ui.verbose:
3211 3229 ui.write(
3212 3230 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3213 3231 % (
3214 3232 i,
3215 3233 r.flags(i),
3216 3234 r.start(i),
3217 3235 r.length(i),
3218 3236 r.rawsize(i),
3219 3237 r.linkrev(i),
3220 3238 pr[0],
3221 3239 pr[1],
3222 3240 shortfn(node),
3223 3241 )
3224 3242 )
3225 3243 else:
3226 3244 ui.write(
3227 3245 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3228 3246 % (
3229 3247 i,
3230 3248 r.flags(i),
3231 3249 r.rawsize(i),
3232 3250 r.linkrev(i),
3233 3251 pr[0],
3234 3252 pr[1],
3235 3253 shortfn(node),
3236 3254 )
3237 3255 )
3238 3256
3239 3257
3240 3258 @command(
3241 3259 b'debugrevspec',
3242 3260 [
3243 3261 (
3244 3262 b'',
3245 3263 b'optimize',
3246 3264 None,
3247 3265 _(b'print parsed tree after optimizing (DEPRECATED)'),
3248 3266 ),
3249 3267 (
3250 3268 b'',
3251 3269 b'show-revs',
3252 3270 True,
3253 3271 _(b'print list of result revisions (default)'),
3254 3272 ),
3255 3273 (
3256 3274 b's',
3257 3275 b'show-set',
3258 3276 None,
3259 3277 _(b'print internal representation of result set'),
3260 3278 ),
3261 3279 (
3262 3280 b'p',
3263 3281 b'show-stage',
3264 3282 [],
3265 3283 _(b'print parsed tree at the given stage'),
3266 3284 _(b'NAME'),
3267 3285 ),
3268 3286 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3269 3287 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3270 3288 ],
3271 3289 b'REVSPEC',
3272 3290 )
3273 3291 def debugrevspec(ui, repo, expr, **opts):
3274 3292 """parse and apply a revision specification
3275 3293
3276 3294 Use -p/--show-stage option to print the parsed tree at the given stages.
3277 3295 Use -p all to print tree at every stage.
3278 3296
3279 3297 Use --no-show-revs option with -s or -p to print only the set
3280 3298 representation or the parsed tree respectively.
3281 3299
3282 3300 Use --verify-optimized to compare the optimized result with the unoptimized
3283 3301 one. Returns 1 if the optimized result differs.
3284 3302 """
3285 3303 opts = pycompat.byteskwargs(opts)
3286 3304 aliases = ui.configitems(b'revsetalias')
3287 3305 stages = [
3288 3306 (b'parsed', lambda tree: tree),
3289 3307 (
3290 3308 b'expanded',
3291 3309 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3292 3310 ),
3293 3311 (b'concatenated', revsetlang.foldconcat),
3294 3312 (b'analyzed', revsetlang.analyze),
3295 3313 (b'optimized', revsetlang.optimize),
3296 3314 ]
3297 3315 if opts[b'no_optimized']:
3298 3316 stages = stages[:-1]
3299 3317 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3300 3318 raise error.Abort(
3301 3319 _(b'cannot use --verify-optimized with --no-optimized')
3302 3320 )
3303 3321 stagenames = {n for n, f in stages}
3304 3322
3305 3323 showalways = set()
3306 3324 showchanged = set()
3307 3325 if ui.verbose and not opts[b'show_stage']:
3308 3326 # show parsed tree by --verbose (deprecated)
3309 3327 showalways.add(b'parsed')
3310 3328 showchanged.update([b'expanded', b'concatenated'])
3311 3329 if opts[b'optimize']:
3312 3330 showalways.add(b'optimized')
3313 3331 if opts[b'show_stage'] and opts[b'optimize']:
3314 3332 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3315 3333 if opts[b'show_stage'] == [b'all']:
3316 3334 showalways.update(stagenames)
3317 3335 else:
3318 3336 for n in opts[b'show_stage']:
3319 3337 if n not in stagenames:
3320 3338 raise error.Abort(_(b'invalid stage name: %s') % n)
3321 3339 showalways.update(opts[b'show_stage'])
3322 3340
3323 3341 treebystage = {}
3324 3342 printedtree = None
3325 3343 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3326 3344 for n, f in stages:
3327 3345 treebystage[n] = tree = f(tree)
3328 3346 if n in showalways or (n in showchanged and tree != printedtree):
3329 3347 if opts[b'show_stage'] or n != b'parsed':
3330 3348 ui.write(b"* %s:\n" % n)
3331 3349 ui.write(revsetlang.prettyformat(tree), b"\n")
3332 3350 printedtree = tree
3333 3351
3334 3352 if opts[b'verify_optimized']:
3335 3353 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3336 3354 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3337 3355 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3338 3356 ui.writenoi18n(
3339 3357 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3340 3358 )
3341 3359 ui.writenoi18n(
3342 3360 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3343 3361 )
3344 3362 arevs = list(arevs)
3345 3363 brevs = list(brevs)
3346 3364 if arevs == brevs:
3347 3365 return 0
3348 3366 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3349 3367 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3350 3368 sm = difflib.SequenceMatcher(None, arevs, brevs)
3351 3369 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3352 3370 if tag in ('delete', 'replace'):
3353 3371 for c in arevs[alo:ahi]:
3354 3372 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3355 3373 if tag in ('insert', 'replace'):
3356 3374 for c in brevs[blo:bhi]:
3357 3375 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3358 3376 if tag == 'equal':
3359 3377 for c in arevs[alo:ahi]:
3360 3378 ui.write(b' %d\n' % c)
3361 3379 return 1
3362 3380
3363 3381 func = revset.makematcher(tree)
3364 3382 revs = func(repo)
3365 3383 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3366 3384 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3367 3385 if not opts[b'show_revs']:
3368 3386 return
3369 3387 for c in revs:
3370 3388 ui.write(b"%d\n" % c)
3371 3389
3372 3390
3373 3391 @command(
3374 3392 b'debugserve',
3375 3393 [
3376 3394 (
3377 3395 b'',
3378 3396 b'sshstdio',
3379 3397 False,
3380 3398 _(b'run an SSH server bound to process handles'),
3381 3399 ),
3382 3400 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3383 3401 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3384 3402 ],
3385 3403 b'',
3386 3404 )
3387 3405 def debugserve(ui, repo, **opts):
3388 3406 """run a server with advanced settings
3389 3407
3390 3408 This command is similar to :hg:`serve`. It exists partially as a
3391 3409 workaround to the fact that ``hg serve --stdio`` must have specific
3392 3410 arguments for security reasons.
3393 3411 """
3394 3412 opts = pycompat.byteskwargs(opts)
3395 3413
3396 3414 if not opts[b'sshstdio']:
3397 3415 raise error.Abort(_(b'only --sshstdio is currently supported'))
3398 3416
3399 3417 logfh = None
3400 3418
3401 3419 if opts[b'logiofd'] and opts[b'logiofile']:
3402 3420 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3403 3421
3404 3422 if opts[b'logiofd']:
3405 3423 # Ideally we would be line buffered. But line buffering in binary
3406 3424 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3407 3425 # buffering could have performance impacts. But since this isn't
3408 3426 # performance critical code, it should be fine.
3409 3427 try:
3410 3428 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3411 3429 except OSError as e:
3412 3430 if e.errno != errno.ESPIPE:
3413 3431 raise
3414 3432 # can't seek a pipe, so `ab` mode fails on py3
3415 3433 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3416 3434 elif opts[b'logiofile']:
3417 3435 logfh = open(opts[b'logiofile'], b'ab', 0)
3418 3436
3419 3437 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3420 3438 s.serve_forever()
3421 3439
3422 3440
3423 3441 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3424 3442 def debugsetparents(ui, repo, rev1, rev2=None):
3425 3443 """manually set the parents of the current working directory
3426 3444
3427 3445 This is useful for writing repository conversion tools, but should
3428 3446 be used with care. For example, neither the working directory nor the
3429 3447 dirstate is updated, so file status may be incorrect after running this
3430 3448 command.
3431 3449
3432 3450 Returns 0 on success.
3433 3451 """
3434 3452
3435 3453 node1 = scmutil.revsingle(repo, rev1).node()
3436 3454 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3437 3455
3438 3456 with repo.wlock():
3439 3457 repo.setparents(node1, node2)
3440 3458
3441 3459
3442 3460 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3443 3461 def debugsidedata(ui, repo, file_, rev=None, **opts):
3444 3462 """dump the side data for a cl/manifest/file revision
3445 3463
3446 3464 Use --verbose to dump the sidedata content."""
3447 3465 opts = pycompat.byteskwargs(opts)
3448 3466 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3449 3467 if rev is not None:
3450 3468 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3451 3469 file_, rev = None, file_
3452 3470 elif rev is None:
3453 3471 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3454 3472 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3455 3473 r = getattr(r, '_revlog', r)
3456 3474 try:
3457 3475 sidedata = r.sidedata(r.lookup(rev))
3458 3476 except KeyError:
3459 3477 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3460 3478 if sidedata:
3461 3479 sidedata = list(sidedata.items())
3462 3480 sidedata.sort()
3463 3481 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3464 3482 for key, value in sidedata:
3465 3483 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3466 3484 if ui.verbose:
3467 3485 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3468 3486
3469 3487
3470 3488 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3471 3489 def debugssl(ui, repo, source=None, **opts):
3472 3490 """test a secure connection to a server
3473 3491
3474 3492 This builds the certificate chain for the server on Windows, installing the
3475 3493 missing intermediates and trusted root via Windows Update if necessary. It
3476 3494 does nothing on other platforms.
3477 3495
3478 3496 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3479 3497 that server is used. See :hg:`help urls` for more information.
3480 3498
3481 3499 If the update succeeds, retry the original operation. Otherwise, the cause
3482 3500 of the SSL error is likely another issue.
3483 3501 """
3484 3502 if not pycompat.iswindows:
3485 3503 raise error.Abort(
3486 3504 _(b'certificate chain building is only possible on Windows')
3487 3505 )
3488 3506
3489 3507 if not source:
3490 3508 if not repo:
3491 3509 raise error.Abort(
3492 3510 _(
3493 3511 b"there is no Mercurial repository here, and no "
3494 3512 b"server specified"
3495 3513 )
3496 3514 )
3497 3515 source = b"default"
3498 3516
3499 3517 source, branches = hg.parseurl(ui.expandpath(source))
3500 3518 url = util.url(source)
3501 3519
3502 3520 defaultport = {b'https': 443, b'ssh': 22}
3503 3521 if url.scheme in defaultport:
3504 3522 try:
3505 3523 addr = (url.host, int(url.port or defaultport[url.scheme]))
3506 3524 except ValueError:
3507 3525 raise error.Abort(_(b"malformed port number in URL"))
3508 3526 else:
3509 3527 raise error.Abort(_(b"only https and ssh connections are supported"))
3510 3528
3511 3529 from . import win32
3512 3530
3513 3531 s = ssl.wrap_socket(
3514 3532 socket.socket(),
3515 3533 ssl_version=ssl.PROTOCOL_TLS,
3516 3534 cert_reqs=ssl.CERT_NONE,
3517 3535 ca_certs=None,
3518 3536 )
3519 3537
3520 3538 try:
3521 3539 s.connect(addr)
3522 3540 cert = s.getpeercert(True)
3523 3541
3524 3542 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3525 3543
3526 3544 complete = win32.checkcertificatechain(cert, build=False)
3527 3545
3528 3546 if not complete:
3529 3547 ui.status(_(b'certificate chain is incomplete, updating... '))
3530 3548
3531 3549 if not win32.checkcertificatechain(cert):
3532 3550 ui.status(_(b'failed.\n'))
3533 3551 else:
3534 3552 ui.status(_(b'done.\n'))
3535 3553 else:
3536 3554 ui.status(_(b'full certificate chain is available\n'))
3537 3555 finally:
3538 3556 s.close()
3539 3557
3540 3558
3541 3559 @command(
3542 3560 b"debugbackupbundle",
3543 3561 [
3544 3562 (
3545 3563 b"",
3546 3564 b"recover",
3547 3565 b"",
3548 3566 b"brings the specified changeset back into the repository",
3549 3567 )
3550 3568 ]
3551 3569 + cmdutil.logopts,
3552 3570 _(b"hg debugbackupbundle [--recover HASH]"),
3553 3571 )
3554 3572 def debugbackupbundle(ui, repo, *pats, **opts):
3555 3573 """lists the changesets available in backup bundles
3556 3574
3557 3575 Without any arguments, this command prints a list of the changesets in each
3558 3576 backup bundle.
3559 3577
3560 3578 --recover takes a changeset hash and unbundles the first bundle that
3561 3579 contains that hash, which puts that changeset back in your repository.
3562 3580
3563 3581 --verbose will print the entire commit message and the bundle path for that
3564 3582 backup.
3565 3583 """
3566 3584 backups = list(
3567 3585 filter(
3568 3586 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3569 3587 )
3570 3588 )
3571 3589 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3572 3590
3573 3591 opts = pycompat.byteskwargs(opts)
3574 3592 opts[b"bundle"] = b""
3575 3593 opts[b"force"] = None
3576 3594 limit = logcmdutil.getlimit(opts)
3577 3595
3578 3596 def display(other, chlist, displayer):
3579 3597 if opts.get(b"newest_first"):
3580 3598 chlist.reverse()
3581 3599 count = 0
3582 3600 for n in chlist:
3583 3601 if limit is not None and count >= limit:
3584 3602 break
3585 3603 parents = [True for p in other.changelog.parents(n) if p != nullid]
3586 3604 if opts.get(b"no_merges") and len(parents) == 2:
3587 3605 continue
3588 3606 count += 1
3589 3607 displayer.show(other[n])
3590 3608
3591 3609 recovernode = opts.get(b"recover")
3592 3610 if recovernode:
3593 3611 if scmutil.isrevsymbol(repo, recovernode):
3594 3612 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3595 3613 return
3596 3614 elif backups:
3597 3615 msg = _(
3598 3616 b"Recover changesets using: hg debugbackupbundle --recover "
3599 3617 b"<changeset hash>\n\nAvailable backup changesets:"
3600 3618 )
3601 3619 ui.status(msg, label=b"status.removed")
3602 3620 else:
3603 3621 ui.status(_(b"no backup changesets found\n"))
3604 3622 return
3605 3623
3606 3624 for backup in backups:
3607 3625 # Much of this is copied from the hg incoming logic
3608 3626 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3609 3627 source, branches = hg.parseurl(source, opts.get(b"branch"))
3610 3628 try:
3611 3629 other = hg.peer(repo, opts, source)
3612 3630 except error.LookupError as ex:
3613 3631 msg = _(b"\nwarning: unable to open bundle %s") % source
3614 3632 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3615 3633 ui.warn(msg, hint=hint)
3616 3634 continue
3617 3635 revs, checkout = hg.addbranchrevs(
3618 3636 repo, other, branches, opts.get(b"rev")
3619 3637 )
3620 3638
3621 3639 if revs:
3622 3640 revs = [other.lookup(rev) for rev in revs]
3623 3641
3624 3642 quiet = ui.quiet
3625 3643 try:
3626 3644 ui.quiet = True
3627 3645 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3628 3646 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3629 3647 )
3630 3648 except error.LookupError:
3631 3649 continue
3632 3650 finally:
3633 3651 ui.quiet = quiet
3634 3652
3635 3653 try:
3636 3654 if not chlist:
3637 3655 continue
3638 3656 if recovernode:
3639 3657 with repo.lock(), repo.transaction(b"unbundle") as tr:
3640 3658 if scmutil.isrevsymbol(other, recovernode):
3641 3659 ui.status(_(b"Unbundling %s\n") % (recovernode))
3642 3660 f = hg.openpath(ui, source)
3643 3661 gen = exchange.readbundle(ui, f, source)
3644 3662 if isinstance(gen, bundle2.unbundle20):
3645 3663 bundle2.applybundle(
3646 3664 repo,
3647 3665 gen,
3648 3666 tr,
3649 3667 source=b"unbundle",
3650 3668 url=b"bundle:" + source,
3651 3669 )
3652 3670 else:
3653 3671 gen.apply(repo, b"unbundle", b"bundle:" + source)
3654 3672 break
3655 3673 else:
3656 3674 backupdate = encoding.strtolocal(
3657 3675 time.strftime(
3658 3676 "%a %H:%M, %Y-%m-%d",
3659 3677 time.localtime(os.path.getmtime(source)),
3660 3678 )
3661 3679 )
3662 3680 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3663 3681 if ui.verbose:
3664 3682 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3665 3683 else:
3666 3684 opts[
3667 3685 b"template"
3668 3686 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3669 3687 displayer = logcmdutil.changesetdisplayer(
3670 3688 ui, other, opts, False
3671 3689 )
3672 3690 display(other, chlist, displayer)
3673 3691 displayer.close()
3674 3692 finally:
3675 3693 cleanupfn()
3676 3694
3677 3695
3678 3696 @command(
3679 3697 b'debugsub',
3680 3698 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3681 3699 _(b'[-r REV] [REV]'),
3682 3700 )
3683 3701 def debugsub(ui, repo, rev=None):
3684 3702 ctx = scmutil.revsingle(repo, rev, None)
3685 3703 for k, v in sorted(ctx.substate.items()):
3686 3704 ui.writenoi18n(b'path %s\n' % k)
3687 3705 ui.writenoi18n(b' source %s\n' % v[0])
3688 3706 ui.writenoi18n(b' revision %s\n' % v[1])
3689 3707
3690 3708
3691 3709 @command(
3692 3710 b'debugsuccessorssets',
3693 3711 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3694 3712 _(b'[REV]'),
3695 3713 )
3696 3714 def debugsuccessorssets(ui, repo, *revs, **opts):
3697 3715 """show set of successors for revision
3698 3716
3699 3717 A successors set of changeset A is a consistent group of revisions that
3700 3718 succeed A. It contains non-obsolete changesets only unless closests
3701 3719 successors set is set.
3702 3720
3703 3721 In most cases a changeset A has a single successors set containing a single
3704 3722 successor (changeset A replaced by A').
3705 3723
3706 3724 A changeset that is made obsolete with no successors are called "pruned".
3707 3725 Such changesets have no successors sets at all.
3708 3726
3709 3727 A changeset that has been "split" will have a successors set containing
3710 3728 more than one successor.
3711 3729
3712 3730 A changeset that has been rewritten in multiple different ways is called
3713 3731 "divergent". Such changesets have multiple successor sets (each of which
3714 3732 may also be split, i.e. have multiple successors).
3715 3733
3716 3734 Results are displayed as follows::
3717 3735
3718 3736 <rev1>
3719 3737 <successors-1A>
3720 3738 <rev2>
3721 3739 <successors-2A>
3722 3740 <successors-2B1> <successors-2B2> <successors-2B3>
3723 3741
3724 3742 Here rev2 has two possible (i.e. divergent) successors sets. The first
3725 3743 holds one element, whereas the second holds three (i.e. the changeset has
3726 3744 been split).
3727 3745 """
3728 3746 # passed to successorssets caching computation from one call to another
3729 3747 cache = {}
3730 3748 ctx2str = bytes
3731 3749 node2str = short
3732 3750 for rev in scmutil.revrange(repo, revs):
3733 3751 ctx = repo[rev]
3734 3752 ui.write(b'%s\n' % ctx2str(ctx))
3735 3753 for succsset in obsutil.successorssets(
3736 3754 repo, ctx.node(), closest=opts['closest'], cache=cache
3737 3755 ):
3738 3756 if succsset:
3739 3757 ui.write(b' ')
3740 3758 ui.write(node2str(succsset[0]))
3741 3759 for node in succsset[1:]:
3742 3760 ui.write(b' ')
3743 3761 ui.write(node2str(node))
3744 3762 ui.write(b'\n')
3745 3763
3746 3764
3747 3765 @command(b'debugtagscache', [])
3748 3766 def debugtagscache(ui, repo):
3749 3767 """display the contents of .hg/cache/hgtagsfnodes1"""
3750 3768 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3751 3769 for r in repo:
3752 3770 node = repo[r].node()
3753 3771 tagsnode = cache.getfnode(node, computemissing=False)
3754 3772 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3755 3773 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3756 3774
3757 3775
3758 3776 @command(
3759 3777 b'debugtemplate',
3760 3778 [
3761 3779 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3762 3780 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3763 3781 ],
3764 3782 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3765 3783 optionalrepo=True,
3766 3784 )
3767 3785 def debugtemplate(ui, repo, tmpl, **opts):
3768 3786 """parse and apply a template
3769 3787
3770 3788 If -r/--rev is given, the template is processed as a log template and
3771 3789 applied to the given changesets. Otherwise, it is processed as a generic
3772 3790 template.
3773 3791
3774 3792 Use --verbose to print the parsed tree.
3775 3793 """
3776 3794 revs = None
3777 3795 if opts['rev']:
3778 3796 if repo is None:
3779 3797 raise error.RepoError(
3780 3798 _(b'there is no Mercurial repository here (.hg not found)')
3781 3799 )
3782 3800 revs = scmutil.revrange(repo, opts['rev'])
3783 3801
3784 3802 props = {}
3785 3803 for d in opts['define']:
3786 3804 try:
3787 3805 k, v = (e.strip() for e in d.split(b'=', 1))
3788 3806 if not k or k == b'ui':
3789 3807 raise ValueError
3790 3808 props[k] = v
3791 3809 except ValueError:
3792 3810 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3793 3811
3794 3812 if ui.verbose:
3795 3813 aliases = ui.configitems(b'templatealias')
3796 3814 tree = templater.parse(tmpl)
3797 3815 ui.note(templater.prettyformat(tree), b'\n')
3798 3816 newtree = templater.expandaliases(tree, aliases)
3799 3817 if newtree != tree:
3800 3818 ui.notenoi18n(
3801 3819 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3802 3820 )
3803 3821
3804 3822 if revs is None:
3805 3823 tres = formatter.templateresources(ui, repo)
3806 3824 t = formatter.maketemplater(ui, tmpl, resources=tres)
3807 3825 if ui.verbose:
3808 3826 kwds, funcs = t.symbolsuseddefault()
3809 3827 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3810 3828 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3811 3829 ui.write(t.renderdefault(props))
3812 3830 else:
3813 3831 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3814 3832 if ui.verbose:
3815 3833 kwds, funcs = displayer.t.symbolsuseddefault()
3816 3834 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3817 3835 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3818 3836 for r in revs:
3819 3837 displayer.show(repo[r], **pycompat.strkwargs(props))
3820 3838 displayer.close()
3821 3839
3822 3840
3823 3841 @command(
3824 3842 b'debuguigetpass',
3825 3843 [
3826 3844 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3827 3845 ],
3828 3846 _(b'[-p TEXT]'),
3829 3847 norepo=True,
3830 3848 )
3831 3849 def debuguigetpass(ui, prompt=b''):
3832 3850 """show prompt to type password"""
3833 3851 r = ui.getpass(prompt)
3834 3852 if r is not None:
3835 3853 r = encoding.strtolocal(r)
3836 3854 else:
3837 3855 r = b"<default response>"
3838 3856 ui.writenoi18n(b'response: %s\n' % r)
3839 3857
3840 3858
3841 3859 @command(
3842 3860 b'debuguiprompt',
3843 3861 [
3844 3862 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3845 3863 ],
3846 3864 _(b'[-p TEXT]'),
3847 3865 norepo=True,
3848 3866 )
3849 3867 def debuguiprompt(ui, prompt=b''):
3850 3868 """show plain prompt"""
3851 3869 r = ui.prompt(prompt)
3852 3870 ui.writenoi18n(b'response: %s\n' % r)
3853 3871
3854 3872
3855 3873 @command(b'debugupdatecaches', [])
3856 3874 def debugupdatecaches(ui, repo, *pats, **opts):
3857 3875 """warm all known caches in the repository"""
3858 3876 with repo.wlock(), repo.lock():
3859 3877 repo.updatecaches(full=True)
3860 3878
3861 3879
3862 3880 @command(
3863 3881 b'debugupgraderepo',
3864 3882 [
3865 3883 (
3866 3884 b'o',
3867 3885 b'optimize',
3868 3886 [],
3869 3887 _(b'extra optimization to perform'),
3870 3888 _(b'NAME'),
3871 3889 ),
3872 3890 (b'', b'run', False, _(b'performs an upgrade')),
3873 3891 (b'', b'backup', True, _(b'keep the old repository content around')),
3874 3892 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3875 3893 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3876 3894 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3877 3895 ],
3878 3896 )
3879 3897 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3880 3898 """upgrade a repository to use different features
3881 3899
3882 3900 If no arguments are specified, the repository is evaluated for upgrade
3883 3901 and a list of problems and potential optimizations is printed.
3884 3902
3885 3903 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3886 3904 can be influenced via additional arguments. More details will be provided
3887 3905 by the command output when run without ``--run``.
3888 3906
3889 3907 During the upgrade, the repository will be locked and no writes will be
3890 3908 allowed.
3891 3909
3892 3910 At the end of the upgrade, the repository may not be readable while new
3893 3911 repository data is swapped in. This window will be as long as it takes to
3894 3912 rename some directories inside the ``.hg`` directory. On most machines, this
3895 3913 should complete almost instantaneously and the chances of a consumer being
3896 3914 unable to access the repository should be low.
3897 3915
3898 3916 By default, all revlog will be upgraded. You can restrict this using flag
3899 3917 such as `--manifest`:
3900 3918
3901 3919 * `--manifest`: only optimize the manifest
3902 3920 * `--no-manifest`: optimize all revlog but the manifest
3903 3921 * `--changelog`: optimize the changelog only
3904 3922 * `--no-changelog --no-manifest`: optimize filelogs only
3905 3923 * `--filelogs`: optimize the filelogs only
3906 3924 * `--no-changelog --no-manifest --no-filelogs`: skip all filelog optimisation
3907 3925 """
3908 3926 return upgrade.upgraderepo(
3909 3927 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3910 3928 )
3911 3929
3912 3930
3913 3931 @command(
3914 3932 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3915 3933 )
3916 3934 def debugwalk(ui, repo, *pats, **opts):
3917 3935 """show how files match on given patterns"""
3918 3936 opts = pycompat.byteskwargs(opts)
3919 3937 m = scmutil.match(repo[None], pats, opts)
3920 3938 if ui.verbose:
3921 3939 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3922 3940 items = list(repo[None].walk(m))
3923 3941 if not items:
3924 3942 return
3925 3943 f = lambda fn: fn
3926 3944 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3927 3945 f = lambda fn: util.normpath(fn)
3928 3946 fmt = b'f %%-%ds %%-%ds %%s' % (
3929 3947 max([len(abs) for abs in items]),
3930 3948 max([len(repo.pathto(abs)) for abs in items]),
3931 3949 )
3932 3950 for abs in items:
3933 3951 line = fmt % (
3934 3952 abs,
3935 3953 f(repo.pathto(abs)),
3936 3954 m.exact(abs) and b'exact' or b'',
3937 3955 )
3938 3956 ui.write(b"%s\n" % line.rstrip())
3939 3957
3940 3958
3941 3959 @command(b'debugwhyunstable', [], _(b'REV'))
3942 3960 def debugwhyunstable(ui, repo, rev):
3943 3961 """explain instabilities of a changeset"""
3944 3962 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3945 3963 dnodes = b''
3946 3964 if entry.get(b'divergentnodes'):
3947 3965 dnodes = (
3948 3966 b' '.join(
3949 3967 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3950 3968 for ctx in entry[b'divergentnodes']
3951 3969 )
3952 3970 + b' '
3953 3971 )
3954 3972 ui.write(
3955 3973 b'%s: %s%s %s\n'
3956 3974 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3957 3975 )
3958 3976
3959 3977
3960 3978 @command(
3961 3979 b'debugwireargs',
3962 3980 [
3963 3981 (b'', b'three', b'', b'three'),
3964 3982 (b'', b'four', b'', b'four'),
3965 3983 (b'', b'five', b'', b'five'),
3966 3984 ]
3967 3985 + cmdutil.remoteopts,
3968 3986 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3969 3987 norepo=True,
3970 3988 )
3971 3989 def debugwireargs(ui, repopath, *vals, **opts):
3972 3990 opts = pycompat.byteskwargs(opts)
3973 3991 repo = hg.peer(ui, opts, repopath)
3974 3992 for opt in cmdutil.remoteopts:
3975 3993 del opts[opt[1]]
3976 3994 args = {}
3977 3995 for k, v in pycompat.iteritems(opts):
3978 3996 if v:
3979 3997 args[k] = v
3980 3998 args = pycompat.strkwargs(args)
3981 3999 # run twice to check that we don't mess up the stream for the next command
3982 4000 res1 = repo.debugwireargs(*vals, **args)
3983 4001 res2 = repo.debugwireargs(*vals, **args)
3984 4002 ui.write(b"%s\n" % res1)
3985 4003 if res1 != res2:
3986 4004 ui.warn(b"%s\n" % res2)
3987 4005
3988 4006
3989 4007 def _parsewirelangblocks(fh):
3990 4008 activeaction = None
3991 4009 blocklines = []
3992 4010 lastindent = 0
3993 4011
3994 4012 for line in fh:
3995 4013 line = line.rstrip()
3996 4014 if not line:
3997 4015 continue
3998 4016
3999 4017 if line.startswith(b'#'):
4000 4018 continue
4001 4019
4002 4020 if not line.startswith(b' '):
4003 4021 # New block. Flush previous one.
4004 4022 if activeaction:
4005 4023 yield activeaction, blocklines
4006 4024
4007 4025 activeaction = line
4008 4026 blocklines = []
4009 4027 lastindent = 0
4010 4028 continue
4011 4029
4012 4030 # Else we start with an indent.
4013 4031
4014 4032 if not activeaction:
4015 4033 raise error.Abort(_(b'indented line outside of block'))
4016 4034
4017 4035 indent = len(line) - len(line.lstrip())
4018 4036
4019 4037 # If this line is indented more than the last line, concatenate it.
4020 4038 if indent > lastindent and blocklines:
4021 4039 blocklines[-1] += line.lstrip()
4022 4040 else:
4023 4041 blocklines.append(line)
4024 4042 lastindent = indent
4025 4043
4026 4044 # Flush last block.
4027 4045 if activeaction:
4028 4046 yield activeaction, blocklines
4029 4047
4030 4048
4031 4049 @command(
4032 4050 b'debugwireproto',
4033 4051 [
4034 4052 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4035 4053 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4036 4054 (
4037 4055 b'',
4038 4056 b'noreadstderr',
4039 4057 False,
4040 4058 _(b'do not read from stderr of the remote'),
4041 4059 ),
4042 4060 (
4043 4061 b'',
4044 4062 b'nologhandshake',
4045 4063 False,
4046 4064 _(b'do not log I/O related to the peer handshake'),
4047 4065 ),
4048 4066 ]
4049 4067 + cmdutil.remoteopts,
4050 4068 _(b'[PATH]'),
4051 4069 optionalrepo=True,
4052 4070 )
4053 4071 def debugwireproto(ui, repo, path=None, **opts):
4054 4072 """send wire protocol commands to a server
4055 4073
4056 4074 This command can be used to issue wire protocol commands to remote
4057 4075 peers and to debug the raw data being exchanged.
4058 4076
4059 4077 ``--localssh`` will start an SSH server against the current repository
4060 4078 and connect to that. By default, the connection will perform a handshake
4061 4079 and establish an appropriate peer instance.
4062 4080
4063 4081 ``--peer`` can be used to bypass the handshake protocol and construct a
4064 4082 peer instance using the specified class type. Valid values are ``raw``,
4065 4083 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4066 4084 raw data payloads and don't support higher-level command actions.
4067 4085
4068 4086 ``--noreadstderr`` can be used to disable automatic reading from stderr
4069 4087 of the peer (for SSH connections only). Disabling automatic reading of
4070 4088 stderr is useful for making output more deterministic.
4071 4089
4072 4090 Commands are issued via a mini language which is specified via stdin.
4073 4091 The language consists of individual actions to perform. An action is
4074 4092 defined by a block. A block is defined as a line with no leading
4075 4093 space followed by 0 or more lines with leading space. Blocks are
4076 4094 effectively a high-level command with additional metadata.
4077 4095
4078 4096 Lines beginning with ``#`` are ignored.
4079 4097
4080 4098 The following sections denote available actions.
4081 4099
4082 4100 raw
4083 4101 ---
4084 4102
4085 4103 Send raw data to the server.
4086 4104
4087 4105 The block payload contains the raw data to send as one atomic send
4088 4106 operation. The data may not actually be delivered in a single system
4089 4107 call: it depends on the abilities of the transport being used.
4090 4108
4091 4109 Each line in the block is de-indented and concatenated. Then, that
4092 4110 value is evaluated as a Python b'' literal. This allows the use of
4093 4111 backslash escaping, etc.
4094 4112
4095 4113 raw+
4096 4114 ----
4097 4115
4098 4116 Behaves like ``raw`` except flushes output afterwards.
4099 4117
4100 4118 command <X>
4101 4119 -----------
4102 4120
4103 4121 Send a request to run a named command, whose name follows the ``command``
4104 4122 string.
4105 4123
4106 4124 Arguments to the command are defined as lines in this block. The format of
4107 4125 each line is ``<key> <value>``. e.g.::
4108 4126
4109 4127 command listkeys
4110 4128 namespace bookmarks
4111 4129
4112 4130 If the value begins with ``eval:``, it will be interpreted as a Python
4113 4131 literal expression. Otherwise values are interpreted as Python b'' literals.
4114 4132 This allows sending complex types and encoding special byte sequences via
4115 4133 backslash escaping.
4116 4134
4117 4135 The following arguments have special meaning:
4118 4136
4119 4137 ``PUSHFILE``
4120 4138 When defined, the *push* mechanism of the peer will be used instead
4121 4139 of the static request-response mechanism and the content of the
4122 4140 file specified in the value of this argument will be sent as the
4123 4141 command payload.
4124 4142
4125 4143 This can be used to submit a local bundle file to the remote.
4126 4144
4127 4145 batchbegin
4128 4146 ----------
4129 4147
4130 4148 Instruct the peer to begin a batched send.
4131 4149
4132 4150 All ``command`` blocks are queued for execution until the next
4133 4151 ``batchsubmit`` block.
4134 4152
4135 4153 batchsubmit
4136 4154 -----------
4137 4155
4138 4156 Submit previously queued ``command`` blocks as a batch request.
4139 4157
4140 4158 This action MUST be paired with a ``batchbegin`` action.
4141 4159
4142 4160 httprequest <method> <path>
4143 4161 ---------------------------
4144 4162
4145 4163 (HTTP peer only)
4146 4164
4147 4165 Send an HTTP request to the peer.
4148 4166
4149 4167 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4150 4168
4151 4169 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4152 4170 headers to add to the request. e.g. ``Accept: foo``.
4153 4171
4154 4172 The following arguments are special:
4155 4173
4156 4174 ``BODYFILE``
4157 4175 The content of the file defined as the value to this argument will be
4158 4176 transferred verbatim as the HTTP request body.
4159 4177
4160 4178 ``frame <type> <flags> <payload>``
4161 4179 Send a unified protocol frame as part of the request body.
4162 4180
4163 4181 All frames will be collected and sent as the body to the HTTP
4164 4182 request.
4165 4183
4166 4184 close
4167 4185 -----
4168 4186
4169 4187 Close the connection to the server.
4170 4188
4171 4189 flush
4172 4190 -----
4173 4191
4174 4192 Flush data written to the server.
4175 4193
4176 4194 readavailable
4177 4195 -------------
4178 4196
4179 4197 Close the write end of the connection and read all available data from
4180 4198 the server.
4181 4199
4182 4200 If the connection to the server encompasses multiple pipes, we poll both
4183 4201 pipes and read available data.
4184 4202
4185 4203 readline
4186 4204 --------
4187 4205
4188 4206 Read a line of output from the server. If there are multiple output
4189 4207 pipes, reads only the main pipe.
4190 4208
4191 4209 ereadline
4192 4210 ---------
4193 4211
4194 4212 Like ``readline``, but read from the stderr pipe, if available.
4195 4213
4196 4214 read <X>
4197 4215 --------
4198 4216
4199 4217 ``read()`` N bytes from the server's main output pipe.
4200 4218
4201 4219 eread <X>
4202 4220 ---------
4203 4221
4204 4222 ``read()`` N bytes from the server's stderr pipe, if available.
4205 4223
4206 4224 Specifying Unified Frame-Based Protocol Frames
4207 4225 ----------------------------------------------
4208 4226
4209 4227 It is possible to emit a *Unified Frame-Based Protocol* by using special
4210 4228 syntax.
4211 4229
4212 4230 A frame is composed as a type, flags, and payload. These can be parsed
4213 4231 from a string of the form:
4214 4232
4215 4233 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4216 4234
4217 4235 ``request-id`` and ``stream-id`` are integers defining the request and
4218 4236 stream identifiers.
4219 4237
4220 4238 ``type`` can be an integer value for the frame type or the string name
4221 4239 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4222 4240 ``command-name``.
4223 4241
4224 4242 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4225 4243 components. Each component (and there can be just one) can be an integer
4226 4244 or a flag name for stream flags or frame flags, respectively. Values are
4227 4245 resolved to integers and then bitwise OR'd together.
4228 4246
4229 4247 ``payload`` represents the raw frame payload. If it begins with
4230 4248 ``cbor:``, the following string is evaluated as Python code and the
4231 4249 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4232 4250 as a Python byte string literal.
4233 4251 """
4234 4252 opts = pycompat.byteskwargs(opts)
4235 4253
4236 4254 if opts[b'localssh'] and not repo:
4237 4255 raise error.Abort(_(b'--localssh requires a repository'))
4238 4256
4239 4257 if opts[b'peer'] and opts[b'peer'] not in (
4240 4258 b'raw',
4241 4259 b'http2',
4242 4260 b'ssh1',
4243 4261 b'ssh2',
4244 4262 ):
4245 4263 raise error.Abort(
4246 4264 _(b'invalid value for --peer'),
4247 4265 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4248 4266 )
4249 4267
4250 4268 if path and opts[b'localssh']:
4251 4269 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4252 4270
4253 4271 if ui.interactive():
4254 4272 ui.write(_(b'(waiting for commands on stdin)\n'))
4255 4273
4256 4274 blocks = list(_parsewirelangblocks(ui.fin))
4257 4275
4258 4276 proc = None
4259 4277 stdin = None
4260 4278 stdout = None
4261 4279 stderr = None
4262 4280 opener = None
4263 4281
4264 4282 if opts[b'localssh']:
4265 4283 # We start the SSH server in its own process so there is process
4266 4284 # separation. This prevents a whole class of potential bugs around
4267 4285 # shared state from interfering with server operation.
4268 4286 args = procutil.hgcmd() + [
4269 4287 b'-R',
4270 4288 repo.root,
4271 4289 b'debugserve',
4272 4290 b'--sshstdio',
4273 4291 ]
4274 4292 proc = subprocess.Popen(
4275 4293 pycompat.rapply(procutil.tonativestr, args),
4276 4294 stdin=subprocess.PIPE,
4277 4295 stdout=subprocess.PIPE,
4278 4296 stderr=subprocess.PIPE,
4279 4297 bufsize=0,
4280 4298 )
4281 4299
4282 4300 stdin = proc.stdin
4283 4301 stdout = proc.stdout
4284 4302 stderr = proc.stderr
4285 4303
4286 4304 # We turn the pipes into observers so we can log I/O.
4287 4305 if ui.verbose or opts[b'peer'] == b'raw':
4288 4306 stdin = util.makeloggingfileobject(
4289 4307 ui, proc.stdin, b'i', logdata=True
4290 4308 )
4291 4309 stdout = util.makeloggingfileobject(
4292 4310 ui, proc.stdout, b'o', logdata=True
4293 4311 )
4294 4312 stderr = util.makeloggingfileobject(
4295 4313 ui, proc.stderr, b'e', logdata=True
4296 4314 )
4297 4315
4298 4316 # --localssh also implies the peer connection settings.
4299 4317
4300 4318 url = b'ssh://localserver'
4301 4319 autoreadstderr = not opts[b'noreadstderr']
4302 4320
4303 4321 if opts[b'peer'] == b'ssh1':
4304 4322 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4305 4323 peer = sshpeer.sshv1peer(
4306 4324 ui,
4307 4325 url,
4308 4326 proc,
4309 4327 stdin,
4310 4328 stdout,
4311 4329 stderr,
4312 4330 None,
4313 4331 autoreadstderr=autoreadstderr,
4314 4332 )
4315 4333 elif opts[b'peer'] == b'ssh2':
4316 4334 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4317 4335 peer = sshpeer.sshv2peer(
4318 4336 ui,
4319 4337 url,
4320 4338 proc,
4321 4339 stdin,
4322 4340 stdout,
4323 4341 stderr,
4324 4342 None,
4325 4343 autoreadstderr=autoreadstderr,
4326 4344 )
4327 4345 elif opts[b'peer'] == b'raw':
4328 4346 ui.write(_(b'using raw connection to peer\n'))
4329 4347 peer = None
4330 4348 else:
4331 4349 ui.write(_(b'creating ssh peer from handshake results\n'))
4332 4350 peer = sshpeer.makepeer(
4333 4351 ui,
4334 4352 url,
4335 4353 proc,
4336 4354 stdin,
4337 4355 stdout,
4338 4356 stderr,
4339 4357 autoreadstderr=autoreadstderr,
4340 4358 )
4341 4359
4342 4360 elif path:
4343 4361 # We bypass hg.peer() so we can proxy the sockets.
4344 4362 # TODO consider not doing this because we skip
4345 4363 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4346 4364 u = util.url(path)
4347 4365 if u.scheme != b'http':
4348 4366 raise error.Abort(_(b'only http:// paths are currently supported'))
4349 4367
4350 4368 url, authinfo = u.authinfo()
4351 4369 openerargs = {
4352 4370 'useragent': b'Mercurial debugwireproto',
4353 4371 }
4354 4372
4355 4373 # Turn pipes/sockets into observers so we can log I/O.
4356 4374 if ui.verbose:
4357 4375 openerargs.update(
4358 4376 {
4359 4377 'loggingfh': ui,
4360 4378 'loggingname': b's',
4361 4379 'loggingopts': {
4362 4380 'logdata': True,
4363 4381 'logdataapis': False,
4364 4382 },
4365 4383 }
4366 4384 )
4367 4385
4368 4386 if ui.debugflag:
4369 4387 openerargs['loggingopts']['logdataapis'] = True
4370 4388
4371 4389 # Don't send default headers when in raw mode. This allows us to
4372 4390 # bypass most of the behavior of our URL handling code so we can
4373 4391 # have near complete control over what's sent on the wire.
4374 4392 if opts[b'peer'] == b'raw':
4375 4393 openerargs['sendaccept'] = False
4376 4394
4377 4395 opener = urlmod.opener(ui, authinfo, **openerargs)
4378 4396
4379 4397 if opts[b'peer'] == b'http2':
4380 4398 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4381 4399 # We go through makepeer() because we need an API descriptor for
4382 4400 # the peer instance to be useful.
4383 4401 with ui.configoverride(
4384 4402 {(b'experimental', b'httppeer.advertise-v2'): True}
4385 4403 ):
4386 4404 if opts[b'nologhandshake']:
4387 4405 ui.pushbuffer()
4388 4406
4389 4407 peer = httppeer.makepeer(ui, path, opener=opener)
4390 4408
4391 4409 if opts[b'nologhandshake']:
4392 4410 ui.popbuffer()
4393 4411
4394 4412 if not isinstance(peer, httppeer.httpv2peer):
4395 4413 raise error.Abort(
4396 4414 _(
4397 4415 b'could not instantiate HTTP peer for '
4398 4416 b'wire protocol version 2'
4399 4417 ),
4400 4418 hint=_(
4401 4419 b'the server may not have the feature '
4402 4420 b'enabled or is not allowing this '
4403 4421 b'client version'
4404 4422 ),
4405 4423 )
4406 4424
4407 4425 elif opts[b'peer'] == b'raw':
4408 4426 ui.write(_(b'using raw connection to peer\n'))
4409 4427 peer = None
4410 4428 elif opts[b'peer']:
4411 4429 raise error.Abort(
4412 4430 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4413 4431 )
4414 4432 else:
4415 4433 peer = httppeer.makepeer(ui, path, opener=opener)
4416 4434
4417 4435 # We /could/ populate stdin/stdout with sock.makefile()...
4418 4436 else:
4419 4437 raise error.Abort(_(b'unsupported connection configuration'))
4420 4438
4421 4439 batchedcommands = None
4422 4440
4423 4441 # Now perform actions based on the parsed wire language instructions.
4424 4442 for action, lines in blocks:
4425 4443 if action in (b'raw', b'raw+'):
4426 4444 if not stdin:
4427 4445 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4428 4446
4429 4447 # Concatenate the data together.
4430 4448 data = b''.join(l.lstrip() for l in lines)
4431 4449 data = stringutil.unescapestr(data)
4432 4450 stdin.write(data)
4433 4451
4434 4452 if action == b'raw+':
4435 4453 stdin.flush()
4436 4454 elif action == b'flush':
4437 4455 if not stdin:
4438 4456 raise error.Abort(_(b'cannot call flush on this peer'))
4439 4457 stdin.flush()
4440 4458 elif action.startswith(b'command'):
4441 4459 if not peer:
4442 4460 raise error.Abort(
4443 4461 _(
4444 4462 b'cannot send commands unless peer instance '
4445 4463 b'is available'
4446 4464 )
4447 4465 )
4448 4466
4449 4467 command = action.split(b' ', 1)[1]
4450 4468
4451 4469 args = {}
4452 4470 for line in lines:
4453 4471 # We need to allow empty values.
4454 4472 fields = line.lstrip().split(b' ', 1)
4455 4473 if len(fields) == 1:
4456 4474 key = fields[0]
4457 4475 value = b''
4458 4476 else:
4459 4477 key, value = fields
4460 4478
4461 4479 if value.startswith(b'eval:'):
4462 4480 value = stringutil.evalpythonliteral(value[5:])
4463 4481 else:
4464 4482 value = stringutil.unescapestr(value)
4465 4483
4466 4484 args[key] = value
4467 4485
4468 4486 if batchedcommands is not None:
4469 4487 batchedcommands.append((command, args))
4470 4488 continue
4471 4489
4472 4490 ui.status(_(b'sending %s command\n') % command)
4473 4491
4474 4492 if b'PUSHFILE' in args:
4475 4493 with open(args[b'PUSHFILE'], 'rb') as fh:
4476 4494 del args[b'PUSHFILE']
4477 4495 res, output = peer._callpush(
4478 4496 command, fh, **pycompat.strkwargs(args)
4479 4497 )
4480 4498 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4481 4499 ui.status(
4482 4500 _(b'remote output: %s\n') % stringutil.escapestr(output)
4483 4501 )
4484 4502 else:
4485 4503 with peer.commandexecutor() as e:
4486 4504 res = e.callcommand(command, args).result()
4487 4505
4488 4506 if isinstance(res, wireprotov2peer.commandresponse):
4489 4507 val = res.objects()
4490 4508 ui.status(
4491 4509 _(b'response: %s\n')
4492 4510 % stringutil.pprint(val, bprefix=True, indent=2)
4493 4511 )
4494 4512 else:
4495 4513 ui.status(
4496 4514 _(b'response: %s\n')
4497 4515 % stringutil.pprint(res, bprefix=True, indent=2)
4498 4516 )
4499 4517
4500 4518 elif action == b'batchbegin':
4501 4519 if batchedcommands is not None:
4502 4520 raise error.Abort(_(b'nested batchbegin not allowed'))
4503 4521
4504 4522 batchedcommands = []
4505 4523 elif action == b'batchsubmit':
4506 4524 # There is a batching API we could go through. But it would be
4507 4525 # difficult to normalize requests into function calls. It is easier
4508 4526 # to bypass this layer and normalize to commands + args.
4509 4527 ui.status(
4510 4528 _(b'sending batch with %d sub-commands\n')
4511 4529 % len(batchedcommands)
4512 4530 )
4513 4531 assert peer is not None
4514 4532 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4515 4533 ui.status(
4516 4534 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4517 4535 )
4518 4536
4519 4537 batchedcommands = None
4520 4538
4521 4539 elif action.startswith(b'httprequest '):
4522 4540 if not opener:
4523 4541 raise error.Abort(
4524 4542 _(b'cannot use httprequest without an HTTP peer')
4525 4543 )
4526 4544
4527 4545 request = action.split(b' ', 2)
4528 4546 if len(request) != 3:
4529 4547 raise error.Abort(
4530 4548 _(
4531 4549 b'invalid httprequest: expected format is '
4532 4550 b'"httprequest <method> <path>'
4533 4551 )
4534 4552 )
4535 4553
4536 4554 method, httppath = request[1:]
4537 4555 headers = {}
4538 4556 body = None
4539 4557 frames = []
4540 4558 for line in lines:
4541 4559 line = line.lstrip()
4542 4560 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4543 4561 if m:
4544 4562 # Headers need to use native strings.
4545 4563 key = pycompat.strurl(m.group(1))
4546 4564 value = pycompat.strurl(m.group(2))
4547 4565 headers[key] = value
4548 4566 continue
4549 4567
4550 4568 if line.startswith(b'BODYFILE '):
4551 4569 with open(line.split(b' ', 1), b'rb') as fh:
4552 4570 body = fh.read()
4553 4571 elif line.startswith(b'frame '):
4554 4572 frame = wireprotoframing.makeframefromhumanstring(
4555 4573 line[len(b'frame ') :]
4556 4574 )
4557 4575
4558 4576 frames.append(frame)
4559 4577 else:
4560 4578 raise error.Abort(
4561 4579 _(b'unknown argument to httprequest: %s') % line
4562 4580 )
4563 4581
4564 4582 url = path + httppath
4565 4583
4566 4584 if frames:
4567 4585 body = b''.join(bytes(f) for f in frames)
4568 4586
4569 4587 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4570 4588
4571 4589 # urllib.Request insists on using has_data() as a proxy for
4572 4590 # determining the request method. Override that to use our
4573 4591 # explicitly requested method.
4574 4592 req.get_method = lambda: pycompat.sysstr(method)
4575 4593
4576 4594 try:
4577 4595 res = opener.open(req)
4578 4596 body = res.read()
4579 4597 except util.urlerr.urlerror as e:
4580 4598 # read() method must be called, but only exists in Python 2
4581 4599 getattr(e, 'read', lambda: None)()
4582 4600 continue
4583 4601
4584 4602 ct = res.headers.get('Content-Type')
4585 4603 if ct == 'application/mercurial-cbor':
4586 4604 ui.write(
4587 4605 _(b'cbor> %s\n')
4588 4606 % stringutil.pprint(
4589 4607 cborutil.decodeall(body), bprefix=True, indent=2
4590 4608 )
4591 4609 )
4592 4610
4593 4611 elif action == b'close':
4594 4612 assert peer is not None
4595 4613 peer.close()
4596 4614 elif action == b'readavailable':
4597 4615 if not stdout or not stderr:
4598 4616 raise error.Abort(
4599 4617 _(b'readavailable not available on this peer')
4600 4618 )
4601 4619
4602 4620 stdin.close()
4603 4621 stdout.read()
4604 4622 stderr.read()
4605 4623
4606 4624 elif action == b'readline':
4607 4625 if not stdout:
4608 4626 raise error.Abort(_(b'readline not available on this peer'))
4609 4627 stdout.readline()
4610 4628 elif action == b'ereadline':
4611 4629 if not stderr:
4612 4630 raise error.Abort(_(b'ereadline not available on this peer'))
4613 4631 stderr.readline()
4614 4632 elif action.startswith(b'read '):
4615 4633 count = int(action.split(b' ', 1)[1])
4616 4634 if not stdout:
4617 4635 raise error.Abort(_(b'read not available on this peer'))
4618 4636 stdout.read(count)
4619 4637 elif action.startswith(b'eread '):
4620 4638 count = int(action.split(b' ', 1)[1])
4621 4639 if not stderr:
4622 4640 raise error.Abort(_(b'eread not available on this peer'))
4623 4641 stderr.read(count)
4624 4642 else:
4625 4643 raise error.Abort(_(b'unknown action: %s') % action)
4626 4644
4627 4645 if batchedcommands is not None:
4628 4646 raise error.Abort(_(b'unclosed "batchbegin" request'))
4629 4647
4630 4648 if peer:
4631 4649 peer.close()
4632 4650
4633 4651 if proc:
4634 4652 proc.kill()
@@ -1,1307 +1,1502
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 18 > echo
19 19 > echo "% -- a -> b set (tip only)"
20 20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 21 > echo
22 22 > echo "% -- b -> a tree"
23 23 > hg -R b debugdiscovery a --verbose --old
24 24 > echo
25 25 > echo "% -- b -> a set"
26 26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 27 > echo
28 28 > echo "% -- b -> a set (tip only)"
29 29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 30 > cd ..
31 31 > }
32 32
33 33
34 34 Small superset:
35 35
36 36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 37 > +2:f +1:a1:b1
38 38 > <f +4 :a2
39 39 > +5 :b2
40 40 > <f +3 :b3'
41 41
42 42 % -- a -> b tree
43 43 comparing with b
44 44 searching for changes
45 45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 46 elapsed time: * seconds (glob)
47 47 heads summary:
48 48 total common heads: 2
49 49 also local heads: 2
50 50 also remote heads: 1
51 51 both: 1
52 52 local heads: 2
53 53 common: 2
54 54 missing: 0
55 55 remote heads: 3
56 56 common: 1
57 57 unknown: 2
58 58 local changesets: 7
59 59 common: 7
60 60 heads: 2
61 61 roots: 1
62 62 missing: 0
63 63 heads: 0
64 64 roots: 0
65 first undecided set: 3
66 heads: 1
67 roots: 1
68 common: 3
69 missing: 0
65 70 common heads: 01241442b3c2 b5714e113bc0
66 71
67 72 % -- a -> b set
68 73 comparing with b
69 74 query 1; heads
70 75 searching for changes
71 76 all local changesets known remotely
72 77 elapsed time: * seconds (glob)
73 78 heads summary:
74 79 total common heads: 2
75 80 also local heads: 2
76 81 also remote heads: 1
77 82 both: 1
78 83 local heads: 2
79 84 common: 2
80 85 missing: 0
81 86 remote heads: 3
82 87 common: 1
83 88 unknown: 2
84 89 local changesets: 7
85 90 common: 7
86 91 heads: 2
87 92 roots: 1
88 93 missing: 0
89 94 heads: 0
90 95 roots: 0
96 first undecided set: 3
97 heads: 1
98 roots: 1
99 common: 3
100 missing: 0
91 101 common heads: 01241442b3c2 b5714e113bc0
92 102
93 103 % -- a -> b set (tip only)
94 104 comparing with b
95 105 query 1; heads
96 106 searching for changes
97 107 all local changesets known remotely
98 108 elapsed time: * seconds (glob)
99 109 heads summary:
100 110 total common heads: 1
101 111 also local heads: 1
102 112 also remote heads: 0
103 113 both: 0
104 114 local heads: 2
105 115 common: 1
106 116 missing: 1
107 117 remote heads: 3
108 118 common: 0
109 119 unknown: 3
110 120 local changesets: 7
111 121 common: 6
112 122 heads: 1
113 123 roots: 1
114 124 missing: 1
115 125 heads: 1
116 126 roots: 1
127 first undecided set: 6
128 heads: 2
129 roots: 1
130 common: 5
131 missing: 1
117 132 common heads: b5714e113bc0
118 133
119 134 % -- b -> a tree
120 135 comparing with a
121 136 searching for changes
122 137 unpruned common: 01241442b3c2 b5714e113bc0
123 138 elapsed time: * seconds (glob)
124 139 heads summary:
125 140 total common heads: 2
126 141 also local heads: 1
127 142 also remote heads: 2
128 143 both: 1
129 144 local heads: 3
130 145 common: 1
131 146 missing: 2
132 147 remote heads: 2
133 148 common: 2
134 149 unknown: 0
135 150 local changesets: 15
136 151 common: 7
137 152 heads: 2
138 153 roots: 1
139 154 missing: 8
140 155 heads: 2
141 156 roots: 2
157 first undecided set: 8
158 heads: 2
159 roots: 2
160 common: 0
161 missing: 8
142 162 common heads: 01241442b3c2 b5714e113bc0
143 163
144 164 % -- b -> a set
145 165 comparing with a
146 166 query 1; heads
147 167 searching for changes
148 168 all remote heads known locally
149 169 elapsed time: * seconds (glob)
150 170 heads summary:
151 171 total common heads: 2
152 172 also local heads: 1
153 173 also remote heads: 2
154 174 both: 1
155 175 local heads: 3
156 176 common: 1
157 177 missing: 2
158 178 remote heads: 2
159 179 common: 2
160 180 unknown: 0
161 181 local changesets: 15
162 182 common: 7
163 183 heads: 2
164 184 roots: 1
165 185 missing: 8
166 186 heads: 2
167 187 roots: 2
188 first undecided set: 8
189 heads: 2
190 roots: 2
191 common: 0
192 missing: 8
168 193 common heads: 01241442b3c2 b5714e113bc0
169 194
170 195 % -- b -> a set (tip only)
171 196 comparing with a
172 197 query 1; heads
173 198 searching for changes
174 199 all remote heads known locally
175 200 elapsed time: * seconds (glob)
176 201 heads summary:
177 202 total common heads: 2
178 203 also local heads: 1
179 204 also remote heads: 2
180 205 both: 1
181 206 local heads: 3
182 207 common: 1
183 208 missing: 2
184 209 remote heads: 2
185 210 common: 2
186 211 unknown: 0
187 212 local changesets: 15
188 213 common: 7
189 214 heads: 2
190 215 roots: 1
191 216 missing: 8
192 217 heads: 2
193 218 roots: 2
219 first undecided set: 8
220 heads: 2
221 roots: 2
222 common: 0
223 missing: 8
194 224 common heads: 01241442b3c2 b5714e113bc0
195 225
196 226
197 227 Many new:
198 228
199 229 $ testdesc '-ra1 -ra2' '-rb' '
200 230 > +2:f +3:a1 +3:b
201 231 > <f +30 :a2'
202 232
203 233 % -- a -> b tree
204 234 comparing with b
205 235 searching for changes
206 236 unpruned common: bebd167eb94d
207 237 elapsed time: * seconds (glob)
208 238 heads summary:
209 239 total common heads: 1
210 240 also local heads: 1
211 241 also remote heads: 0
212 242 both: 0
213 243 local heads: 2
214 244 common: 1
215 245 missing: 1
216 246 remote heads: 1
217 247 common: 0
218 248 unknown: 1
219 249 local changesets: 35
220 250 common: 5
221 251 heads: 1
222 252 roots: 1
223 253 missing: 30
224 254 heads: 1
225 255 roots: 1
256 first undecided set: 34
257 heads: 2
258 roots: 1
259 common: 4
260 missing: 30
226 261 common heads: bebd167eb94d
227 262
228 263 % -- a -> b set
229 264 comparing with b
230 265 query 1; heads
231 266 searching for changes
232 267 taking initial sample
233 268 searching: 2 queries
234 269 query 2; still undecided: 29, sample size is: 29
235 270 2 total queries in *.????s (glob)
236 271 elapsed time: * seconds (glob)
237 272 heads summary:
238 273 total common heads: 1
239 274 also local heads: 1
240 275 also remote heads: 0
241 276 both: 0
242 277 local heads: 2
243 278 common: 1
244 279 missing: 1
245 280 remote heads: 1
246 281 common: 0
247 282 unknown: 1
248 283 local changesets: 35
249 284 common: 5
250 285 heads: 1
251 286 roots: 1
252 287 missing: 30
253 288 heads: 1
254 289 roots: 1
290 first undecided set: 34
291 heads: 2
292 roots: 1
293 common: 4
294 missing: 30
255 295 common heads: bebd167eb94d
256 296
257 297 % -- a -> b set (tip only)
258 298 comparing with b
259 299 query 1; heads
260 300 searching for changes
261 301 taking quick initial sample
262 302 searching: 2 queries
263 303 query 2; still undecided: 31, sample size is: 31
264 304 2 total queries in *.????s (glob)
265 305 elapsed time: * seconds (glob)
266 306 heads summary:
267 307 total common heads: 1
268 308 also local heads: 0
269 309 also remote heads: 0
270 310 both: 0
271 311 local heads: 2
272 312 common: 0
273 313 missing: 2
274 314 remote heads: 1
275 315 common: 0
276 316 unknown: 1
277 317 local changesets: 35
278 318 common: 2
279 319 heads: 1
280 320 roots: 1
281 321 missing: 33
282 322 heads: 2
283 323 roots: 2
324 first undecided set: 35
325 heads: 2
326 roots: 1
327 common: 2
328 missing: 33
284 329 common heads: 66f7d451a68b
285 330
286 331 % -- b -> a tree
287 332 comparing with a
288 333 searching for changes
289 334 unpruned common: 66f7d451a68b bebd167eb94d
290 335 elapsed time: * seconds (glob)
291 336 heads summary:
292 337 total common heads: 1
293 338 also local heads: 0
294 339 also remote heads: 1
295 340 both: 0
296 341 local heads: 1
297 342 common: 0
298 343 missing: 1
299 344 remote heads: 2
300 345 common: 1
301 346 unknown: 1
302 347 local changesets: 8
303 348 common: 5
304 349 heads: 1
305 350 roots: 1
306 351 missing: 3
307 352 heads: 1
308 353 roots: 1
354 first undecided set: 3
355 heads: 1
356 roots: 1
357 common: 0
358 missing: 3
309 359 common heads: bebd167eb94d
310 360
311 361 % -- b -> a set
312 362 comparing with a
313 363 query 1; heads
314 364 searching for changes
315 365 taking initial sample
316 366 searching: 2 queries
317 367 query 2; still undecided: 2, sample size is: 2
318 368 2 total queries in *.????s (glob)
319 369 elapsed time: * seconds (glob)
320 370 heads summary:
321 371 total common heads: 1
322 372 also local heads: 0
323 373 also remote heads: 1
324 374 both: 0
325 375 local heads: 1
326 376 common: 0
327 377 missing: 1
328 378 remote heads: 2
329 379 common: 1
330 380 unknown: 1
331 381 local changesets: 8
332 382 common: 5
333 383 heads: 1
334 384 roots: 1
335 385 missing: 3
336 386 heads: 1
337 387 roots: 1
388 first undecided set: 3
389 heads: 1
390 roots: 1
391 common: 0
392 missing: 3
338 393 common heads: bebd167eb94d
339 394
340 395 % -- b -> a set (tip only)
341 396 comparing with a
342 397 query 1; heads
343 398 searching for changes
344 399 taking initial sample
345 400 searching: 2 queries
346 401 query 2; still undecided: 2, sample size is: 2
347 402 2 total queries in *.????s (glob)
348 403 elapsed time: * seconds (glob)
349 404 heads summary:
350 405 total common heads: 1
351 406 also local heads: 0
352 407 also remote heads: 1
353 408 both: 0
354 409 local heads: 1
355 410 common: 0
356 411 missing: 1
357 412 remote heads: 2
358 413 common: 1
359 414 unknown: 1
360 415 local changesets: 8
361 416 common: 5
362 417 heads: 1
363 418 roots: 1
364 419 missing: 3
365 420 heads: 1
366 421 roots: 1
422 first undecided set: 3
423 heads: 1
424 roots: 1
425 common: 0
426 missing: 3
367 427 common heads: bebd167eb94d
368 428
369 429 Both sides many new with stub:
370 430
371 431 $ testdesc '-ra1 -ra2' '-rb' '
372 432 > +2:f +2:a1 +30 :b
373 433 > <f +30 :a2'
374 434
375 435 % -- a -> b tree
376 436 comparing with b
377 437 searching for changes
378 438 unpruned common: 2dc09a01254d
379 439 elapsed time: * seconds (glob)
380 440 heads summary:
381 441 total common heads: 1
382 442 also local heads: 1
383 443 also remote heads: 0
384 444 both: 0
385 445 local heads: 2
386 446 common: 1
387 447 missing: 1
388 448 remote heads: 1
389 449 common: 0
390 450 unknown: 1
391 451 local changesets: 34
392 452 common: 4
393 453 heads: 1
394 454 roots: 1
395 455 missing: 30
396 456 heads: 1
397 457 roots: 1
458 first undecided set: 33
459 heads: 2
460 roots: 1
461 common: 3
462 missing: 30
398 463 common heads: 2dc09a01254d
399 464
400 465 % -- a -> b set
401 466 comparing with b
402 467 query 1; heads
403 468 searching for changes
404 469 taking initial sample
405 470 searching: 2 queries
406 471 query 2; still undecided: 29, sample size is: 29
407 472 2 total queries in *.????s (glob)
408 473 elapsed time: * seconds (glob)
409 474 heads summary:
410 475 total common heads: 1
411 476 also local heads: 1
412 477 also remote heads: 0
413 478 both: 0
414 479 local heads: 2
415 480 common: 1
416 481 missing: 1
417 482 remote heads: 1
418 483 common: 0
419 484 unknown: 1
420 485 local changesets: 34
421 486 common: 4
422 487 heads: 1
423 488 roots: 1
424 489 missing: 30
425 490 heads: 1
426 491 roots: 1
492 first undecided set: 33
493 heads: 2
494 roots: 1
495 common: 3
496 missing: 30
427 497 common heads: 2dc09a01254d
428 498
429 499 % -- a -> b set (tip only)
430 500 comparing with b
431 501 query 1; heads
432 502 searching for changes
433 503 taking quick initial sample
434 504 searching: 2 queries
435 505 query 2; still undecided: 31, sample size is: 31
436 506 2 total queries in *.????s (glob)
437 507 elapsed time: * seconds (glob)
438 508 heads summary:
439 509 total common heads: 1
440 510 also local heads: 0
441 511 also remote heads: 0
442 512 both: 0
443 513 local heads: 2
444 514 common: 0
445 515 missing: 2
446 516 remote heads: 1
447 517 common: 0
448 518 unknown: 1
449 519 local changesets: 34
450 520 common: 2
451 521 heads: 1
452 522 roots: 1
453 523 missing: 32
454 524 heads: 2
455 525 roots: 2
526 first undecided set: 34
527 heads: 2
528 roots: 1
529 common: 2
530 missing: 32
456 531 common heads: 66f7d451a68b
457 532
458 533 % -- b -> a tree
459 534 comparing with a
460 535 searching for changes
461 536 unpruned common: 2dc09a01254d 66f7d451a68b
462 537 elapsed time: * seconds (glob)
463 538 heads summary:
464 539 total common heads: 1
465 540 also local heads: 0
466 541 also remote heads: 1
467 542 both: 0
468 543 local heads: 1
469 544 common: 0
470 545 missing: 1
471 546 remote heads: 2
472 547 common: 1
473 548 unknown: 1
474 549 local changesets: 34
475 550 common: 4
476 551 heads: 1
477 552 roots: 1
478 553 missing: 30
479 554 heads: 1
480 555 roots: 1
556 first undecided set: 30
557 heads: 1
558 roots: 1
559 common: 0
560 missing: 30
481 561 common heads: 2dc09a01254d
482 562
483 563 % -- b -> a set
484 564 comparing with a
485 565 query 1; heads
486 566 searching for changes
487 567 taking initial sample
488 568 searching: 2 queries
489 569 query 2; still undecided: 29, sample size is: 29
490 570 2 total queries in *.????s (glob)
491 571 elapsed time: * seconds (glob)
492 572 heads summary:
493 573 total common heads: 1
494 574 also local heads: 0
495 575 also remote heads: 1
496 576 both: 0
497 577 local heads: 1
498 578 common: 0
499 579 missing: 1
500 580 remote heads: 2
501 581 common: 1
502 582 unknown: 1
503 583 local changesets: 34
504 584 common: 4
505 585 heads: 1
506 586 roots: 1
507 587 missing: 30
508 588 heads: 1
509 589 roots: 1
590 first undecided set: 30
591 heads: 1
592 roots: 1
593 common: 0
594 missing: 30
510 595 common heads: 2dc09a01254d
511 596
512 597 % -- b -> a set (tip only)
513 598 comparing with a
514 599 query 1; heads
515 600 searching for changes
516 601 taking initial sample
517 602 searching: 2 queries
518 603 query 2; still undecided: 29, sample size is: 29
519 604 2 total queries in *.????s (glob)
520 605 elapsed time: * seconds (glob)
521 606 heads summary:
522 607 total common heads: 1
523 608 also local heads: 0
524 609 also remote heads: 1
525 610 both: 0
526 611 local heads: 1
527 612 common: 0
528 613 missing: 1
529 614 remote heads: 2
530 615 common: 1
531 616 unknown: 1
532 617 local changesets: 34
533 618 common: 4
534 619 heads: 1
535 620 roots: 1
536 621 missing: 30
537 622 heads: 1
538 623 roots: 1
624 first undecided set: 30
625 heads: 1
626 roots: 1
627 common: 0
628 missing: 30
539 629 common heads: 2dc09a01254d
540 630
541 631
542 632 Both many new:
543 633
544 634 $ testdesc '-ra' '-rb' '
545 635 > +2:f +30 :b
546 636 > <f +30 :a'
547 637
548 638 % -- a -> b tree
549 639 comparing with b
550 640 searching for changes
551 641 unpruned common: 66f7d451a68b
552 642 elapsed time: * seconds (glob)
553 643 heads summary:
554 644 total common heads: 1
555 645 also local heads: 0
556 646 also remote heads: 0
557 647 both: 0
558 648 local heads: 1
559 649 common: 0
560 650 missing: 1
561 651 remote heads: 1
562 652 common: 0
563 653 unknown: 1
564 654 local changesets: 32
565 655 common: 2
566 656 heads: 1
567 657 roots: 1
568 658 missing: 30
569 659 heads: 1
570 660 roots: 1
661 first undecided set: 32
662 heads: 1
663 roots: 1
664 common: 2
665 missing: 30
571 666 common heads: 66f7d451a68b
572 667
573 668 % -- a -> b set
574 669 comparing with b
575 670 query 1; heads
576 671 searching for changes
577 672 taking quick initial sample
578 673 searching: 2 queries
579 674 query 2; still undecided: 31, sample size is: 31
580 675 2 total queries in *.????s (glob)
581 676 elapsed time: * seconds (glob)
582 677 heads summary:
583 678 total common heads: 1
584 679 also local heads: 0
585 680 also remote heads: 0
586 681 both: 0
587 682 local heads: 1
588 683 common: 0
589 684 missing: 1
590 685 remote heads: 1
591 686 common: 0
592 687 unknown: 1
593 688 local changesets: 32
594 689 common: 2
595 690 heads: 1
596 691 roots: 1
597 692 missing: 30
598 693 heads: 1
599 694 roots: 1
695 first undecided set: 32
696 heads: 1
697 roots: 1
698 common: 2
699 missing: 30
600 700 common heads: 66f7d451a68b
601 701
602 702 % -- a -> b set (tip only)
603 703 comparing with b
604 704 query 1; heads
605 705 searching for changes
606 706 taking quick initial sample
607 707 searching: 2 queries
608 708 query 2; still undecided: 31, sample size is: 31
609 709 2 total queries in *.????s (glob)
610 710 elapsed time: * seconds (glob)
611 711 heads summary:
612 712 total common heads: 1
613 713 also local heads: 0
614 714 also remote heads: 0
615 715 both: 0
616 716 local heads: 1
617 717 common: 0
618 718 missing: 1
619 719 remote heads: 1
620 720 common: 0
621 721 unknown: 1
622 722 local changesets: 32
623 723 common: 2
624 724 heads: 1
625 725 roots: 1
626 726 missing: 30
627 727 heads: 1
628 728 roots: 1
729 first undecided set: 32
730 heads: 1
731 roots: 1
732 common: 2
733 missing: 30
629 734 common heads: 66f7d451a68b
630 735
631 736 % -- b -> a tree
632 737 comparing with a
633 738 searching for changes
634 739 unpruned common: 66f7d451a68b
635 740 elapsed time: * seconds (glob)
636 741 heads summary:
637 742 total common heads: 1
638 743 also local heads: 0
639 744 also remote heads: 0
640 745 both: 0
641 746 local heads: 1
642 747 common: 0
643 748 missing: 1
644 749 remote heads: 1
645 750 common: 0
646 751 unknown: 1
647 752 local changesets: 32
648 753 common: 2
649 754 heads: 1
650 755 roots: 1
651 756 missing: 30
652 757 heads: 1
653 758 roots: 1
759 first undecided set: 32
760 heads: 1
761 roots: 1
762 common: 2
763 missing: 30
654 764 common heads: 66f7d451a68b
655 765
656 766 % -- b -> a set
657 767 comparing with a
658 768 query 1; heads
659 769 searching for changes
660 770 taking quick initial sample
661 771 searching: 2 queries
662 772 query 2; still undecided: 31, sample size is: 31
663 773 2 total queries in *.????s (glob)
664 774 elapsed time: * seconds (glob)
665 775 heads summary:
666 776 total common heads: 1
667 777 also local heads: 0
668 778 also remote heads: 0
669 779 both: 0
670 780 local heads: 1
671 781 common: 0
672 782 missing: 1
673 783 remote heads: 1
674 784 common: 0
675 785 unknown: 1
676 786 local changesets: 32
677 787 common: 2
678 788 heads: 1
679 789 roots: 1
680 790 missing: 30
681 791 heads: 1
682 792 roots: 1
793 first undecided set: 32
794 heads: 1
795 roots: 1
796 common: 2
797 missing: 30
683 798 common heads: 66f7d451a68b
684 799
685 800 % -- b -> a set (tip only)
686 801 comparing with a
687 802 query 1; heads
688 803 searching for changes
689 804 taking quick initial sample
690 805 searching: 2 queries
691 806 query 2; still undecided: 31, sample size is: 31
692 807 2 total queries in *.????s (glob)
693 808 elapsed time: * seconds (glob)
694 809 heads summary:
695 810 total common heads: 1
696 811 also local heads: 0
697 812 also remote heads: 0
698 813 both: 0
699 814 local heads: 1
700 815 common: 0
701 816 missing: 1
702 817 remote heads: 1
703 818 common: 0
704 819 unknown: 1
705 820 local changesets: 32
706 821 common: 2
707 822 heads: 1
708 823 roots: 1
709 824 missing: 30
710 825 heads: 1
711 826 roots: 1
827 first undecided set: 32
828 heads: 1
829 roots: 1
830 common: 2
831 missing: 30
712 832 common heads: 66f7d451a68b
713 833
714 834
715 835 Both many new skewed:
716 836
717 837 $ testdesc '-ra' '-rb' '
718 838 > +2:f +30 :b
719 839 > <f +50 :a'
720 840
721 841 % -- a -> b tree
722 842 comparing with b
723 843 searching for changes
724 844 unpruned common: 66f7d451a68b
725 845 elapsed time: * seconds (glob)
726 846 heads summary:
727 847 total common heads: 1
728 848 also local heads: 0
729 849 also remote heads: 0
730 850 both: 0
731 851 local heads: 1
732 852 common: 0
733 853 missing: 1
734 854 remote heads: 1
735 855 common: 0
736 856 unknown: 1
737 857 local changesets: 52
738 858 common: 2
739 859 heads: 1
740 860 roots: 1
741 861 missing: 50
742 862 heads: 1
743 863 roots: 1
864 first undecided set: 52
865 heads: 1
866 roots: 1
867 common: 2
868 missing: 50
744 869 common heads: 66f7d451a68b
745 870
746 871 % -- a -> b set
747 872 comparing with b
748 873 query 1; heads
749 874 searching for changes
750 875 taking quick initial sample
751 876 searching: 2 queries
752 877 query 2; still undecided: 51, sample size is: 51
753 878 2 total queries in *.????s (glob)
754 879 elapsed time: * seconds (glob)
755 880 heads summary:
756 881 total common heads: 1
757 882 also local heads: 0
758 883 also remote heads: 0
759 884 both: 0
760 885 local heads: 1
761 886 common: 0
762 887 missing: 1
763 888 remote heads: 1
764 889 common: 0
765 890 unknown: 1
766 891 local changesets: 52
767 892 common: 2
768 893 heads: 1
769 894 roots: 1
770 895 missing: 50
771 896 heads: 1
772 897 roots: 1
898 first undecided set: 52
899 heads: 1
900 roots: 1
901 common: 2
902 missing: 50
773 903 common heads: 66f7d451a68b
774 904
775 905 % -- a -> b set (tip only)
776 906 comparing with b
777 907 query 1; heads
778 908 searching for changes
779 909 taking quick initial sample
780 910 searching: 2 queries
781 911 query 2; still undecided: 51, sample size is: 51
782 912 2 total queries in *.????s (glob)
783 913 elapsed time: * seconds (glob)
784 914 heads summary:
785 915 total common heads: 1
786 916 also local heads: 0
787 917 also remote heads: 0
788 918 both: 0
789 919 local heads: 1
790 920 common: 0
791 921 missing: 1
792 922 remote heads: 1
793 923 common: 0
794 924 unknown: 1
795 925 local changesets: 52
796 926 common: 2
797 927 heads: 1
798 928 roots: 1
799 929 missing: 50
800 930 heads: 1
801 931 roots: 1
932 first undecided set: 52
933 heads: 1
934 roots: 1
935 common: 2
936 missing: 50
802 937 common heads: 66f7d451a68b
803 938
804 939 % -- b -> a tree
805 940 comparing with a
806 941 searching for changes
807 942 unpruned common: 66f7d451a68b
808 943 elapsed time: * seconds (glob)
809 944 heads summary:
810 945 total common heads: 1
811 946 also local heads: 0
812 947 also remote heads: 0
813 948 both: 0
814 949 local heads: 1
815 950 common: 0
816 951 missing: 1
817 952 remote heads: 1
818 953 common: 0
819 954 unknown: 1
820 955 local changesets: 32
821 956 common: 2
822 957 heads: 1
823 958 roots: 1
824 959 missing: 30
825 960 heads: 1
826 961 roots: 1
962 first undecided set: 32
963 heads: 1
964 roots: 1
965 common: 2
966 missing: 30
827 967 common heads: 66f7d451a68b
828 968
829 969 % -- b -> a set
830 970 comparing with a
831 971 query 1; heads
832 972 searching for changes
833 973 taking quick initial sample
834 974 searching: 2 queries
835 975 query 2; still undecided: 31, sample size is: 31
836 976 2 total queries in *.????s (glob)
837 977 elapsed time: * seconds (glob)
838 978 heads summary:
839 979 total common heads: 1
840 980 also local heads: 0
841 981 also remote heads: 0
842 982 both: 0
843 983 local heads: 1
844 984 common: 0
845 985 missing: 1
846 986 remote heads: 1
847 987 common: 0
848 988 unknown: 1
849 989 local changesets: 32
850 990 common: 2
851 991 heads: 1
852 992 roots: 1
853 993 missing: 30
854 994 heads: 1
855 995 roots: 1
996 first undecided set: 32
997 heads: 1
998 roots: 1
999 common: 2
1000 missing: 30
856 1001 common heads: 66f7d451a68b
857 1002
858 1003 % -- b -> a set (tip only)
859 1004 comparing with a
860 1005 query 1; heads
861 1006 searching for changes
862 1007 taking quick initial sample
863 1008 searching: 2 queries
864 1009 query 2; still undecided: 31, sample size is: 31
865 1010 2 total queries in *.????s (glob)
866 1011 elapsed time: * seconds (glob)
867 1012 heads summary:
868 1013 total common heads: 1
869 1014 also local heads: 0
870 1015 also remote heads: 0
871 1016 both: 0
872 1017 local heads: 1
873 1018 common: 0
874 1019 missing: 1
875 1020 remote heads: 1
876 1021 common: 0
877 1022 unknown: 1
878 1023 local changesets: 32
879 1024 common: 2
880 1025 heads: 1
881 1026 roots: 1
882 1027 missing: 30
883 1028 heads: 1
884 1029 roots: 1
1030 first undecided set: 32
1031 heads: 1
1032 roots: 1
1033 common: 2
1034 missing: 30
885 1035 common heads: 66f7d451a68b
886 1036
887 1037
888 1038 Both many new on top of long history:
889 1039
890 1040 $ testdesc '-ra' '-rb' '
891 1041 > +1000:f +30 :b
892 1042 > <f +50 :a'
893 1043
894 1044 % -- a -> b tree
895 1045 comparing with b
896 1046 searching for changes
897 1047 unpruned common: 7ead0cba2838
898 1048 elapsed time: * seconds (glob)
899 1049 heads summary:
900 1050 total common heads: 1
901 1051 also local heads: 0
902 1052 also remote heads: 0
903 1053 both: 0
904 1054 local heads: 1
905 1055 common: 0
906 1056 missing: 1
907 1057 remote heads: 1
908 1058 common: 0
909 1059 unknown: 1
910 1060 local changesets: 1050
911 1061 common: 1000
912 1062 heads: 1
913 1063 roots: 1
914 1064 missing: 50
915 1065 heads: 1
916 1066 roots: 1
1067 first undecided set: 1050
1068 heads: 1
1069 roots: 1
1070 common: 1000
1071 missing: 50
917 1072 common heads: 7ead0cba2838
918 1073
919 1074 % -- a -> b set
920 1075 comparing with b
921 1076 query 1; heads
922 1077 searching for changes
923 1078 taking quick initial sample
924 1079 searching: 2 queries
925 1080 query 2; still undecided: 1049, sample size is: 11
926 1081 sampling from both directions
927 1082 searching: 3 queries
928 1083 query 3; still undecided: 31, sample size is: 31
929 1084 3 total queries in *.????s (glob)
930 1085 elapsed time: * seconds (glob)
931 1086 heads summary:
932 1087 total common heads: 1
933 1088 also local heads: 0
934 1089 also remote heads: 0
935 1090 both: 0
936 1091 local heads: 1
937 1092 common: 0
938 1093 missing: 1
939 1094 remote heads: 1
940 1095 common: 0
941 1096 unknown: 1
942 1097 local changesets: 1050
943 1098 common: 1000
944 1099 heads: 1
945 1100 roots: 1
946 1101 missing: 50
947 1102 heads: 1
948 1103 roots: 1
1104 first undecided set: 1050
1105 heads: 1
1106 roots: 1
1107 common: 1000
1108 missing: 50
949 1109 common heads: 7ead0cba2838
950 1110
951 1111 % -- a -> b set (tip only)
952 1112 comparing with b
953 1113 query 1; heads
954 1114 searching for changes
955 1115 taking quick initial sample
956 1116 searching: 2 queries
957 1117 query 2; still undecided: 1049, sample size is: 11
958 1118 sampling from both directions
959 1119 searching: 3 queries
960 1120 query 3; still undecided: 31, sample size is: 31
961 1121 3 total queries in *.????s (glob)
962 1122 elapsed time: * seconds (glob)
963 1123 heads summary:
964 1124 total common heads: 1
965 1125 also local heads: 0
966 1126 also remote heads: 0
967 1127 both: 0
968 1128 local heads: 1
969 1129 common: 0
970 1130 missing: 1
971 1131 remote heads: 1
972 1132 common: 0
973 1133 unknown: 1
974 1134 local changesets: 1050
975 1135 common: 1000
976 1136 heads: 1
977 1137 roots: 1
978 1138 missing: 50
979 1139 heads: 1
980 1140 roots: 1
1141 first undecided set: 1050
1142 heads: 1
1143 roots: 1
1144 common: 1000
1145 missing: 50
981 1146 common heads: 7ead0cba2838
982 1147
983 1148 % -- b -> a tree
984 1149 comparing with a
985 1150 searching for changes
986 1151 unpruned common: 7ead0cba2838
987 1152 elapsed time: * seconds (glob)
988 1153 heads summary:
989 1154 total common heads: 1
990 1155 also local heads: 0
991 1156 also remote heads: 0
992 1157 both: 0
993 1158 local heads: 1
994 1159 common: 0
995 1160 missing: 1
996 1161 remote heads: 1
997 1162 common: 0
998 1163 unknown: 1
999 1164 local changesets: 1030
1000 1165 common: 1000
1001 1166 heads: 1
1002 1167 roots: 1
1003 1168 missing: 30
1004 1169 heads: 1
1005 1170 roots: 1
1171 first undecided set: 1030
1172 heads: 1
1173 roots: 1
1174 common: 1000
1175 missing: 30
1006 1176 common heads: 7ead0cba2838
1007 1177
1008 1178 % -- b -> a set
1009 1179 comparing with a
1010 1180 query 1; heads
1011 1181 searching for changes
1012 1182 taking quick initial sample
1013 1183 searching: 2 queries
1014 1184 query 2; still undecided: 1029, sample size is: 11
1015 1185 sampling from both directions
1016 1186 searching: 3 queries
1017 1187 query 3; still undecided: 15, sample size is: 15
1018 1188 3 total queries in *.????s (glob)
1019 1189 elapsed time: * seconds (glob)
1020 1190 heads summary:
1021 1191 total common heads: 1
1022 1192 also local heads: 0
1023 1193 also remote heads: 0
1024 1194 both: 0
1025 1195 local heads: 1
1026 1196 common: 0
1027 1197 missing: 1
1028 1198 remote heads: 1
1029 1199 common: 0
1030 1200 unknown: 1
1031 1201 local changesets: 1030
1032 1202 common: 1000
1033 1203 heads: 1
1034 1204 roots: 1
1035 1205 missing: 30
1036 1206 heads: 1
1037 1207 roots: 1
1208 first undecided set: 1030
1209 heads: 1
1210 roots: 1
1211 common: 1000
1212 missing: 30
1038 1213 common heads: 7ead0cba2838
1039 1214
1040 1215 % -- b -> a set (tip only)
1041 1216 comparing with a
1042 1217 query 1; heads
1043 1218 searching for changes
1044 1219 taking quick initial sample
1045 1220 searching: 2 queries
1046 1221 query 2; still undecided: 1029, sample size is: 11
1047 1222 sampling from both directions
1048 1223 searching: 3 queries
1049 1224 query 3; still undecided: 15, sample size is: 15
1050 1225 3 total queries in *.????s (glob)
1051 1226 elapsed time: * seconds (glob)
1052 1227 heads summary:
1053 1228 total common heads: 1
1054 1229 also local heads: 0
1055 1230 also remote heads: 0
1056 1231 both: 0
1057 1232 local heads: 1
1058 1233 common: 0
1059 1234 missing: 1
1060 1235 remote heads: 1
1061 1236 common: 0
1062 1237 unknown: 1
1063 1238 local changesets: 1030
1064 1239 common: 1000
1065 1240 heads: 1
1066 1241 roots: 1
1067 1242 missing: 30
1068 1243 heads: 1
1069 1244 roots: 1
1245 first undecided set: 1030
1246 heads: 1
1247 roots: 1
1248 common: 1000
1249 missing: 30
1070 1250 common heads: 7ead0cba2838
1071 1251
1072 1252
1073 1253 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1074 1254
1075 1255 $ hg init manyheads
1076 1256 $ cd manyheads
1077 1257 $ echo "+300:r @a" >dagdesc
1078 1258 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1079 1259 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1080 1260 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1081 1261 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1082 1262 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1083 1263 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1084 1264 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1085 1265 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1086 1266 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1087 1267 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1088 1268 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1089 1269 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1090 1270 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1091 1271 $ echo "@b *r+3" >>dagdesc # one more head
1092 1272 $ hg debugbuilddag <dagdesc
1093 1273 reading DAG from stdin
1094 1274
1095 1275 $ hg heads -t --template . | wc -c
1096 1276 \s*261 (re)
1097 1277
1098 1278 $ hg clone -b a . a
1099 1279 adding changesets
1100 1280 adding manifests
1101 1281 adding file changes
1102 1282 added 1340 changesets with 0 changes to 0 files (+259 heads)
1103 1283 new changesets 1ea73414a91b:1c51e2c80832
1104 1284 updating to branch a
1105 1285 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1106 1286 $ hg clone -b b . b
1107 1287 adding changesets
1108 1288 adding manifests
1109 1289 adding file changes
1110 1290 added 304 changesets with 0 changes to 0 files
1111 1291 new changesets 1ea73414a91b:513314ca8b3a
1112 1292 updating to branch b
1113 1293 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1114 1294
1115 1295 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false
1116 1296 comparing with b
1117 1297 query 1; heads
1118 1298 searching for changes
1119 1299 taking quick initial sample
1120 1300 searching: 2 queries
1121 1301 query 2; still undecided: 1080, sample size is: 100
1122 1302 sampling from both directions
1123 1303 searching: 3 queries
1124 1304 query 3; still undecided: 980, sample size is: 200
1125 1305 sampling from both directions
1126 1306 searching: 4 queries
1127 1307 query 4; still undecided: 497, sample size is: 210
1128 1308 sampling from both directions
1129 1309 searching: 5 queries
1130 1310 query 5; still undecided: 285, sample size is: 220
1131 1311 sampling from both directions
1132 1312 searching: 6 queries
1133 1313 query 6; still undecided: 63, sample size is: 63
1134 1314 6 total queries in *.????s (glob)
1135 1315 elapsed time: * seconds (glob)
1136 1316 heads summary:
1137 1317 total common heads: 1
1138 1318 also local heads: 0
1139 1319 also remote heads: 0
1140 1320 both: 0
1141 1321 local heads: 260
1142 1322 common: 0
1143 1323 missing: 260
1144 1324 remote heads: 1
1145 1325 common: 0
1146 1326 unknown: 1
1147 1327 local changesets: 1340
1148 1328 common: 300
1149 1329 heads: 1
1150 1330 roots: 1
1151 1331 missing: 1040
1152 1332 heads: 260
1153 1333 roots: 260
1334 first undecided set: 1340
1335 heads: 260
1336 roots: 1
1337 common: 300
1338 missing: 1040
1154 1339 common heads: 3ee37d65064a
1155 1340 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1156 1341 comparing with b
1157 1342 query 1; heads
1158 1343 searching for changes
1159 1344 taking quick initial sample
1160 1345 searching: 2 queries
1161 1346 query 2; still undecided: 303, sample size is: 9
1162 1347 sampling from both directions
1163 1348 searching: 3 queries
1164 1349 query 3; still undecided: 3, sample size is: 3
1165 1350 3 total queries in *.????s (glob)
1166 1351 elapsed time: * seconds (glob)
1167 1352 heads summary:
1168 1353 total common heads: 1
1169 1354 also local heads: 0
1170 1355 also remote heads: 0
1171 1356 both: 0
1172 1357 local heads: 260
1173 1358 common: 0
1174 1359 missing: 260
1175 1360 remote heads: 1
1176 1361 common: 0
1177 1362 unknown: 1
1178 1363 local changesets: 1340
1179 1364 common: 300
1180 1365 heads: 1
1181 1366 roots: 1
1182 1367 missing: 1040
1183 1368 heads: 260
1184 1369 roots: 260
1370 first undecided set: 1340
1371 heads: 260
1372 roots: 1
1373 common: 300
1374 missing: 1040
1185 1375 common heads: 3ee37d65064a
1186 1376
1187 1377 Test actual protocol when pulling one new head in addition to common heads
1188 1378
1189 1379 $ hg clone -U b c
1190 1380 $ hg -R c id -ir tip
1191 1381 513314ca8b3a
1192 1382 $ hg -R c up -qr default
1193 1383 $ touch c/f
1194 1384 $ hg -R c ci -Aqm "extra head"
1195 1385 $ hg -R c id -i
1196 1386 e64a39e7da8b
1197 1387
1198 1388 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1199 1389 $ cat hg.pid >> $DAEMON_PIDS
1200 1390
1201 1391 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1202 1392 comparing with http://localhost:$HGPORT/
1203 1393 searching for changes
1204 1394 e64a39e7da8b
1205 1395
1206 1396 $ killdaemons.py
1207 1397 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1208 1398 "GET /?cmd=capabilities HTTP/1.1" 200 -
1209 1399 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1210 1400 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1211 1401 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1212 1402 $ cat errors.log
1213 1403
1214 1404 $ cd ..
1215 1405
1216 1406
1217 1407 Issue 4438 - test coverage for 3ef893520a85 issues.
1218 1408
1219 1409 $ mkdir issue4438
1220 1410 $ cd issue4438
1221 1411 #if false
1222 1412 generate new bundles:
1223 1413 $ hg init r1
1224 1414 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1225 1415 $ hg clone -q r1 r2
1226 1416 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1227 1417 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1228 1418 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1229 1419 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1230 1420 #else
1231 1421 use existing bundles:
1232 1422 $ hg init r1
1233 1423 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1234 1424 $ hg -R r1 -q up
1235 1425 $ hg init r2
1236 1426 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1237 1427 $ hg -R r2 -q up
1238 1428 #endif
1239 1429
1240 1430 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1241 1431
1242 1432 $ hg -R r1 outgoing r2 -T'{rev} '
1243 1433 comparing with r2
1244 1434 searching for changes
1245 1435 101 102 103 104 105 106 107 108 109 110 (no-eol)
1246 1436
1247 1437 The case where all the 'initialsamplesize' samples already were common would
1248 1438 give 'all remote heads known locally' without checking the remaining heads -
1249 1439 fixed in 86c35b7ae300:
1250 1440
1251 1441 $ cat >> r1/.hg/hgrc << EOF
1252 1442 > [devel]
1253 1443 > discovery.randomize = False
1254 1444 > EOF
1255 1445
1256 1446 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1257 1447 > --config blackbox.track='command commandfinish discovery'
1258 1448 comparing with r2
1259 1449 searching for changes
1260 1450 101 102 103 104 105 106 107 108 109 110 (no-eol)
1261 1451 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1262 1452 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1263 1453 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1264 1454 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1265 1455 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1266 1456 $ cd ..
1267 1457
1268 1458 Even if the set of revs to discover is restricted, unrelated revs may be
1269 1459 returned as common heads.
1270 1460
1271 1461 $ mkdir ancestorsof
1272 1462 $ cd ancestorsof
1273 1463 $ hg init a
1274 1464 $ hg clone a b -q
1275 1465 $ cd b
1276 1466 $ hg debugbuilddag '.:root *root *root'
1277 1467 $ hg log -G -T '{node|short}'
1278 1468 o fa942426a6fd
1279 1469 |
1280 1470 | o 66f7d451a68b
1281 1471 |/
1282 1472 o 1ea73414a91b
1283 1473
1284 1474 $ hg push -r 66f7d451a68b -q
1285 1475 $ hg debugdiscovery --verbose --rev fa942426a6fd
1286 1476 comparing with $TESTTMP/ancestorsof/a
1287 1477 searching for changes
1288 1478 elapsed time: * seconds (glob)
1289 1479 heads summary:
1290 1480 total common heads: 1
1291 1481 also local heads: 1
1292 1482 also remote heads: 1
1293 1483 both: 1
1294 1484 local heads: 2
1295 1485 common: 1
1296 1486 missing: 1
1297 1487 remote heads: 1
1298 1488 common: 1
1299 1489 unknown: 0
1300 1490 local changesets: 3
1301 1491 common: 2
1302 1492 heads: 1
1303 1493 roots: 1
1304 1494 missing: 1
1305 1495 heads: 1
1306 1496 roots: 1
1497 first undecided set: 1
1498 heads: 1
1499 roots: 1
1500 common: 0
1501 missing: 1
1307 1502 common heads: 66f7d451a68b
General Comments 0
You need to be logged in to leave comments. Login now