##// END OF EJS Templates
debugdiscovery: display the number of roundtrip used...
marmoute -
r46726:d90f439f default
parent child Browse files
Show More
@@ -1,4652 +1,4653 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import glob
15 15 import operator
16 16 import os
17 17 import platform
18 18 import random
19 19 import re
20 20 import socket
21 21 import ssl
22 22 import stat
23 23 import string
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 revlog,
73 73 revset,
74 74 revsetlang,
75 75 scmutil,
76 76 setdiscovery,
77 77 simplemerge,
78 78 sshpeer,
79 79 sslutil,
80 80 streamclone,
81 81 strip,
82 82 tags as tagsmod,
83 83 templater,
84 84 treediscovery,
85 85 upgrade,
86 86 url as urlmod,
87 87 util,
88 88 vfs as vfsmod,
89 89 wireprotoframing,
90 90 wireprotoserver,
91 91 wireprotov2peer,
92 92 )
93 93 from .utils import (
94 94 cborutil,
95 95 compression,
96 96 dateutil,
97 97 procutil,
98 98 stringutil,
99 99 )
100 100
101 101 from .revlogutils import (
102 102 deltas as deltautil,
103 103 nodemap,
104 104 sidedata,
105 105 )
106 106
107 107 release = lockmod.release
108 108
109 109 table = {}
110 110 table.update(strip.command._table)
111 111 command = registrar.command(table)
112 112
113 113
114 114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
115 115 def debugancestor(ui, repo, *args):
116 116 """find the ancestor revision of two revisions in a given index"""
117 117 if len(args) == 3:
118 118 index, rev1, rev2 = args
119 119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
120 120 lookup = r.lookup
121 121 elif len(args) == 2:
122 122 if not repo:
123 123 raise error.Abort(
124 124 _(b'there is no Mercurial repository here (.hg not found)')
125 125 )
126 126 rev1, rev2 = args
127 127 r = repo.changelog
128 128 lookup = repo.lookup
129 129 else:
130 130 raise error.Abort(_(b'either two or three arguments required'))
131 131 a = r.ancestor(lookup(rev1), lookup(rev2))
132 132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
133 133
134 134
135 135 @command(b'debugantivirusrunning', [])
136 136 def debugantivirusrunning(ui, repo):
137 137 """attempt to trigger an antivirus scanner to see if one is active"""
138 138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
139 139 f.write(
140 140 util.b85decode(
141 141 # This is a base85-armored version of the EICAR test file. See
142 142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
143 143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
144 144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
145 145 )
146 146 )
147 147 # Give an AV engine time to scan the file.
148 148 time.sleep(2)
149 149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
150 150
151 151
152 152 @command(b'debugapplystreamclonebundle', [], b'FILE')
153 153 def debugapplystreamclonebundle(ui, repo, fname):
154 154 """apply a stream clone bundle file"""
155 155 f = hg.openpath(ui, fname)
156 156 gen = exchange.readbundle(ui, f, fname)
157 157 gen.apply(repo)
158 158
159 159
160 160 @command(
161 161 b'debugbuilddag',
162 162 [
163 163 (
164 164 b'm',
165 165 b'mergeable-file',
166 166 None,
167 167 _(b'add single file mergeable changes'),
168 168 ),
169 169 (
170 170 b'o',
171 171 b'overwritten-file',
172 172 None,
173 173 _(b'add single file all revs overwrite'),
174 174 ),
175 175 (b'n', b'new-file', None, _(b'add new file at each rev')),
176 176 ],
177 177 _(b'[OPTION]... [TEXT]'),
178 178 )
179 179 def debugbuilddag(
180 180 ui,
181 181 repo,
182 182 text=None,
183 183 mergeable_file=False,
184 184 overwritten_file=False,
185 185 new_file=False,
186 186 ):
187 187 """builds a repo with a given DAG from scratch in the current empty repo
188 188
189 189 The description of the DAG is read from stdin if not given on the
190 190 command line.
191 191
192 192 Elements:
193 193
194 194 - "+n" is a linear run of n nodes based on the current default parent
195 195 - "." is a single node based on the current default parent
196 196 - "$" resets the default parent to null (implied at the start);
197 197 otherwise the default parent is always the last node created
198 198 - "<p" sets the default parent to the backref p
199 199 - "*p" is a fork at parent p, which is a backref
200 200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
201 201 - "/p2" is a merge of the preceding node and p2
202 202 - ":tag" defines a local tag for the preceding node
203 203 - "@branch" sets the named branch for subsequent nodes
204 204 - "#...\\n" is a comment up to the end of the line
205 205
206 206 Whitespace between the above elements is ignored.
207 207
208 208 A backref is either
209 209
210 210 - a number n, which references the node curr-n, where curr is the current
211 211 node, or
212 212 - the name of a local tag you placed earlier using ":tag", or
213 213 - empty to denote the default parent.
214 214
215 215 All string valued-elements are either strictly alphanumeric, or must
216 216 be enclosed in double quotes ("..."), with "\\" as escape character.
217 217 """
218 218
219 219 if text is None:
220 220 ui.status(_(b"reading DAG from stdin\n"))
221 221 text = ui.fin.read()
222 222
223 223 cl = repo.changelog
224 224 if len(cl) > 0:
225 225 raise error.Abort(_(b'repository is not empty'))
226 226
227 227 # determine number of revs in DAG
228 228 total = 0
229 229 for type, data in dagparser.parsedag(text):
230 230 if type == b'n':
231 231 total += 1
232 232
233 233 if mergeable_file:
234 234 linesperrev = 2
235 235 # make a file with k lines per rev
236 236 initialmergedlines = [
237 237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
238 238 ]
239 239 initialmergedlines.append(b"")
240 240
241 241 tags = []
242 242 progress = ui.makeprogress(
243 243 _(b'building'), unit=_(b'revisions'), total=total
244 244 )
245 245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
246 246 at = -1
247 247 atbranch = b'default'
248 248 nodeids = []
249 249 id = 0
250 250 progress.update(id)
251 251 for type, data in dagparser.parsedag(text):
252 252 if type == b'n':
253 253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
254 254 id, ps = data
255 255
256 256 files = []
257 257 filecontent = {}
258 258
259 259 p2 = None
260 260 if mergeable_file:
261 261 fn = b"mf"
262 262 p1 = repo[ps[0]]
263 263 if len(ps) > 1:
264 264 p2 = repo[ps[1]]
265 265 pa = p1.ancestor(p2)
266 266 base, local, other = [
267 267 x[fn].data() for x in (pa, p1, p2)
268 268 ]
269 269 m3 = simplemerge.Merge3Text(base, local, other)
270 270 ml = [l.strip() for l in m3.merge_lines()]
271 271 ml.append(b"")
272 272 elif at > 0:
273 273 ml = p1[fn].data().split(b"\n")
274 274 else:
275 275 ml = initialmergedlines
276 276 ml[id * linesperrev] += b" r%i" % id
277 277 mergedtext = b"\n".join(ml)
278 278 files.append(fn)
279 279 filecontent[fn] = mergedtext
280 280
281 281 if overwritten_file:
282 282 fn = b"of"
283 283 files.append(fn)
284 284 filecontent[fn] = b"r%i\n" % id
285 285
286 286 if new_file:
287 287 fn = b"nf%i" % id
288 288 files.append(fn)
289 289 filecontent[fn] = b"r%i\n" % id
290 290 if len(ps) > 1:
291 291 if not p2:
292 292 p2 = repo[ps[1]]
293 293 for fn in p2:
294 294 if fn.startswith(b"nf"):
295 295 files.append(fn)
296 296 filecontent[fn] = p2[fn].data()
297 297
298 298 def fctxfn(repo, cx, path):
299 299 if path in filecontent:
300 300 return context.memfilectx(
301 301 repo, cx, path, filecontent[path]
302 302 )
303 303 return None
304 304
305 305 if len(ps) == 0 or ps[0] < 0:
306 306 pars = [None, None]
307 307 elif len(ps) == 1:
308 308 pars = [nodeids[ps[0]], None]
309 309 else:
310 310 pars = [nodeids[p] for p in ps]
311 311 cx = context.memctx(
312 312 repo,
313 313 pars,
314 314 b"r%i" % id,
315 315 files,
316 316 fctxfn,
317 317 date=(id, 0),
318 318 user=b"debugbuilddag",
319 319 extra={b'branch': atbranch},
320 320 )
321 321 nodeid = repo.commitctx(cx)
322 322 nodeids.append(nodeid)
323 323 at = id
324 324 elif type == b'l':
325 325 id, name = data
326 326 ui.note((b'tag %s\n' % name))
327 327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
328 328 elif type == b'a':
329 329 ui.note((b'branch %s\n' % data))
330 330 atbranch = data
331 331 progress.update(id)
332 332
333 333 if tags:
334 334 repo.vfs.write(b"localtags", b"".join(tags))
335 335
336 336
337 337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
338 338 indent_string = b' ' * indent
339 339 if all:
340 340 ui.writenoi18n(
341 341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
342 342 % indent_string
343 343 )
344 344
345 345 def showchunks(named):
346 346 ui.write(b"\n%s%s\n" % (indent_string, named))
347 347 for deltadata in gen.deltaiter():
348 348 node, p1, p2, cs, deltabase, delta, flags = deltadata
349 349 ui.write(
350 350 b"%s%s %s %s %s %s %d\n"
351 351 % (
352 352 indent_string,
353 353 hex(node),
354 354 hex(p1),
355 355 hex(p2),
356 356 hex(cs),
357 357 hex(deltabase),
358 358 len(delta),
359 359 )
360 360 )
361 361
362 362 gen.changelogheader()
363 363 showchunks(b"changelog")
364 364 gen.manifestheader()
365 365 showchunks(b"manifest")
366 366 for chunkdata in iter(gen.filelogheader, {}):
367 367 fname = chunkdata[b'filename']
368 368 showchunks(fname)
369 369 else:
370 370 if isinstance(gen, bundle2.unbundle20):
371 371 raise error.Abort(_(b'use debugbundle2 for this file'))
372 372 gen.changelogheader()
373 373 for deltadata in gen.deltaiter():
374 374 node, p1, p2, cs, deltabase, delta, flags = deltadata
375 375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
376 376
377 377
378 378 def _debugobsmarkers(ui, part, indent=0, **opts):
379 379 """display version and markers contained in 'data'"""
380 380 opts = pycompat.byteskwargs(opts)
381 381 data = part.read()
382 382 indent_string = b' ' * indent
383 383 try:
384 384 version, markers = obsolete._readmarkers(data)
385 385 except error.UnknownVersion as exc:
386 386 msg = b"%sunsupported version: %s (%d bytes)\n"
387 387 msg %= indent_string, exc.version, len(data)
388 388 ui.write(msg)
389 389 else:
390 390 msg = b"%sversion: %d (%d bytes)\n"
391 391 msg %= indent_string, version, len(data)
392 392 ui.write(msg)
393 393 fm = ui.formatter(b'debugobsolete', opts)
394 394 for rawmarker in sorted(markers):
395 395 m = obsutil.marker(None, rawmarker)
396 396 fm.startitem()
397 397 fm.plain(indent_string)
398 398 cmdutil.showmarker(fm, m)
399 399 fm.end()
400 400
401 401
402 402 def _debugphaseheads(ui, data, indent=0):
403 403 """display version and markers contained in 'data'"""
404 404 indent_string = b' ' * indent
405 405 headsbyphase = phases.binarydecode(data)
406 406 for phase in phases.allphases:
407 407 for head in headsbyphase[phase]:
408 408 ui.write(indent_string)
409 409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
410 410
411 411
412 412 def _quasirepr(thing):
413 413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
414 414 return b'{%s}' % (
415 415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
416 416 )
417 417 return pycompat.bytestr(repr(thing))
418 418
419 419
420 420 def _debugbundle2(ui, gen, all=None, **opts):
421 421 """lists the contents of a bundle2"""
422 422 if not isinstance(gen, bundle2.unbundle20):
423 423 raise error.Abort(_(b'not a bundle2 file'))
424 424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
425 425 parttypes = opts.get('part_type', [])
426 426 for part in gen.iterparts():
427 427 if parttypes and part.type not in parttypes:
428 428 continue
429 429 msg = b'%s -- %s (mandatory: %r)\n'
430 430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
431 431 if part.type == b'changegroup':
432 432 version = part.params.get(b'version', b'01')
433 433 cg = changegroup.getunbundler(version, part, b'UN')
434 434 if not ui.quiet:
435 435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
436 436 if part.type == b'obsmarkers':
437 437 if not ui.quiet:
438 438 _debugobsmarkers(ui, part, indent=4, **opts)
439 439 if part.type == b'phase-heads':
440 440 if not ui.quiet:
441 441 _debugphaseheads(ui, part, indent=4)
442 442
443 443
444 444 @command(
445 445 b'debugbundle',
446 446 [
447 447 (b'a', b'all', None, _(b'show all details')),
448 448 (b'', b'part-type', [], _(b'show only the named part type')),
449 449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
450 450 ],
451 451 _(b'FILE'),
452 452 norepo=True,
453 453 )
454 454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
455 455 """lists the contents of a bundle"""
456 456 with hg.openpath(ui, bundlepath) as f:
457 457 if spec:
458 458 spec = exchange.getbundlespec(ui, f)
459 459 ui.write(b'%s\n' % spec)
460 460 return
461 461
462 462 gen = exchange.readbundle(ui, f, bundlepath)
463 463 if isinstance(gen, bundle2.unbundle20):
464 464 return _debugbundle2(ui, gen, all=all, **opts)
465 465 _debugchangegroup(ui, gen, all=all, **opts)
466 466
467 467
468 468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
469 469 def debugcapabilities(ui, path, **opts):
470 470 """lists the capabilities of a remote peer"""
471 471 opts = pycompat.byteskwargs(opts)
472 472 peer = hg.peer(ui, opts, path)
473 473 caps = peer.capabilities()
474 474 ui.writenoi18n(b'Main capabilities:\n')
475 475 for c in sorted(caps):
476 476 ui.write(b' %s\n' % c)
477 477 b2caps = bundle2.bundle2caps(peer)
478 478 if b2caps:
479 479 ui.writenoi18n(b'Bundle2 capabilities:\n')
480 480 for key, values in sorted(pycompat.iteritems(b2caps)):
481 481 ui.write(b' %s\n' % key)
482 482 for v in values:
483 483 ui.write(b' %s\n' % v)
484 484
485 485
486 486 @command(b'debugchangedfiles', [], b'REV')
487 487 def debugchangedfiles(ui, repo, rev):
488 488 """list the stored files changes for a revision"""
489 489 ctx = scmutil.revsingle(repo, rev, None)
490 490 sd = repo.changelog.sidedata(ctx.rev())
491 491 files_block = sd.get(sidedata.SD_FILES)
492 492 if files_block is not None:
493 493 files = metadata.decode_files_sidedata(sd)
494 494 for f in sorted(files.touched):
495 495 if f in files.added:
496 496 action = b"added"
497 497 elif f in files.removed:
498 498 action = b"removed"
499 499 elif f in files.merged:
500 500 action = b"merged"
501 501 elif f in files.salvaged:
502 502 action = b"salvaged"
503 503 else:
504 504 action = b"touched"
505 505
506 506 copy_parent = b""
507 507 copy_source = b""
508 508 if f in files.copied_from_p1:
509 509 copy_parent = b"p1"
510 510 copy_source = files.copied_from_p1[f]
511 511 elif f in files.copied_from_p2:
512 512 copy_parent = b"p2"
513 513 copy_source = files.copied_from_p2[f]
514 514
515 515 data = (action, copy_parent, f, copy_source)
516 516 template = b"%-8s %2s: %s, %s;\n"
517 517 ui.write(template % data)
518 518
519 519
520 520 @command(b'debugcheckstate', [], b'')
521 521 def debugcheckstate(ui, repo):
522 522 """validate the correctness of the current dirstate"""
523 523 parent1, parent2 = repo.dirstate.parents()
524 524 m1 = repo[parent1].manifest()
525 525 m2 = repo[parent2].manifest()
526 526 errors = 0
527 527 for f in repo.dirstate:
528 528 state = repo.dirstate[f]
529 529 if state in b"nr" and f not in m1:
530 530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
531 531 errors += 1
532 532 if state in b"a" and f in m1:
533 533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
534 534 errors += 1
535 535 if state in b"m" and f not in m1 and f not in m2:
536 536 ui.warn(
537 537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
538 538 )
539 539 errors += 1
540 540 for f in m1:
541 541 state = repo.dirstate[f]
542 542 if state not in b"nrm":
543 543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
544 544 errors += 1
545 545 if errors:
546 546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
547 547 raise error.Abort(errstr)
548 548
549 549
550 550 @command(
551 551 b'debugcolor',
552 552 [(b'', b'style', None, _(b'show all configured styles'))],
553 553 b'hg debugcolor',
554 554 )
555 555 def debugcolor(ui, repo, **opts):
556 556 """show available color, effects or style"""
557 557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
558 558 if opts.get('style'):
559 559 return _debugdisplaystyle(ui)
560 560 else:
561 561 return _debugdisplaycolor(ui)
562 562
563 563
564 564 def _debugdisplaycolor(ui):
565 565 ui = ui.copy()
566 566 ui._styles.clear()
567 567 for effect in color._activeeffects(ui).keys():
568 568 ui._styles[effect] = effect
569 569 if ui._terminfoparams:
570 570 for k, v in ui.configitems(b'color'):
571 571 if k.startswith(b'color.'):
572 572 ui._styles[k] = k[6:]
573 573 elif k.startswith(b'terminfo.'):
574 574 ui._styles[k] = k[9:]
575 575 ui.write(_(b'available colors:\n'))
576 576 # sort label with a '_' after the other to group '_background' entry.
577 577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
578 578 for colorname, label in items:
579 579 ui.write(b'%s\n' % colorname, label=label)
580 580
581 581
582 582 def _debugdisplaystyle(ui):
583 583 ui.write(_(b'available style:\n'))
584 584 if not ui._styles:
585 585 return
586 586 width = max(len(s) for s in ui._styles)
587 587 for label, effects in sorted(ui._styles.items()):
588 588 ui.write(b'%s' % label, label=label)
589 589 if effects:
590 590 # 50
591 591 ui.write(b': ')
592 592 ui.write(b' ' * (max(0, width - len(label))))
593 593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
594 594 ui.write(b'\n')
595 595
596 596
597 597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
598 598 def debugcreatestreamclonebundle(ui, repo, fname):
599 599 """create a stream clone bundle file
600 600
601 601 Stream bundles are special bundles that are essentially archives of
602 602 revlog files. They are commonly used for cloning very quickly.
603 603 """
604 604 # TODO we may want to turn this into an abort when this functionality
605 605 # is moved into `hg bundle`.
606 606 if phases.hassecret(repo):
607 607 ui.warn(
608 608 _(
609 609 b'(warning: stream clone bundle will contain secret '
610 610 b'revisions)\n'
611 611 )
612 612 )
613 613
614 614 requirements, gen = streamclone.generatebundlev1(repo)
615 615 changegroup.writechunks(ui, gen, fname)
616 616
617 617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
618 618
619 619
620 620 @command(
621 621 b'debugdag',
622 622 [
623 623 (b't', b'tags', None, _(b'use tags as labels')),
624 624 (b'b', b'branches', None, _(b'annotate with branch names')),
625 625 (b'', b'dots', None, _(b'use dots for runs')),
626 626 (b's', b'spaces', None, _(b'separate elements by spaces')),
627 627 ],
628 628 _(b'[OPTION]... [FILE [REV]...]'),
629 629 optionalrepo=True,
630 630 )
631 631 def debugdag(ui, repo, file_=None, *revs, **opts):
632 632 """format the changelog or an index DAG as a concise textual description
633 633
634 634 If you pass a revlog index, the revlog's DAG is emitted. If you list
635 635 revision numbers, they get labeled in the output as rN.
636 636
637 637 Otherwise, the changelog DAG of the current repo is emitted.
638 638 """
639 639 spaces = opts.get('spaces')
640 640 dots = opts.get('dots')
641 641 if file_:
642 642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
643 643 revs = {int(r) for r in revs}
644 644
645 645 def events():
646 646 for r in rlog:
647 647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
648 648 if r in revs:
649 649 yield b'l', (r, b"r%i" % r)
650 650
651 651 elif repo:
652 652 cl = repo.changelog
653 653 tags = opts.get('tags')
654 654 branches = opts.get('branches')
655 655 if tags:
656 656 labels = {}
657 657 for l, n in repo.tags().items():
658 658 labels.setdefault(cl.rev(n), []).append(l)
659 659
660 660 def events():
661 661 b = b"default"
662 662 for r in cl:
663 663 if branches:
664 664 newb = cl.read(cl.node(r))[5][b'branch']
665 665 if newb != b:
666 666 yield b'a', newb
667 667 b = newb
668 668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
669 669 if tags:
670 670 ls = labels.get(r)
671 671 if ls:
672 672 for l in ls:
673 673 yield b'l', (r, l)
674 674
675 675 else:
676 676 raise error.Abort(_(b'need repo for changelog dag'))
677 677
678 678 for line in dagparser.dagtextlines(
679 679 events(),
680 680 addspaces=spaces,
681 681 wraplabels=True,
682 682 wrapannotations=True,
683 683 wrapnonlinear=dots,
684 684 usedots=dots,
685 685 maxlinewidth=70,
686 686 ):
687 687 ui.write(line)
688 688 ui.write(b"\n")
689 689
690 690
691 691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
692 692 def debugdata(ui, repo, file_, rev=None, **opts):
693 693 """dump the contents of a data file revision"""
694 694 opts = pycompat.byteskwargs(opts)
695 695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
696 696 if rev is not None:
697 697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 698 file_, rev = None, file_
699 699 elif rev is None:
700 700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
701 701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
702 702 try:
703 703 ui.write(r.rawdata(r.lookup(rev)))
704 704 except KeyError:
705 705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
706 706
707 707
708 708 @command(
709 709 b'debugdate',
710 710 [(b'e', b'extended', None, _(b'try extended date formats'))],
711 711 _(b'[-e] DATE [RANGE]'),
712 712 norepo=True,
713 713 optionalrepo=True,
714 714 )
715 715 def debugdate(ui, date, range=None, **opts):
716 716 """parse and display a date"""
717 717 if opts["extended"]:
718 718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
719 719 else:
720 720 d = dateutil.parsedate(date)
721 721 ui.writenoi18n(b"internal: %d %d\n" % d)
722 722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
723 723 if range:
724 724 m = dateutil.matchdate(range)
725 725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
726 726
727 727
728 728 @command(
729 729 b'debugdeltachain',
730 730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
731 731 _(b'-c|-m|FILE'),
732 732 optionalrepo=True,
733 733 )
734 734 def debugdeltachain(ui, repo, file_=None, **opts):
735 735 """dump information about delta chains in a revlog
736 736
737 737 Output can be templatized. Available template keywords are:
738 738
739 739 :``rev``: revision number
740 740 :``chainid``: delta chain identifier (numbered by unique base)
741 741 :``chainlen``: delta chain length to this revision
742 742 :``prevrev``: previous revision in delta chain
743 743 :``deltatype``: role of delta / how it was computed
744 744 :``compsize``: compressed size of revision
745 745 :``uncompsize``: uncompressed size of revision
746 746 :``chainsize``: total size of compressed revisions in chain
747 747 :``chainratio``: total chain size divided by uncompressed revision size
748 748 (new delta chains typically start at ratio 2.00)
749 749 :``lindist``: linear distance from base revision in delta chain to end
750 750 of this revision
751 751 :``extradist``: total size of revisions not part of this delta chain from
752 752 base of delta chain to end of this revision; a measurement
753 753 of how much extra data we need to read/seek across to read
754 754 the delta chain for this revision
755 755 :``extraratio``: extradist divided by chainsize; another representation of
756 756 how much unrelated data is needed to load this delta chain
757 757
758 758 If the repository is configured to use the sparse read, additional keywords
759 759 are available:
760 760
761 761 :``readsize``: total size of data read from the disk for a revision
762 762 (sum of the sizes of all the blocks)
763 763 :``largestblock``: size of the largest block of data read from the disk
764 764 :``readdensity``: density of useful bytes in the data read from the disk
765 765 :``srchunks``: in how many data hunks the whole revision would be read
766 766
767 767 The sparse read can be enabled with experimental.sparse-read = True
768 768 """
769 769 opts = pycompat.byteskwargs(opts)
770 770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
771 771 index = r.index
772 772 start = r.start
773 773 length = r.length
774 774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
775 775 withsparseread = getattr(r, '_withsparseread', False)
776 776
777 777 def revinfo(rev):
778 778 e = index[rev]
779 779 compsize = e[1]
780 780 uncompsize = e[2]
781 781 chainsize = 0
782 782
783 783 if generaldelta:
784 784 if e[3] == e[5]:
785 785 deltatype = b'p1'
786 786 elif e[3] == e[6]:
787 787 deltatype = b'p2'
788 788 elif e[3] == rev - 1:
789 789 deltatype = b'prev'
790 790 elif e[3] == rev:
791 791 deltatype = b'base'
792 792 else:
793 793 deltatype = b'other'
794 794 else:
795 795 if e[3] == rev:
796 796 deltatype = b'base'
797 797 else:
798 798 deltatype = b'prev'
799 799
800 800 chain = r._deltachain(rev)[0]
801 801 for iterrev in chain:
802 802 e = index[iterrev]
803 803 chainsize += e[1]
804 804
805 805 return compsize, uncompsize, deltatype, chain, chainsize
806 806
807 807 fm = ui.formatter(b'debugdeltachain', opts)
808 808
809 809 fm.plain(
810 810 b' rev chain# chainlen prev delta '
811 811 b'size rawsize chainsize ratio lindist extradist '
812 812 b'extraratio'
813 813 )
814 814 if withsparseread:
815 815 fm.plain(b' readsize largestblk rddensity srchunks')
816 816 fm.plain(b'\n')
817 817
818 818 chainbases = {}
819 819 for rev in r:
820 820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
821 821 chainbase = chain[0]
822 822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
823 823 basestart = start(chainbase)
824 824 revstart = start(rev)
825 825 lineardist = revstart + comp - basestart
826 826 extradist = lineardist - chainsize
827 827 try:
828 828 prevrev = chain[-2]
829 829 except IndexError:
830 830 prevrev = -1
831 831
832 832 if uncomp != 0:
833 833 chainratio = float(chainsize) / float(uncomp)
834 834 else:
835 835 chainratio = chainsize
836 836
837 837 if chainsize != 0:
838 838 extraratio = float(extradist) / float(chainsize)
839 839 else:
840 840 extraratio = extradist
841 841
842 842 fm.startitem()
843 843 fm.write(
844 844 b'rev chainid chainlen prevrev deltatype compsize '
845 845 b'uncompsize chainsize chainratio lindist extradist '
846 846 b'extraratio',
847 847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
848 848 rev,
849 849 chainid,
850 850 len(chain),
851 851 prevrev,
852 852 deltatype,
853 853 comp,
854 854 uncomp,
855 855 chainsize,
856 856 chainratio,
857 857 lineardist,
858 858 extradist,
859 859 extraratio,
860 860 rev=rev,
861 861 chainid=chainid,
862 862 chainlen=len(chain),
863 863 prevrev=prevrev,
864 864 deltatype=deltatype,
865 865 compsize=comp,
866 866 uncompsize=uncomp,
867 867 chainsize=chainsize,
868 868 chainratio=chainratio,
869 869 lindist=lineardist,
870 870 extradist=extradist,
871 871 extraratio=extraratio,
872 872 )
873 873 if withsparseread:
874 874 readsize = 0
875 875 largestblock = 0
876 876 srchunks = 0
877 877
878 878 for revschunk in deltautil.slicechunk(r, chain):
879 879 srchunks += 1
880 880 blkend = start(revschunk[-1]) + length(revschunk[-1])
881 881 blksize = blkend - start(revschunk[0])
882 882
883 883 readsize += blksize
884 884 if largestblock < blksize:
885 885 largestblock = blksize
886 886
887 887 if readsize:
888 888 readdensity = float(chainsize) / float(readsize)
889 889 else:
890 890 readdensity = 1
891 891
892 892 fm.write(
893 893 b'readsize largestblock readdensity srchunks',
894 894 b' %10d %10d %9.5f %8d',
895 895 readsize,
896 896 largestblock,
897 897 readdensity,
898 898 srchunks,
899 899 readsize=readsize,
900 900 largestblock=largestblock,
901 901 readdensity=readdensity,
902 902 srchunks=srchunks,
903 903 )
904 904
905 905 fm.plain(b'\n')
906 906
907 907 fm.end()
908 908
909 909
910 910 @command(
911 911 b'debugdirstate|debugstate',
912 912 [
913 913 (
914 914 b'',
915 915 b'nodates',
916 916 None,
917 917 _(b'do not display the saved mtime (DEPRECATED)'),
918 918 ),
919 919 (b'', b'dates', True, _(b'display the saved mtime')),
920 920 (b'', b'datesort', None, _(b'sort by saved mtime')),
921 921 ],
922 922 _(b'[OPTION]...'),
923 923 )
924 924 def debugstate(ui, repo, **opts):
925 925 """show the contents of the current dirstate"""
926 926
927 927 nodates = not opts['dates']
928 928 if opts.get('nodates') is not None:
929 929 nodates = True
930 930 datesort = opts.get('datesort')
931 931
932 932 if datesort:
933 933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
934 934 else:
935 935 keyfunc = None # sort by filename
936 936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
937 937 if ent[3] == -1:
938 938 timestr = b'unset '
939 939 elif nodates:
940 940 timestr = b'set '
941 941 else:
942 942 timestr = time.strftime(
943 943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
944 944 )
945 945 timestr = encoding.strtolocal(timestr)
946 946 if ent[1] & 0o20000:
947 947 mode = b'lnk'
948 948 else:
949 949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
950 950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
951 951 for f in repo.dirstate.copies():
952 952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
953 953
954 954
955 955 @command(
956 956 b'debugdiscovery',
957 957 [
958 958 (b'', b'old', None, _(b'use old-style discovery')),
959 959 (
960 960 b'',
961 961 b'nonheads',
962 962 None,
963 963 _(b'use old-style discovery with non-heads included'),
964 964 ),
965 965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
966 966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
967 967 ]
968 968 + cmdutil.remoteopts,
969 969 _(b'[--rev REV] [OTHER]'),
970 970 )
971 971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
972 972 """runs the changeset discovery protocol in isolation"""
973 973 opts = pycompat.byteskwargs(opts)
974 974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
975 975 remote = hg.peer(repo, opts, remoteurl)
976 976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
977 977
978 978 # make sure tests are repeatable
979 979 random.seed(int(opts[b'seed']))
980 980
981 data = {}
981 982 if opts.get(b'old'):
982 983
983 984 def doit(pushedrevs, remoteheads, remote=remote):
984 985 if not util.safehasattr(remote, b'branches'):
985 986 # enable in-client legacy support
986 987 remote = localrepo.locallegacypeer(remote.local())
987 988 common, _in, hds = treediscovery.findcommonincoming(
988 repo, remote, force=True
989 repo, remote, force=True, audit=data
989 990 )
990 991 common = set(common)
991 992 if not opts.get(b'nonheads'):
992 993 ui.writenoi18n(
993 994 b"unpruned common: %s\n"
994 995 % b" ".join(sorted(short(n) for n in common))
995 996 )
996 997
997 998 clnode = repo.changelog.node
998 999 common = repo.revs(b'heads(::%ln)', common)
999 1000 common = {clnode(r) for r in common}
1000 1001 return common, hds
1001 1002
1002 1003 else:
1003 1004
1004 1005 def doit(pushedrevs, remoteheads, remote=remote):
1005 1006 nodes = None
1006 1007 if pushedrevs:
1007 1008 revs = scmutil.revrange(repo, pushedrevs)
1008 1009 nodes = [repo[r].node() for r in revs]
1009 1010 common, any, hds = setdiscovery.findcommonheads(
1010 ui, repo, remote, ancestorsof=nodes
1011 ui, repo, remote, ancestorsof=nodes, audit=data
1011 1012 )
1012 1013 return common, hds
1013 1014
1014 1015 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1015 1016 localrevs = opts[b'rev']
1016 1017 with util.timedcm('debug-discovery') as t:
1017 1018 common, hds = doit(localrevs, remoterevs)
1018 1019
1019 1020 # compute all statistics
1020 1021 heads_common = set(common)
1021 1022 heads_remote = set(hds)
1022 1023 heads_local = set(repo.heads())
1023 1024 # note: they cannot be a local or remote head that is in common and not
1024 1025 # itself a head of common.
1025 1026 heads_common_local = heads_common & heads_local
1026 1027 heads_common_remote = heads_common & heads_remote
1027 1028 heads_common_both = heads_common & heads_remote & heads_local
1028 1029
1029 1030 all = repo.revs(b'all()')
1030 1031 common = repo.revs(b'::%ln', common)
1031 1032 roots_common = repo.revs(b'roots(::%ld)', common)
1032 1033 missing = repo.revs(b'not ::%ld', common)
1033 1034 heads_missing = repo.revs(b'heads(%ld)', missing)
1034 1035 roots_missing = repo.revs(b'roots(%ld)', missing)
1035 1036 assert len(common) + len(missing) == len(all)
1036 1037
1037 1038 initial_undecided = repo.revs(
1038 1039 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1039 1040 )
1040 1041 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1041 1042 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1042 1043 common_initial_undecided = initial_undecided & common
1043 1044 missing_initial_undecided = initial_undecided & missing
1044 1045
1045 data = {}
1046 1046 data[b'elapsed'] = t.elapsed
1047 1047 data[b'nb-common-heads'] = len(heads_common)
1048 1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1049 1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1050 1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1051 1051 data[b'nb-common-roots'] = len(roots_common)
1052 1052 data[b'nb-head-local'] = len(heads_local)
1053 1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1054 1054 data[b'nb-head-remote'] = len(heads_remote)
1055 1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1056 1056 heads_common_remote
1057 1057 )
1058 1058 data[b'nb-revs'] = len(all)
1059 1059 data[b'nb-revs-common'] = len(common)
1060 1060 data[b'nb-revs-missing'] = len(missing)
1061 1061 data[b'nb-missing-heads'] = len(roots_missing)
1062 1062 data[b'nb-missing-roots'] = len(heads_missing)
1063 1063 data[b'nb-ini_und'] = len(initial_undecided)
1064 1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1065 1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1066 1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1067 1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1068 1068
1069 1069 # display discovery summary
1070 1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1071 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1071 1072 ui.writenoi18n(b"heads summary:\n")
1072 1073 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1073 1074 ui.writenoi18n(
1074 1075 b" also local heads: %(nb-common-heads-local)9d\n" % data
1075 1076 )
1076 1077 ui.writenoi18n(
1077 1078 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1078 1079 )
1079 1080 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1080 1081 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1081 1082 ui.writenoi18n(
1082 1083 b" common: %(nb-common-heads-local)9d\n" % data
1083 1084 )
1084 1085 ui.writenoi18n(
1085 1086 b" missing: %(nb-head-local-missing)9d\n" % data
1086 1087 )
1087 1088 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1088 1089 ui.writenoi18n(
1089 1090 b" common: %(nb-common-heads-remote)9d\n" % data
1090 1091 )
1091 1092 ui.writenoi18n(
1092 1093 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1093 1094 )
1094 1095 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1095 1096 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1096 1097 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1097 1098 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1098 1099 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1099 1100 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1100 1101 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1101 1102 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1102 1103 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1103 1104 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1104 1105 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1105 1106 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1106 1107
1107 1108 if ui.verbose:
1108 1109 ui.writenoi18n(
1109 1110 b"common heads: %s\n"
1110 1111 % b" ".join(sorted(short(n) for n in heads_common))
1111 1112 )
1112 1113
1113 1114
1114 1115 _chunksize = 4 << 10
1115 1116
1116 1117
1117 1118 @command(
1118 1119 b'debugdownload',
1119 1120 [
1120 1121 (b'o', b'output', b'', _(b'path')),
1121 1122 ],
1122 1123 optionalrepo=True,
1123 1124 )
1124 1125 def debugdownload(ui, repo, url, output=None, **opts):
1125 1126 """download a resource using Mercurial logic and config"""
1126 1127 fh = urlmod.open(ui, url, output)
1127 1128
1128 1129 dest = ui
1129 1130 if output:
1130 1131 dest = open(output, b"wb", _chunksize)
1131 1132 try:
1132 1133 data = fh.read(_chunksize)
1133 1134 while data:
1134 1135 dest.write(data)
1135 1136 data = fh.read(_chunksize)
1136 1137 finally:
1137 1138 if output:
1138 1139 dest.close()
1139 1140
1140 1141
1141 1142 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1142 1143 def debugextensions(ui, repo, **opts):
1143 1144 '''show information about active extensions'''
1144 1145 opts = pycompat.byteskwargs(opts)
1145 1146 exts = extensions.extensions(ui)
1146 1147 hgver = util.version()
1147 1148 fm = ui.formatter(b'debugextensions', opts)
1148 1149 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1149 1150 isinternal = extensions.ismoduleinternal(extmod)
1150 1151 extsource = None
1151 1152
1152 1153 if util.safehasattr(extmod, '__file__'):
1153 1154 extsource = pycompat.fsencode(extmod.__file__)
1154 1155 elif getattr(sys, 'oxidized', False):
1155 1156 extsource = pycompat.sysexecutable
1156 1157 if isinternal:
1157 1158 exttestedwith = [] # never expose magic string to users
1158 1159 else:
1159 1160 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1160 1161 extbuglink = getattr(extmod, 'buglink', None)
1161 1162
1162 1163 fm.startitem()
1163 1164
1164 1165 if ui.quiet or ui.verbose:
1165 1166 fm.write(b'name', b'%s\n', extname)
1166 1167 else:
1167 1168 fm.write(b'name', b'%s', extname)
1168 1169 if isinternal or hgver in exttestedwith:
1169 1170 fm.plain(b'\n')
1170 1171 elif not exttestedwith:
1171 1172 fm.plain(_(b' (untested!)\n'))
1172 1173 else:
1173 1174 lasttestedversion = exttestedwith[-1]
1174 1175 fm.plain(b' (%s!)\n' % lasttestedversion)
1175 1176
1176 1177 fm.condwrite(
1177 1178 ui.verbose and extsource,
1178 1179 b'source',
1179 1180 _(b' location: %s\n'),
1180 1181 extsource or b"",
1181 1182 )
1182 1183
1183 1184 if ui.verbose:
1184 1185 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1185 1186 fm.data(bundled=isinternal)
1186 1187
1187 1188 fm.condwrite(
1188 1189 ui.verbose and exttestedwith,
1189 1190 b'testedwith',
1190 1191 _(b' tested with: %s\n'),
1191 1192 fm.formatlist(exttestedwith, name=b'ver'),
1192 1193 )
1193 1194
1194 1195 fm.condwrite(
1195 1196 ui.verbose and extbuglink,
1196 1197 b'buglink',
1197 1198 _(b' bug reporting: %s\n'),
1198 1199 extbuglink or b"",
1199 1200 )
1200 1201
1201 1202 fm.end()
1202 1203
1203 1204
1204 1205 @command(
1205 1206 b'debugfileset',
1206 1207 [
1207 1208 (
1208 1209 b'r',
1209 1210 b'rev',
1210 1211 b'',
1211 1212 _(b'apply the filespec on this revision'),
1212 1213 _(b'REV'),
1213 1214 ),
1214 1215 (
1215 1216 b'',
1216 1217 b'all-files',
1217 1218 False,
1218 1219 _(b'test files from all revisions and working directory'),
1219 1220 ),
1220 1221 (
1221 1222 b's',
1222 1223 b'show-matcher',
1223 1224 None,
1224 1225 _(b'print internal representation of matcher'),
1225 1226 ),
1226 1227 (
1227 1228 b'p',
1228 1229 b'show-stage',
1229 1230 [],
1230 1231 _(b'print parsed tree at the given stage'),
1231 1232 _(b'NAME'),
1232 1233 ),
1233 1234 ],
1234 1235 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1235 1236 )
1236 1237 def debugfileset(ui, repo, expr, **opts):
1237 1238 '''parse and apply a fileset specification'''
1238 1239 from . import fileset
1239 1240
1240 1241 fileset.symbols # force import of fileset so we have predicates to optimize
1241 1242 opts = pycompat.byteskwargs(opts)
1242 1243 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1243 1244
1244 1245 stages = [
1245 1246 (b'parsed', pycompat.identity),
1246 1247 (b'analyzed', filesetlang.analyze),
1247 1248 (b'optimized', filesetlang.optimize),
1248 1249 ]
1249 1250 stagenames = {n for n, f in stages}
1250 1251
1251 1252 showalways = set()
1252 1253 if ui.verbose and not opts[b'show_stage']:
1253 1254 # show parsed tree by --verbose (deprecated)
1254 1255 showalways.add(b'parsed')
1255 1256 if opts[b'show_stage'] == [b'all']:
1256 1257 showalways.update(stagenames)
1257 1258 else:
1258 1259 for n in opts[b'show_stage']:
1259 1260 if n not in stagenames:
1260 1261 raise error.Abort(_(b'invalid stage name: %s') % n)
1261 1262 showalways.update(opts[b'show_stage'])
1262 1263
1263 1264 tree = filesetlang.parse(expr)
1264 1265 for n, f in stages:
1265 1266 tree = f(tree)
1266 1267 if n in showalways:
1267 1268 if opts[b'show_stage'] or n != b'parsed':
1268 1269 ui.write(b"* %s:\n" % n)
1269 1270 ui.write(filesetlang.prettyformat(tree), b"\n")
1270 1271
1271 1272 files = set()
1272 1273 if opts[b'all_files']:
1273 1274 for r in repo:
1274 1275 c = repo[r]
1275 1276 files.update(c.files())
1276 1277 files.update(c.substate)
1277 1278 if opts[b'all_files'] or ctx.rev() is None:
1278 1279 wctx = repo[None]
1279 1280 files.update(
1280 1281 repo.dirstate.walk(
1281 1282 scmutil.matchall(repo),
1282 1283 subrepos=list(wctx.substate),
1283 1284 unknown=True,
1284 1285 ignored=True,
1285 1286 )
1286 1287 )
1287 1288 files.update(wctx.substate)
1288 1289 else:
1289 1290 files.update(ctx.files())
1290 1291 files.update(ctx.substate)
1291 1292
1292 1293 m = ctx.matchfileset(repo.getcwd(), expr)
1293 1294 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1294 1295 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1295 1296 for f in sorted(files):
1296 1297 if not m(f):
1297 1298 continue
1298 1299 ui.write(b"%s\n" % f)
1299 1300
1300 1301
1301 1302 @command(b'debugformat', [] + cmdutil.formatteropts)
1302 1303 def debugformat(ui, repo, **opts):
1303 1304 """display format information about the current repository
1304 1305
1305 1306 Use --verbose to get extra information about current config value and
1306 1307 Mercurial default."""
1307 1308 opts = pycompat.byteskwargs(opts)
1308 1309 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1309 1310 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1310 1311
1311 1312 def makeformatname(name):
1312 1313 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1313 1314
1314 1315 fm = ui.formatter(b'debugformat', opts)
1315 1316 if fm.isplain():
1316 1317
1317 1318 def formatvalue(value):
1318 1319 if util.safehasattr(value, b'startswith'):
1319 1320 return value
1320 1321 if value:
1321 1322 return b'yes'
1322 1323 else:
1323 1324 return b'no'
1324 1325
1325 1326 else:
1326 1327 formatvalue = pycompat.identity
1327 1328
1328 1329 fm.plain(b'format-variant')
1329 1330 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1330 1331 fm.plain(b' repo')
1331 1332 if ui.verbose:
1332 1333 fm.plain(b' config default')
1333 1334 fm.plain(b'\n')
1334 1335 for fv in upgrade.allformatvariant:
1335 1336 fm.startitem()
1336 1337 repovalue = fv.fromrepo(repo)
1337 1338 configvalue = fv.fromconfig(repo)
1338 1339
1339 1340 if repovalue != configvalue:
1340 1341 namelabel = b'formatvariant.name.mismatchconfig'
1341 1342 repolabel = b'formatvariant.repo.mismatchconfig'
1342 1343 elif repovalue != fv.default:
1343 1344 namelabel = b'formatvariant.name.mismatchdefault'
1344 1345 repolabel = b'formatvariant.repo.mismatchdefault'
1345 1346 else:
1346 1347 namelabel = b'formatvariant.name.uptodate'
1347 1348 repolabel = b'formatvariant.repo.uptodate'
1348 1349
1349 1350 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1350 1351 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1351 1352 if fv.default != configvalue:
1352 1353 configlabel = b'formatvariant.config.special'
1353 1354 else:
1354 1355 configlabel = b'formatvariant.config.default'
1355 1356 fm.condwrite(
1356 1357 ui.verbose,
1357 1358 b'config',
1358 1359 b' %6s',
1359 1360 formatvalue(configvalue),
1360 1361 label=configlabel,
1361 1362 )
1362 1363 fm.condwrite(
1363 1364 ui.verbose,
1364 1365 b'default',
1365 1366 b' %7s',
1366 1367 formatvalue(fv.default),
1367 1368 label=b'formatvariant.default',
1368 1369 )
1369 1370 fm.plain(b'\n')
1370 1371 fm.end()
1371 1372
1372 1373
1373 1374 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1374 1375 def debugfsinfo(ui, path=b"."):
1375 1376 """show information detected about current filesystem"""
1376 1377 ui.writenoi18n(b'path: %s\n' % path)
1377 1378 ui.writenoi18n(
1378 1379 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1379 1380 )
1380 1381 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1381 1382 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1382 1383 ui.writenoi18n(
1383 1384 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1384 1385 )
1385 1386 ui.writenoi18n(
1386 1387 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1387 1388 )
1388 1389 casesensitive = b'(unknown)'
1389 1390 try:
1390 1391 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1391 1392 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1392 1393 except OSError:
1393 1394 pass
1394 1395 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1395 1396
1396 1397
1397 1398 @command(
1398 1399 b'debuggetbundle',
1399 1400 [
1400 1401 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1401 1402 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1402 1403 (
1403 1404 b't',
1404 1405 b'type',
1405 1406 b'bzip2',
1406 1407 _(b'bundle compression type to use'),
1407 1408 _(b'TYPE'),
1408 1409 ),
1409 1410 ],
1410 1411 _(b'REPO FILE [-H|-C ID]...'),
1411 1412 norepo=True,
1412 1413 )
1413 1414 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1414 1415 """retrieves a bundle from a repo
1415 1416
1416 1417 Every ID must be a full-length hex node id string. Saves the bundle to the
1417 1418 given file.
1418 1419 """
1419 1420 opts = pycompat.byteskwargs(opts)
1420 1421 repo = hg.peer(ui, opts, repopath)
1421 1422 if not repo.capable(b'getbundle'):
1422 1423 raise error.Abort(b"getbundle() not supported by target repository")
1423 1424 args = {}
1424 1425 if common:
1425 1426 args['common'] = [bin(s) for s in common]
1426 1427 if head:
1427 1428 args['heads'] = [bin(s) for s in head]
1428 1429 # TODO: get desired bundlecaps from command line.
1429 1430 args['bundlecaps'] = None
1430 1431 bundle = repo.getbundle(b'debug', **args)
1431 1432
1432 1433 bundletype = opts.get(b'type', b'bzip2').lower()
1433 1434 btypes = {
1434 1435 b'none': b'HG10UN',
1435 1436 b'bzip2': b'HG10BZ',
1436 1437 b'gzip': b'HG10GZ',
1437 1438 b'bundle2': b'HG20',
1438 1439 }
1439 1440 bundletype = btypes.get(bundletype)
1440 1441 if bundletype not in bundle2.bundletypes:
1441 1442 raise error.Abort(_(b'unknown bundle type specified with --type'))
1442 1443 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1443 1444
1444 1445
1445 1446 @command(b'debugignore', [], b'[FILE]')
1446 1447 def debugignore(ui, repo, *files, **opts):
1447 1448 """display the combined ignore pattern and information about ignored files
1448 1449
1449 1450 With no argument display the combined ignore pattern.
1450 1451
1451 1452 Given space separated file names, shows if the given file is ignored and
1452 1453 if so, show the ignore rule (file and line number) that matched it.
1453 1454 """
1454 1455 ignore = repo.dirstate._ignore
1455 1456 if not files:
1456 1457 # Show all the patterns
1457 1458 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1458 1459 else:
1459 1460 m = scmutil.match(repo[None], pats=files)
1460 1461 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1461 1462 for f in m.files():
1462 1463 nf = util.normpath(f)
1463 1464 ignored = None
1464 1465 ignoredata = None
1465 1466 if nf != b'.':
1466 1467 if ignore(nf):
1467 1468 ignored = nf
1468 1469 ignoredata = repo.dirstate._ignorefileandline(nf)
1469 1470 else:
1470 1471 for p in pathutil.finddirs(nf):
1471 1472 if ignore(p):
1472 1473 ignored = p
1473 1474 ignoredata = repo.dirstate._ignorefileandline(p)
1474 1475 break
1475 1476 if ignored:
1476 1477 if ignored == nf:
1477 1478 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1478 1479 else:
1479 1480 ui.write(
1480 1481 _(
1481 1482 b"%s is ignored because of "
1482 1483 b"containing directory %s\n"
1483 1484 )
1484 1485 % (uipathfn(f), ignored)
1485 1486 )
1486 1487 ignorefile, lineno, line = ignoredata
1487 1488 ui.write(
1488 1489 _(b"(ignore rule in %s, line %d: '%s')\n")
1489 1490 % (ignorefile, lineno, line)
1490 1491 )
1491 1492 else:
1492 1493 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1493 1494
1494 1495
1495 1496 @command(
1496 1497 b'debugindex',
1497 1498 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1498 1499 _(b'-c|-m|FILE'),
1499 1500 )
1500 1501 def debugindex(ui, repo, file_=None, **opts):
1501 1502 """dump index data for a storage primitive"""
1502 1503 opts = pycompat.byteskwargs(opts)
1503 1504 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1504 1505
1505 1506 if ui.debugflag:
1506 1507 shortfn = hex
1507 1508 else:
1508 1509 shortfn = short
1509 1510
1510 1511 idlen = 12
1511 1512 for i in store:
1512 1513 idlen = len(shortfn(store.node(i)))
1513 1514 break
1514 1515
1515 1516 fm = ui.formatter(b'debugindex', opts)
1516 1517 fm.plain(
1517 1518 b' rev linkrev %s %s p2\n'
1518 1519 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1519 1520 )
1520 1521
1521 1522 for rev in store:
1522 1523 node = store.node(rev)
1523 1524 parents = store.parents(node)
1524 1525
1525 1526 fm.startitem()
1526 1527 fm.write(b'rev', b'%6d ', rev)
1527 1528 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1528 1529 fm.write(b'node', b'%s ', shortfn(node))
1529 1530 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1530 1531 fm.write(b'p2', b'%s', shortfn(parents[1]))
1531 1532 fm.plain(b'\n')
1532 1533
1533 1534 fm.end()
1534 1535
1535 1536
1536 1537 @command(
1537 1538 b'debugindexdot',
1538 1539 cmdutil.debugrevlogopts,
1539 1540 _(b'-c|-m|FILE'),
1540 1541 optionalrepo=True,
1541 1542 )
1542 1543 def debugindexdot(ui, repo, file_=None, **opts):
1543 1544 """dump an index DAG as a graphviz dot file"""
1544 1545 opts = pycompat.byteskwargs(opts)
1545 1546 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1546 1547 ui.writenoi18n(b"digraph G {\n")
1547 1548 for i in r:
1548 1549 node = r.node(i)
1549 1550 pp = r.parents(node)
1550 1551 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1551 1552 if pp[1] != nullid:
1552 1553 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1553 1554 ui.write(b"}\n")
1554 1555
1555 1556
1556 1557 @command(b'debugindexstats', [])
1557 1558 def debugindexstats(ui, repo):
1558 1559 """show stats related to the changelog index"""
1559 1560 repo.changelog.shortest(nullid, 1)
1560 1561 index = repo.changelog.index
1561 1562 if not util.safehasattr(index, b'stats'):
1562 1563 raise error.Abort(_(b'debugindexstats only works with native code'))
1563 1564 for k, v in sorted(index.stats().items()):
1564 1565 ui.write(b'%s: %d\n' % (k, v))
1565 1566
1566 1567
1567 1568 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1568 1569 def debuginstall(ui, **opts):
1569 1570 """test Mercurial installation
1570 1571
1571 1572 Returns 0 on success.
1572 1573 """
1573 1574 opts = pycompat.byteskwargs(opts)
1574 1575
1575 1576 problems = 0
1576 1577
1577 1578 fm = ui.formatter(b'debuginstall', opts)
1578 1579 fm.startitem()
1579 1580
1580 1581 # encoding might be unknown or wrong. don't translate these messages.
1581 1582 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1582 1583 err = None
1583 1584 try:
1584 1585 codecs.lookup(pycompat.sysstr(encoding.encoding))
1585 1586 except LookupError as inst:
1586 1587 err = stringutil.forcebytestr(inst)
1587 1588 problems += 1
1588 1589 fm.condwrite(
1589 1590 err,
1590 1591 b'encodingerror',
1591 1592 b" %s\n (check that your locale is properly set)\n",
1592 1593 err,
1593 1594 )
1594 1595
1595 1596 # Python
1596 1597 pythonlib = None
1597 1598 if util.safehasattr(os, '__file__'):
1598 1599 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1599 1600 elif getattr(sys, 'oxidized', False):
1600 1601 pythonlib = pycompat.sysexecutable
1601 1602
1602 1603 fm.write(
1603 1604 b'pythonexe',
1604 1605 _(b"checking Python executable (%s)\n"),
1605 1606 pycompat.sysexecutable or _(b"unknown"),
1606 1607 )
1607 1608 fm.write(
1608 1609 b'pythonimplementation',
1609 1610 _(b"checking Python implementation (%s)\n"),
1610 1611 pycompat.sysbytes(platform.python_implementation()),
1611 1612 )
1612 1613 fm.write(
1613 1614 b'pythonver',
1614 1615 _(b"checking Python version (%s)\n"),
1615 1616 (b"%d.%d.%d" % sys.version_info[:3]),
1616 1617 )
1617 1618 fm.write(
1618 1619 b'pythonlib',
1619 1620 _(b"checking Python lib (%s)...\n"),
1620 1621 pythonlib or _(b"unknown"),
1621 1622 )
1622 1623
1623 1624 try:
1624 1625 from . import rustext
1625 1626
1626 1627 rustext.__doc__ # trigger lazy import
1627 1628 except ImportError:
1628 1629 rustext = None
1629 1630
1630 1631 security = set(sslutil.supportedprotocols)
1631 1632 if sslutil.hassni:
1632 1633 security.add(b'sni')
1633 1634
1634 1635 fm.write(
1635 1636 b'pythonsecurity',
1636 1637 _(b"checking Python security support (%s)\n"),
1637 1638 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1638 1639 )
1639 1640
1640 1641 # These are warnings, not errors. So don't increment problem count. This
1641 1642 # may change in the future.
1642 1643 if b'tls1.2' not in security:
1643 1644 fm.plain(
1644 1645 _(
1645 1646 b' TLS 1.2 not supported by Python install; '
1646 1647 b'network connections lack modern security\n'
1647 1648 )
1648 1649 )
1649 1650 if b'sni' not in security:
1650 1651 fm.plain(
1651 1652 _(
1652 1653 b' SNI not supported by Python install; may have '
1653 1654 b'connectivity issues with some servers\n'
1654 1655 )
1655 1656 )
1656 1657
1657 1658 fm.plain(
1658 1659 _(
1659 1660 b"checking Rust extensions (%s)\n"
1660 1661 % (b'missing' if rustext is None else b'installed')
1661 1662 ),
1662 1663 )
1663 1664
1664 1665 # TODO print CA cert info
1665 1666
1666 1667 # hg version
1667 1668 hgver = util.version()
1668 1669 fm.write(
1669 1670 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1670 1671 )
1671 1672 fm.write(
1672 1673 b'hgverextra',
1673 1674 _(b"checking Mercurial custom build (%s)\n"),
1674 1675 b'+'.join(hgver.split(b'+')[1:]),
1675 1676 )
1676 1677
1677 1678 # compiled modules
1678 1679 hgmodules = None
1679 1680 if util.safehasattr(sys.modules[__name__], '__file__'):
1680 1681 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1681 1682 elif getattr(sys, 'oxidized', False):
1682 1683 hgmodules = pycompat.sysexecutable
1683 1684
1684 1685 fm.write(
1685 1686 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1686 1687 )
1687 1688 fm.write(
1688 1689 b'hgmodules',
1689 1690 _(b"checking installed modules (%s)...\n"),
1690 1691 hgmodules or _(b"unknown"),
1691 1692 )
1692 1693
1693 1694 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1694 1695 rustext = rustandc # for now, that's the only case
1695 1696 cext = policy.policy in (b'c', b'allow') or rustandc
1696 1697 nopure = cext or rustext
1697 1698 if nopure:
1698 1699 err = None
1699 1700 try:
1700 1701 if cext:
1701 1702 from .cext import ( # pytype: disable=import-error
1702 1703 base85,
1703 1704 bdiff,
1704 1705 mpatch,
1705 1706 osutil,
1706 1707 )
1707 1708
1708 1709 # quiet pyflakes
1709 1710 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1710 1711 if rustext:
1711 1712 from .rustext import ( # pytype: disable=import-error
1712 1713 ancestor,
1713 1714 dirstate,
1714 1715 )
1715 1716
1716 1717 dir(ancestor), dir(dirstate) # quiet pyflakes
1717 1718 except Exception as inst:
1718 1719 err = stringutil.forcebytestr(inst)
1719 1720 problems += 1
1720 1721 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1721 1722
1722 1723 compengines = util.compengines._engines.values()
1723 1724 fm.write(
1724 1725 b'compengines',
1725 1726 _(b'checking registered compression engines (%s)\n'),
1726 1727 fm.formatlist(
1727 1728 sorted(e.name() for e in compengines),
1728 1729 name=b'compengine',
1729 1730 fmt=b'%s',
1730 1731 sep=b', ',
1731 1732 ),
1732 1733 )
1733 1734 fm.write(
1734 1735 b'compenginesavail',
1735 1736 _(b'checking available compression engines (%s)\n'),
1736 1737 fm.formatlist(
1737 1738 sorted(e.name() for e in compengines if e.available()),
1738 1739 name=b'compengine',
1739 1740 fmt=b'%s',
1740 1741 sep=b', ',
1741 1742 ),
1742 1743 )
1743 1744 wirecompengines = compression.compengines.supportedwireengines(
1744 1745 compression.SERVERROLE
1745 1746 )
1746 1747 fm.write(
1747 1748 b'compenginesserver',
1748 1749 _(
1749 1750 b'checking available compression engines '
1750 1751 b'for wire protocol (%s)\n'
1751 1752 ),
1752 1753 fm.formatlist(
1753 1754 [e.name() for e in wirecompengines if e.wireprotosupport()],
1754 1755 name=b'compengine',
1755 1756 fmt=b'%s',
1756 1757 sep=b', ',
1757 1758 ),
1758 1759 )
1759 1760 re2 = b'missing'
1760 1761 if util._re2:
1761 1762 re2 = b'available'
1762 1763 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1763 1764 fm.data(re2=bool(util._re2))
1764 1765
1765 1766 # templates
1766 1767 p = templater.templatedir()
1767 1768 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1768 1769 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1769 1770 if p:
1770 1771 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1771 1772 if m:
1772 1773 # template found, check if it is working
1773 1774 err = None
1774 1775 try:
1775 1776 templater.templater.frommapfile(m)
1776 1777 except Exception as inst:
1777 1778 err = stringutil.forcebytestr(inst)
1778 1779 p = None
1779 1780 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1780 1781 else:
1781 1782 p = None
1782 1783 fm.condwrite(
1783 1784 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1784 1785 )
1785 1786 fm.condwrite(
1786 1787 not m,
1787 1788 b'defaulttemplatenotfound',
1788 1789 _(b" template '%s' not found\n"),
1789 1790 b"default",
1790 1791 )
1791 1792 if not p:
1792 1793 problems += 1
1793 1794 fm.condwrite(
1794 1795 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1795 1796 )
1796 1797
1797 1798 # editor
1798 1799 editor = ui.geteditor()
1799 1800 editor = util.expandpath(editor)
1800 1801 editorbin = procutil.shellsplit(editor)[0]
1801 1802 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1802 1803 cmdpath = procutil.findexe(editorbin)
1803 1804 fm.condwrite(
1804 1805 not cmdpath and editor == b'vi',
1805 1806 b'vinotfound',
1806 1807 _(
1807 1808 b" No commit editor set and can't find %s in PATH\n"
1808 1809 b" (specify a commit editor in your configuration"
1809 1810 b" file)\n"
1810 1811 ),
1811 1812 not cmdpath and editor == b'vi' and editorbin,
1812 1813 )
1813 1814 fm.condwrite(
1814 1815 not cmdpath and editor != b'vi',
1815 1816 b'editornotfound',
1816 1817 _(
1817 1818 b" Can't find editor '%s' in PATH\n"
1818 1819 b" (specify a commit editor in your configuration"
1819 1820 b" file)\n"
1820 1821 ),
1821 1822 not cmdpath and editorbin,
1822 1823 )
1823 1824 if not cmdpath and editor != b'vi':
1824 1825 problems += 1
1825 1826
1826 1827 # check username
1827 1828 username = None
1828 1829 err = None
1829 1830 try:
1830 1831 username = ui.username()
1831 1832 except error.Abort as e:
1832 1833 err = e.message
1833 1834 problems += 1
1834 1835
1835 1836 fm.condwrite(
1836 1837 username, b'username', _(b"checking username (%s)\n"), username
1837 1838 )
1838 1839 fm.condwrite(
1839 1840 err,
1840 1841 b'usernameerror',
1841 1842 _(
1842 1843 b"checking username...\n %s\n"
1843 1844 b" (specify a username in your configuration file)\n"
1844 1845 ),
1845 1846 err,
1846 1847 )
1847 1848
1848 1849 for name, mod in extensions.extensions():
1849 1850 handler = getattr(mod, 'debuginstall', None)
1850 1851 if handler is not None:
1851 1852 problems += handler(ui, fm)
1852 1853
1853 1854 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1854 1855 if not problems:
1855 1856 fm.data(problems=problems)
1856 1857 fm.condwrite(
1857 1858 problems,
1858 1859 b'problems',
1859 1860 _(b"%d problems detected, please check your install!\n"),
1860 1861 problems,
1861 1862 )
1862 1863 fm.end()
1863 1864
1864 1865 return problems
1865 1866
1866 1867
1867 1868 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1868 1869 def debugknown(ui, repopath, *ids, **opts):
1869 1870 """test whether node ids are known to a repo
1870 1871
1871 1872 Every ID must be a full-length hex node id string. Returns a list of 0s
1872 1873 and 1s indicating unknown/known.
1873 1874 """
1874 1875 opts = pycompat.byteskwargs(opts)
1875 1876 repo = hg.peer(ui, opts, repopath)
1876 1877 if not repo.capable(b'known'):
1877 1878 raise error.Abort(b"known() not supported by target repository")
1878 1879 flags = repo.known([bin(s) for s in ids])
1879 1880 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1880 1881
1881 1882
1882 1883 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1883 1884 def debuglabelcomplete(ui, repo, *args):
1884 1885 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1885 1886 debugnamecomplete(ui, repo, *args)
1886 1887
1887 1888
1888 1889 @command(
1889 1890 b'debuglocks',
1890 1891 [
1891 1892 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1892 1893 (
1893 1894 b'W',
1894 1895 b'force-wlock',
1895 1896 None,
1896 1897 _(b'free the working state lock (DANGEROUS)'),
1897 1898 ),
1898 1899 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1899 1900 (
1900 1901 b'S',
1901 1902 b'set-wlock',
1902 1903 None,
1903 1904 _(b'set the working state lock until stopped'),
1904 1905 ),
1905 1906 ],
1906 1907 _(b'[OPTION]...'),
1907 1908 )
1908 1909 def debuglocks(ui, repo, **opts):
1909 1910 """show or modify state of locks
1910 1911
1911 1912 By default, this command will show which locks are held. This
1912 1913 includes the user and process holding the lock, the amount of time
1913 1914 the lock has been held, and the machine name where the process is
1914 1915 running if it's not local.
1915 1916
1916 1917 Locks protect the integrity of Mercurial's data, so should be
1917 1918 treated with care. System crashes or other interruptions may cause
1918 1919 locks to not be properly released, though Mercurial will usually
1919 1920 detect and remove such stale locks automatically.
1920 1921
1921 1922 However, detecting stale locks may not always be possible (for
1922 1923 instance, on a shared filesystem). Removing locks may also be
1923 1924 blocked by filesystem permissions.
1924 1925
1925 1926 Setting a lock will prevent other commands from changing the data.
1926 1927 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1927 1928 The set locks are removed when the command exits.
1928 1929
1929 1930 Returns 0 if no locks are held.
1930 1931
1931 1932 """
1932 1933
1933 1934 if opts.get('force_lock'):
1934 1935 repo.svfs.unlink(b'lock')
1935 1936 if opts.get('force_wlock'):
1936 1937 repo.vfs.unlink(b'wlock')
1937 1938 if opts.get('force_lock') or opts.get('force_wlock'):
1938 1939 return 0
1939 1940
1940 1941 locks = []
1941 1942 try:
1942 1943 if opts.get('set_wlock'):
1943 1944 try:
1944 1945 locks.append(repo.wlock(False))
1945 1946 except error.LockHeld:
1946 1947 raise error.Abort(_(b'wlock is already held'))
1947 1948 if opts.get('set_lock'):
1948 1949 try:
1949 1950 locks.append(repo.lock(False))
1950 1951 except error.LockHeld:
1951 1952 raise error.Abort(_(b'lock is already held'))
1952 1953 if len(locks):
1953 1954 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1954 1955 return 0
1955 1956 finally:
1956 1957 release(*locks)
1957 1958
1958 1959 now = time.time()
1959 1960 held = 0
1960 1961
1961 1962 def report(vfs, name, method):
1962 1963 # this causes stale locks to get reaped for more accurate reporting
1963 1964 try:
1964 1965 l = method(False)
1965 1966 except error.LockHeld:
1966 1967 l = None
1967 1968
1968 1969 if l:
1969 1970 l.release()
1970 1971 else:
1971 1972 try:
1972 1973 st = vfs.lstat(name)
1973 1974 age = now - st[stat.ST_MTIME]
1974 1975 user = util.username(st.st_uid)
1975 1976 locker = vfs.readlock(name)
1976 1977 if b":" in locker:
1977 1978 host, pid = locker.split(b':')
1978 1979 if host == socket.gethostname():
1979 1980 locker = b'user %s, process %s' % (user or b'None', pid)
1980 1981 else:
1981 1982 locker = b'user %s, process %s, host %s' % (
1982 1983 user or b'None',
1983 1984 pid,
1984 1985 host,
1985 1986 )
1986 1987 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1987 1988 return 1
1988 1989 except OSError as e:
1989 1990 if e.errno != errno.ENOENT:
1990 1991 raise
1991 1992
1992 1993 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1993 1994 return 0
1994 1995
1995 1996 held += report(repo.svfs, b"lock", repo.lock)
1996 1997 held += report(repo.vfs, b"wlock", repo.wlock)
1997 1998
1998 1999 return held
1999 2000
2000 2001
2001 2002 @command(
2002 2003 b'debugmanifestfulltextcache',
2003 2004 [
2004 2005 (b'', b'clear', False, _(b'clear the cache')),
2005 2006 (
2006 2007 b'a',
2007 2008 b'add',
2008 2009 [],
2009 2010 _(b'add the given manifest nodes to the cache'),
2010 2011 _(b'NODE'),
2011 2012 ),
2012 2013 ],
2013 2014 b'',
2014 2015 )
2015 2016 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2016 2017 """show, clear or amend the contents of the manifest fulltext cache"""
2017 2018
2018 2019 def getcache():
2019 2020 r = repo.manifestlog.getstorage(b'')
2020 2021 try:
2021 2022 return r._fulltextcache
2022 2023 except AttributeError:
2023 2024 msg = _(
2024 2025 b"Current revlog implementation doesn't appear to have a "
2025 2026 b"manifest fulltext cache\n"
2026 2027 )
2027 2028 raise error.Abort(msg)
2028 2029
2029 2030 if opts.get('clear'):
2030 2031 with repo.wlock():
2031 2032 cache = getcache()
2032 2033 cache.clear(clear_persisted_data=True)
2033 2034 return
2034 2035
2035 2036 if add:
2036 2037 with repo.wlock():
2037 2038 m = repo.manifestlog
2038 2039 store = m.getstorage(b'')
2039 2040 for n in add:
2040 2041 try:
2041 2042 manifest = m[store.lookup(n)]
2042 2043 except error.LookupError as e:
2043 2044 raise error.Abort(e, hint=b"Check your manifest node id")
2044 2045 manifest.read() # stores revisision in cache too
2045 2046 return
2046 2047
2047 2048 cache = getcache()
2048 2049 if not len(cache):
2049 2050 ui.write(_(b'cache empty\n'))
2050 2051 else:
2051 2052 ui.write(
2052 2053 _(
2053 2054 b'cache contains %d manifest entries, in order of most to '
2054 2055 b'least recent:\n'
2055 2056 )
2056 2057 % (len(cache),)
2057 2058 )
2058 2059 totalsize = 0
2059 2060 for nodeid in cache:
2060 2061 # Use cache.get to not update the LRU order
2061 2062 data = cache.peek(nodeid)
2062 2063 size = len(data)
2063 2064 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2064 2065 ui.write(
2065 2066 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2066 2067 )
2067 2068 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2068 2069 ui.write(
2069 2070 _(b'total cache data size %s, on-disk %s\n')
2070 2071 % (util.bytecount(totalsize), util.bytecount(ondisk))
2071 2072 )
2072 2073
2073 2074
2074 2075 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2075 2076 def debugmergestate(ui, repo, *args, **opts):
2076 2077 """print merge state
2077 2078
2078 2079 Use --verbose to print out information about whether v1 or v2 merge state
2079 2080 was chosen."""
2080 2081
2081 2082 if ui.verbose:
2082 2083 ms = mergestatemod.mergestate(repo)
2083 2084
2084 2085 # sort so that reasonable information is on top
2085 2086 v1records = ms._readrecordsv1()
2086 2087 v2records = ms._readrecordsv2()
2087 2088
2088 2089 if not v1records and not v2records:
2089 2090 pass
2090 2091 elif not v2records:
2091 2092 ui.writenoi18n(b'no version 2 merge state\n')
2092 2093 elif ms._v1v2match(v1records, v2records):
2093 2094 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2094 2095 else:
2095 2096 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2096 2097
2097 2098 opts = pycompat.byteskwargs(opts)
2098 2099 if not opts[b'template']:
2099 2100 opts[b'template'] = (
2100 2101 b'{if(commits, "", "no merge state found\n")}'
2101 2102 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2102 2103 b'{files % "file: {path} (state \\"{state}\\")\n'
2103 2104 b'{if(local_path, "'
2104 2105 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2105 2106 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2106 2107 b' other path: {other_path} (node {other_node})\n'
2107 2108 b'")}'
2108 2109 b'{if(rename_side, "'
2109 2110 b' rename side: {rename_side}\n'
2110 2111 b' renamed path: {renamed_path}\n'
2111 2112 b'")}'
2112 2113 b'{extras % " extra: {key} = {value}\n"}'
2113 2114 b'"}'
2114 2115 b'{extras % "extra: {file} ({key} = {value})\n"}'
2115 2116 )
2116 2117
2117 2118 ms = mergestatemod.mergestate.read(repo)
2118 2119
2119 2120 fm = ui.formatter(b'debugmergestate', opts)
2120 2121 fm.startitem()
2121 2122
2122 2123 fm_commits = fm.nested(b'commits')
2123 2124 if ms.active():
2124 2125 for name, node, label_index in (
2125 2126 (b'local', ms.local, 0),
2126 2127 (b'other', ms.other, 1),
2127 2128 ):
2128 2129 fm_commits.startitem()
2129 2130 fm_commits.data(name=name)
2130 2131 fm_commits.data(node=hex(node))
2131 2132 if ms._labels and len(ms._labels) > label_index:
2132 2133 fm_commits.data(label=ms._labels[label_index])
2133 2134 fm_commits.end()
2134 2135
2135 2136 fm_files = fm.nested(b'files')
2136 2137 if ms.active():
2137 2138 for f in ms:
2138 2139 fm_files.startitem()
2139 2140 fm_files.data(path=f)
2140 2141 state = ms._state[f]
2141 2142 fm_files.data(state=state[0])
2142 2143 if state[0] in (
2143 2144 mergestatemod.MERGE_RECORD_UNRESOLVED,
2144 2145 mergestatemod.MERGE_RECORD_RESOLVED,
2145 2146 ):
2146 2147 fm_files.data(local_key=state[1])
2147 2148 fm_files.data(local_path=state[2])
2148 2149 fm_files.data(ancestor_path=state[3])
2149 2150 fm_files.data(ancestor_node=state[4])
2150 2151 fm_files.data(other_path=state[5])
2151 2152 fm_files.data(other_node=state[6])
2152 2153 fm_files.data(local_flags=state[7])
2153 2154 elif state[0] in (
2154 2155 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2155 2156 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2156 2157 ):
2157 2158 fm_files.data(renamed_path=state[1])
2158 2159 fm_files.data(rename_side=state[2])
2159 2160 fm_extras = fm_files.nested(b'extras')
2160 2161 for k, v in sorted(ms.extras(f).items()):
2161 2162 fm_extras.startitem()
2162 2163 fm_extras.data(key=k)
2163 2164 fm_extras.data(value=v)
2164 2165 fm_extras.end()
2165 2166
2166 2167 fm_files.end()
2167 2168
2168 2169 fm_extras = fm.nested(b'extras')
2169 2170 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2170 2171 if f in ms:
2171 2172 # If file is in mergestate, we have already processed it's extras
2172 2173 continue
2173 2174 for k, v in pycompat.iteritems(d):
2174 2175 fm_extras.startitem()
2175 2176 fm_extras.data(file=f)
2176 2177 fm_extras.data(key=k)
2177 2178 fm_extras.data(value=v)
2178 2179 fm_extras.end()
2179 2180
2180 2181 fm.end()
2181 2182
2182 2183
2183 2184 @command(b'debugnamecomplete', [], _(b'NAME...'))
2184 2185 def debugnamecomplete(ui, repo, *args):
2185 2186 '''complete "names" - tags, open branch names, bookmark names'''
2186 2187
2187 2188 names = set()
2188 2189 # since we previously only listed open branches, we will handle that
2189 2190 # specially (after this for loop)
2190 2191 for name, ns in pycompat.iteritems(repo.names):
2191 2192 if name != b'branches':
2192 2193 names.update(ns.listnames(repo))
2193 2194 names.update(
2194 2195 tag
2195 2196 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2196 2197 if not closed
2197 2198 )
2198 2199 completions = set()
2199 2200 if not args:
2200 2201 args = [b'']
2201 2202 for a in args:
2202 2203 completions.update(n for n in names if n.startswith(a))
2203 2204 ui.write(b'\n'.join(sorted(completions)))
2204 2205 ui.write(b'\n')
2205 2206
2206 2207
2207 2208 @command(
2208 2209 b'debugnodemap',
2209 2210 [
2210 2211 (
2211 2212 b'',
2212 2213 b'dump-new',
2213 2214 False,
2214 2215 _(b'write a (new) persistent binary nodemap on stdin'),
2215 2216 ),
2216 2217 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2217 2218 (
2218 2219 b'',
2219 2220 b'check',
2220 2221 False,
2221 2222 _(b'check that the data on disk data are correct.'),
2222 2223 ),
2223 2224 (
2224 2225 b'',
2225 2226 b'metadata',
2226 2227 False,
2227 2228 _(b'display the on disk meta data for the nodemap'),
2228 2229 ),
2229 2230 ],
2230 2231 )
2231 2232 def debugnodemap(ui, repo, **opts):
2232 2233 """write and inspect on disk nodemap"""
2233 2234 if opts['dump_new']:
2234 2235 unfi = repo.unfiltered()
2235 2236 cl = unfi.changelog
2236 2237 if util.safehasattr(cl.index, "nodemap_data_all"):
2237 2238 data = cl.index.nodemap_data_all()
2238 2239 else:
2239 2240 data = nodemap.persistent_data(cl.index)
2240 2241 ui.write(data)
2241 2242 elif opts['dump_disk']:
2242 2243 unfi = repo.unfiltered()
2243 2244 cl = unfi.changelog
2244 2245 nm_data = nodemap.persisted_data(cl)
2245 2246 if nm_data is not None:
2246 2247 docket, data = nm_data
2247 2248 ui.write(data[:])
2248 2249 elif opts['check']:
2249 2250 unfi = repo.unfiltered()
2250 2251 cl = unfi.changelog
2251 2252 nm_data = nodemap.persisted_data(cl)
2252 2253 if nm_data is not None:
2253 2254 docket, data = nm_data
2254 2255 return nodemap.check_data(ui, cl.index, data)
2255 2256 elif opts['metadata']:
2256 2257 unfi = repo.unfiltered()
2257 2258 cl = unfi.changelog
2258 2259 nm_data = nodemap.persisted_data(cl)
2259 2260 if nm_data is not None:
2260 2261 docket, data = nm_data
2261 2262 ui.write((b"uid: %s\n") % docket.uid)
2262 2263 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2263 2264 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2264 2265 ui.write((b"data-length: %d\n") % docket.data_length)
2265 2266 ui.write((b"data-unused: %d\n") % docket.data_unused)
2266 2267 unused_perc = docket.data_unused * 100.0 / docket.data_length
2267 2268 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2268 2269
2269 2270
2270 2271 @command(
2271 2272 b'debugobsolete',
2272 2273 [
2273 2274 (b'', b'flags', 0, _(b'markers flag')),
2274 2275 (
2275 2276 b'',
2276 2277 b'record-parents',
2277 2278 False,
2278 2279 _(b'record parent information for the precursor'),
2279 2280 ),
2280 2281 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2281 2282 (
2282 2283 b'',
2283 2284 b'exclusive',
2284 2285 False,
2285 2286 _(b'restrict display to markers only relevant to REV'),
2286 2287 ),
2287 2288 (b'', b'index', False, _(b'display index of the marker')),
2288 2289 (b'', b'delete', [], _(b'delete markers specified by indices')),
2289 2290 ]
2290 2291 + cmdutil.commitopts2
2291 2292 + cmdutil.formatteropts,
2292 2293 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2293 2294 )
2294 2295 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2295 2296 """create arbitrary obsolete marker
2296 2297
2297 2298 With no arguments, displays the list of obsolescence markers."""
2298 2299
2299 2300 opts = pycompat.byteskwargs(opts)
2300 2301
2301 2302 def parsenodeid(s):
2302 2303 try:
2303 2304 # We do not use revsingle/revrange functions here to accept
2304 2305 # arbitrary node identifiers, possibly not present in the
2305 2306 # local repository.
2306 2307 n = bin(s)
2307 2308 if len(n) != len(nullid):
2308 2309 raise TypeError()
2309 2310 return n
2310 2311 except TypeError:
2311 2312 raise error.InputError(
2312 2313 b'changeset references must be full hexadecimal '
2313 2314 b'node identifiers'
2314 2315 )
2315 2316
2316 2317 if opts.get(b'delete'):
2317 2318 indices = []
2318 2319 for v in opts.get(b'delete'):
2319 2320 try:
2320 2321 indices.append(int(v))
2321 2322 except ValueError:
2322 2323 raise error.InputError(
2323 2324 _(b'invalid index value: %r') % v,
2324 2325 hint=_(b'use integers for indices'),
2325 2326 )
2326 2327
2327 2328 if repo.currenttransaction():
2328 2329 raise error.Abort(
2329 2330 _(b'cannot delete obsmarkers in the middle of transaction.')
2330 2331 )
2331 2332
2332 2333 with repo.lock():
2333 2334 n = repair.deleteobsmarkers(repo.obsstore, indices)
2334 2335 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2335 2336
2336 2337 return
2337 2338
2338 2339 if precursor is not None:
2339 2340 if opts[b'rev']:
2340 2341 raise error.InputError(
2341 2342 b'cannot select revision when creating marker'
2342 2343 )
2343 2344 metadata = {}
2344 2345 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2345 2346 succs = tuple(parsenodeid(succ) for succ in successors)
2346 2347 l = repo.lock()
2347 2348 try:
2348 2349 tr = repo.transaction(b'debugobsolete')
2349 2350 try:
2350 2351 date = opts.get(b'date')
2351 2352 if date:
2352 2353 date = dateutil.parsedate(date)
2353 2354 else:
2354 2355 date = None
2355 2356 prec = parsenodeid(precursor)
2356 2357 parents = None
2357 2358 if opts[b'record_parents']:
2358 2359 if prec not in repo.unfiltered():
2359 2360 raise error.Abort(
2360 2361 b'cannot used --record-parents on '
2361 2362 b'unknown changesets'
2362 2363 )
2363 2364 parents = repo.unfiltered()[prec].parents()
2364 2365 parents = tuple(p.node() for p in parents)
2365 2366 repo.obsstore.create(
2366 2367 tr,
2367 2368 prec,
2368 2369 succs,
2369 2370 opts[b'flags'],
2370 2371 parents=parents,
2371 2372 date=date,
2372 2373 metadata=metadata,
2373 2374 ui=ui,
2374 2375 )
2375 2376 tr.close()
2376 2377 except ValueError as exc:
2377 2378 raise error.Abort(
2378 2379 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2379 2380 )
2380 2381 finally:
2381 2382 tr.release()
2382 2383 finally:
2383 2384 l.release()
2384 2385 else:
2385 2386 if opts[b'rev']:
2386 2387 revs = scmutil.revrange(repo, opts[b'rev'])
2387 2388 nodes = [repo[r].node() for r in revs]
2388 2389 markers = list(
2389 2390 obsutil.getmarkers(
2390 2391 repo, nodes=nodes, exclusive=opts[b'exclusive']
2391 2392 )
2392 2393 )
2393 2394 markers.sort(key=lambda x: x._data)
2394 2395 else:
2395 2396 markers = obsutil.getmarkers(repo)
2396 2397
2397 2398 markerstoiter = markers
2398 2399 isrelevant = lambda m: True
2399 2400 if opts.get(b'rev') and opts.get(b'index'):
2400 2401 markerstoiter = obsutil.getmarkers(repo)
2401 2402 markerset = set(markers)
2402 2403 isrelevant = lambda m: m in markerset
2403 2404
2404 2405 fm = ui.formatter(b'debugobsolete', opts)
2405 2406 for i, m in enumerate(markerstoiter):
2406 2407 if not isrelevant(m):
2407 2408 # marker can be irrelevant when we're iterating over a set
2408 2409 # of markers (markerstoiter) which is bigger than the set
2409 2410 # of markers we want to display (markers)
2410 2411 # this can happen if both --index and --rev options are
2411 2412 # provided and thus we need to iterate over all of the markers
2412 2413 # to get the correct indices, but only display the ones that
2413 2414 # are relevant to --rev value
2414 2415 continue
2415 2416 fm.startitem()
2416 2417 ind = i if opts.get(b'index') else None
2417 2418 cmdutil.showmarker(fm, m, index=ind)
2418 2419 fm.end()
2419 2420
2420 2421
2421 2422 @command(
2422 2423 b'debugp1copies',
2423 2424 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2424 2425 _(b'[-r REV]'),
2425 2426 )
2426 2427 def debugp1copies(ui, repo, **opts):
2427 2428 """dump copy information compared to p1"""
2428 2429
2429 2430 opts = pycompat.byteskwargs(opts)
2430 2431 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2431 2432 for dst, src in ctx.p1copies().items():
2432 2433 ui.write(b'%s -> %s\n' % (src, dst))
2433 2434
2434 2435
2435 2436 @command(
2436 2437 b'debugp2copies',
2437 2438 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2438 2439 _(b'[-r REV]'),
2439 2440 )
2440 2441 def debugp1copies(ui, repo, **opts):
2441 2442 """dump copy information compared to p2"""
2442 2443
2443 2444 opts = pycompat.byteskwargs(opts)
2444 2445 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2445 2446 for dst, src in ctx.p2copies().items():
2446 2447 ui.write(b'%s -> %s\n' % (src, dst))
2447 2448
2448 2449
2449 2450 @command(
2450 2451 b'debugpathcomplete',
2451 2452 [
2452 2453 (b'f', b'full', None, _(b'complete an entire path')),
2453 2454 (b'n', b'normal', None, _(b'show only normal files')),
2454 2455 (b'a', b'added', None, _(b'show only added files')),
2455 2456 (b'r', b'removed', None, _(b'show only removed files')),
2456 2457 ],
2457 2458 _(b'FILESPEC...'),
2458 2459 )
2459 2460 def debugpathcomplete(ui, repo, *specs, **opts):
2460 2461 """complete part or all of a tracked path
2461 2462
2462 2463 This command supports shells that offer path name completion. It
2463 2464 currently completes only files already known to the dirstate.
2464 2465
2465 2466 Completion extends only to the next path segment unless
2466 2467 --full is specified, in which case entire paths are used."""
2467 2468
2468 2469 def complete(path, acceptable):
2469 2470 dirstate = repo.dirstate
2470 2471 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2471 2472 rootdir = repo.root + pycompat.ossep
2472 2473 if spec != repo.root and not spec.startswith(rootdir):
2473 2474 return [], []
2474 2475 if os.path.isdir(spec):
2475 2476 spec += b'/'
2476 2477 spec = spec[len(rootdir) :]
2477 2478 fixpaths = pycompat.ossep != b'/'
2478 2479 if fixpaths:
2479 2480 spec = spec.replace(pycompat.ossep, b'/')
2480 2481 speclen = len(spec)
2481 2482 fullpaths = opts['full']
2482 2483 files, dirs = set(), set()
2483 2484 adddir, addfile = dirs.add, files.add
2484 2485 for f, st in pycompat.iteritems(dirstate):
2485 2486 if f.startswith(spec) and st[0] in acceptable:
2486 2487 if fixpaths:
2487 2488 f = f.replace(b'/', pycompat.ossep)
2488 2489 if fullpaths:
2489 2490 addfile(f)
2490 2491 continue
2491 2492 s = f.find(pycompat.ossep, speclen)
2492 2493 if s >= 0:
2493 2494 adddir(f[:s])
2494 2495 else:
2495 2496 addfile(f)
2496 2497 return files, dirs
2497 2498
2498 2499 acceptable = b''
2499 2500 if opts['normal']:
2500 2501 acceptable += b'nm'
2501 2502 if opts['added']:
2502 2503 acceptable += b'a'
2503 2504 if opts['removed']:
2504 2505 acceptable += b'r'
2505 2506 cwd = repo.getcwd()
2506 2507 if not specs:
2507 2508 specs = [b'.']
2508 2509
2509 2510 files, dirs = set(), set()
2510 2511 for spec in specs:
2511 2512 f, d = complete(spec, acceptable or b'nmar')
2512 2513 files.update(f)
2513 2514 dirs.update(d)
2514 2515 files.update(dirs)
2515 2516 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2516 2517 ui.write(b'\n')
2517 2518
2518 2519
2519 2520 @command(
2520 2521 b'debugpathcopies',
2521 2522 cmdutil.walkopts,
2522 2523 b'hg debugpathcopies REV1 REV2 [FILE]',
2523 2524 inferrepo=True,
2524 2525 )
2525 2526 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2526 2527 """show copies between two revisions"""
2527 2528 ctx1 = scmutil.revsingle(repo, rev1)
2528 2529 ctx2 = scmutil.revsingle(repo, rev2)
2529 2530 m = scmutil.match(ctx1, pats, opts)
2530 2531 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2531 2532 ui.write(b'%s -> %s\n' % (src, dst))
2532 2533
2533 2534
2534 2535 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2535 2536 def debugpeer(ui, path):
2536 2537 """establish a connection to a peer repository"""
2537 2538 # Always enable peer request logging. Requires --debug to display
2538 2539 # though.
2539 2540 overrides = {
2540 2541 (b'devel', b'debug.peer-request'): True,
2541 2542 }
2542 2543
2543 2544 with ui.configoverride(overrides):
2544 2545 peer = hg.peer(ui, {}, path)
2545 2546
2546 2547 local = peer.local() is not None
2547 2548 canpush = peer.canpush()
2548 2549
2549 2550 ui.write(_(b'url: %s\n') % peer.url())
2550 2551 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2551 2552 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2552 2553
2553 2554
2554 2555 @command(
2555 2556 b'debugpickmergetool',
2556 2557 [
2557 2558 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2558 2559 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2559 2560 ]
2560 2561 + cmdutil.walkopts
2561 2562 + cmdutil.mergetoolopts,
2562 2563 _(b'[PATTERN]...'),
2563 2564 inferrepo=True,
2564 2565 )
2565 2566 def debugpickmergetool(ui, repo, *pats, **opts):
2566 2567 """examine which merge tool is chosen for specified file
2567 2568
2568 2569 As described in :hg:`help merge-tools`, Mercurial examines
2569 2570 configurations below in this order to decide which merge tool is
2570 2571 chosen for specified file.
2571 2572
2572 2573 1. ``--tool`` option
2573 2574 2. ``HGMERGE`` environment variable
2574 2575 3. configurations in ``merge-patterns`` section
2575 2576 4. configuration of ``ui.merge``
2576 2577 5. configurations in ``merge-tools`` section
2577 2578 6. ``hgmerge`` tool (for historical reason only)
2578 2579 7. default tool for fallback (``:merge`` or ``:prompt``)
2579 2580
2580 2581 This command writes out examination result in the style below::
2581 2582
2582 2583 FILE = MERGETOOL
2583 2584
2584 2585 By default, all files known in the first parent context of the
2585 2586 working directory are examined. Use file patterns and/or -I/-X
2586 2587 options to limit target files. -r/--rev is also useful to examine
2587 2588 files in another context without actual updating to it.
2588 2589
2589 2590 With --debug, this command shows warning messages while matching
2590 2591 against ``merge-patterns`` and so on, too. It is recommended to
2591 2592 use this option with explicit file patterns and/or -I/-X options,
2592 2593 because this option increases amount of output per file according
2593 2594 to configurations in hgrc.
2594 2595
2595 2596 With -v/--verbose, this command shows configurations below at
2596 2597 first (only if specified).
2597 2598
2598 2599 - ``--tool`` option
2599 2600 - ``HGMERGE`` environment variable
2600 2601 - configuration of ``ui.merge``
2601 2602
2602 2603 If merge tool is chosen before matching against
2603 2604 ``merge-patterns``, this command can't show any helpful
2604 2605 information, even with --debug. In such case, information above is
2605 2606 useful to know why a merge tool is chosen.
2606 2607 """
2607 2608 opts = pycompat.byteskwargs(opts)
2608 2609 overrides = {}
2609 2610 if opts[b'tool']:
2610 2611 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2611 2612 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2612 2613
2613 2614 with ui.configoverride(overrides, b'debugmergepatterns'):
2614 2615 hgmerge = encoding.environ.get(b"HGMERGE")
2615 2616 if hgmerge is not None:
2616 2617 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2617 2618 uimerge = ui.config(b"ui", b"merge")
2618 2619 if uimerge:
2619 2620 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2620 2621
2621 2622 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2622 2623 m = scmutil.match(ctx, pats, opts)
2623 2624 changedelete = opts[b'changedelete']
2624 2625 for path in ctx.walk(m):
2625 2626 fctx = ctx[path]
2626 2627 try:
2627 2628 if not ui.debugflag:
2628 2629 ui.pushbuffer(error=True)
2629 2630 tool, toolpath = filemerge._picktool(
2630 2631 repo,
2631 2632 ui,
2632 2633 path,
2633 2634 fctx.isbinary(),
2634 2635 b'l' in fctx.flags(),
2635 2636 changedelete,
2636 2637 )
2637 2638 finally:
2638 2639 if not ui.debugflag:
2639 2640 ui.popbuffer()
2640 2641 ui.write(b'%s = %s\n' % (path, tool))
2641 2642
2642 2643
2643 2644 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2644 2645 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2645 2646 """access the pushkey key/value protocol
2646 2647
2647 2648 With two args, list the keys in the given namespace.
2648 2649
2649 2650 With five args, set a key to new if it currently is set to old.
2650 2651 Reports success or failure.
2651 2652 """
2652 2653
2653 2654 target = hg.peer(ui, {}, repopath)
2654 2655 if keyinfo:
2655 2656 key, old, new = keyinfo
2656 2657 with target.commandexecutor() as e:
2657 2658 r = e.callcommand(
2658 2659 b'pushkey',
2659 2660 {
2660 2661 b'namespace': namespace,
2661 2662 b'key': key,
2662 2663 b'old': old,
2663 2664 b'new': new,
2664 2665 },
2665 2666 ).result()
2666 2667
2667 2668 ui.status(pycompat.bytestr(r) + b'\n')
2668 2669 return not r
2669 2670 else:
2670 2671 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2671 2672 ui.write(
2672 2673 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2673 2674 )
2674 2675
2675 2676
2676 2677 @command(b'debugpvec', [], _(b'A B'))
2677 2678 def debugpvec(ui, repo, a, b=None):
2678 2679 ca = scmutil.revsingle(repo, a)
2679 2680 cb = scmutil.revsingle(repo, b)
2680 2681 pa = pvec.ctxpvec(ca)
2681 2682 pb = pvec.ctxpvec(cb)
2682 2683 if pa == pb:
2683 2684 rel = b"="
2684 2685 elif pa > pb:
2685 2686 rel = b">"
2686 2687 elif pa < pb:
2687 2688 rel = b"<"
2688 2689 elif pa | pb:
2689 2690 rel = b"|"
2690 2691 ui.write(_(b"a: %s\n") % pa)
2691 2692 ui.write(_(b"b: %s\n") % pb)
2692 2693 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2693 2694 ui.write(
2694 2695 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2695 2696 % (
2696 2697 abs(pa._depth - pb._depth),
2697 2698 pvec._hamming(pa._vec, pb._vec),
2698 2699 pa.distance(pb),
2699 2700 rel,
2700 2701 )
2701 2702 )
2702 2703
2703 2704
2704 2705 @command(
2705 2706 b'debugrebuilddirstate|debugrebuildstate',
2706 2707 [
2707 2708 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2708 2709 (
2709 2710 b'',
2710 2711 b'minimal',
2711 2712 None,
2712 2713 _(
2713 2714 b'only rebuild files that are inconsistent with '
2714 2715 b'the working copy parent'
2715 2716 ),
2716 2717 ),
2717 2718 ],
2718 2719 _(b'[-r REV]'),
2719 2720 )
2720 2721 def debugrebuilddirstate(ui, repo, rev, **opts):
2721 2722 """rebuild the dirstate as it would look like for the given revision
2722 2723
2723 2724 If no revision is specified the first current parent will be used.
2724 2725
2725 2726 The dirstate will be set to the files of the given revision.
2726 2727 The actual working directory content or existing dirstate
2727 2728 information such as adds or removes is not considered.
2728 2729
2729 2730 ``minimal`` will only rebuild the dirstate status for files that claim to be
2730 2731 tracked but are not in the parent manifest, or that exist in the parent
2731 2732 manifest but are not in the dirstate. It will not change adds, removes, or
2732 2733 modified files that are in the working copy parent.
2733 2734
2734 2735 One use of this command is to make the next :hg:`status` invocation
2735 2736 check the actual file content.
2736 2737 """
2737 2738 ctx = scmutil.revsingle(repo, rev)
2738 2739 with repo.wlock():
2739 2740 dirstate = repo.dirstate
2740 2741 changedfiles = None
2741 2742 # See command doc for what minimal does.
2742 2743 if opts.get('minimal'):
2743 2744 manifestfiles = set(ctx.manifest().keys())
2744 2745 dirstatefiles = set(dirstate)
2745 2746 manifestonly = manifestfiles - dirstatefiles
2746 2747 dsonly = dirstatefiles - manifestfiles
2747 2748 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2748 2749 changedfiles = manifestonly | dsnotadded
2749 2750
2750 2751 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2751 2752
2752 2753
2753 2754 @command(b'debugrebuildfncache', [], b'')
2754 2755 def debugrebuildfncache(ui, repo):
2755 2756 """rebuild the fncache file"""
2756 2757 repair.rebuildfncache(ui, repo)
2757 2758
2758 2759
2759 2760 @command(
2760 2761 b'debugrename',
2761 2762 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2762 2763 _(b'[-r REV] [FILE]...'),
2763 2764 )
2764 2765 def debugrename(ui, repo, *pats, **opts):
2765 2766 """dump rename information"""
2766 2767
2767 2768 opts = pycompat.byteskwargs(opts)
2768 2769 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2769 2770 m = scmutil.match(ctx, pats, opts)
2770 2771 for abs in ctx.walk(m):
2771 2772 fctx = ctx[abs]
2772 2773 o = fctx.filelog().renamed(fctx.filenode())
2773 2774 rel = repo.pathto(abs)
2774 2775 if o:
2775 2776 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2776 2777 else:
2777 2778 ui.write(_(b"%s not renamed\n") % rel)
2778 2779
2779 2780
2780 2781 @command(b'debugrequires|debugrequirements', [], b'')
2781 2782 def debugrequirements(ui, repo):
2782 2783 """ print the current repo requirements """
2783 2784 for r in sorted(repo.requirements):
2784 2785 ui.write(b"%s\n" % r)
2785 2786
2786 2787
2787 2788 @command(
2788 2789 b'debugrevlog',
2789 2790 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2790 2791 _(b'-c|-m|FILE'),
2791 2792 optionalrepo=True,
2792 2793 )
2793 2794 def debugrevlog(ui, repo, file_=None, **opts):
2794 2795 """show data and statistics about a revlog"""
2795 2796 opts = pycompat.byteskwargs(opts)
2796 2797 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2797 2798
2798 2799 if opts.get(b"dump"):
2799 2800 numrevs = len(r)
2800 2801 ui.write(
2801 2802 (
2802 2803 b"# rev p1rev p2rev start end deltastart base p1 p2"
2803 2804 b" rawsize totalsize compression heads chainlen\n"
2804 2805 )
2805 2806 )
2806 2807 ts = 0
2807 2808 heads = set()
2808 2809
2809 2810 for rev in pycompat.xrange(numrevs):
2810 2811 dbase = r.deltaparent(rev)
2811 2812 if dbase == -1:
2812 2813 dbase = rev
2813 2814 cbase = r.chainbase(rev)
2814 2815 clen = r.chainlen(rev)
2815 2816 p1, p2 = r.parentrevs(rev)
2816 2817 rs = r.rawsize(rev)
2817 2818 ts = ts + rs
2818 2819 heads -= set(r.parentrevs(rev))
2819 2820 heads.add(rev)
2820 2821 try:
2821 2822 compression = ts / r.end(rev)
2822 2823 except ZeroDivisionError:
2823 2824 compression = 0
2824 2825 ui.write(
2825 2826 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2826 2827 b"%11d %5d %8d\n"
2827 2828 % (
2828 2829 rev,
2829 2830 p1,
2830 2831 p2,
2831 2832 r.start(rev),
2832 2833 r.end(rev),
2833 2834 r.start(dbase),
2834 2835 r.start(cbase),
2835 2836 r.start(p1),
2836 2837 r.start(p2),
2837 2838 rs,
2838 2839 ts,
2839 2840 compression,
2840 2841 len(heads),
2841 2842 clen,
2842 2843 )
2843 2844 )
2844 2845 return 0
2845 2846
2846 2847 v = r.version
2847 2848 format = v & 0xFFFF
2848 2849 flags = []
2849 2850 gdelta = False
2850 2851 if v & revlog.FLAG_INLINE_DATA:
2851 2852 flags.append(b'inline')
2852 2853 if v & revlog.FLAG_GENERALDELTA:
2853 2854 gdelta = True
2854 2855 flags.append(b'generaldelta')
2855 2856 if not flags:
2856 2857 flags = [b'(none)']
2857 2858
2858 2859 ### tracks merge vs single parent
2859 2860 nummerges = 0
2860 2861
2861 2862 ### tracks ways the "delta" are build
2862 2863 # nodelta
2863 2864 numempty = 0
2864 2865 numemptytext = 0
2865 2866 numemptydelta = 0
2866 2867 # full file content
2867 2868 numfull = 0
2868 2869 # intermediate snapshot against a prior snapshot
2869 2870 numsemi = 0
2870 2871 # snapshot count per depth
2871 2872 numsnapdepth = collections.defaultdict(lambda: 0)
2872 2873 # delta against previous revision
2873 2874 numprev = 0
2874 2875 # delta against first or second parent (not prev)
2875 2876 nump1 = 0
2876 2877 nump2 = 0
2877 2878 # delta against neither prev nor parents
2878 2879 numother = 0
2879 2880 # delta against prev that are also first or second parent
2880 2881 # (details of `numprev`)
2881 2882 nump1prev = 0
2882 2883 nump2prev = 0
2883 2884
2884 2885 # data about delta chain of each revs
2885 2886 chainlengths = []
2886 2887 chainbases = []
2887 2888 chainspans = []
2888 2889
2889 2890 # data about each revision
2890 2891 datasize = [None, 0, 0]
2891 2892 fullsize = [None, 0, 0]
2892 2893 semisize = [None, 0, 0]
2893 2894 # snapshot count per depth
2894 2895 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2895 2896 deltasize = [None, 0, 0]
2896 2897 chunktypecounts = {}
2897 2898 chunktypesizes = {}
2898 2899
2899 2900 def addsize(size, l):
2900 2901 if l[0] is None or size < l[0]:
2901 2902 l[0] = size
2902 2903 if size > l[1]:
2903 2904 l[1] = size
2904 2905 l[2] += size
2905 2906
2906 2907 numrevs = len(r)
2907 2908 for rev in pycompat.xrange(numrevs):
2908 2909 p1, p2 = r.parentrevs(rev)
2909 2910 delta = r.deltaparent(rev)
2910 2911 if format > 0:
2911 2912 addsize(r.rawsize(rev), datasize)
2912 2913 if p2 != nullrev:
2913 2914 nummerges += 1
2914 2915 size = r.length(rev)
2915 2916 if delta == nullrev:
2916 2917 chainlengths.append(0)
2917 2918 chainbases.append(r.start(rev))
2918 2919 chainspans.append(size)
2919 2920 if size == 0:
2920 2921 numempty += 1
2921 2922 numemptytext += 1
2922 2923 else:
2923 2924 numfull += 1
2924 2925 numsnapdepth[0] += 1
2925 2926 addsize(size, fullsize)
2926 2927 addsize(size, snapsizedepth[0])
2927 2928 else:
2928 2929 chainlengths.append(chainlengths[delta] + 1)
2929 2930 baseaddr = chainbases[delta]
2930 2931 revaddr = r.start(rev)
2931 2932 chainbases.append(baseaddr)
2932 2933 chainspans.append((revaddr - baseaddr) + size)
2933 2934 if size == 0:
2934 2935 numempty += 1
2935 2936 numemptydelta += 1
2936 2937 elif r.issnapshot(rev):
2937 2938 addsize(size, semisize)
2938 2939 numsemi += 1
2939 2940 depth = r.snapshotdepth(rev)
2940 2941 numsnapdepth[depth] += 1
2941 2942 addsize(size, snapsizedepth[depth])
2942 2943 else:
2943 2944 addsize(size, deltasize)
2944 2945 if delta == rev - 1:
2945 2946 numprev += 1
2946 2947 if delta == p1:
2947 2948 nump1prev += 1
2948 2949 elif delta == p2:
2949 2950 nump2prev += 1
2950 2951 elif delta == p1:
2951 2952 nump1 += 1
2952 2953 elif delta == p2:
2953 2954 nump2 += 1
2954 2955 elif delta != nullrev:
2955 2956 numother += 1
2956 2957
2957 2958 # Obtain data on the raw chunks in the revlog.
2958 2959 if util.safehasattr(r, b'_getsegmentforrevs'):
2959 2960 segment = r._getsegmentforrevs(rev, rev)[1]
2960 2961 else:
2961 2962 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2962 2963 if segment:
2963 2964 chunktype = bytes(segment[0:1])
2964 2965 else:
2965 2966 chunktype = b'empty'
2966 2967
2967 2968 if chunktype not in chunktypecounts:
2968 2969 chunktypecounts[chunktype] = 0
2969 2970 chunktypesizes[chunktype] = 0
2970 2971
2971 2972 chunktypecounts[chunktype] += 1
2972 2973 chunktypesizes[chunktype] += size
2973 2974
2974 2975 # Adjust size min value for empty cases
2975 2976 for size in (datasize, fullsize, semisize, deltasize):
2976 2977 if size[0] is None:
2977 2978 size[0] = 0
2978 2979
2979 2980 numdeltas = numrevs - numfull - numempty - numsemi
2980 2981 numoprev = numprev - nump1prev - nump2prev
2981 2982 totalrawsize = datasize[2]
2982 2983 datasize[2] /= numrevs
2983 2984 fulltotal = fullsize[2]
2984 2985 if numfull == 0:
2985 2986 fullsize[2] = 0
2986 2987 else:
2987 2988 fullsize[2] /= numfull
2988 2989 semitotal = semisize[2]
2989 2990 snaptotal = {}
2990 2991 if numsemi > 0:
2991 2992 semisize[2] /= numsemi
2992 2993 for depth in snapsizedepth:
2993 2994 snaptotal[depth] = snapsizedepth[depth][2]
2994 2995 snapsizedepth[depth][2] /= numsnapdepth[depth]
2995 2996
2996 2997 deltatotal = deltasize[2]
2997 2998 if numdeltas > 0:
2998 2999 deltasize[2] /= numdeltas
2999 3000 totalsize = fulltotal + semitotal + deltatotal
3000 3001 avgchainlen = sum(chainlengths) / numrevs
3001 3002 maxchainlen = max(chainlengths)
3002 3003 maxchainspan = max(chainspans)
3003 3004 compratio = 1
3004 3005 if totalsize:
3005 3006 compratio = totalrawsize / totalsize
3006 3007
3007 3008 basedfmtstr = b'%%%dd\n'
3008 3009 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3009 3010
3010 3011 def dfmtstr(max):
3011 3012 return basedfmtstr % len(str(max))
3012 3013
3013 3014 def pcfmtstr(max, padding=0):
3014 3015 return basepcfmtstr % (len(str(max)), b' ' * padding)
3015 3016
3016 3017 def pcfmt(value, total):
3017 3018 if total:
3018 3019 return (value, 100 * float(value) / total)
3019 3020 else:
3020 3021 return value, 100.0
3021 3022
3022 3023 ui.writenoi18n(b'format : %d\n' % format)
3023 3024 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3024 3025
3025 3026 ui.write(b'\n')
3026 3027 fmt = pcfmtstr(totalsize)
3027 3028 fmt2 = dfmtstr(totalsize)
3028 3029 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3029 3030 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3030 3031 ui.writenoi18n(
3031 3032 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3032 3033 )
3033 3034 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3034 3035 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3035 3036 ui.writenoi18n(
3036 3037 b' text : '
3037 3038 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3038 3039 )
3039 3040 ui.writenoi18n(
3040 3041 b' delta : '
3041 3042 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3042 3043 )
3043 3044 ui.writenoi18n(
3044 3045 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3045 3046 )
3046 3047 for depth in sorted(numsnapdepth):
3047 3048 ui.write(
3048 3049 (b' lvl-%-3d : ' % depth)
3049 3050 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3050 3051 )
3051 3052 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3052 3053 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3053 3054 ui.writenoi18n(
3054 3055 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3055 3056 )
3056 3057 for depth in sorted(numsnapdepth):
3057 3058 ui.write(
3058 3059 (b' lvl-%-3d : ' % depth)
3059 3060 + fmt % pcfmt(snaptotal[depth], totalsize)
3060 3061 )
3061 3062 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3062 3063
3063 3064 def fmtchunktype(chunktype):
3064 3065 if chunktype == b'empty':
3065 3066 return b' %s : ' % chunktype
3066 3067 elif chunktype in pycompat.bytestr(string.ascii_letters):
3067 3068 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3068 3069 else:
3069 3070 return b' 0x%s : ' % hex(chunktype)
3070 3071
3071 3072 ui.write(b'\n')
3072 3073 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3073 3074 for chunktype in sorted(chunktypecounts):
3074 3075 ui.write(fmtchunktype(chunktype))
3075 3076 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3076 3077 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3077 3078 for chunktype in sorted(chunktypecounts):
3078 3079 ui.write(fmtchunktype(chunktype))
3079 3080 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3080 3081
3081 3082 ui.write(b'\n')
3082 3083 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3083 3084 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3084 3085 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3085 3086 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3086 3087 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3087 3088
3088 3089 if format > 0:
3089 3090 ui.write(b'\n')
3090 3091 ui.writenoi18n(
3091 3092 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3092 3093 % tuple(datasize)
3093 3094 )
3094 3095 ui.writenoi18n(
3095 3096 b'full revision size (min/max/avg) : %d / %d / %d\n'
3096 3097 % tuple(fullsize)
3097 3098 )
3098 3099 ui.writenoi18n(
3099 3100 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3100 3101 % tuple(semisize)
3101 3102 )
3102 3103 for depth in sorted(snapsizedepth):
3103 3104 if depth == 0:
3104 3105 continue
3105 3106 ui.writenoi18n(
3106 3107 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3107 3108 % ((depth,) + tuple(snapsizedepth[depth]))
3108 3109 )
3109 3110 ui.writenoi18n(
3110 3111 b'delta size (min/max/avg) : %d / %d / %d\n'
3111 3112 % tuple(deltasize)
3112 3113 )
3113 3114
3114 3115 if numdeltas > 0:
3115 3116 ui.write(b'\n')
3116 3117 fmt = pcfmtstr(numdeltas)
3117 3118 fmt2 = pcfmtstr(numdeltas, 4)
3118 3119 ui.writenoi18n(
3119 3120 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3120 3121 )
3121 3122 if numprev > 0:
3122 3123 ui.writenoi18n(
3123 3124 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3124 3125 )
3125 3126 ui.writenoi18n(
3126 3127 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3127 3128 )
3128 3129 ui.writenoi18n(
3129 3130 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3130 3131 )
3131 3132 if gdelta:
3132 3133 ui.writenoi18n(
3133 3134 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3134 3135 )
3135 3136 ui.writenoi18n(
3136 3137 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3137 3138 )
3138 3139 ui.writenoi18n(
3139 3140 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3140 3141 )
3141 3142
3142 3143
3143 3144 @command(
3144 3145 b'debugrevlogindex',
3145 3146 cmdutil.debugrevlogopts
3146 3147 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3147 3148 _(b'[-f FORMAT] -c|-m|FILE'),
3148 3149 optionalrepo=True,
3149 3150 )
3150 3151 def debugrevlogindex(ui, repo, file_=None, **opts):
3151 3152 """dump the contents of a revlog index"""
3152 3153 opts = pycompat.byteskwargs(opts)
3153 3154 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3154 3155 format = opts.get(b'format', 0)
3155 3156 if format not in (0, 1):
3156 3157 raise error.Abort(_(b"unknown format %d") % format)
3157 3158
3158 3159 if ui.debugflag:
3159 3160 shortfn = hex
3160 3161 else:
3161 3162 shortfn = short
3162 3163
3163 3164 # There might not be anything in r, so have a sane default
3164 3165 idlen = 12
3165 3166 for i in r:
3166 3167 idlen = len(shortfn(r.node(i)))
3167 3168 break
3168 3169
3169 3170 if format == 0:
3170 3171 if ui.verbose:
3171 3172 ui.writenoi18n(
3172 3173 b" rev offset length linkrev %s %s p2\n"
3173 3174 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3174 3175 )
3175 3176 else:
3176 3177 ui.writenoi18n(
3177 3178 b" rev linkrev %s %s p2\n"
3178 3179 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3179 3180 )
3180 3181 elif format == 1:
3181 3182 if ui.verbose:
3182 3183 ui.writenoi18n(
3183 3184 (
3184 3185 b" rev flag offset length size link p1"
3185 3186 b" p2 %s\n"
3186 3187 )
3187 3188 % b"nodeid".rjust(idlen)
3188 3189 )
3189 3190 else:
3190 3191 ui.writenoi18n(
3191 3192 b" rev flag size link p1 p2 %s\n"
3192 3193 % b"nodeid".rjust(idlen)
3193 3194 )
3194 3195
3195 3196 for i in r:
3196 3197 node = r.node(i)
3197 3198 if format == 0:
3198 3199 try:
3199 3200 pp = r.parents(node)
3200 3201 except Exception:
3201 3202 pp = [nullid, nullid]
3202 3203 if ui.verbose:
3203 3204 ui.write(
3204 3205 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3205 3206 % (
3206 3207 i,
3207 3208 r.start(i),
3208 3209 r.length(i),
3209 3210 r.linkrev(i),
3210 3211 shortfn(node),
3211 3212 shortfn(pp[0]),
3212 3213 shortfn(pp[1]),
3213 3214 )
3214 3215 )
3215 3216 else:
3216 3217 ui.write(
3217 3218 b"% 6d % 7d %s %s %s\n"
3218 3219 % (
3219 3220 i,
3220 3221 r.linkrev(i),
3221 3222 shortfn(node),
3222 3223 shortfn(pp[0]),
3223 3224 shortfn(pp[1]),
3224 3225 )
3225 3226 )
3226 3227 elif format == 1:
3227 3228 pr = r.parentrevs(i)
3228 3229 if ui.verbose:
3229 3230 ui.write(
3230 3231 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3231 3232 % (
3232 3233 i,
3233 3234 r.flags(i),
3234 3235 r.start(i),
3235 3236 r.length(i),
3236 3237 r.rawsize(i),
3237 3238 r.linkrev(i),
3238 3239 pr[0],
3239 3240 pr[1],
3240 3241 shortfn(node),
3241 3242 )
3242 3243 )
3243 3244 else:
3244 3245 ui.write(
3245 3246 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3246 3247 % (
3247 3248 i,
3248 3249 r.flags(i),
3249 3250 r.rawsize(i),
3250 3251 r.linkrev(i),
3251 3252 pr[0],
3252 3253 pr[1],
3253 3254 shortfn(node),
3254 3255 )
3255 3256 )
3256 3257
3257 3258
3258 3259 @command(
3259 3260 b'debugrevspec',
3260 3261 [
3261 3262 (
3262 3263 b'',
3263 3264 b'optimize',
3264 3265 None,
3265 3266 _(b'print parsed tree after optimizing (DEPRECATED)'),
3266 3267 ),
3267 3268 (
3268 3269 b'',
3269 3270 b'show-revs',
3270 3271 True,
3271 3272 _(b'print list of result revisions (default)'),
3272 3273 ),
3273 3274 (
3274 3275 b's',
3275 3276 b'show-set',
3276 3277 None,
3277 3278 _(b'print internal representation of result set'),
3278 3279 ),
3279 3280 (
3280 3281 b'p',
3281 3282 b'show-stage',
3282 3283 [],
3283 3284 _(b'print parsed tree at the given stage'),
3284 3285 _(b'NAME'),
3285 3286 ),
3286 3287 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3287 3288 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3288 3289 ],
3289 3290 b'REVSPEC',
3290 3291 )
3291 3292 def debugrevspec(ui, repo, expr, **opts):
3292 3293 """parse and apply a revision specification
3293 3294
3294 3295 Use -p/--show-stage option to print the parsed tree at the given stages.
3295 3296 Use -p all to print tree at every stage.
3296 3297
3297 3298 Use --no-show-revs option with -s or -p to print only the set
3298 3299 representation or the parsed tree respectively.
3299 3300
3300 3301 Use --verify-optimized to compare the optimized result with the unoptimized
3301 3302 one. Returns 1 if the optimized result differs.
3302 3303 """
3303 3304 opts = pycompat.byteskwargs(opts)
3304 3305 aliases = ui.configitems(b'revsetalias')
3305 3306 stages = [
3306 3307 (b'parsed', lambda tree: tree),
3307 3308 (
3308 3309 b'expanded',
3309 3310 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3310 3311 ),
3311 3312 (b'concatenated', revsetlang.foldconcat),
3312 3313 (b'analyzed', revsetlang.analyze),
3313 3314 (b'optimized', revsetlang.optimize),
3314 3315 ]
3315 3316 if opts[b'no_optimized']:
3316 3317 stages = stages[:-1]
3317 3318 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3318 3319 raise error.Abort(
3319 3320 _(b'cannot use --verify-optimized with --no-optimized')
3320 3321 )
3321 3322 stagenames = {n for n, f in stages}
3322 3323
3323 3324 showalways = set()
3324 3325 showchanged = set()
3325 3326 if ui.verbose and not opts[b'show_stage']:
3326 3327 # show parsed tree by --verbose (deprecated)
3327 3328 showalways.add(b'parsed')
3328 3329 showchanged.update([b'expanded', b'concatenated'])
3329 3330 if opts[b'optimize']:
3330 3331 showalways.add(b'optimized')
3331 3332 if opts[b'show_stage'] and opts[b'optimize']:
3332 3333 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3333 3334 if opts[b'show_stage'] == [b'all']:
3334 3335 showalways.update(stagenames)
3335 3336 else:
3336 3337 for n in opts[b'show_stage']:
3337 3338 if n not in stagenames:
3338 3339 raise error.Abort(_(b'invalid stage name: %s') % n)
3339 3340 showalways.update(opts[b'show_stage'])
3340 3341
3341 3342 treebystage = {}
3342 3343 printedtree = None
3343 3344 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3344 3345 for n, f in stages:
3345 3346 treebystage[n] = tree = f(tree)
3346 3347 if n in showalways or (n in showchanged and tree != printedtree):
3347 3348 if opts[b'show_stage'] or n != b'parsed':
3348 3349 ui.write(b"* %s:\n" % n)
3349 3350 ui.write(revsetlang.prettyformat(tree), b"\n")
3350 3351 printedtree = tree
3351 3352
3352 3353 if opts[b'verify_optimized']:
3353 3354 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3354 3355 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3355 3356 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3356 3357 ui.writenoi18n(
3357 3358 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3358 3359 )
3359 3360 ui.writenoi18n(
3360 3361 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3361 3362 )
3362 3363 arevs = list(arevs)
3363 3364 brevs = list(brevs)
3364 3365 if arevs == brevs:
3365 3366 return 0
3366 3367 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3367 3368 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3368 3369 sm = difflib.SequenceMatcher(None, arevs, brevs)
3369 3370 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3370 3371 if tag in ('delete', 'replace'):
3371 3372 for c in arevs[alo:ahi]:
3372 3373 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3373 3374 if tag in ('insert', 'replace'):
3374 3375 for c in brevs[blo:bhi]:
3375 3376 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3376 3377 if tag == 'equal':
3377 3378 for c in arevs[alo:ahi]:
3378 3379 ui.write(b' %d\n' % c)
3379 3380 return 1
3380 3381
3381 3382 func = revset.makematcher(tree)
3382 3383 revs = func(repo)
3383 3384 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3384 3385 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3385 3386 if not opts[b'show_revs']:
3386 3387 return
3387 3388 for c in revs:
3388 3389 ui.write(b"%d\n" % c)
3389 3390
3390 3391
3391 3392 @command(
3392 3393 b'debugserve',
3393 3394 [
3394 3395 (
3395 3396 b'',
3396 3397 b'sshstdio',
3397 3398 False,
3398 3399 _(b'run an SSH server bound to process handles'),
3399 3400 ),
3400 3401 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3401 3402 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3402 3403 ],
3403 3404 b'',
3404 3405 )
3405 3406 def debugserve(ui, repo, **opts):
3406 3407 """run a server with advanced settings
3407 3408
3408 3409 This command is similar to :hg:`serve`. It exists partially as a
3409 3410 workaround to the fact that ``hg serve --stdio`` must have specific
3410 3411 arguments for security reasons.
3411 3412 """
3412 3413 opts = pycompat.byteskwargs(opts)
3413 3414
3414 3415 if not opts[b'sshstdio']:
3415 3416 raise error.Abort(_(b'only --sshstdio is currently supported'))
3416 3417
3417 3418 logfh = None
3418 3419
3419 3420 if opts[b'logiofd'] and opts[b'logiofile']:
3420 3421 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3421 3422
3422 3423 if opts[b'logiofd']:
3423 3424 # Ideally we would be line buffered. But line buffering in binary
3424 3425 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3425 3426 # buffering could have performance impacts. But since this isn't
3426 3427 # performance critical code, it should be fine.
3427 3428 try:
3428 3429 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3429 3430 except OSError as e:
3430 3431 if e.errno != errno.ESPIPE:
3431 3432 raise
3432 3433 # can't seek a pipe, so `ab` mode fails on py3
3433 3434 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3434 3435 elif opts[b'logiofile']:
3435 3436 logfh = open(opts[b'logiofile'], b'ab', 0)
3436 3437
3437 3438 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3438 3439 s.serve_forever()
3439 3440
3440 3441
3441 3442 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3442 3443 def debugsetparents(ui, repo, rev1, rev2=None):
3443 3444 """manually set the parents of the current working directory
3444 3445
3445 3446 This is useful for writing repository conversion tools, but should
3446 3447 be used with care. For example, neither the working directory nor the
3447 3448 dirstate is updated, so file status may be incorrect after running this
3448 3449 command.
3449 3450
3450 3451 Returns 0 on success.
3451 3452 """
3452 3453
3453 3454 node1 = scmutil.revsingle(repo, rev1).node()
3454 3455 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3455 3456
3456 3457 with repo.wlock():
3457 3458 repo.setparents(node1, node2)
3458 3459
3459 3460
3460 3461 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3461 3462 def debugsidedata(ui, repo, file_, rev=None, **opts):
3462 3463 """dump the side data for a cl/manifest/file revision
3463 3464
3464 3465 Use --verbose to dump the sidedata content."""
3465 3466 opts = pycompat.byteskwargs(opts)
3466 3467 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3467 3468 if rev is not None:
3468 3469 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3469 3470 file_, rev = None, file_
3470 3471 elif rev is None:
3471 3472 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3472 3473 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3473 3474 r = getattr(r, '_revlog', r)
3474 3475 try:
3475 3476 sidedata = r.sidedata(r.lookup(rev))
3476 3477 except KeyError:
3477 3478 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3478 3479 if sidedata:
3479 3480 sidedata = list(sidedata.items())
3480 3481 sidedata.sort()
3481 3482 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3482 3483 for key, value in sidedata:
3483 3484 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3484 3485 if ui.verbose:
3485 3486 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3486 3487
3487 3488
3488 3489 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3489 3490 def debugssl(ui, repo, source=None, **opts):
3490 3491 """test a secure connection to a server
3491 3492
3492 3493 This builds the certificate chain for the server on Windows, installing the
3493 3494 missing intermediates and trusted root via Windows Update if necessary. It
3494 3495 does nothing on other platforms.
3495 3496
3496 3497 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3497 3498 that server is used. See :hg:`help urls` for more information.
3498 3499
3499 3500 If the update succeeds, retry the original operation. Otherwise, the cause
3500 3501 of the SSL error is likely another issue.
3501 3502 """
3502 3503 if not pycompat.iswindows:
3503 3504 raise error.Abort(
3504 3505 _(b'certificate chain building is only possible on Windows')
3505 3506 )
3506 3507
3507 3508 if not source:
3508 3509 if not repo:
3509 3510 raise error.Abort(
3510 3511 _(
3511 3512 b"there is no Mercurial repository here, and no "
3512 3513 b"server specified"
3513 3514 )
3514 3515 )
3515 3516 source = b"default"
3516 3517
3517 3518 source, branches = hg.parseurl(ui.expandpath(source))
3518 3519 url = util.url(source)
3519 3520
3520 3521 defaultport = {b'https': 443, b'ssh': 22}
3521 3522 if url.scheme in defaultport:
3522 3523 try:
3523 3524 addr = (url.host, int(url.port or defaultport[url.scheme]))
3524 3525 except ValueError:
3525 3526 raise error.Abort(_(b"malformed port number in URL"))
3526 3527 else:
3527 3528 raise error.Abort(_(b"only https and ssh connections are supported"))
3528 3529
3529 3530 from . import win32
3530 3531
3531 3532 s = ssl.wrap_socket(
3532 3533 socket.socket(),
3533 3534 ssl_version=ssl.PROTOCOL_TLS,
3534 3535 cert_reqs=ssl.CERT_NONE,
3535 3536 ca_certs=None,
3536 3537 )
3537 3538
3538 3539 try:
3539 3540 s.connect(addr)
3540 3541 cert = s.getpeercert(True)
3541 3542
3542 3543 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3543 3544
3544 3545 complete = win32.checkcertificatechain(cert, build=False)
3545 3546
3546 3547 if not complete:
3547 3548 ui.status(_(b'certificate chain is incomplete, updating... '))
3548 3549
3549 3550 if not win32.checkcertificatechain(cert):
3550 3551 ui.status(_(b'failed.\n'))
3551 3552 else:
3552 3553 ui.status(_(b'done.\n'))
3553 3554 else:
3554 3555 ui.status(_(b'full certificate chain is available\n'))
3555 3556 finally:
3556 3557 s.close()
3557 3558
3558 3559
3559 3560 @command(
3560 3561 b"debugbackupbundle",
3561 3562 [
3562 3563 (
3563 3564 b"",
3564 3565 b"recover",
3565 3566 b"",
3566 3567 b"brings the specified changeset back into the repository",
3567 3568 )
3568 3569 ]
3569 3570 + cmdutil.logopts,
3570 3571 _(b"hg debugbackupbundle [--recover HASH]"),
3571 3572 )
3572 3573 def debugbackupbundle(ui, repo, *pats, **opts):
3573 3574 """lists the changesets available in backup bundles
3574 3575
3575 3576 Without any arguments, this command prints a list of the changesets in each
3576 3577 backup bundle.
3577 3578
3578 3579 --recover takes a changeset hash and unbundles the first bundle that
3579 3580 contains that hash, which puts that changeset back in your repository.
3580 3581
3581 3582 --verbose will print the entire commit message and the bundle path for that
3582 3583 backup.
3583 3584 """
3584 3585 backups = list(
3585 3586 filter(
3586 3587 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3587 3588 )
3588 3589 )
3589 3590 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3590 3591
3591 3592 opts = pycompat.byteskwargs(opts)
3592 3593 opts[b"bundle"] = b""
3593 3594 opts[b"force"] = None
3594 3595 limit = logcmdutil.getlimit(opts)
3595 3596
3596 3597 def display(other, chlist, displayer):
3597 3598 if opts.get(b"newest_first"):
3598 3599 chlist.reverse()
3599 3600 count = 0
3600 3601 for n in chlist:
3601 3602 if limit is not None and count >= limit:
3602 3603 break
3603 3604 parents = [True for p in other.changelog.parents(n) if p != nullid]
3604 3605 if opts.get(b"no_merges") and len(parents) == 2:
3605 3606 continue
3606 3607 count += 1
3607 3608 displayer.show(other[n])
3608 3609
3609 3610 recovernode = opts.get(b"recover")
3610 3611 if recovernode:
3611 3612 if scmutil.isrevsymbol(repo, recovernode):
3612 3613 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3613 3614 return
3614 3615 elif backups:
3615 3616 msg = _(
3616 3617 b"Recover changesets using: hg debugbackupbundle --recover "
3617 3618 b"<changeset hash>\n\nAvailable backup changesets:"
3618 3619 )
3619 3620 ui.status(msg, label=b"status.removed")
3620 3621 else:
3621 3622 ui.status(_(b"no backup changesets found\n"))
3622 3623 return
3623 3624
3624 3625 for backup in backups:
3625 3626 # Much of this is copied from the hg incoming logic
3626 3627 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3627 3628 source, branches = hg.parseurl(source, opts.get(b"branch"))
3628 3629 try:
3629 3630 other = hg.peer(repo, opts, source)
3630 3631 except error.LookupError as ex:
3631 3632 msg = _(b"\nwarning: unable to open bundle %s") % source
3632 3633 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3633 3634 ui.warn(msg, hint=hint)
3634 3635 continue
3635 3636 revs, checkout = hg.addbranchrevs(
3636 3637 repo, other, branches, opts.get(b"rev")
3637 3638 )
3638 3639
3639 3640 if revs:
3640 3641 revs = [other.lookup(rev) for rev in revs]
3641 3642
3642 3643 quiet = ui.quiet
3643 3644 try:
3644 3645 ui.quiet = True
3645 3646 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3646 3647 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3647 3648 )
3648 3649 except error.LookupError:
3649 3650 continue
3650 3651 finally:
3651 3652 ui.quiet = quiet
3652 3653
3653 3654 try:
3654 3655 if not chlist:
3655 3656 continue
3656 3657 if recovernode:
3657 3658 with repo.lock(), repo.transaction(b"unbundle") as tr:
3658 3659 if scmutil.isrevsymbol(other, recovernode):
3659 3660 ui.status(_(b"Unbundling %s\n") % (recovernode))
3660 3661 f = hg.openpath(ui, source)
3661 3662 gen = exchange.readbundle(ui, f, source)
3662 3663 if isinstance(gen, bundle2.unbundle20):
3663 3664 bundle2.applybundle(
3664 3665 repo,
3665 3666 gen,
3666 3667 tr,
3667 3668 source=b"unbundle",
3668 3669 url=b"bundle:" + source,
3669 3670 )
3670 3671 else:
3671 3672 gen.apply(repo, b"unbundle", b"bundle:" + source)
3672 3673 break
3673 3674 else:
3674 3675 backupdate = encoding.strtolocal(
3675 3676 time.strftime(
3676 3677 "%a %H:%M, %Y-%m-%d",
3677 3678 time.localtime(os.path.getmtime(source)),
3678 3679 )
3679 3680 )
3680 3681 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3681 3682 if ui.verbose:
3682 3683 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3683 3684 else:
3684 3685 opts[
3685 3686 b"template"
3686 3687 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3687 3688 displayer = logcmdutil.changesetdisplayer(
3688 3689 ui, other, opts, False
3689 3690 )
3690 3691 display(other, chlist, displayer)
3691 3692 displayer.close()
3692 3693 finally:
3693 3694 cleanupfn()
3694 3695
3695 3696
3696 3697 @command(
3697 3698 b'debugsub',
3698 3699 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3699 3700 _(b'[-r REV] [REV]'),
3700 3701 )
3701 3702 def debugsub(ui, repo, rev=None):
3702 3703 ctx = scmutil.revsingle(repo, rev, None)
3703 3704 for k, v in sorted(ctx.substate.items()):
3704 3705 ui.writenoi18n(b'path %s\n' % k)
3705 3706 ui.writenoi18n(b' source %s\n' % v[0])
3706 3707 ui.writenoi18n(b' revision %s\n' % v[1])
3707 3708
3708 3709
3709 3710 @command(
3710 3711 b'debugsuccessorssets',
3711 3712 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3712 3713 _(b'[REV]'),
3713 3714 )
3714 3715 def debugsuccessorssets(ui, repo, *revs, **opts):
3715 3716 """show set of successors for revision
3716 3717
3717 3718 A successors set of changeset A is a consistent group of revisions that
3718 3719 succeed A. It contains non-obsolete changesets only unless closests
3719 3720 successors set is set.
3720 3721
3721 3722 In most cases a changeset A has a single successors set containing a single
3722 3723 successor (changeset A replaced by A').
3723 3724
3724 3725 A changeset that is made obsolete with no successors are called "pruned".
3725 3726 Such changesets have no successors sets at all.
3726 3727
3727 3728 A changeset that has been "split" will have a successors set containing
3728 3729 more than one successor.
3729 3730
3730 3731 A changeset that has been rewritten in multiple different ways is called
3731 3732 "divergent". Such changesets have multiple successor sets (each of which
3732 3733 may also be split, i.e. have multiple successors).
3733 3734
3734 3735 Results are displayed as follows::
3735 3736
3736 3737 <rev1>
3737 3738 <successors-1A>
3738 3739 <rev2>
3739 3740 <successors-2A>
3740 3741 <successors-2B1> <successors-2B2> <successors-2B3>
3741 3742
3742 3743 Here rev2 has two possible (i.e. divergent) successors sets. The first
3743 3744 holds one element, whereas the second holds three (i.e. the changeset has
3744 3745 been split).
3745 3746 """
3746 3747 # passed to successorssets caching computation from one call to another
3747 3748 cache = {}
3748 3749 ctx2str = bytes
3749 3750 node2str = short
3750 3751 for rev in scmutil.revrange(repo, revs):
3751 3752 ctx = repo[rev]
3752 3753 ui.write(b'%s\n' % ctx2str(ctx))
3753 3754 for succsset in obsutil.successorssets(
3754 3755 repo, ctx.node(), closest=opts['closest'], cache=cache
3755 3756 ):
3756 3757 if succsset:
3757 3758 ui.write(b' ')
3758 3759 ui.write(node2str(succsset[0]))
3759 3760 for node in succsset[1:]:
3760 3761 ui.write(b' ')
3761 3762 ui.write(node2str(node))
3762 3763 ui.write(b'\n')
3763 3764
3764 3765
3765 3766 @command(b'debugtagscache', [])
3766 3767 def debugtagscache(ui, repo):
3767 3768 """display the contents of .hg/cache/hgtagsfnodes1"""
3768 3769 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3769 3770 for r in repo:
3770 3771 node = repo[r].node()
3771 3772 tagsnode = cache.getfnode(node, computemissing=False)
3772 3773 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3773 3774 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3774 3775
3775 3776
3776 3777 @command(
3777 3778 b'debugtemplate',
3778 3779 [
3779 3780 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3780 3781 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3781 3782 ],
3782 3783 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3783 3784 optionalrepo=True,
3784 3785 )
3785 3786 def debugtemplate(ui, repo, tmpl, **opts):
3786 3787 """parse and apply a template
3787 3788
3788 3789 If -r/--rev is given, the template is processed as a log template and
3789 3790 applied to the given changesets. Otherwise, it is processed as a generic
3790 3791 template.
3791 3792
3792 3793 Use --verbose to print the parsed tree.
3793 3794 """
3794 3795 revs = None
3795 3796 if opts['rev']:
3796 3797 if repo is None:
3797 3798 raise error.RepoError(
3798 3799 _(b'there is no Mercurial repository here (.hg not found)')
3799 3800 )
3800 3801 revs = scmutil.revrange(repo, opts['rev'])
3801 3802
3802 3803 props = {}
3803 3804 for d in opts['define']:
3804 3805 try:
3805 3806 k, v = (e.strip() for e in d.split(b'=', 1))
3806 3807 if not k or k == b'ui':
3807 3808 raise ValueError
3808 3809 props[k] = v
3809 3810 except ValueError:
3810 3811 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3811 3812
3812 3813 if ui.verbose:
3813 3814 aliases = ui.configitems(b'templatealias')
3814 3815 tree = templater.parse(tmpl)
3815 3816 ui.note(templater.prettyformat(tree), b'\n')
3816 3817 newtree = templater.expandaliases(tree, aliases)
3817 3818 if newtree != tree:
3818 3819 ui.notenoi18n(
3819 3820 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3820 3821 )
3821 3822
3822 3823 if revs is None:
3823 3824 tres = formatter.templateresources(ui, repo)
3824 3825 t = formatter.maketemplater(ui, tmpl, resources=tres)
3825 3826 if ui.verbose:
3826 3827 kwds, funcs = t.symbolsuseddefault()
3827 3828 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3828 3829 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3829 3830 ui.write(t.renderdefault(props))
3830 3831 else:
3831 3832 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3832 3833 if ui.verbose:
3833 3834 kwds, funcs = displayer.t.symbolsuseddefault()
3834 3835 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3835 3836 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3836 3837 for r in revs:
3837 3838 displayer.show(repo[r], **pycompat.strkwargs(props))
3838 3839 displayer.close()
3839 3840
3840 3841
3841 3842 @command(
3842 3843 b'debuguigetpass',
3843 3844 [
3844 3845 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3845 3846 ],
3846 3847 _(b'[-p TEXT]'),
3847 3848 norepo=True,
3848 3849 )
3849 3850 def debuguigetpass(ui, prompt=b''):
3850 3851 """show prompt to type password"""
3851 3852 r = ui.getpass(prompt)
3852 3853 if r is not None:
3853 3854 r = encoding.strtolocal(r)
3854 3855 else:
3855 3856 r = b"<default response>"
3856 3857 ui.writenoi18n(b'response: %s\n' % r)
3857 3858
3858 3859
3859 3860 @command(
3860 3861 b'debuguiprompt',
3861 3862 [
3862 3863 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3863 3864 ],
3864 3865 _(b'[-p TEXT]'),
3865 3866 norepo=True,
3866 3867 )
3867 3868 def debuguiprompt(ui, prompt=b''):
3868 3869 """show plain prompt"""
3869 3870 r = ui.prompt(prompt)
3870 3871 ui.writenoi18n(b'response: %s\n' % r)
3871 3872
3872 3873
3873 3874 @command(b'debugupdatecaches', [])
3874 3875 def debugupdatecaches(ui, repo, *pats, **opts):
3875 3876 """warm all known caches in the repository"""
3876 3877 with repo.wlock(), repo.lock():
3877 3878 repo.updatecaches(full=True)
3878 3879
3879 3880
3880 3881 @command(
3881 3882 b'debugupgraderepo',
3882 3883 [
3883 3884 (
3884 3885 b'o',
3885 3886 b'optimize',
3886 3887 [],
3887 3888 _(b'extra optimization to perform'),
3888 3889 _(b'NAME'),
3889 3890 ),
3890 3891 (b'', b'run', False, _(b'performs an upgrade')),
3891 3892 (b'', b'backup', True, _(b'keep the old repository content around')),
3892 3893 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3893 3894 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3894 3895 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3895 3896 ],
3896 3897 )
3897 3898 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3898 3899 """upgrade a repository to use different features
3899 3900
3900 3901 If no arguments are specified, the repository is evaluated for upgrade
3901 3902 and a list of problems and potential optimizations is printed.
3902 3903
3903 3904 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3904 3905 can be influenced via additional arguments. More details will be provided
3905 3906 by the command output when run without ``--run``.
3906 3907
3907 3908 During the upgrade, the repository will be locked and no writes will be
3908 3909 allowed.
3909 3910
3910 3911 At the end of the upgrade, the repository may not be readable while new
3911 3912 repository data is swapped in. This window will be as long as it takes to
3912 3913 rename some directories inside the ``.hg`` directory. On most machines, this
3913 3914 should complete almost instantaneously and the chances of a consumer being
3914 3915 unable to access the repository should be low.
3915 3916
3916 3917 By default, all revlog will be upgraded. You can restrict this using flag
3917 3918 such as `--manifest`:
3918 3919
3919 3920 * `--manifest`: only optimize the manifest
3920 3921 * `--no-manifest`: optimize all revlog but the manifest
3921 3922 * `--changelog`: optimize the changelog only
3922 3923 * `--no-changelog --no-manifest`: optimize filelogs only
3923 3924 * `--filelogs`: optimize the filelogs only
3924 3925 * `--no-changelog --no-manifest --no-filelogs`: skip all filelog optimisation
3925 3926 """
3926 3927 return upgrade.upgraderepo(
3927 3928 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3928 3929 )
3929 3930
3930 3931
3931 3932 @command(
3932 3933 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3933 3934 )
3934 3935 def debugwalk(ui, repo, *pats, **opts):
3935 3936 """show how files match on given patterns"""
3936 3937 opts = pycompat.byteskwargs(opts)
3937 3938 m = scmutil.match(repo[None], pats, opts)
3938 3939 if ui.verbose:
3939 3940 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3940 3941 items = list(repo[None].walk(m))
3941 3942 if not items:
3942 3943 return
3943 3944 f = lambda fn: fn
3944 3945 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3945 3946 f = lambda fn: util.normpath(fn)
3946 3947 fmt = b'f %%-%ds %%-%ds %%s' % (
3947 3948 max([len(abs) for abs in items]),
3948 3949 max([len(repo.pathto(abs)) for abs in items]),
3949 3950 )
3950 3951 for abs in items:
3951 3952 line = fmt % (
3952 3953 abs,
3953 3954 f(repo.pathto(abs)),
3954 3955 m.exact(abs) and b'exact' or b'',
3955 3956 )
3956 3957 ui.write(b"%s\n" % line.rstrip())
3957 3958
3958 3959
3959 3960 @command(b'debugwhyunstable', [], _(b'REV'))
3960 3961 def debugwhyunstable(ui, repo, rev):
3961 3962 """explain instabilities of a changeset"""
3962 3963 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3963 3964 dnodes = b''
3964 3965 if entry.get(b'divergentnodes'):
3965 3966 dnodes = (
3966 3967 b' '.join(
3967 3968 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3968 3969 for ctx in entry[b'divergentnodes']
3969 3970 )
3970 3971 + b' '
3971 3972 )
3972 3973 ui.write(
3973 3974 b'%s: %s%s %s\n'
3974 3975 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3975 3976 )
3976 3977
3977 3978
3978 3979 @command(
3979 3980 b'debugwireargs',
3980 3981 [
3981 3982 (b'', b'three', b'', b'three'),
3982 3983 (b'', b'four', b'', b'four'),
3983 3984 (b'', b'five', b'', b'five'),
3984 3985 ]
3985 3986 + cmdutil.remoteopts,
3986 3987 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3987 3988 norepo=True,
3988 3989 )
3989 3990 def debugwireargs(ui, repopath, *vals, **opts):
3990 3991 opts = pycompat.byteskwargs(opts)
3991 3992 repo = hg.peer(ui, opts, repopath)
3992 3993 for opt in cmdutil.remoteopts:
3993 3994 del opts[opt[1]]
3994 3995 args = {}
3995 3996 for k, v in pycompat.iteritems(opts):
3996 3997 if v:
3997 3998 args[k] = v
3998 3999 args = pycompat.strkwargs(args)
3999 4000 # run twice to check that we don't mess up the stream for the next command
4000 4001 res1 = repo.debugwireargs(*vals, **args)
4001 4002 res2 = repo.debugwireargs(*vals, **args)
4002 4003 ui.write(b"%s\n" % res1)
4003 4004 if res1 != res2:
4004 4005 ui.warn(b"%s\n" % res2)
4005 4006
4006 4007
4007 4008 def _parsewirelangblocks(fh):
4008 4009 activeaction = None
4009 4010 blocklines = []
4010 4011 lastindent = 0
4011 4012
4012 4013 for line in fh:
4013 4014 line = line.rstrip()
4014 4015 if not line:
4015 4016 continue
4016 4017
4017 4018 if line.startswith(b'#'):
4018 4019 continue
4019 4020
4020 4021 if not line.startswith(b' '):
4021 4022 # New block. Flush previous one.
4022 4023 if activeaction:
4023 4024 yield activeaction, blocklines
4024 4025
4025 4026 activeaction = line
4026 4027 blocklines = []
4027 4028 lastindent = 0
4028 4029 continue
4029 4030
4030 4031 # Else we start with an indent.
4031 4032
4032 4033 if not activeaction:
4033 4034 raise error.Abort(_(b'indented line outside of block'))
4034 4035
4035 4036 indent = len(line) - len(line.lstrip())
4036 4037
4037 4038 # If this line is indented more than the last line, concatenate it.
4038 4039 if indent > lastindent and blocklines:
4039 4040 blocklines[-1] += line.lstrip()
4040 4041 else:
4041 4042 blocklines.append(line)
4042 4043 lastindent = indent
4043 4044
4044 4045 # Flush last block.
4045 4046 if activeaction:
4046 4047 yield activeaction, blocklines
4047 4048
4048 4049
4049 4050 @command(
4050 4051 b'debugwireproto',
4051 4052 [
4052 4053 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4053 4054 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4054 4055 (
4055 4056 b'',
4056 4057 b'noreadstderr',
4057 4058 False,
4058 4059 _(b'do not read from stderr of the remote'),
4059 4060 ),
4060 4061 (
4061 4062 b'',
4062 4063 b'nologhandshake',
4063 4064 False,
4064 4065 _(b'do not log I/O related to the peer handshake'),
4065 4066 ),
4066 4067 ]
4067 4068 + cmdutil.remoteopts,
4068 4069 _(b'[PATH]'),
4069 4070 optionalrepo=True,
4070 4071 )
4071 4072 def debugwireproto(ui, repo, path=None, **opts):
4072 4073 """send wire protocol commands to a server
4073 4074
4074 4075 This command can be used to issue wire protocol commands to remote
4075 4076 peers and to debug the raw data being exchanged.
4076 4077
4077 4078 ``--localssh`` will start an SSH server against the current repository
4078 4079 and connect to that. By default, the connection will perform a handshake
4079 4080 and establish an appropriate peer instance.
4080 4081
4081 4082 ``--peer`` can be used to bypass the handshake protocol and construct a
4082 4083 peer instance using the specified class type. Valid values are ``raw``,
4083 4084 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4084 4085 raw data payloads and don't support higher-level command actions.
4085 4086
4086 4087 ``--noreadstderr`` can be used to disable automatic reading from stderr
4087 4088 of the peer (for SSH connections only). Disabling automatic reading of
4088 4089 stderr is useful for making output more deterministic.
4089 4090
4090 4091 Commands are issued via a mini language which is specified via stdin.
4091 4092 The language consists of individual actions to perform. An action is
4092 4093 defined by a block. A block is defined as a line with no leading
4093 4094 space followed by 0 or more lines with leading space. Blocks are
4094 4095 effectively a high-level command with additional metadata.
4095 4096
4096 4097 Lines beginning with ``#`` are ignored.
4097 4098
4098 4099 The following sections denote available actions.
4099 4100
4100 4101 raw
4101 4102 ---
4102 4103
4103 4104 Send raw data to the server.
4104 4105
4105 4106 The block payload contains the raw data to send as one atomic send
4106 4107 operation. The data may not actually be delivered in a single system
4107 4108 call: it depends on the abilities of the transport being used.
4108 4109
4109 4110 Each line in the block is de-indented and concatenated. Then, that
4110 4111 value is evaluated as a Python b'' literal. This allows the use of
4111 4112 backslash escaping, etc.
4112 4113
4113 4114 raw+
4114 4115 ----
4115 4116
4116 4117 Behaves like ``raw`` except flushes output afterwards.
4117 4118
4118 4119 command <X>
4119 4120 -----------
4120 4121
4121 4122 Send a request to run a named command, whose name follows the ``command``
4122 4123 string.
4123 4124
4124 4125 Arguments to the command are defined as lines in this block. The format of
4125 4126 each line is ``<key> <value>``. e.g.::
4126 4127
4127 4128 command listkeys
4128 4129 namespace bookmarks
4129 4130
4130 4131 If the value begins with ``eval:``, it will be interpreted as a Python
4131 4132 literal expression. Otherwise values are interpreted as Python b'' literals.
4132 4133 This allows sending complex types and encoding special byte sequences via
4133 4134 backslash escaping.
4134 4135
4135 4136 The following arguments have special meaning:
4136 4137
4137 4138 ``PUSHFILE``
4138 4139 When defined, the *push* mechanism of the peer will be used instead
4139 4140 of the static request-response mechanism and the content of the
4140 4141 file specified in the value of this argument will be sent as the
4141 4142 command payload.
4142 4143
4143 4144 This can be used to submit a local bundle file to the remote.
4144 4145
4145 4146 batchbegin
4146 4147 ----------
4147 4148
4148 4149 Instruct the peer to begin a batched send.
4149 4150
4150 4151 All ``command`` blocks are queued for execution until the next
4151 4152 ``batchsubmit`` block.
4152 4153
4153 4154 batchsubmit
4154 4155 -----------
4155 4156
4156 4157 Submit previously queued ``command`` blocks as a batch request.
4157 4158
4158 4159 This action MUST be paired with a ``batchbegin`` action.
4159 4160
4160 4161 httprequest <method> <path>
4161 4162 ---------------------------
4162 4163
4163 4164 (HTTP peer only)
4164 4165
4165 4166 Send an HTTP request to the peer.
4166 4167
4167 4168 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4168 4169
4169 4170 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4170 4171 headers to add to the request. e.g. ``Accept: foo``.
4171 4172
4172 4173 The following arguments are special:
4173 4174
4174 4175 ``BODYFILE``
4175 4176 The content of the file defined as the value to this argument will be
4176 4177 transferred verbatim as the HTTP request body.
4177 4178
4178 4179 ``frame <type> <flags> <payload>``
4179 4180 Send a unified protocol frame as part of the request body.
4180 4181
4181 4182 All frames will be collected and sent as the body to the HTTP
4182 4183 request.
4183 4184
4184 4185 close
4185 4186 -----
4186 4187
4187 4188 Close the connection to the server.
4188 4189
4189 4190 flush
4190 4191 -----
4191 4192
4192 4193 Flush data written to the server.
4193 4194
4194 4195 readavailable
4195 4196 -------------
4196 4197
4197 4198 Close the write end of the connection and read all available data from
4198 4199 the server.
4199 4200
4200 4201 If the connection to the server encompasses multiple pipes, we poll both
4201 4202 pipes and read available data.
4202 4203
4203 4204 readline
4204 4205 --------
4205 4206
4206 4207 Read a line of output from the server. If there are multiple output
4207 4208 pipes, reads only the main pipe.
4208 4209
4209 4210 ereadline
4210 4211 ---------
4211 4212
4212 4213 Like ``readline``, but read from the stderr pipe, if available.
4213 4214
4214 4215 read <X>
4215 4216 --------
4216 4217
4217 4218 ``read()`` N bytes from the server's main output pipe.
4218 4219
4219 4220 eread <X>
4220 4221 ---------
4221 4222
4222 4223 ``read()`` N bytes from the server's stderr pipe, if available.
4223 4224
4224 4225 Specifying Unified Frame-Based Protocol Frames
4225 4226 ----------------------------------------------
4226 4227
4227 4228 It is possible to emit a *Unified Frame-Based Protocol* by using special
4228 4229 syntax.
4229 4230
4230 4231 A frame is composed as a type, flags, and payload. These can be parsed
4231 4232 from a string of the form:
4232 4233
4233 4234 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4234 4235
4235 4236 ``request-id`` and ``stream-id`` are integers defining the request and
4236 4237 stream identifiers.
4237 4238
4238 4239 ``type`` can be an integer value for the frame type or the string name
4239 4240 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4240 4241 ``command-name``.
4241 4242
4242 4243 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4243 4244 components. Each component (and there can be just one) can be an integer
4244 4245 or a flag name for stream flags or frame flags, respectively. Values are
4245 4246 resolved to integers and then bitwise OR'd together.
4246 4247
4247 4248 ``payload`` represents the raw frame payload. If it begins with
4248 4249 ``cbor:``, the following string is evaluated as Python code and the
4249 4250 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4250 4251 as a Python byte string literal.
4251 4252 """
4252 4253 opts = pycompat.byteskwargs(opts)
4253 4254
4254 4255 if opts[b'localssh'] and not repo:
4255 4256 raise error.Abort(_(b'--localssh requires a repository'))
4256 4257
4257 4258 if opts[b'peer'] and opts[b'peer'] not in (
4258 4259 b'raw',
4259 4260 b'http2',
4260 4261 b'ssh1',
4261 4262 b'ssh2',
4262 4263 ):
4263 4264 raise error.Abort(
4264 4265 _(b'invalid value for --peer'),
4265 4266 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4266 4267 )
4267 4268
4268 4269 if path and opts[b'localssh']:
4269 4270 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4270 4271
4271 4272 if ui.interactive():
4272 4273 ui.write(_(b'(waiting for commands on stdin)\n'))
4273 4274
4274 4275 blocks = list(_parsewirelangblocks(ui.fin))
4275 4276
4276 4277 proc = None
4277 4278 stdin = None
4278 4279 stdout = None
4279 4280 stderr = None
4280 4281 opener = None
4281 4282
4282 4283 if opts[b'localssh']:
4283 4284 # We start the SSH server in its own process so there is process
4284 4285 # separation. This prevents a whole class of potential bugs around
4285 4286 # shared state from interfering with server operation.
4286 4287 args = procutil.hgcmd() + [
4287 4288 b'-R',
4288 4289 repo.root,
4289 4290 b'debugserve',
4290 4291 b'--sshstdio',
4291 4292 ]
4292 4293 proc = subprocess.Popen(
4293 4294 pycompat.rapply(procutil.tonativestr, args),
4294 4295 stdin=subprocess.PIPE,
4295 4296 stdout=subprocess.PIPE,
4296 4297 stderr=subprocess.PIPE,
4297 4298 bufsize=0,
4298 4299 )
4299 4300
4300 4301 stdin = proc.stdin
4301 4302 stdout = proc.stdout
4302 4303 stderr = proc.stderr
4303 4304
4304 4305 # We turn the pipes into observers so we can log I/O.
4305 4306 if ui.verbose or opts[b'peer'] == b'raw':
4306 4307 stdin = util.makeloggingfileobject(
4307 4308 ui, proc.stdin, b'i', logdata=True
4308 4309 )
4309 4310 stdout = util.makeloggingfileobject(
4310 4311 ui, proc.stdout, b'o', logdata=True
4311 4312 )
4312 4313 stderr = util.makeloggingfileobject(
4313 4314 ui, proc.stderr, b'e', logdata=True
4314 4315 )
4315 4316
4316 4317 # --localssh also implies the peer connection settings.
4317 4318
4318 4319 url = b'ssh://localserver'
4319 4320 autoreadstderr = not opts[b'noreadstderr']
4320 4321
4321 4322 if opts[b'peer'] == b'ssh1':
4322 4323 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4323 4324 peer = sshpeer.sshv1peer(
4324 4325 ui,
4325 4326 url,
4326 4327 proc,
4327 4328 stdin,
4328 4329 stdout,
4329 4330 stderr,
4330 4331 None,
4331 4332 autoreadstderr=autoreadstderr,
4332 4333 )
4333 4334 elif opts[b'peer'] == b'ssh2':
4334 4335 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4335 4336 peer = sshpeer.sshv2peer(
4336 4337 ui,
4337 4338 url,
4338 4339 proc,
4339 4340 stdin,
4340 4341 stdout,
4341 4342 stderr,
4342 4343 None,
4343 4344 autoreadstderr=autoreadstderr,
4344 4345 )
4345 4346 elif opts[b'peer'] == b'raw':
4346 4347 ui.write(_(b'using raw connection to peer\n'))
4347 4348 peer = None
4348 4349 else:
4349 4350 ui.write(_(b'creating ssh peer from handshake results\n'))
4350 4351 peer = sshpeer.makepeer(
4351 4352 ui,
4352 4353 url,
4353 4354 proc,
4354 4355 stdin,
4355 4356 stdout,
4356 4357 stderr,
4357 4358 autoreadstderr=autoreadstderr,
4358 4359 )
4359 4360
4360 4361 elif path:
4361 4362 # We bypass hg.peer() so we can proxy the sockets.
4362 4363 # TODO consider not doing this because we skip
4363 4364 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4364 4365 u = util.url(path)
4365 4366 if u.scheme != b'http':
4366 4367 raise error.Abort(_(b'only http:// paths are currently supported'))
4367 4368
4368 4369 url, authinfo = u.authinfo()
4369 4370 openerargs = {
4370 4371 'useragent': b'Mercurial debugwireproto',
4371 4372 }
4372 4373
4373 4374 # Turn pipes/sockets into observers so we can log I/O.
4374 4375 if ui.verbose:
4375 4376 openerargs.update(
4376 4377 {
4377 4378 'loggingfh': ui,
4378 4379 'loggingname': b's',
4379 4380 'loggingopts': {
4380 4381 'logdata': True,
4381 4382 'logdataapis': False,
4382 4383 },
4383 4384 }
4384 4385 )
4385 4386
4386 4387 if ui.debugflag:
4387 4388 openerargs['loggingopts']['logdataapis'] = True
4388 4389
4389 4390 # Don't send default headers when in raw mode. This allows us to
4390 4391 # bypass most of the behavior of our URL handling code so we can
4391 4392 # have near complete control over what's sent on the wire.
4392 4393 if opts[b'peer'] == b'raw':
4393 4394 openerargs['sendaccept'] = False
4394 4395
4395 4396 opener = urlmod.opener(ui, authinfo, **openerargs)
4396 4397
4397 4398 if opts[b'peer'] == b'http2':
4398 4399 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4399 4400 # We go through makepeer() because we need an API descriptor for
4400 4401 # the peer instance to be useful.
4401 4402 with ui.configoverride(
4402 4403 {(b'experimental', b'httppeer.advertise-v2'): True}
4403 4404 ):
4404 4405 if opts[b'nologhandshake']:
4405 4406 ui.pushbuffer()
4406 4407
4407 4408 peer = httppeer.makepeer(ui, path, opener=opener)
4408 4409
4409 4410 if opts[b'nologhandshake']:
4410 4411 ui.popbuffer()
4411 4412
4412 4413 if not isinstance(peer, httppeer.httpv2peer):
4413 4414 raise error.Abort(
4414 4415 _(
4415 4416 b'could not instantiate HTTP peer for '
4416 4417 b'wire protocol version 2'
4417 4418 ),
4418 4419 hint=_(
4419 4420 b'the server may not have the feature '
4420 4421 b'enabled or is not allowing this '
4421 4422 b'client version'
4422 4423 ),
4423 4424 )
4424 4425
4425 4426 elif opts[b'peer'] == b'raw':
4426 4427 ui.write(_(b'using raw connection to peer\n'))
4427 4428 peer = None
4428 4429 elif opts[b'peer']:
4429 4430 raise error.Abort(
4430 4431 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4431 4432 )
4432 4433 else:
4433 4434 peer = httppeer.makepeer(ui, path, opener=opener)
4434 4435
4435 4436 # We /could/ populate stdin/stdout with sock.makefile()...
4436 4437 else:
4437 4438 raise error.Abort(_(b'unsupported connection configuration'))
4438 4439
4439 4440 batchedcommands = None
4440 4441
4441 4442 # Now perform actions based on the parsed wire language instructions.
4442 4443 for action, lines in blocks:
4443 4444 if action in (b'raw', b'raw+'):
4444 4445 if not stdin:
4445 4446 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4446 4447
4447 4448 # Concatenate the data together.
4448 4449 data = b''.join(l.lstrip() for l in lines)
4449 4450 data = stringutil.unescapestr(data)
4450 4451 stdin.write(data)
4451 4452
4452 4453 if action == b'raw+':
4453 4454 stdin.flush()
4454 4455 elif action == b'flush':
4455 4456 if not stdin:
4456 4457 raise error.Abort(_(b'cannot call flush on this peer'))
4457 4458 stdin.flush()
4458 4459 elif action.startswith(b'command'):
4459 4460 if not peer:
4460 4461 raise error.Abort(
4461 4462 _(
4462 4463 b'cannot send commands unless peer instance '
4463 4464 b'is available'
4464 4465 )
4465 4466 )
4466 4467
4467 4468 command = action.split(b' ', 1)[1]
4468 4469
4469 4470 args = {}
4470 4471 for line in lines:
4471 4472 # We need to allow empty values.
4472 4473 fields = line.lstrip().split(b' ', 1)
4473 4474 if len(fields) == 1:
4474 4475 key = fields[0]
4475 4476 value = b''
4476 4477 else:
4477 4478 key, value = fields
4478 4479
4479 4480 if value.startswith(b'eval:'):
4480 4481 value = stringutil.evalpythonliteral(value[5:])
4481 4482 else:
4482 4483 value = stringutil.unescapestr(value)
4483 4484
4484 4485 args[key] = value
4485 4486
4486 4487 if batchedcommands is not None:
4487 4488 batchedcommands.append((command, args))
4488 4489 continue
4489 4490
4490 4491 ui.status(_(b'sending %s command\n') % command)
4491 4492
4492 4493 if b'PUSHFILE' in args:
4493 4494 with open(args[b'PUSHFILE'], 'rb') as fh:
4494 4495 del args[b'PUSHFILE']
4495 4496 res, output = peer._callpush(
4496 4497 command, fh, **pycompat.strkwargs(args)
4497 4498 )
4498 4499 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4499 4500 ui.status(
4500 4501 _(b'remote output: %s\n') % stringutil.escapestr(output)
4501 4502 )
4502 4503 else:
4503 4504 with peer.commandexecutor() as e:
4504 4505 res = e.callcommand(command, args).result()
4505 4506
4506 4507 if isinstance(res, wireprotov2peer.commandresponse):
4507 4508 val = res.objects()
4508 4509 ui.status(
4509 4510 _(b'response: %s\n')
4510 4511 % stringutil.pprint(val, bprefix=True, indent=2)
4511 4512 )
4512 4513 else:
4513 4514 ui.status(
4514 4515 _(b'response: %s\n')
4515 4516 % stringutil.pprint(res, bprefix=True, indent=2)
4516 4517 )
4517 4518
4518 4519 elif action == b'batchbegin':
4519 4520 if batchedcommands is not None:
4520 4521 raise error.Abort(_(b'nested batchbegin not allowed'))
4521 4522
4522 4523 batchedcommands = []
4523 4524 elif action == b'batchsubmit':
4524 4525 # There is a batching API we could go through. But it would be
4525 4526 # difficult to normalize requests into function calls. It is easier
4526 4527 # to bypass this layer and normalize to commands + args.
4527 4528 ui.status(
4528 4529 _(b'sending batch with %d sub-commands\n')
4529 4530 % len(batchedcommands)
4530 4531 )
4531 4532 assert peer is not None
4532 4533 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4533 4534 ui.status(
4534 4535 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4535 4536 )
4536 4537
4537 4538 batchedcommands = None
4538 4539
4539 4540 elif action.startswith(b'httprequest '):
4540 4541 if not opener:
4541 4542 raise error.Abort(
4542 4543 _(b'cannot use httprequest without an HTTP peer')
4543 4544 )
4544 4545
4545 4546 request = action.split(b' ', 2)
4546 4547 if len(request) != 3:
4547 4548 raise error.Abort(
4548 4549 _(
4549 4550 b'invalid httprequest: expected format is '
4550 4551 b'"httprequest <method> <path>'
4551 4552 )
4552 4553 )
4553 4554
4554 4555 method, httppath = request[1:]
4555 4556 headers = {}
4556 4557 body = None
4557 4558 frames = []
4558 4559 for line in lines:
4559 4560 line = line.lstrip()
4560 4561 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4561 4562 if m:
4562 4563 # Headers need to use native strings.
4563 4564 key = pycompat.strurl(m.group(1))
4564 4565 value = pycompat.strurl(m.group(2))
4565 4566 headers[key] = value
4566 4567 continue
4567 4568
4568 4569 if line.startswith(b'BODYFILE '):
4569 4570 with open(line.split(b' ', 1), b'rb') as fh:
4570 4571 body = fh.read()
4571 4572 elif line.startswith(b'frame '):
4572 4573 frame = wireprotoframing.makeframefromhumanstring(
4573 4574 line[len(b'frame ') :]
4574 4575 )
4575 4576
4576 4577 frames.append(frame)
4577 4578 else:
4578 4579 raise error.Abort(
4579 4580 _(b'unknown argument to httprequest: %s') % line
4580 4581 )
4581 4582
4582 4583 url = path + httppath
4583 4584
4584 4585 if frames:
4585 4586 body = b''.join(bytes(f) for f in frames)
4586 4587
4587 4588 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4588 4589
4589 4590 # urllib.Request insists on using has_data() as a proxy for
4590 4591 # determining the request method. Override that to use our
4591 4592 # explicitly requested method.
4592 4593 req.get_method = lambda: pycompat.sysstr(method)
4593 4594
4594 4595 try:
4595 4596 res = opener.open(req)
4596 4597 body = res.read()
4597 4598 except util.urlerr.urlerror as e:
4598 4599 # read() method must be called, but only exists in Python 2
4599 4600 getattr(e, 'read', lambda: None)()
4600 4601 continue
4601 4602
4602 4603 ct = res.headers.get('Content-Type')
4603 4604 if ct == 'application/mercurial-cbor':
4604 4605 ui.write(
4605 4606 _(b'cbor> %s\n')
4606 4607 % stringutil.pprint(
4607 4608 cborutil.decodeall(body), bprefix=True, indent=2
4608 4609 )
4609 4610 )
4610 4611
4611 4612 elif action == b'close':
4612 4613 assert peer is not None
4613 4614 peer.close()
4614 4615 elif action == b'readavailable':
4615 4616 if not stdout or not stderr:
4616 4617 raise error.Abort(
4617 4618 _(b'readavailable not available on this peer')
4618 4619 )
4619 4620
4620 4621 stdin.close()
4621 4622 stdout.read()
4622 4623 stderr.read()
4623 4624
4624 4625 elif action == b'readline':
4625 4626 if not stdout:
4626 4627 raise error.Abort(_(b'readline not available on this peer'))
4627 4628 stdout.readline()
4628 4629 elif action == b'ereadline':
4629 4630 if not stderr:
4630 4631 raise error.Abort(_(b'ereadline not available on this peer'))
4631 4632 stderr.readline()
4632 4633 elif action.startswith(b'read '):
4633 4634 count = int(action.split(b' ', 1)[1])
4634 4635 if not stdout:
4635 4636 raise error.Abort(_(b'read not available on this peer'))
4636 4637 stdout.read(count)
4637 4638 elif action.startswith(b'eread '):
4638 4639 count = int(action.split(b' ', 1)[1])
4639 4640 if not stderr:
4640 4641 raise error.Abort(_(b'eread not available on this peer'))
4641 4642 stderr.read(count)
4642 4643 else:
4643 4644 raise error.Abort(_(b'unknown action: %s') % action)
4644 4645
4645 4646 if batchedcommands is not None:
4646 4647 raise error.Abort(_(b'unclosed "batchbegin" request'))
4647 4648
4648 4649 if peer:
4649 4650 peer.close()
4650 4651
4651 4652 if proc:
4652 4653 proc.kill()
@@ -1,489 +1,500 b''
1 1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 2 #
3 3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8 """
9 9 Algorithm works in the following way. You have two repository: local and
10 10 remote. They both contains a DAG of changelists.
11 11
12 12 The goal of the discovery protocol is to find one set of node *common*,
13 13 the set of nodes shared by local and remote.
14 14
15 15 One of the issue with the original protocol was latency, it could
16 16 potentially require lots of roundtrips to discover that the local repo was a
17 17 subset of remote (which is a very common case, you usually have few changes
18 18 compared to upstream, while upstream probably had lots of development).
19 19
20 20 The new protocol only requires one interface for the remote repo: `known()`,
21 21 which given a set of changelists tells you if they are present in the DAG.
22 22
23 23 The algorithm then works as follow:
24 24
25 25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 26 all nodes are in `unknown`.
27 27 - Take a sample from `unknown`, call `remote.known(sample)`
28 28 - For each node that remote knows, move it and all its ancestors to `common`
29 29 - For each node that remote doesn't know, move it and all its descendants
30 30 to `missing`
31 31 - Iterate until `unknown` is empty
32 32
33 33 There are a couple optimizations, first is instead of starting with a random
34 34 sample of missing, start by sending all heads, in the case where the local
35 35 repo is a subset, you computed the answer in one round trip.
36 36
37 37 Then you can do something similar to the bisecting strategy used when
38 38 finding faulty changesets. Instead of random samples, you can try picking
39 39 nodes that will maximize the number of nodes that will be
40 40 classified with it (since all ancestors or descendants will be marked as well).
41 41 """
42 42
43 43 from __future__ import absolute_import
44 44
45 45 import collections
46 46 import random
47 47
48 48 from .i18n import _
49 49 from .node import (
50 50 nullid,
51 51 nullrev,
52 52 )
53 53 from . import (
54 54 error,
55 55 policy,
56 56 util,
57 57 )
58 58
59 59
60 60 def _updatesample(revs, heads, sample, parentfn, quicksamplesize=0):
61 61 """update an existing sample to match the expected size
62 62
63 63 The sample is updated with revs exponentially distant from each head of the
64 64 <revs> set. (H~1, H~2, H~4, H~8, etc).
65 65
66 66 If a target size is specified, the sampling will stop once this size is
67 67 reached. Otherwise sampling will happen until roots of the <revs> set are
68 68 reached.
69 69
70 70 :revs: set of revs we want to discover (if None, assume the whole dag)
71 71 :heads: set of DAG head revs
72 72 :sample: a sample to update
73 73 :parentfn: a callable to resolve parents for a revision
74 74 :quicksamplesize: optional target size of the sample"""
75 75 dist = {}
76 76 visit = collections.deque(heads)
77 77 seen = set()
78 78 factor = 1
79 79 while visit:
80 80 curr = visit.popleft()
81 81 if curr in seen:
82 82 continue
83 83 d = dist.setdefault(curr, 1)
84 84 if d > factor:
85 85 factor *= 2
86 86 if d == factor:
87 87 sample.add(curr)
88 88 if quicksamplesize and (len(sample) >= quicksamplesize):
89 89 return
90 90 seen.add(curr)
91 91
92 92 for p in parentfn(curr):
93 93 if p != nullrev and (not revs or p in revs):
94 94 dist.setdefault(p, d + 1)
95 95 visit.append(p)
96 96
97 97
98 98 def _limitsample(sample, desiredlen, randomize=True):
99 99 """return a random subset of sample of at most desiredlen item.
100 100
101 101 If randomize is False, though, a deterministic subset is returned.
102 102 This is meant for integration tests.
103 103 """
104 104 if len(sample) <= desiredlen:
105 105 return sample
106 106 if randomize:
107 107 return set(random.sample(sample, desiredlen))
108 108 sample = list(sample)
109 109 sample.sort()
110 110 return set(sample[:desiredlen])
111 111
112 112
113 113 class partialdiscovery(object):
114 114 """an object representing ongoing discovery
115 115
116 116 Feed with data from the remote repository, this object keep track of the
117 117 current set of changeset in various states:
118 118
119 119 - common: revs also known remotely
120 120 - undecided: revs we don't have information on yet
121 121 - missing: revs missing remotely
122 122 (all tracked revisions are known locally)
123 123 """
124 124
125 125 def __init__(self, repo, targetheads, respectsize, randomize=True):
126 126 self._repo = repo
127 127 self._targetheads = targetheads
128 128 self._common = repo.changelog.incrementalmissingrevs()
129 129 self._undecided = None
130 130 self.missing = set()
131 131 self._childrenmap = None
132 132 self._respectsize = respectsize
133 133 self.randomize = randomize
134 134
135 135 def addcommons(self, commons):
136 136 """register nodes known as common"""
137 137 self._common.addbases(commons)
138 138 if self._undecided is not None:
139 139 self._common.removeancestorsfrom(self._undecided)
140 140
141 141 def addmissings(self, missings):
142 142 """register some nodes as missing"""
143 143 newmissing = self._repo.revs(b'%ld::%ld', missings, self.undecided)
144 144 if newmissing:
145 145 self.missing.update(newmissing)
146 146 self.undecided.difference_update(newmissing)
147 147
148 148 def addinfo(self, sample):
149 149 """consume an iterable of (rev, known) tuples"""
150 150 common = set()
151 151 missing = set()
152 152 for rev, known in sample:
153 153 if known:
154 154 common.add(rev)
155 155 else:
156 156 missing.add(rev)
157 157 if common:
158 158 self.addcommons(common)
159 159 if missing:
160 160 self.addmissings(missing)
161 161
162 162 def hasinfo(self):
163 163 """return True is we have any clue about the remote state"""
164 164 return self._common.hasbases()
165 165
166 166 def iscomplete(self):
167 167 """True if all the necessary data have been gathered"""
168 168 return self._undecided is not None and not self._undecided
169 169
170 170 @property
171 171 def undecided(self):
172 172 if self._undecided is not None:
173 173 return self._undecided
174 174 self._undecided = set(self._common.missingancestors(self._targetheads))
175 175 return self._undecided
176 176
177 177 def stats(self):
178 178 return {
179 179 'undecided': len(self.undecided),
180 180 }
181 181
182 182 def commonheads(self):
183 183 """the heads of the known common set"""
184 184 # heads(common) == heads(common.bases) since common represents
185 185 # common.bases and all its ancestors
186 186 return self._common.basesheads()
187 187
188 188 def _parentsgetter(self):
189 189 getrev = self._repo.changelog.index.__getitem__
190 190
191 191 def getparents(r):
192 192 return getrev(r)[5:7]
193 193
194 194 return getparents
195 195
196 196 def _childrengetter(self):
197 197
198 198 if self._childrenmap is not None:
199 199 # During discovery, the `undecided` set keep shrinking.
200 200 # Therefore, the map computed for an iteration N will be
201 201 # valid for iteration N+1. Instead of computing the same
202 202 # data over and over we cached it the first time.
203 203 return self._childrenmap.__getitem__
204 204
205 205 # _updatesample() essentially does interaction over revisions to look
206 206 # up their children. This lookup is expensive and doing it in a loop is
207 207 # quadratic. We precompute the children for all relevant revisions and
208 208 # make the lookup in _updatesample() a simple dict lookup.
209 209 self._childrenmap = children = {}
210 210
211 211 parentrevs = self._parentsgetter()
212 212 revs = self.undecided
213 213
214 214 for rev in sorted(revs):
215 215 # Always ensure revision has an entry so we don't need to worry
216 216 # about missing keys.
217 217 children[rev] = []
218 218 for prev in parentrevs(rev):
219 219 if prev == nullrev:
220 220 continue
221 221 c = children.get(prev)
222 222 if c is not None:
223 223 c.append(rev)
224 224 return children.__getitem__
225 225
226 226 def takequicksample(self, headrevs, size):
227 227 """takes a quick sample of size <size>
228 228
229 229 It is meant for initial sampling and focuses on querying heads and close
230 230 ancestors of heads.
231 231
232 232 :headrevs: set of head revisions in local DAG to consider
233 233 :size: the maximum size of the sample"""
234 234 revs = self.undecided
235 235 if len(revs) <= size:
236 236 return list(revs)
237 237 sample = set(self._repo.revs(b'heads(%ld)', revs))
238 238
239 239 if len(sample) >= size:
240 240 return _limitsample(sample, size, randomize=self.randomize)
241 241
242 242 _updatesample(
243 243 None, headrevs, sample, self._parentsgetter(), quicksamplesize=size
244 244 )
245 245 return sample
246 246
247 247 def takefullsample(self, headrevs, size):
248 248 revs = self.undecided
249 249 if len(revs) <= size:
250 250 return list(revs)
251 251 repo = self._repo
252 252 sample = set(repo.revs(b'heads(%ld)', revs))
253 253 parentrevs = self._parentsgetter()
254 254
255 255 # update from heads
256 256 revsheads = sample.copy()
257 257 _updatesample(revs, revsheads, sample, parentrevs)
258 258
259 259 # update from roots
260 260 revsroots = set(repo.revs(b'roots(%ld)', revs))
261 261 childrenrevs = self._childrengetter()
262 262 _updatesample(revs, revsroots, sample, childrenrevs)
263 263 assert sample
264 264
265 265 if not self._respectsize:
266 266 size = max(size, min(len(revsroots), len(revsheads)))
267 267
268 268 sample = _limitsample(sample, size, randomize=self.randomize)
269 269 if len(sample) < size:
270 270 more = size - len(sample)
271 271 takefrom = list(revs - sample)
272 272 if self.randomize:
273 273 sample.update(random.sample(takefrom, more))
274 274 else:
275 275 takefrom.sort()
276 276 sample.update(takefrom[:more])
277 277 return sample
278 278
279 279
280 280 partialdiscovery = policy.importrust(
281 281 'discovery', member='PartialDiscovery', default=partialdiscovery
282 282 )
283 283
284 284
285 285 def findcommonheads(
286 286 ui,
287 287 local,
288 288 remote,
289 289 initialsamplesize=100,
290 290 fullsamplesize=200,
291 291 abortwhenunrelated=True,
292 292 ancestorsof=None,
293 293 samplegrowth=1.05,
294 audit=None,
294 295 ):
295 296 """Return a tuple (common, anyincoming, remoteheads) used to identify
296 297 missing nodes from or in remote.
298
299 The audit argument is an optional dictionnary that a caller can pass. it
300 will be updated with extra data about the discovery, this is useful for
301 debug.
297 302 """
298 303 start = util.timer()
299 304
300 305 roundtrips = 0
301 306 cl = local.changelog
302 307 clnode = cl.node
303 308 clrev = cl.rev
304 309
305 310 if ancestorsof is not None:
306 311 ownheads = [clrev(n) for n in ancestorsof]
307 312 else:
308 313 ownheads = [rev for rev in cl.headrevs() if rev != nullrev]
309 314
310 315 # early exit if we know all the specified remote heads already
311 316 ui.debug(b"query 1; heads\n")
312 317 roundtrips += 1
313 318 # We also ask remote about all the local heads. That set can be arbitrarily
314 319 # large, so we used to limit it size to `initialsamplesize`. We no longer
315 320 # do as it proved counter productive. The skipped heads could lead to a
316 321 # large "undecided" set, slower to be clarified than if we asked the
317 322 # question for all heads right away.
318 323 #
319 324 # We are already fetching all server heads using the `heads` commands,
320 325 # sending a equivalent number of heads the other way should not have a
321 326 # significant impact. In addition, it is very likely that we are going to
322 327 # have to issue "known" request for an equivalent amount of revisions in
323 328 # order to decide if theses heads are common or missing.
324 329 #
325 330 # find a detailled analysis below.
326 331 #
327 332 # Case A: local and server both has few heads
328 333 #
329 334 # Ownheads is below initialsamplesize, limit would not have any effect.
330 335 #
331 336 # Case B: local has few heads and server has many
332 337 #
333 338 # Ownheads is below initialsamplesize, limit would not have any effect.
334 339 #
335 340 # Case C: local and server both has many heads
336 341 #
337 342 # We now transfert some more data, but not significantly more than is
338 343 # already transfered to carry the server heads.
339 344 #
340 345 # Case D: local has many heads, server has few
341 346 #
342 347 # D.1 local heads are mostly known remotely
343 348 #
344 349 # All the known head will have be part of a `known` request at some
345 350 # point for the discovery to finish. Sending them all earlier is
346 351 # actually helping.
347 352 #
348 353 # (This case is fairly unlikely, it requires the numerous heads to all
349 354 # be merged server side in only a few heads)
350 355 #
351 356 # D.2 local heads are mostly missing remotely
352 357 #
353 358 # To determine that the heads are missing, we'll have to issue `known`
354 359 # request for them or one of their ancestors. This amount of `known`
355 360 # request will likely be in the same order of magnitude than the amount
356 361 # of local heads.
357 362 #
358 363 # The only case where we can be more efficient using `known` request on
359 364 # ancestors are case were all the "missing" local heads are based on a
360 365 # few changeset, also "missing". This means we would have a "complex"
361 366 # graph (with many heads) attached to, but very independant to a the
362 367 # "simple" graph on the server. This is a fairly usual case and have
363 368 # not been met in the wild so far.
364 369 if remote.limitedarguments:
365 370 sample = _limitsample(ownheads, initialsamplesize)
366 371 # indices between sample and externalized version must match
367 372 sample = list(sample)
368 373 else:
369 374 sample = ownheads
370 375
371 376 with remote.commandexecutor() as e:
372 377 fheads = e.callcommand(b'heads', {})
373 378 fknown = e.callcommand(
374 379 b'known',
375 380 {
376 381 b'nodes': [clnode(r) for r in sample],
377 382 },
378 383 )
379 384
380 385 srvheadhashes, yesno = fheads.result(), fknown.result()
381 386
387 if audit is not None:
388 audit[b'total-roundtrips'] = 1
389
382 390 if cl.tip() == nullid:
383 391 if srvheadhashes != [nullid]:
384 392 return [nullid], True, srvheadhashes
385 393 return [nullid], False, []
386 394
387 395 # start actual discovery (we note this before the next "if" for
388 396 # compatibility reasons)
389 397 ui.status(_(b"searching for changes\n"))
390 398
391 399 knownsrvheads = [] # revnos of remote heads that are known locally
392 400 for node in srvheadhashes:
393 401 if node == nullid:
394 402 continue
395 403
396 404 try:
397 405 knownsrvheads.append(clrev(node))
398 406 # Catches unknown and filtered nodes.
399 407 except error.LookupError:
400 408 continue
401 409
402 410 if len(knownsrvheads) == len(srvheadhashes):
403 411 ui.debug(b"all remote heads known locally\n")
404 412 return srvheadhashes, False, srvheadhashes
405 413
406 414 if len(sample) == len(ownheads) and all(yesno):
407 415 ui.note(_(b"all local changesets known remotely\n"))
408 416 ownheadhashes = [clnode(r) for r in ownheads]
409 417 return ownheadhashes, True, srvheadhashes
410 418
411 419 # full blown discovery
412 420
413 421 randomize = ui.configbool(b'devel', b'discovery.randomize')
414 422 disco = partialdiscovery(
415 423 local, ownheads, remote.limitedarguments, randomize=randomize
416 424 )
417 425 # treat remote heads (and maybe own heads) as a first implicit sample
418 426 # response
419 427 disco.addcommons(knownsrvheads)
420 428 disco.addinfo(zip(sample, yesno))
421 429
422 430 full = False
423 431 progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
424 432 while not disco.iscomplete():
425 433
426 434 if full or disco.hasinfo():
427 435 if full:
428 436 ui.note(_(b"sampling from both directions\n"))
429 437 else:
430 438 ui.debug(b"taking initial sample\n")
431 439 samplefunc = disco.takefullsample
432 440 targetsize = fullsamplesize
433 441 if not remote.limitedarguments:
434 442 fullsamplesize = int(fullsamplesize * samplegrowth)
435 443 else:
436 444 # use even cheaper initial sample
437 445 ui.debug(b"taking quick initial sample\n")
438 446 samplefunc = disco.takequicksample
439 447 targetsize = initialsamplesize
440 448 sample = samplefunc(ownheads, targetsize)
441 449
442 450 roundtrips += 1
443 451 progress.update(roundtrips)
444 452 stats = disco.stats()
445 453 ui.debug(
446 454 b"query %i; still undecided: %i, sample size is: %i\n"
447 455 % (roundtrips, stats['undecided'], len(sample))
448 456 )
449 457
450 458 # indices between sample and externalized version must match
451 459 sample = list(sample)
452 460
453 461 with remote.commandexecutor() as e:
454 462 yesno = e.callcommand(
455 463 b'known',
456 464 {
457 465 b'nodes': [clnode(r) for r in sample],
458 466 },
459 467 ).result()
460 468
461 469 full = True
462 470
463 471 disco.addinfo(zip(sample, yesno))
464 472
465 473 result = disco.commonheads()
466 474 elapsed = util.timer() - start
467 475 progress.complete()
468 476 ui.debug(b"%d total queries in %.4fs\n" % (roundtrips, elapsed))
469 477 msg = (
470 478 b'found %d common and %d unknown server heads,'
471 479 b' %d roundtrips in %.4fs\n'
472 480 )
473 481 missing = set(result) - set(knownsrvheads)
474 482 ui.log(b'discovery', msg, len(result), len(missing), roundtrips, elapsed)
475 483
484 if audit is not None:
485 audit[b'total-roundtrips'] = roundtrips
486
476 487 if not result and srvheadhashes != [nullid]:
477 488 if abortwhenunrelated:
478 489 raise error.Abort(_(b"repository is unrelated"))
479 490 else:
480 491 ui.warn(_(b"warning: repository is unrelated\n"))
481 492 return (
482 493 {nullid},
483 494 True,
484 495 srvheadhashes,
485 496 )
486 497
487 498 anyincoming = srvheadhashes != [nullid]
488 499 result = {clnode(r) for r in result}
489 500 return result, anyincoming, srvheadhashes
@@ -1,185 +1,190 b''
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11
12 12 from .i18n import _
13 13 from .node import (
14 14 nullid,
15 15 short,
16 16 )
17 17 from . import (
18 18 error,
19 19 pycompat,
20 20 )
21 21
22 22
23 def findcommonincoming(repo, remote, heads=None, force=False):
23 def findcommonincoming(repo, remote, heads=None, force=False, audit=None):
24 24 """Return a tuple (common, fetch, heads) used to identify the common
25 25 subset of nodes between repo and remote.
26 26
27 27 "common" is a list of (at least) the heads of the common subset.
28 28 "fetch" is a list of roots of the nodes that would be incoming, to be
29 29 supplied to changegroupsubset.
30 30 "heads" is either the supplied heads, or else the remote's heads.
31 31 """
32 32
33 33 knownnode = repo.changelog.hasnode
34 34 search = []
35 35 fetch = set()
36 36 seen = set()
37 37 seenbranch = set()
38 38 base = set()
39 39
40 40 if not heads:
41 41 with remote.commandexecutor() as e:
42 42 heads = e.callcommand(b'heads', {}).result()
43 43
44 if audit is not None:
45 audit[b'total-roundtrips'] = 1
46
44 47 if repo.changelog.tip() == nullid:
45 48 base.add(nullid)
46 49 if heads != [nullid]:
47 50 return [nullid], [nullid], list(heads)
48 51 return [nullid], [], heads
49 52
50 53 # assume we're closer to the tip than the root
51 54 # and start by examining the heads
52 55 repo.ui.status(_(b"searching for changes\n"))
53 56
54 57 unknown = []
55 58 for h in heads:
56 59 if not knownnode(h):
57 60 unknown.append(h)
58 61 else:
59 62 base.add(h)
60 63
61 64 if not unknown:
62 65 return list(base), [], list(heads)
63 66
64 67 req = set(unknown)
65 68 reqcnt = 0
66 69 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
67 70
68 71 # search through remote branches
69 72 # a 'branch' here is a linear segment of history, with four parts:
70 73 # head, root, first parent, second parent
71 74 # (a branch always has two parents (or none) by definition)
72 75 with remote.commandexecutor() as e:
73 76 branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
74 77
75 78 unknown = collections.deque(branches)
76 79 while unknown:
77 80 r = []
78 81 while unknown:
79 82 n = unknown.popleft()
80 83 if n[0] in seen:
81 84 continue
82 85
83 86 repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
84 87 if n[0] == nullid: # found the end of the branch
85 88 pass
86 89 elif n in seenbranch:
87 90 repo.ui.debug(b"branch already found\n")
88 91 continue
89 92 elif n[1] and knownnode(n[1]): # do we know the base?
90 93 repo.ui.debug(
91 94 b"found incomplete branch %s:%s\n"
92 95 % (short(n[0]), short(n[1]))
93 96 )
94 97 search.append(n[0:2]) # schedule branch range for scanning
95 98 seenbranch.add(n)
96 99 else:
97 100 if n[1] not in seen and n[1] not in fetch:
98 101 if knownnode(n[2]) and knownnode(n[3]):
99 102 repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
100 103 fetch.add(n[1]) # earliest unknown
101 104 for p in n[2:4]:
102 105 if knownnode(p):
103 106 base.add(p) # latest known
104 107
105 108 for p in n[2:4]:
106 109 if p not in req and not knownnode(p):
107 110 r.append(p)
108 111 req.add(p)
109 112 seen.add(n[0])
110 113
111 114 if r:
112 115 reqcnt += 1
113 116 progress.increment()
114 117 repo.ui.debug(
115 118 b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
116 119 )
117 120 for p in pycompat.xrange(0, len(r), 10):
118 121 with remote.commandexecutor() as e:
119 122 branches = e.callcommand(
120 123 b'branches',
121 124 {
122 125 b'nodes': r[p : p + 10],
123 126 },
124 127 ).result()
125 128
126 129 for b in branches:
127 130 repo.ui.debug(
128 131 b"received %s:%s\n" % (short(b[0]), short(b[1]))
129 132 )
130 133 unknown.append(b)
131 134
132 135 # do binary search on the branches we found
133 136 while search:
134 137 newsearch = []
135 138 reqcnt += 1
136 139 progress.increment()
137 140
138 141 with remote.commandexecutor() as e:
139 142 between = e.callcommand(b'between', {b'pairs': search}).result()
140 143
141 144 for n, l in zip(search, between):
142 145 l.append(n[1])
143 146 p = n[0]
144 147 f = 1
145 148 for i in l:
146 149 repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
147 150 if knownnode(i):
148 151 if f <= 2:
149 152 repo.ui.debug(
150 153 b"found new branch changeset %s\n" % short(p)
151 154 )
152 155 fetch.add(p)
153 156 base.add(i)
154 157 else:
155 158 repo.ui.debug(
156 159 b"narrowed branch search to %s:%s\n"
157 160 % (short(p), short(i))
158 161 )
159 162 newsearch.append((p, i))
160 163 break
161 164 p, f = i, f * 2
162 165 search = newsearch
163 166
164 167 # sanity check our fetch list
165 168 for f in fetch:
166 169 if knownnode(f):
167 170 raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
168 171
169 172 base = list(base)
170 173 if base == [nullid]:
171 174 if force:
172 175 repo.ui.warn(_(b"warning: repository is unrelated\n"))
173 176 else:
174 177 raise error.Abort(_(b"repository is unrelated"))
175 178
176 179 repo.ui.debug(
177 180 b"found new changesets starting at "
178 181 + b" ".join([short(f) for f in fetch])
179 182 + b"\n"
180 183 )
181 184
182 185 progress.complete()
183 186 repo.ui.debug(b"%d total queries\n" % reqcnt)
187 if audit is not None:
188 audit[b'total-roundtrips'] = reqcnt
184 189
185 190 return base, list(fetch), heads
@@ -1,1502 +1,1541 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 18 > echo
19 19 > echo "% -- a -> b set (tip only)"
20 20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 21 > echo
22 22 > echo "% -- b -> a tree"
23 23 > hg -R b debugdiscovery a --verbose --old
24 24 > echo
25 25 > echo "% -- b -> a set"
26 26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 27 > echo
28 28 > echo "% -- b -> a set (tip only)"
29 29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 30 > cd ..
31 31 > }
32 32
33 33
34 34 Small superset:
35 35
36 36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 37 > +2:f +1:a1:b1
38 38 > <f +4 :a2
39 39 > +5 :b2
40 40 > <f +3 :b3'
41 41
42 42 % -- a -> b tree
43 43 comparing with b
44 44 searching for changes
45 45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 46 elapsed time: * seconds (glob)
47 round-trips: 2
47 48 heads summary:
48 49 total common heads: 2
49 50 also local heads: 2
50 51 also remote heads: 1
51 52 both: 1
52 53 local heads: 2
53 54 common: 2
54 55 missing: 0
55 56 remote heads: 3
56 57 common: 1
57 58 unknown: 2
58 59 local changesets: 7
59 60 common: 7
60 61 heads: 2
61 62 roots: 1
62 63 missing: 0
63 64 heads: 0
64 65 roots: 0
65 66 first undecided set: 3
66 67 heads: 1
67 68 roots: 1
68 69 common: 3
69 70 missing: 0
70 71 common heads: 01241442b3c2 b5714e113bc0
71 72
72 73 % -- a -> b set
73 74 comparing with b
74 75 query 1; heads
75 76 searching for changes
76 77 all local changesets known remotely
77 78 elapsed time: * seconds (glob)
79 round-trips: 1
78 80 heads summary:
79 81 total common heads: 2
80 82 also local heads: 2
81 83 also remote heads: 1
82 84 both: 1
83 85 local heads: 2
84 86 common: 2
85 87 missing: 0
86 88 remote heads: 3
87 89 common: 1
88 90 unknown: 2
89 91 local changesets: 7
90 92 common: 7
91 93 heads: 2
92 94 roots: 1
93 95 missing: 0
94 96 heads: 0
95 97 roots: 0
96 98 first undecided set: 3
97 99 heads: 1
98 100 roots: 1
99 101 common: 3
100 102 missing: 0
101 103 common heads: 01241442b3c2 b5714e113bc0
102 104
103 105 % -- a -> b set (tip only)
104 106 comparing with b
105 107 query 1; heads
106 108 searching for changes
107 109 all local changesets known remotely
108 110 elapsed time: * seconds (glob)
111 round-trips: 1
109 112 heads summary:
110 113 total common heads: 1
111 114 also local heads: 1
112 115 also remote heads: 0
113 116 both: 0
114 117 local heads: 2
115 118 common: 1
116 119 missing: 1
117 120 remote heads: 3
118 121 common: 0
119 122 unknown: 3
120 123 local changesets: 7
121 124 common: 6
122 125 heads: 1
123 126 roots: 1
124 127 missing: 1
125 128 heads: 1
126 129 roots: 1
127 130 first undecided set: 6
128 131 heads: 2
129 132 roots: 1
130 133 common: 5
131 134 missing: 1
132 135 common heads: b5714e113bc0
133 136
134 137 % -- b -> a tree
135 138 comparing with a
136 139 searching for changes
137 140 unpruned common: 01241442b3c2 b5714e113bc0
138 141 elapsed time: * seconds (glob)
142 round-trips: 1
139 143 heads summary:
140 144 total common heads: 2
141 145 also local heads: 1
142 146 also remote heads: 2
143 147 both: 1
144 148 local heads: 3
145 149 common: 1
146 150 missing: 2
147 151 remote heads: 2
148 152 common: 2
149 153 unknown: 0
150 154 local changesets: 15
151 155 common: 7
152 156 heads: 2
153 157 roots: 1
154 158 missing: 8
155 159 heads: 2
156 160 roots: 2
157 161 first undecided set: 8
158 162 heads: 2
159 163 roots: 2
160 164 common: 0
161 165 missing: 8
162 166 common heads: 01241442b3c2 b5714e113bc0
163 167
164 168 % -- b -> a set
165 169 comparing with a
166 170 query 1; heads
167 171 searching for changes
168 172 all remote heads known locally
169 173 elapsed time: * seconds (glob)
174 round-trips: 1
170 175 heads summary:
171 176 total common heads: 2
172 177 also local heads: 1
173 178 also remote heads: 2
174 179 both: 1
175 180 local heads: 3
176 181 common: 1
177 182 missing: 2
178 183 remote heads: 2
179 184 common: 2
180 185 unknown: 0
181 186 local changesets: 15
182 187 common: 7
183 188 heads: 2
184 189 roots: 1
185 190 missing: 8
186 191 heads: 2
187 192 roots: 2
188 193 first undecided set: 8
189 194 heads: 2
190 195 roots: 2
191 196 common: 0
192 197 missing: 8
193 198 common heads: 01241442b3c2 b5714e113bc0
194 199
195 200 % -- b -> a set (tip only)
196 201 comparing with a
197 202 query 1; heads
198 203 searching for changes
199 204 all remote heads known locally
200 205 elapsed time: * seconds (glob)
206 round-trips: 1
201 207 heads summary:
202 208 total common heads: 2
203 209 also local heads: 1
204 210 also remote heads: 2
205 211 both: 1
206 212 local heads: 3
207 213 common: 1
208 214 missing: 2
209 215 remote heads: 2
210 216 common: 2
211 217 unknown: 0
212 218 local changesets: 15
213 219 common: 7
214 220 heads: 2
215 221 roots: 1
216 222 missing: 8
217 223 heads: 2
218 224 roots: 2
219 225 first undecided set: 8
220 226 heads: 2
221 227 roots: 2
222 228 common: 0
223 229 missing: 8
224 230 common heads: 01241442b3c2 b5714e113bc0
225 231
226 232
227 233 Many new:
228 234
229 235 $ testdesc '-ra1 -ra2' '-rb' '
230 236 > +2:f +3:a1 +3:b
231 237 > <f +30 :a2'
232 238
233 239 % -- a -> b tree
234 240 comparing with b
235 241 searching for changes
236 242 unpruned common: bebd167eb94d
237 243 elapsed time: * seconds (glob)
244 round-trips: 2
238 245 heads summary:
239 246 total common heads: 1
240 247 also local heads: 1
241 248 also remote heads: 0
242 249 both: 0
243 250 local heads: 2
244 251 common: 1
245 252 missing: 1
246 253 remote heads: 1
247 254 common: 0
248 255 unknown: 1
249 256 local changesets: 35
250 257 common: 5
251 258 heads: 1
252 259 roots: 1
253 260 missing: 30
254 261 heads: 1
255 262 roots: 1
256 263 first undecided set: 34
257 264 heads: 2
258 265 roots: 1
259 266 common: 4
260 267 missing: 30
261 268 common heads: bebd167eb94d
262 269
263 270 % -- a -> b set
264 271 comparing with b
265 272 query 1; heads
266 273 searching for changes
267 274 taking initial sample
268 275 searching: 2 queries
269 276 query 2; still undecided: 29, sample size is: 29
270 277 2 total queries in *.????s (glob)
271 278 elapsed time: * seconds (glob)
279 round-trips: 2
272 280 heads summary:
273 281 total common heads: 1
274 282 also local heads: 1
275 283 also remote heads: 0
276 284 both: 0
277 285 local heads: 2
278 286 common: 1
279 287 missing: 1
280 288 remote heads: 1
281 289 common: 0
282 290 unknown: 1
283 291 local changesets: 35
284 292 common: 5
285 293 heads: 1
286 294 roots: 1
287 295 missing: 30
288 296 heads: 1
289 297 roots: 1
290 298 first undecided set: 34
291 299 heads: 2
292 300 roots: 1
293 301 common: 4
294 302 missing: 30
295 303 common heads: bebd167eb94d
296 304
297 305 % -- a -> b set (tip only)
298 306 comparing with b
299 307 query 1; heads
300 308 searching for changes
301 309 taking quick initial sample
302 310 searching: 2 queries
303 311 query 2; still undecided: 31, sample size is: 31
304 312 2 total queries in *.????s (glob)
305 313 elapsed time: * seconds (glob)
314 round-trips: 2
306 315 heads summary:
307 316 total common heads: 1
308 317 also local heads: 0
309 318 also remote heads: 0
310 319 both: 0
311 320 local heads: 2
312 321 common: 0
313 322 missing: 2
314 323 remote heads: 1
315 324 common: 0
316 325 unknown: 1
317 326 local changesets: 35
318 327 common: 2
319 328 heads: 1
320 329 roots: 1
321 330 missing: 33
322 331 heads: 2
323 332 roots: 2
324 333 first undecided set: 35
325 334 heads: 2
326 335 roots: 1
327 336 common: 2
328 337 missing: 33
329 338 common heads: 66f7d451a68b
330 339
331 340 % -- b -> a tree
332 341 comparing with a
333 342 searching for changes
334 343 unpruned common: 66f7d451a68b bebd167eb94d
335 344 elapsed time: * seconds (glob)
345 round-trips: 4
336 346 heads summary:
337 347 total common heads: 1
338 348 also local heads: 0
339 349 also remote heads: 1
340 350 both: 0
341 351 local heads: 1
342 352 common: 0
343 353 missing: 1
344 354 remote heads: 2
345 355 common: 1
346 356 unknown: 1
347 357 local changesets: 8
348 358 common: 5
349 359 heads: 1
350 360 roots: 1
351 361 missing: 3
352 362 heads: 1
353 363 roots: 1
354 364 first undecided set: 3
355 365 heads: 1
356 366 roots: 1
357 367 common: 0
358 368 missing: 3
359 369 common heads: bebd167eb94d
360 370
361 371 % -- b -> a set
362 372 comparing with a
363 373 query 1; heads
364 374 searching for changes
365 375 taking initial sample
366 376 searching: 2 queries
367 377 query 2; still undecided: 2, sample size is: 2
368 378 2 total queries in *.????s (glob)
369 379 elapsed time: * seconds (glob)
380 round-trips: 2
370 381 heads summary:
371 382 total common heads: 1
372 383 also local heads: 0
373 384 also remote heads: 1
374 385 both: 0
375 386 local heads: 1
376 387 common: 0
377 388 missing: 1
378 389 remote heads: 2
379 390 common: 1
380 391 unknown: 1
381 392 local changesets: 8
382 393 common: 5
383 394 heads: 1
384 395 roots: 1
385 396 missing: 3
386 397 heads: 1
387 398 roots: 1
388 399 first undecided set: 3
389 400 heads: 1
390 401 roots: 1
391 402 common: 0
392 403 missing: 3
393 404 common heads: bebd167eb94d
394 405
395 406 % -- b -> a set (tip only)
396 407 comparing with a
397 408 query 1; heads
398 409 searching for changes
399 410 taking initial sample
400 411 searching: 2 queries
401 412 query 2; still undecided: 2, sample size is: 2
402 413 2 total queries in *.????s (glob)
403 414 elapsed time: * seconds (glob)
415 round-trips: 2
404 416 heads summary:
405 417 total common heads: 1
406 418 also local heads: 0
407 419 also remote heads: 1
408 420 both: 0
409 421 local heads: 1
410 422 common: 0
411 423 missing: 1
412 424 remote heads: 2
413 425 common: 1
414 426 unknown: 1
415 427 local changesets: 8
416 428 common: 5
417 429 heads: 1
418 430 roots: 1
419 431 missing: 3
420 432 heads: 1
421 433 roots: 1
422 434 first undecided set: 3
423 435 heads: 1
424 436 roots: 1
425 437 common: 0
426 438 missing: 3
427 439 common heads: bebd167eb94d
428 440
429 441 Both sides many new with stub:
430 442
431 443 $ testdesc '-ra1 -ra2' '-rb' '
432 444 > +2:f +2:a1 +30 :b
433 445 > <f +30 :a2'
434 446
435 447 % -- a -> b tree
436 448 comparing with b
437 449 searching for changes
438 450 unpruned common: 2dc09a01254d
439 451 elapsed time: * seconds (glob)
452 round-trips: 4
440 453 heads summary:
441 454 total common heads: 1
442 455 also local heads: 1
443 456 also remote heads: 0
444 457 both: 0
445 458 local heads: 2
446 459 common: 1
447 460 missing: 1
448 461 remote heads: 1
449 462 common: 0
450 463 unknown: 1
451 464 local changesets: 34
452 465 common: 4
453 466 heads: 1
454 467 roots: 1
455 468 missing: 30
456 469 heads: 1
457 470 roots: 1
458 471 first undecided set: 33
459 472 heads: 2
460 473 roots: 1
461 474 common: 3
462 475 missing: 30
463 476 common heads: 2dc09a01254d
464 477
465 478 % -- a -> b set
466 479 comparing with b
467 480 query 1; heads
468 481 searching for changes
469 482 taking initial sample
470 483 searching: 2 queries
471 484 query 2; still undecided: 29, sample size is: 29
472 485 2 total queries in *.????s (glob)
473 486 elapsed time: * seconds (glob)
487 round-trips: 2
474 488 heads summary:
475 489 total common heads: 1
476 490 also local heads: 1
477 491 also remote heads: 0
478 492 both: 0
479 493 local heads: 2
480 494 common: 1
481 495 missing: 1
482 496 remote heads: 1
483 497 common: 0
484 498 unknown: 1
485 499 local changesets: 34
486 500 common: 4
487 501 heads: 1
488 502 roots: 1
489 503 missing: 30
490 504 heads: 1
491 505 roots: 1
492 506 first undecided set: 33
493 507 heads: 2
494 508 roots: 1
495 509 common: 3
496 510 missing: 30
497 511 common heads: 2dc09a01254d
498 512
499 513 % -- a -> b set (tip only)
500 514 comparing with b
501 515 query 1; heads
502 516 searching for changes
503 517 taking quick initial sample
504 518 searching: 2 queries
505 519 query 2; still undecided: 31, sample size is: 31
506 520 2 total queries in *.????s (glob)
507 521 elapsed time: * seconds (glob)
522 round-trips: 2
508 523 heads summary:
509 524 total common heads: 1
510 525 also local heads: 0
511 526 also remote heads: 0
512 527 both: 0
513 528 local heads: 2
514 529 common: 0
515 530 missing: 2
516 531 remote heads: 1
517 532 common: 0
518 533 unknown: 1
519 534 local changesets: 34
520 535 common: 2
521 536 heads: 1
522 537 roots: 1
523 538 missing: 32
524 539 heads: 2
525 540 roots: 2
526 541 first undecided set: 34
527 542 heads: 2
528 543 roots: 1
529 544 common: 2
530 545 missing: 32
531 546 common heads: 66f7d451a68b
532 547
533 548 % -- b -> a tree
534 549 comparing with a
535 550 searching for changes
536 551 unpruned common: 2dc09a01254d 66f7d451a68b
537 552 elapsed time: * seconds (glob)
553 round-trips: 4
538 554 heads summary:
539 555 total common heads: 1
540 556 also local heads: 0
541 557 also remote heads: 1
542 558 both: 0
543 559 local heads: 1
544 560 common: 0
545 561 missing: 1
546 562 remote heads: 2
547 563 common: 1
548 564 unknown: 1
549 565 local changesets: 34
550 566 common: 4
551 567 heads: 1
552 568 roots: 1
553 569 missing: 30
554 570 heads: 1
555 571 roots: 1
556 572 first undecided set: 30
557 573 heads: 1
558 574 roots: 1
559 575 common: 0
560 576 missing: 30
561 577 common heads: 2dc09a01254d
562 578
563 579 % -- b -> a set
564 580 comparing with a
565 581 query 1; heads
566 582 searching for changes
567 583 taking initial sample
568 584 searching: 2 queries
569 585 query 2; still undecided: 29, sample size is: 29
570 586 2 total queries in *.????s (glob)
571 587 elapsed time: * seconds (glob)
588 round-trips: 2
572 589 heads summary:
573 590 total common heads: 1
574 591 also local heads: 0
575 592 also remote heads: 1
576 593 both: 0
577 594 local heads: 1
578 595 common: 0
579 596 missing: 1
580 597 remote heads: 2
581 598 common: 1
582 599 unknown: 1
583 600 local changesets: 34
584 601 common: 4
585 602 heads: 1
586 603 roots: 1
587 604 missing: 30
588 605 heads: 1
589 606 roots: 1
590 607 first undecided set: 30
591 608 heads: 1
592 609 roots: 1
593 610 common: 0
594 611 missing: 30
595 612 common heads: 2dc09a01254d
596 613
597 614 % -- b -> a set (tip only)
598 615 comparing with a
599 616 query 1; heads
600 617 searching for changes
601 618 taking initial sample
602 619 searching: 2 queries
603 620 query 2; still undecided: 29, sample size is: 29
604 621 2 total queries in *.????s (glob)
605 622 elapsed time: * seconds (glob)
623 round-trips: 2
606 624 heads summary:
607 625 total common heads: 1
608 626 also local heads: 0
609 627 also remote heads: 1
610 628 both: 0
611 629 local heads: 1
612 630 common: 0
613 631 missing: 1
614 632 remote heads: 2
615 633 common: 1
616 634 unknown: 1
617 635 local changesets: 34
618 636 common: 4
619 637 heads: 1
620 638 roots: 1
621 639 missing: 30
622 640 heads: 1
623 641 roots: 1
624 642 first undecided set: 30
625 643 heads: 1
626 644 roots: 1
627 645 common: 0
628 646 missing: 30
629 647 common heads: 2dc09a01254d
630 648
631 649
632 650 Both many new:
633 651
634 652 $ testdesc '-ra' '-rb' '
635 653 > +2:f +30 :b
636 654 > <f +30 :a'
637 655
638 656 % -- a -> b tree
639 657 comparing with b
640 658 searching for changes
641 659 unpruned common: 66f7d451a68b
642 660 elapsed time: * seconds (glob)
661 round-trips: 4
643 662 heads summary:
644 663 total common heads: 1
645 664 also local heads: 0
646 665 also remote heads: 0
647 666 both: 0
648 667 local heads: 1
649 668 common: 0
650 669 missing: 1
651 670 remote heads: 1
652 671 common: 0
653 672 unknown: 1
654 673 local changesets: 32
655 674 common: 2
656 675 heads: 1
657 676 roots: 1
658 677 missing: 30
659 678 heads: 1
660 679 roots: 1
661 680 first undecided set: 32
662 681 heads: 1
663 682 roots: 1
664 683 common: 2
665 684 missing: 30
666 685 common heads: 66f7d451a68b
667 686
668 687 % -- a -> b set
669 688 comparing with b
670 689 query 1; heads
671 690 searching for changes
672 691 taking quick initial sample
673 692 searching: 2 queries
674 693 query 2; still undecided: 31, sample size is: 31
675 694 2 total queries in *.????s (glob)
676 695 elapsed time: * seconds (glob)
696 round-trips: 2
677 697 heads summary:
678 698 total common heads: 1
679 699 also local heads: 0
680 700 also remote heads: 0
681 701 both: 0
682 702 local heads: 1
683 703 common: 0
684 704 missing: 1
685 705 remote heads: 1
686 706 common: 0
687 707 unknown: 1
688 708 local changesets: 32
689 709 common: 2
690 710 heads: 1
691 711 roots: 1
692 712 missing: 30
693 713 heads: 1
694 714 roots: 1
695 715 first undecided set: 32
696 716 heads: 1
697 717 roots: 1
698 718 common: 2
699 719 missing: 30
700 720 common heads: 66f7d451a68b
701 721
702 722 % -- a -> b set (tip only)
703 723 comparing with b
704 724 query 1; heads
705 725 searching for changes
706 726 taking quick initial sample
707 727 searching: 2 queries
708 728 query 2; still undecided: 31, sample size is: 31
709 729 2 total queries in *.????s (glob)
710 730 elapsed time: * seconds (glob)
731 round-trips: 2
711 732 heads summary:
712 733 total common heads: 1
713 734 also local heads: 0
714 735 also remote heads: 0
715 736 both: 0
716 737 local heads: 1
717 738 common: 0
718 739 missing: 1
719 740 remote heads: 1
720 741 common: 0
721 742 unknown: 1
722 743 local changesets: 32
723 744 common: 2
724 745 heads: 1
725 746 roots: 1
726 747 missing: 30
727 748 heads: 1
728 749 roots: 1
729 750 first undecided set: 32
730 751 heads: 1
731 752 roots: 1
732 753 common: 2
733 754 missing: 30
734 755 common heads: 66f7d451a68b
735 756
736 757 % -- b -> a tree
737 758 comparing with a
738 759 searching for changes
739 760 unpruned common: 66f7d451a68b
740 761 elapsed time: * seconds (glob)
762 round-trips: 4
741 763 heads summary:
742 764 total common heads: 1
743 765 also local heads: 0
744 766 also remote heads: 0
745 767 both: 0
746 768 local heads: 1
747 769 common: 0
748 770 missing: 1
749 771 remote heads: 1
750 772 common: 0
751 773 unknown: 1
752 774 local changesets: 32
753 775 common: 2
754 776 heads: 1
755 777 roots: 1
756 778 missing: 30
757 779 heads: 1
758 780 roots: 1
759 781 first undecided set: 32
760 782 heads: 1
761 783 roots: 1
762 784 common: 2
763 785 missing: 30
764 786 common heads: 66f7d451a68b
765 787
766 788 % -- b -> a set
767 789 comparing with a
768 790 query 1; heads
769 791 searching for changes
770 792 taking quick initial sample
771 793 searching: 2 queries
772 794 query 2; still undecided: 31, sample size is: 31
773 795 2 total queries in *.????s (glob)
774 796 elapsed time: * seconds (glob)
797 round-trips: 2
775 798 heads summary:
776 799 total common heads: 1
777 800 also local heads: 0
778 801 also remote heads: 0
779 802 both: 0
780 803 local heads: 1
781 804 common: 0
782 805 missing: 1
783 806 remote heads: 1
784 807 common: 0
785 808 unknown: 1
786 809 local changesets: 32
787 810 common: 2
788 811 heads: 1
789 812 roots: 1
790 813 missing: 30
791 814 heads: 1
792 815 roots: 1
793 816 first undecided set: 32
794 817 heads: 1
795 818 roots: 1
796 819 common: 2
797 820 missing: 30
798 821 common heads: 66f7d451a68b
799 822
800 823 % -- b -> a set (tip only)
801 824 comparing with a
802 825 query 1; heads
803 826 searching for changes
804 827 taking quick initial sample
805 828 searching: 2 queries
806 829 query 2; still undecided: 31, sample size is: 31
807 830 2 total queries in *.????s (glob)
808 831 elapsed time: * seconds (glob)
832 round-trips: 2
809 833 heads summary:
810 834 total common heads: 1
811 835 also local heads: 0
812 836 also remote heads: 0
813 837 both: 0
814 838 local heads: 1
815 839 common: 0
816 840 missing: 1
817 841 remote heads: 1
818 842 common: 0
819 843 unknown: 1
820 844 local changesets: 32
821 845 common: 2
822 846 heads: 1
823 847 roots: 1
824 848 missing: 30
825 849 heads: 1
826 850 roots: 1
827 851 first undecided set: 32
828 852 heads: 1
829 853 roots: 1
830 854 common: 2
831 855 missing: 30
832 856 common heads: 66f7d451a68b
833 857
834 858
835 859 Both many new skewed:
836 860
837 861 $ testdesc '-ra' '-rb' '
838 862 > +2:f +30 :b
839 863 > <f +50 :a'
840 864
841 865 % -- a -> b tree
842 866 comparing with b
843 867 searching for changes
844 868 unpruned common: 66f7d451a68b
845 869 elapsed time: * seconds (glob)
870 round-trips: 4
846 871 heads summary:
847 872 total common heads: 1
848 873 also local heads: 0
849 874 also remote heads: 0
850 875 both: 0
851 876 local heads: 1
852 877 common: 0
853 878 missing: 1
854 879 remote heads: 1
855 880 common: 0
856 881 unknown: 1
857 882 local changesets: 52
858 883 common: 2
859 884 heads: 1
860 885 roots: 1
861 886 missing: 50
862 887 heads: 1
863 888 roots: 1
864 889 first undecided set: 52
865 890 heads: 1
866 891 roots: 1
867 892 common: 2
868 893 missing: 50
869 894 common heads: 66f7d451a68b
870 895
871 896 % -- a -> b set
872 897 comparing with b
873 898 query 1; heads
874 899 searching for changes
875 900 taking quick initial sample
876 901 searching: 2 queries
877 902 query 2; still undecided: 51, sample size is: 51
878 903 2 total queries in *.????s (glob)
879 904 elapsed time: * seconds (glob)
905 round-trips: 2
880 906 heads summary:
881 907 total common heads: 1
882 908 also local heads: 0
883 909 also remote heads: 0
884 910 both: 0
885 911 local heads: 1
886 912 common: 0
887 913 missing: 1
888 914 remote heads: 1
889 915 common: 0
890 916 unknown: 1
891 917 local changesets: 52
892 918 common: 2
893 919 heads: 1
894 920 roots: 1
895 921 missing: 50
896 922 heads: 1
897 923 roots: 1
898 924 first undecided set: 52
899 925 heads: 1
900 926 roots: 1
901 927 common: 2
902 928 missing: 50
903 929 common heads: 66f7d451a68b
904 930
905 931 % -- a -> b set (tip only)
906 932 comparing with b
907 933 query 1; heads
908 934 searching for changes
909 935 taking quick initial sample
910 936 searching: 2 queries
911 937 query 2; still undecided: 51, sample size is: 51
912 938 2 total queries in *.????s (glob)
913 939 elapsed time: * seconds (glob)
940 round-trips: 2
914 941 heads summary:
915 942 total common heads: 1
916 943 also local heads: 0
917 944 also remote heads: 0
918 945 both: 0
919 946 local heads: 1
920 947 common: 0
921 948 missing: 1
922 949 remote heads: 1
923 950 common: 0
924 951 unknown: 1
925 952 local changesets: 52
926 953 common: 2
927 954 heads: 1
928 955 roots: 1
929 956 missing: 50
930 957 heads: 1
931 958 roots: 1
932 959 first undecided set: 52
933 960 heads: 1
934 961 roots: 1
935 962 common: 2
936 963 missing: 50
937 964 common heads: 66f7d451a68b
938 965
939 966 % -- b -> a tree
940 967 comparing with a
941 968 searching for changes
942 969 unpruned common: 66f7d451a68b
943 970 elapsed time: * seconds (glob)
971 round-trips: 3
944 972 heads summary:
945 973 total common heads: 1
946 974 also local heads: 0
947 975 also remote heads: 0
948 976 both: 0
949 977 local heads: 1
950 978 common: 0
951 979 missing: 1
952 980 remote heads: 1
953 981 common: 0
954 982 unknown: 1
955 983 local changesets: 32
956 984 common: 2
957 985 heads: 1
958 986 roots: 1
959 987 missing: 30
960 988 heads: 1
961 989 roots: 1
962 990 first undecided set: 32
963 991 heads: 1
964 992 roots: 1
965 993 common: 2
966 994 missing: 30
967 995 common heads: 66f7d451a68b
968 996
969 997 % -- b -> a set
970 998 comparing with a
971 999 query 1; heads
972 1000 searching for changes
973 1001 taking quick initial sample
974 1002 searching: 2 queries
975 1003 query 2; still undecided: 31, sample size is: 31
976 1004 2 total queries in *.????s (glob)
977 1005 elapsed time: * seconds (glob)
1006 round-trips: 2
978 1007 heads summary:
979 1008 total common heads: 1
980 1009 also local heads: 0
981 1010 also remote heads: 0
982 1011 both: 0
983 1012 local heads: 1
984 1013 common: 0
985 1014 missing: 1
986 1015 remote heads: 1
987 1016 common: 0
988 1017 unknown: 1
989 1018 local changesets: 32
990 1019 common: 2
991 1020 heads: 1
992 1021 roots: 1
993 1022 missing: 30
994 1023 heads: 1
995 1024 roots: 1
996 1025 first undecided set: 32
997 1026 heads: 1
998 1027 roots: 1
999 1028 common: 2
1000 1029 missing: 30
1001 1030 common heads: 66f7d451a68b
1002 1031
1003 1032 % -- b -> a set (tip only)
1004 1033 comparing with a
1005 1034 query 1; heads
1006 1035 searching for changes
1007 1036 taking quick initial sample
1008 1037 searching: 2 queries
1009 1038 query 2; still undecided: 31, sample size is: 31
1010 1039 2 total queries in *.????s (glob)
1011 1040 elapsed time: * seconds (glob)
1041 round-trips: 2
1012 1042 heads summary:
1013 1043 total common heads: 1
1014 1044 also local heads: 0
1015 1045 also remote heads: 0
1016 1046 both: 0
1017 1047 local heads: 1
1018 1048 common: 0
1019 1049 missing: 1
1020 1050 remote heads: 1
1021 1051 common: 0
1022 1052 unknown: 1
1023 1053 local changesets: 32
1024 1054 common: 2
1025 1055 heads: 1
1026 1056 roots: 1
1027 1057 missing: 30
1028 1058 heads: 1
1029 1059 roots: 1
1030 1060 first undecided set: 32
1031 1061 heads: 1
1032 1062 roots: 1
1033 1063 common: 2
1034 1064 missing: 30
1035 1065 common heads: 66f7d451a68b
1036 1066
1037 1067
1038 1068 Both many new on top of long history:
1039 1069
1040 1070 $ testdesc '-ra' '-rb' '
1041 1071 > +1000:f +30 :b
1042 1072 > <f +50 :a'
1043 1073
1044 1074 % -- a -> b tree
1045 1075 comparing with b
1046 1076 searching for changes
1047 1077 unpruned common: 7ead0cba2838
1048 1078 elapsed time: * seconds (glob)
1079 round-trips: 4
1049 1080 heads summary:
1050 1081 total common heads: 1
1051 1082 also local heads: 0
1052 1083 also remote heads: 0
1053 1084 both: 0
1054 1085 local heads: 1
1055 1086 common: 0
1056 1087 missing: 1
1057 1088 remote heads: 1
1058 1089 common: 0
1059 1090 unknown: 1
1060 1091 local changesets: 1050
1061 1092 common: 1000
1062 1093 heads: 1
1063 1094 roots: 1
1064 1095 missing: 50
1065 1096 heads: 1
1066 1097 roots: 1
1067 1098 first undecided set: 1050
1068 1099 heads: 1
1069 1100 roots: 1
1070 1101 common: 1000
1071 1102 missing: 50
1072 1103 common heads: 7ead0cba2838
1073 1104
1074 1105 % -- a -> b set
1075 1106 comparing with b
1076 1107 query 1; heads
1077 1108 searching for changes
1078 1109 taking quick initial sample
1079 1110 searching: 2 queries
1080 1111 query 2; still undecided: 1049, sample size is: 11
1081 1112 sampling from both directions
1082 1113 searching: 3 queries
1083 1114 query 3; still undecided: 31, sample size is: 31
1084 1115 3 total queries in *.????s (glob)
1085 1116 elapsed time: * seconds (glob)
1117 round-trips: 3
1086 1118 heads summary:
1087 1119 total common heads: 1
1088 1120 also local heads: 0
1089 1121 also remote heads: 0
1090 1122 both: 0
1091 1123 local heads: 1
1092 1124 common: 0
1093 1125 missing: 1
1094 1126 remote heads: 1
1095 1127 common: 0
1096 1128 unknown: 1
1097 1129 local changesets: 1050
1098 1130 common: 1000
1099 1131 heads: 1
1100 1132 roots: 1
1101 1133 missing: 50
1102 1134 heads: 1
1103 1135 roots: 1
1104 1136 first undecided set: 1050
1105 1137 heads: 1
1106 1138 roots: 1
1107 1139 common: 1000
1108 1140 missing: 50
1109 1141 common heads: 7ead0cba2838
1110 1142
1111 1143 % -- a -> b set (tip only)
1112 1144 comparing with b
1113 1145 query 1; heads
1114 1146 searching for changes
1115 1147 taking quick initial sample
1116 1148 searching: 2 queries
1117 1149 query 2; still undecided: 1049, sample size is: 11
1118 1150 sampling from both directions
1119 1151 searching: 3 queries
1120 1152 query 3; still undecided: 31, sample size is: 31
1121 1153 3 total queries in *.????s (glob)
1122 1154 elapsed time: * seconds (glob)
1155 round-trips: 3
1123 1156 heads summary:
1124 1157 total common heads: 1
1125 1158 also local heads: 0
1126 1159 also remote heads: 0
1127 1160 both: 0
1128 1161 local heads: 1
1129 1162 common: 0
1130 1163 missing: 1
1131 1164 remote heads: 1
1132 1165 common: 0
1133 1166 unknown: 1
1134 1167 local changesets: 1050
1135 1168 common: 1000
1136 1169 heads: 1
1137 1170 roots: 1
1138 1171 missing: 50
1139 1172 heads: 1
1140 1173 roots: 1
1141 1174 first undecided set: 1050
1142 1175 heads: 1
1143 1176 roots: 1
1144 1177 common: 1000
1145 1178 missing: 50
1146 1179 common heads: 7ead0cba2838
1147 1180
1148 1181 % -- b -> a tree
1149 1182 comparing with a
1150 1183 searching for changes
1151 1184 unpruned common: 7ead0cba2838
1152 1185 elapsed time: * seconds (glob)
1186 round-trips: 3
1153 1187 heads summary:
1154 1188 total common heads: 1
1155 1189 also local heads: 0
1156 1190 also remote heads: 0
1157 1191 both: 0
1158 1192 local heads: 1
1159 1193 common: 0
1160 1194 missing: 1
1161 1195 remote heads: 1
1162 1196 common: 0
1163 1197 unknown: 1
1164 1198 local changesets: 1030
1165 1199 common: 1000
1166 1200 heads: 1
1167 1201 roots: 1
1168 1202 missing: 30
1169 1203 heads: 1
1170 1204 roots: 1
1171 1205 first undecided set: 1030
1172 1206 heads: 1
1173 1207 roots: 1
1174 1208 common: 1000
1175 1209 missing: 30
1176 1210 common heads: 7ead0cba2838
1177 1211
1178 1212 % -- b -> a set
1179 1213 comparing with a
1180 1214 query 1; heads
1181 1215 searching for changes
1182 1216 taking quick initial sample
1183 1217 searching: 2 queries
1184 1218 query 2; still undecided: 1029, sample size is: 11
1185 1219 sampling from both directions
1186 1220 searching: 3 queries
1187 1221 query 3; still undecided: 15, sample size is: 15
1188 1222 3 total queries in *.????s (glob)
1189 1223 elapsed time: * seconds (glob)
1224 round-trips: 3
1190 1225 heads summary:
1191 1226 total common heads: 1
1192 1227 also local heads: 0
1193 1228 also remote heads: 0
1194 1229 both: 0
1195 1230 local heads: 1
1196 1231 common: 0
1197 1232 missing: 1
1198 1233 remote heads: 1
1199 1234 common: 0
1200 1235 unknown: 1
1201 1236 local changesets: 1030
1202 1237 common: 1000
1203 1238 heads: 1
1204 1239 roots: 1
1205 1240 missing: 30
1206 1241 heads: 1
1207 1242 roots: 1
1208 1243 first undecided set: 1030
1209 1244 heads: 1
1210 1245 roots: 1
1211 1246 common: 1000
1212 1247 missing: 30
1213 1248 common heads: 7ead0cba2838
1214 1249
1215 1250 % -- b -> a set (tip only)
1216 1251 comparing with a
1217 1252 query 1; heads
1218 1253 searching for changes
1219 1254 taking quick initial sample
1220 1255 searching: 2 queries
1221 1256 query 2; still undecided: 1029, sample size is: 11
1222 1257 sampling from both directions
1223 1258 searching: 3 queries
1224 1259 query 3; still undecided: 15, sample size is: 15
1225 1260 3 total queries in *.????s (glob)
1226 1261 elapsed time: * seconds (glob)
1262 round-trips: 3
1227 1263 heads summary:
1228 1264 total common heads: 1
1229 1265 also local heads: 0
1230 1266 also remote heads: 0
1231 1267 both: 0
1232 1268 local heads: 1
1233 1269 common: 0
1234 1270 missing: 1
1235 1271 remote heads: 1
1236 1272 common: 0
1237 1273 unknown: 1
1238 1274 local changesets: 1030
1239 1275 common: 1000
1240 1276 heads: 1
1241 1277 roots: 1
1242 1278 missing: 30
1243 1279 heads: 1
1244 1280 roots: 1
1245 1281 first undecided set: 1030
1246 1282 heads: 1
1247 1283 roots: 1
1248 1284 common: 1000
1249 1285 missing: 30
1250 1286 common heads: 7ead0cba2838
1251 1287
1252 1288
1253 1289 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1254 1290
1255 1291 $ hg init manyheads
1256 1292 $ cd manyheads
1257 1293 $ echo "+300:r @a" >dagdesc
1258 1294 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1259 1295 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1260 1296 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1261 1297 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1262 1298 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1263 1299 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1264 1300 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1265 1301 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1266 1302 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1267 1303 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1268 1304 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1269 1305 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1270 1306 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1271 1307 $ echo "@b *r+3" >>dagdesc # one more head
1272 1308 $ hg debugbuilddag <dagdesc
1273 1309 reading DAG from stdin
1274 1310
1275 1311 $ hg heads -t --template . | wc -c
1276 1312 \s*261 (re)
1277 1313
1278 1314 $ hg clone -b a . a
1279 1315 adding changesets
1280 1316 adding manifests
1281 1317 adding file changes
1282 1318 added 1340 changesets with 0 changes to 0 files (+259 heads)
1283 1319 new changesets 1ea73414a91b:1c51e2c80832
1284 1320 updating to branch a
1285 1321 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1286 1322 $ hg clone -b b . b
1287 1323 adding changesets
1288 1324 adding manifests
1289 1325 adding file changes
1290 1326 added 304 changesets with 0 changes to 0 files
1291 1327 new changesets 1ea73414a91b:513314ca8b3a
1292 1328 updating to branch b
1293 1329 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1294 1330
1295 1331 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false
1296 1332 comparing with b
1297 1333 query 1; heads
1298 1334 searching for changes
1299 1335 taking quick initial sample
1300 1336 searching: 2 queries
1301 1337 query 2; still undecided: 1080, sample size is: 100
1302 1338 sampling from both directions
1303 1339 searching: 3 queries
1304 1340 query 3; still undecided: 980, sample size is: 200
1305 1341 sampling from both directions
1306 1342 searching: 4 queries
1307 1343 query 4; still undecided: 497, sample size is: 210
1308 1344 sampling from both directions
1309 1345 searching: 5 queries
1310 1346 query 5; still undecided: 285, sample size is: 220
1311 1347 sampling from both directions
1312 1348 searching: 6 queries
1313 1349 query 6; still undecided: 63, sample size is: 63
1314 1350 6 total queries in *.????s (glob)
1315 1351 elapsed time: * seconds (glob)
1352 round-trips: 6
1316 1353 heads summary:
1317 1354 total common heads: 1
1318 1355 also local heads: 0
1319 1356 also remote heads: 0
1320 1357 both: 0
1321 1358 local heads: 260
1322 1359 common: 0
1323 1360 missing: 260
1324 1361 remote heads: 1
1325 1362 common: 0
1326 1363 unknown: 1
1327 1364 local changesets: 1340
1328 1365 common: 300
1329 1366 heads: 1
1330 1367 roots: 1
1331 1368 missing: 1040
1332 1369 heads: 260
1333 1370 roots: 260
1334 1371 first undecided set: 1340
1335 1372 heads: 260
1336 1373 roots: 1
1337 1374 common: 300
1338 1375 missing: 1040
1339 1376 common heads: 3ee37d65064a
1340 1377 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1341 1378 comparing with b
1342 1379 query 1; heads
1343 1380 searching for changes
1344 1381 taking quick initial sample
1345 1382 searching: 2 queries
1346 1383 query 2; still undecided: 303, sample size is: 9
1347 1384 sampling from both directions
1348 1385 searching: 3 queries
1349 1386 query 3; still undecided: 3, sample size is: 3
1350 1387 3 total queries in *.????s (glob)
1351 1388 elapsed time: * seconds (glob)
1389 round-trips: 3
1352 1390 heads summary:
1353 1391 total common heads: 1
1354 1392 also local heads: 0
1355 1393 also remote heads: 0
1356 1394 both: 0
1357 1395 local heads: 260
1358 1396 common: 0
1359 1397 missing: 260
1360 1398 remote heads: 1
1361 1399 common: 0
1362 1400 unknown: 1
1363 1401 local changesets: 1340
1364 1402 common: 300
1365 1403 heads: 1
1366 1404 roots: 1
1367 1405 missing: 1040
1368 1406 heads: 260
1369 1407 roots: 260
1370 1408 first undecided set: 1340
1371 1409 heads: 260
1372 1410 roots: 1
1373 1411 common: 300
1374 1412 missing: 1040
1375 1413 common heads: 3ee37d65064a
1376 1414
1377 1415 Test actual protocol when pulling one new head in addition to common heads
1378 1416
1379 1417 $ hg clone -U b c
1380 1418 $ hg -R c id -ir tip
1381 1419 513314ca8b3a
1382 1420 $ hg -R c up -qr default
1383 1421 $ touch c/f
1384 1422 $ hg -R c ci -Aqm "extra head"
1385 1423 $ hg -R c id -i
1386 1424 e64a39e7da8b
1387 1425
1388 1426 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1389 1427 $ cat hg.pid >> $DAEMON_PIDS
1390 1428
1391 1429 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1392 1430 comparing with http://localhost:$HGPORT/
1393 1431 searching for changes
1394 1432 e64a39e7da8b
1395 1433
1396 1434 $ killdaemons.py
1397 1435 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1398 1436 "GET /?cmd=capabilities HTTP/1.1" 200 -
1399 1437 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1400 1438 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1401 1439 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1402 1440 $ cat errors.log
1403 1441
1404 1442 $ cd ..
1405 1443
1406 1444
1407 1445 Issue 4438 - test coverage for 3ef893520a85 issues.
1408 1446
1409 1447 $ mkdir issue4438
1410 1448 $ cd issue4438
1411 1449 #if false
1412 1450 generate new bundles:
1413 1451 $ hg init r1
1414 1452 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1415 1453 $ hg clone -q r1 r2
1416 1454 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1417 1455 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1418 1456 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1419 1457 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1420 1458 #else
1421 1459 use existing bundles:
1422 1460 $ hg init r1
1423 1461 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1424 1462 $ hg -R r1 -q up
1425 1463 $ hg init r2
1426 1464 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1427 1465 $ hg -R r2 -q up
1428 1466 #endif
1429 1467
1430 1468 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1431 1469
1432 1470 $ hg -R r1 outgoing r2 -T'{rev} '
1433 1471 comparing with r2
1434 1472 searching for changes
1435 1473 101 102 103 104 105 106 107 108 109 110 (no-eol)
1436 1474
1437 1475 The case where all the 'initialsamplesize' samples already were common would
1438 1476 give 'all remote heads known locally' without checking the remaining heads -
1439 1477 fixed in 86c35b7ae300:
1440 1478
1441 1479 $ cat >> r1/.hg/hgrc << EOF
1442 1480 > [devel]
1443 1481 > discovery.randomize = False
1444 1482 > EOF
1445 1483
1446 1484 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1447 1485 > --config blackbox.track='command commandfinish discovery'
1448 1486 comparing with r2
1449 1487 searching for changes
1450 1488 101 102 103 104 105 106 107 108 109 110 (no-eol)
1451 1489 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1452 1490 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1453 1491 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1454 1492 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1455 1493 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1456 1494 $ cd ..
1457 1495
1458 1496 Even if the set of revs to discover is restricted, unrelated revs may be
1459 1497 returned as common heads.
1460 1498
1461 1499 $ mkdir ancestorsof
1462 1500 $ cd ancestorsof
1463 1501 $ hg init a
1464 1502 $ hg clone a b -q
1465 1503 $ cd b
1466 1504 $ hg debugbuilddag '.:root *root *root'
1467 1505 $ hg log -G -T '{node|short}'
1468 1506 o fa942426a6fd
1469 1507 |
1470 1508 | o 66f7d451a68b
1471 1509 |/
1472 1510 o 1ea73414a91b
1473 1511
1474 1512 $ hg push -r 66f7d451a68b -q
1475 1513 $ hg debugdiscovery --verbose --rev fa942426a6fd
1476 1514 comparing with $TESTTMP/ancestorsof/a
1477 1515 searching for changes
1478 1516 elapsed time: * seconds (glob)
1517 round-trips: 1
1479 1518 heads summary:
1480 1519 total common heads: 1
1481 1520 also local heads: 1
1482 1521 also remote heads: 1
1483 1522 both: 1
1484 1523 local heads: 2
1485 1524 common: 1
1486 1525 missing: 1
1487 1526 remote heads: 1
1488 1527 common: 1
1489 1528 unknown: 0
1490 1529 local changesets: 3
1491 1530 common: 2
1492 1531 heads: 1
1493 1532 roots: 1
1494 1533 missing: 1
1495 1534 heads: 1
1496 1535 roots: 1
1497 1536 first undecided set: 1
1498 1537 heads: 1
1499 1538 roots: 1
1500 1539 common: 0
1501 1540 missing: 1
1502 1541 common heads: 66f7d451a68b
General Comments 0
You need to be logged in to leave comments. Login now