##// END OF EJS Templates
debugdiscovery: also integrate the discovery output in the json one...
marmoute -
r47503:67a2ecea default
parent child Browse files
Show More
@@ -1,4766 +1,4779 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 import contextlib
12 13 import difflib
13 14 import errno
14 15 import glob
15 16 import operator
16 17 import os
17 18 import platform
18 19 import random
19 20 import re
20 21 import socket
21 22 import ssl
22 23 import stat
23 24 import string
24 25 import subprocess
25 26 import sys
26 27 import time
27 28
28 29 from .i18n import _
29 30 from .node import (
30 31 bin,
31 32 hex,
32 33 nullid,
33 34 nullrev,
34 35 short,
35 36 )
36 37 from .pycompat import (
37 38 getattr,
38 39 open,
39 40 )
40 41 from . import (
41 42 bundle2,
42 43 bundlerepo,
43 44 changegroup,
44 45 cmdutil,
45 46 color,
46 47 context,
47 48 copies,
48 49 dagparser,
49 50 encoding,
50 51 error,
51 52 exchange,
52 53 extensions,
53 54 filemerge,
54 55 filesetlang,
55 56 formatter,
56 57 hg,
57 58 httppeer,
58 59 localrepo,
59 60 lock as lockmod,
60 61 logcmdutil,
61 62 mergestate as mergestatemod,
62 63 metadata,
63 64 obsolete,
64 65 obsutil,
65 66 pathutil,
66 67 phases,
67 68 policy,
68 69 pvec,
69 70 pycompat,
70 71 registrar,
71 72 repair,
72 73 repoview,
73 74 revlog,
74 75 revset,
75 76 revsetlang,
76 77 scmutil,
77 78 setdiscovery,
78 79 simplemerge,
79 80 sshpeer,
80 81 sslutil,
81 82 streamclone,
82 83 strip,
83 84 tags as tagsmod,
84 85 templater,
85 86 treediscovery,
86 87 upgrade,
87 88 url as urlmod,
88 89 util,
89 90 vfs as vfsmod,
90 91 wireprotoframing,
91 92 wireprotoserver,
92 93 wireprotov2peer,
93 94 )
94 95 from .utils import (
95 96 cborutil,
96 97 compression,
97 98 dateutil,
98 99 procutil,
99 100 stringutil,
100 101 )
101 102
102 103 from .revlogutils import (
103 104 deltas as deltautil,
104 105 nodemap,
105 106 sidedata,
106 107 )
107 108
108 109 release = lockmod.release
109 110
110 111 table = {}
111 112 table.update(strip.command._table)
112 113 command = registrar.command(table)
113 114
114 115
115 116 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
116 117 def debugancestor(ui, repo, *args):
117 118 """find the ancestor revision of two revisions in a given index"""
118 119 if len(args) == 3:
119 120 index, rev1, rev2 = args
120 121 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
121 122 lookup = r.lookup
122 123 elif len(args) == 2:
123 124 if not repo:
124 125 raise error.Abort(
125 126 _(b'there is no Mercurial repository here (.hg not found)')
126 127 )
127 128 rev1, rev2 = args
128 129 r = repo.changelog
129 130 lookup = repo.lookup
130 131 else:
131 132 raise error.Abort(_(b'either two or three arguments required'))
132 133 a = r.ancestor(lookup(rev1), lookup(rev2))
133 134 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
134 135
135 136
136 137 @command(b'debugantivirusrunning', [])
137 138 def debugantivirusrunning(ui, repo):
138 139 """attempt to trigger an antivirus scanner to see if one is active"""
139 140 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
140 141 f.write(
141 142 util.b85decode(
142 143 # This is a base85-armored version of the EICAR test file. See
143 144 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
144 145 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
145 146 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
146 147 )
147 148 )
148 149 # Give an AV engine time to scan the file.
149 150 time.sleep(2)
150 151 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
151 152
152 153
153 154 @command(b'debugapplystreamclonebundle', [], b'FILE')
154 155 def debugapplystreamclonebundle(ui, repo, fname):
155 156 """apply a stream clone bundle file"""
156 157 f = hg.openpath(ui, fname)
157 158 gen = exchange.readbundle(ui, f, fname)
158 159 gen.apply(repo)
159 160
160 161
161 162 @command(
162 163 b'debugbuilddag',
163 164 [
164 165 (
165 166 b'm',
166 167 b'mergeable-file',
167 168 None,
168 169 _(b'add single file mergeable changes'),
169 170 ),
170 171 (
171 172 b'o',
172 173 b'overwritten-file',
173 174 None,
174 175 _(b'add single file all revs overwrite'),
175 176 ),
176 177 (b'n', b'new-file', None, _(b'add new file at each rev')),
177 178 ],
178 179 _(b'[OPTION]... [TEXT]'),
179 180 )
180 181 def debugbuilddag(
181 182 ui,
182 183 repo,
183 184 text=None,
184 185 mergeable_file=False,
185 186 overwritten_file=False,
186 187 new_file=False,
187 188 ):
188 189 """builds a repo with a given DAG from scratch in the current empty repo
189 190
190 191 The description of the DAG is read from stdin if not given on the
191 192 command line.
192 193
193 194 Elements:
194 195
195 196 - "+n" is a linear run of n nodes based on the current default parent
196 197 - "." is a single node based on the current default parent
197 198 - "$" resets the default parent to null (implied at the start);
198 199 otherwise the default parent is always the last node created
199 200 - "<p" sets the default parent to the backref p
200 201 - "*p" is a fork at parent p, which is a backref
201 202 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
202 203 - "/p2" is a merge of the preceding node and p2
203 204 - ":tag" defines a local tag for the preceding node
204 205 - "@branch" sets the named branch for subsequent nodes
205 206 - "#...\\n" is a comment up to the end of the line
206 207
207 208 Whitespace between the above elements is ignored.
208 209
209 210 A backref is either
210 211
211 212 - a number n, which references the node curr-n, where curr is the current
212 213 node, or
213 214 - the name of a local tag you placed earlier using ":tag", or
214 215 - empty to denote the default parent.
215 216
216 217 All string valued-elements are either strictly alphanumeric, or must
217 218 be enclosed in double quotes ("..."), with "\\" as escape character.
218 219 """
219 220
220 221 if text is None:
221 222 ui.status(_(b"reading DAG from stdin\n"))
222 223 text = ui.fin.read()
223 224
224 225 cl = repo.changelog
225 226 if len(cl) > 0:
226 227 raise error.Abort(_(b'repository is not empty'))
227 228
228 229 # determine number of revs in DAG
229 230 total = 0
230 231 for type, data in dagparser.parsedag(text):
231 232 if type == b'n':
232 233 total += 1
233 234
234 235 if mergeable_file:
235 236 linesperrev = 2
236 237 # make a file with k lines per rev
237 238 initialmergedlines = [
238 239 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
239 240 ]
240 241 initialmergedlines.append(b"")
241 242
242 243 tags = []
243 244 progress = ui.makeprogress(
244 245 _(b'building'), unit=_(b'revisions'), total=total
245 246 )
246 247 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
247 248 at = -1
248 249 atbranch = b'default'
249 250 nodeids = []
250 251 id = 0
251 252 progress.update(id)
252 253 for type, data in dagparser.parsedag(text):
253 254 if type == b'n':
254 255 ui.note((b'node %s\n' % pycompat.bytestr(data)))
255 256 id, ps = data
256 257
257 258 files = []
258 259 filecontent = {}
259 260
260 261 p2 = None
261 262 if mergeable_file:
262 263 fn = b"mf"
263 264 p1 = repo[ps[0]]
264 265 if len(ps) > 1:
265 266 p2 = repo[ps[1]]
266 267 pa = p1.ancestor(p2)
267 268 base, local, other = [
268 269 x[fn].data() for x in (pa, p1, p2)
269 270 ]
270 271 m3 = simplemerge.Merge3Text(base, local, other)
271 272 ml = [l.strip() for l in m3.merge_lines()]
272 273 ml.append(b"")
273 274 elif at > 0:
274 275 ml = p1[fn].data().split(b"\n")
275 276 else:
276 277 ml = initialmergedlines
277 278 ml[id * linesperrev] += b" r%i" % id
278 279 mergedtext = b"\n".join(ml)
279 280 files.append(fn)
280 281 filecontent[fn] = mergedtext
281 282
282 283 if overwritten_file:
283 284 fn = b"of"
284 285 files.append(fn)
285 286 filecontent[fn] = b"r%i\n" % id
286 287
287 288 if new_file:
288 289 fn = b"nf%i" % id
289 290 files.append(fn)
290 291 filecontent[fn] = b"r%i\n" % id
291 292 if len(ps) > 1:
292 293 if not p2:
293 294 p2 = repo[ps[1]]
294 295 for fn in p2:
295 296 if fn.startswith(b"nf"):
296 297 files.append(fn)
297 298 filecontent[fn] = p2[fn].data()
298 299
299 300 def fctxfn(repo, cx, path):
300 301 if path in filecontent:
301 302 return context.memfilectx(
302 303 repo, cx, path, filecontent[path]
303 304 )
304 305 return None
305 306
306 307 if len(ps) == 0 or ps[0] < 0:
307 308 pars = [None, None]
308 309 elif len(ps) == 1:
309 310 pars = [nodeids[ps[0]], None]
310 311 else:
311 312 pars = [nodeids[p] for p in ps]
312 313 cx = context.memctx(
313 314 repo,
314 315 pars,
315 316 b"r%i" % id,
316 317 files,
317 318 fctxfn,
318 319 date=(id, 0),
319 320 user=b"debugbuilddag",
320 321 extra={b'branch': atbranch},
321 322 )
322 323 nodeid = repo.commitctx(cx)
323 324 nodeids.append(nodeid)
324 325 at = id
325 326 elif type == b'l':
326 327 id, name = data
327 328 ui.note((b'tag %s\n' % name))
328 329 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
329 330 elif type == b'a':
330 331 ui.note((b'branch %s\n' % data))
331 332 atbranch = data
332 333 progress.update(id)
333 334
334 335 if tags:
335 336 repo.vfs.write(b"localtags", b"".join(tags))
336 337
337 338
338 339 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
339 340 indent_string = b' ' * indent
340 341 if all:
341 342 ui.writenoi18n(
342 343 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
343 344 % indent_string
344 345 )
345 346
346 347 def showchunks(named):
347 348 ui.write(b"\n%s%s\n" % (indent_string, named))
348 349 for deltadata in gen.deltaiter():
349 350 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
350 351 ui.write(
351 352 b"%s%s %s %s %s %s %d\n"
352 353 % (
353 354 indent_string,
354 355 hex(node),
355 356 hex(p1),
356 357 hex(p2),
357 358 hex(cs),
358 359 hex(deltabase),
359 360 len(delta),
360 361 )
361 362 )
362 363
363 364 gen.changelogheader()
364 365 showchunks(b"changelog")
365 366 gen.manifestheader()
366 367 showchunks(b"manifest")
367 368 for chunkdata in iter(gen.filelogheader, {}):
368 369 fname = chunkdata[b'filename']
369 370 showchunks(fname)
370 371 else:
371 372 if isinstance(gen, bundle2.unbundle20):
372 373 raise error.Abort(_(b'use debugbundle2 for this file'))
373 374 gen.changelogheader()
374 375 for deltadata in gen.deltaiter():
375 376 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
376 377 ui.write(b"%s%s\n" % (indent_string, hex(node)))
377 378
378 379
379 380 def _debugobsmarkers(ui, part, indent=0, **opts):
380 381 """display version and markers contained in 'data'"""
381 382 opts = pycompat.byteskwargs(opts)
382 383 data = part.read()
383 384 indent_string = b' ' * indent
384 385 try:
385 386 version, markers = obsolete._readmarkers(data)
386 387 except error.UnknownVersion as exc:
387 388 msg = b"%sunsupported version: %s (%d bytes)\n"
388 389 msg %= indent_string, exc.version, len(data)
389 390 ui.write(msg)
390 391 else:
391 392 msg = b"%sversion: %d (%d bytes)\n"
392 393 msg %= indent_string, version, len(data)
393 394 ui.write(msg)
394 395 fm = ui.formatter(b'debugobsolete', opts)
395 396 for rawmarker in sorted(markers):
396 397 m = obsutil.marker(None, rawmarker)
397 398 fm.startitem()
398 399 fm.plain(indent_string)
399 400 cmdutil.showmarker(fm, m)
400 401 fm.end()
401 402
402 403
403 404 def _debugphaseheads(ui, data, indent=0):
404 405 """display version and markers contained in 'data'"""
405 406 indent_string = b' ' * indent
406 407 headsbyphase = phases.binarydecode(data)
407 408 for phase in phases.allphases:
408 409 for head in headsbyphase[phase]:
409 410 ui.write(indent_string)
410 411 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
411 412
412 413
413 414 def _quasirepr(thing):
414 415 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
415 416 return b'{%s}' % (
416 417 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
417 418 )
418 419 return pycompat.bytestr(repr(thing))
419 420
420 421
421 422 def _debugbundle2(ui, gen, all=None, **opts):
422 423 """lists the contents of a bundle2"""
423 424 if not isinstance(gen, bundle2.unbundle20):
424 425 raise error.Abort(_(b'not a bundle2 file'))
425 426 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
426 427 parttypes = opts.get('part_type', [])
427 428 for part in gen.iterparts():
428 429 if parttypes and part.type not in parttypes:
429 430 continue
430 431 msg = b'%s -- %s (mandatory: %r)\n'
431 432 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
432 433 if part.type == b'changegroup':
433 434 version = part.params.get(b'version', b'01')
434 435 cg = changegroup.getunbundler(version, part, b'UN')
435 436 if not ui.quiet:
436 437 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
437 438 if part.type == b'obsmarkers':
438 439 if not ui.quiet:
439 440 _debugobsmarkers(ui, part, indent=4, **opts)
440 441 if part.type == b'phase-heads':
441 442 if not ui.quiet:
442 443 _debugphaseheads(ui, part, indent=4)
443 444
444 445
445 446 @command(
446 447 b'debugbundle',
447 448 [
448 449 (b'a', b'all', None, _(b'show all details')),
449 450 (b'', b'part-type', [], _(b'show only the named part type')),
450 451 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
451 452 ],
452 453 _(b'FILE'),
453 454 norepo=True,
454 455 )
455 456 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
456 457 """lists the contents of a bundle"""
457 458 with hg.openpath(ui, bundlepath) as f:
458 459 if spec:
459 460 spec = exchange.getbundlespec(ui, f)
460 461 ui.write(b'%s\n' % spec)
461 462 return
462 463
463 464 gen = exchange.readbundle(ui, f, bundlepath)
464 465 if isinstance(gen, bundle2.unbundle20):
465 466 return _debugbundle2(ui, gen, all=all, **opts)
466 467 _debugchangegroup(ui, gen, all=all, **opts)
467 468
468 469
469 470 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
470 471 def debugcapabilities(ui, path, **opts):
471 472 """lists the capabilities of a remote peer"""
472 473 opts = pycompat.byteskwargs(opts)
473 474 peer = hg.peer(ui, opts, path)
474 475 try:
475 476 caps = peer.capabilities()
476 477 ui.writenoi18n(b'Main capabilities:\n')
477 478 for c in sorted(caps):
478 479 ui.write(b' %s\n' % c)
479 480 b2caps = bundle2.bundle2caps(peer)
480 481 if b2caps:
481 482 ui.writenoi18n(b'Bundle2 capabilities:\n')
482 483 for key, values in sorted(pycompat.iteritems(b2caps)):
483 484 ui.write(b' %s\n' % key)
484 485 for v in values:
485 486 ui.write(b' %s\n' % v)
486 487 finally:
487 488 peer.close()
488 489
489 490
490 491 @command(
491 492 b'debugchangedfiles',
492 493 [
493 494 (
494 495 b'',
495 496 b'compute',
496 497 False,
497 498 b"compute information instead of reading it from storage",
498 499 ),
499 500 ],
500 501 b'REV',
501 502 )
502 503 def debugchangedfiles(ui, repo, rev, **opts):
503 504 """list the stored files changes for a revision"""
504 505 ctx = scmutil.revsingle(repo, rev, None)
505 506 files = None
506 507
507 508 if opts['compute']:
508 509 files = metadata.compute_all_files_changes(ctx)
509 510 else:
510 511 sd = repo.changelog.sidedata(ctx.rev())
511 512 files_block = sd.get(sidedata.SD_FILES)
512 513 if files_block is not None:
513 514 files = metadata.decode_files_sidedata(sd)
514 515 if files is not None:
515 516 for f in sorted(files.touched):
516 517 if f in files.added:
517 518 action = b"added"
518 519 elif f in files.removed:
519 520 action = b"removed"
520 521 elif f in files.merged:
521 522 action = b"merged"
522 523 elif f in files.salvaged:
523 524 action = b"salvaged"
524 525 else:
525 526 action = b"touched"
526 527
527 528 copy_parent = b""
528 529 copy_source = b""
529 530 if f in files.copied_from_p1:
530 531 copy_parent = b"p1"
531 532 copy_source = files.copied_from_p1[f]
532 533 elif f in files.copied_from_p2:
533 534 copy_parent = b"p2"
534 535 copy_source = files.copied_from_p2[f]
535 536
536 537 data = (action, copy_parent, f, copy_source)
537 538 template = b"%-8s %2s: %s, %s;\n"
538 539 ui.write(template % data)
539 540
540 541
541 542 @command(b'debugcheckstate', [], b'')
542 543 def debugcheckstate(ui, repo):
543 544 """validate the correctness of the current dirstate"""
544 545 parent1, parent2 = repo.dirstate.parents()
545 546 m1 = repo[parent1].manifest()
546 547 m2 = repo[parent2].manifest()
547 548 errors = 0
548 549 for f in repo.dirstate:
549 550 state = repo.dirstate[f]
550 551 if state in b"nr" and f not in m1:
551 552 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
552 553 errors += 1
553 554 if state in b"a" and f in m1:
554 555 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
555 556 errors += 1
556 557 if state in b"m" and f not in m1 and f not in m2:
557 558 ui.warn(
558 559 _(b"%s in state %s, but not in either manifest\n") % (f, state)
559 560 )
560 561 errors += 1
561 562 for f in m1:
562 563 state = repo.dirstate[f]
563 564 if state not in b"nrm":
564 565 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
565 566 errors += 1
566 567 if errors:
567 568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 569 raise error.Abort(errstr)
569 570
570 571
571 572 @command(
572 573 b'debugcolor',
573 574 [(b'', b'style', None, _(b'show all configured styles'))],
574 575 b'hg debugcolor',
575 576 )
576 577 def debugcolor(ui, repo, **opts):
577 578 """show available color, effects or style"""
578 579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 580 if opts.get('style'):
580 581 return _debugdisplaystyle(ui)
581 582 else:
582 583 return _debugdisplaycolor(ui)
583 584
584 585
585 586 def _debugdisplaycolor(ui):
586 587 ui = ui.copy()
587 588 ui._styles.clear()
588 589 for effect in color._activeeffects(ui).keys():
589 590 ui._styles[effect] = effect
590 591 if ui._terminfoparams:
591 592 for k, v in ui.configitems(b'color'):
592 593 if k.startswith(b'color.'):
593 594 ui._styles[k] = k[6:]
594 595 elif k.startswith(b'terminfo.'):
595 596 ui._styles[k] = k[9:]
596 597 ui.write(_(b'available colors:\n'))
597 598 # sort label with a '_' after the other to group '_background' entry.
598 599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 600 for colorname, label in items:
600 601 ui.write(b'%s\n' % colorname, label=label)
601 602
602 603
603 604 def _debugdisplaystyle(ui):
604 605 ui.write(_(b'available style:\n'))
605 606 if not ui._styles:
606 607 return
607 608 width = max(len(s) for s in ui._styles)
608 609 for label, effects in sorted(ui._styles.items()):
609 610 ui.write(b'%s' % label, label=label)
610 611 if effects:
611 612 # 50
612 613 ui.write(b': ')
613 614 ui.write(b' ' * (max(0, width - len(label))))
614 615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 616 ui.write(b'\n')
616 617
617 618
618 619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 620 def debugcreatestreamclonebundle(ui, repo, fname):
620 621 """create a stream clone bundle file
621 622
622 623 Stream bundles are special bundles that are essentially archives of
623 624 revlog files. They are commonly used for cloning very quickly.
624 625 """
625 626 # TODO we may want to turn this into an abort when this functionality
626 627 # is moved into `hg bundle`.
627 628 if phases.hassecret(repo):
628 629 ui.warn(
629 630 _(
630 631 b'(warning: stream clone bundle will contain secret '
631 632 b'revisions)\n'
632 633 )
633 634 )
634 635
635 636 requirements, gen = streamclone.generatebundlev1(repo)
636 637 changegroup.writechunks(ui, gen, fname)
637 638
638 639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 640
640 641
641 642 @command(
642 643 b'debugdag',
643 644 [
644 645 (b't', b'tags', None, _(b'use tags as labels')),
645 646 (b'b', b'branches', None, _(b'annotate with branch names')),
646 647 (b'', b'dots', None, _(b'use dots for runs')),
647 648 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 649 ],
649 650 _(b'[OPTION]... [FILE [REV]...]'),
650 651 optionalrepo=True,
651 652 )
652 653 def debugdag(ui, repo, file_=None, *revs, **opts):
653 654 """format the changelog or an index DAG as a concise textual description
654 655
655 656 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 657 revision numbers, they get labeled in the output as rN.
657 658
658 659 Otherwise, the changelog DAG of the current repo is emitted.
659 660 """
660 661 spaces = opts.get('spaces')
661 662 dots = opts.get('dots')
662 663 if file_:
663 664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 665 revs = {int(r) for r in revs}
665 666
666 667 def events():
667 668 for r in rlog:
668 669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 670 if r in revs:
670 671 yield b'l', (r, b"r%i" % r)
671 672
672 673 elif repo:
673 674 cl = repo.changelog
674 675 tags = opts.get('tags')
675 676 branches = opts.get('branches')
676 677 if tags:
677 678 labels = {}
678 679 for l, n in repo.tags().items():
679 680 labels.setdefault(cl.rev(n), []).append(l)
680 681
681 682 def events():
682 683 b = b"default"
683 684 for r in cl:
684 685 if branches:
685 686 newb = cl.read(cl.node(r))[5][b'branch']
686 687 if newb != b:
687 688 yield b'a', newb
688 689 b = newb
689 690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 691 if tags:
691 692 ls = labels.get(r)
692 693 if ls:
693 694 for l in ls:
694 695 yield b'l', (r, l)
695 696
696 697 else:
697 698 raise error.Abort(_(b'need repo for changelog dag'))
698 699
699 700 for line in dagparser.dagtextlines(
700 701 events(),
701 702 addspaces=spaces,
702 703 wraplabels=True,
703 704 wrapannotations=True,
704 705 wrapnonlinear=dots,
705 706 usedots=dots,
706 707 maxlinewidth=70,
707 708 ):
708 709 ui.write(line)
709 710 ui.write(b"\n")
710 711
711 712
712 713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 714 def debugdata(ui, repo, file_, rev=None, **opts):
714 715 """dump the contents of a data file revision"""
715 716 opts = pycompat.byteskwargs(opts)
716 717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 718 if rev is not None:
718 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 720 file_, rev = None, file_
720 721 elif rev is None:
721 722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 724 try:
724 725 ui.write(r.rawdata(r.lookup(rev)))
725 726 except KeyError:
726 727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 728
728 729
729 730 @command(
730 731 b'debugdate',
731 732 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 733 _(b'[-e] DATE [RANGE]'),
733 734 norepo=True,
734 735 optionalrepo=True,
735 736 )
736 737 def debugdate(ui, date, range=None, **opts):
737 738 """parse and display a date"""
738 739 if opts["extended"]:
739 740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 741 else:
741 742 d = dateutil.parsedate(date)
742 743 ui.writenoi18n(b"internal: %d %d\n" % d)
743 744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 745 if range:
745 746 m = dateutil.matchdate(range)
746 747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 748
748 749
749 750 @command(
750 751 b'debugdeltachain',
751 752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 753 _(b'-c|-m|FILE'),
753 754 optionalrepo=True,
754 755 )
755 756 def debugdeltachain(ui, repo, file_=None, **opts):
756 757 """dump information about delta chains in a revlog
757 758
758 759 Output can be templatized. Available template keywords are:
759 760
760 761 :``rev``: revision number
761 762 :``chainid``: delta chain identifier (numbered by unique base)
762 763 :``chainlen``: delta chain length to this revision
763 764 :``prevrev``: previous revision in delta chain
764 765 :``deltatype``: role of delta / how it was computed
765 766 :``compsize``: compressed size of revision
766 767 :``uncompsize``: uncompressed size of revision
767 768 :``chainsize``: total size of compressed revisions in chain
768 769 :``chainratio``: total chain size divided by uncompressed revision size
769 770 (new delta chains typically start at ratio 2.00)
770 771 :``lindist``: linear distance from base revision in delta chain to end
771 772 of this revision
772 773 :``extradist``: total size of revisions not part of this delta chain from
773 774 base of delta chain to end of this revision; a measurement
774 775 of how much extra data we need to read/seek across to read
775 776 the delta chain for this revision
776 777 :``extraratio``: extradist divided by chainsize; another representation of
777 778 how much unrelated data is needed to load this delta chain
778 779
779 780 If the repository is configured to use the sparse read, additional keywords
780 781 are available:
781 782
782 783 :``readsize``: total size of data read from the disk for a revision
783 784 (sum of the sizes of all the blocks)
784 785 :``largestblock``: size of the largest block of data read from the disk
785 786 :``readdensity``: density of useful bytes in the data read from the disk
786 787 :``srchunks``: in how many data hunks the whole revision would be read
787 788
788 789 The sparse read can be enabled with experimental.sparse-read = True
789 790 """
790 791 opts = pycompat.byteskwargs(opts)
791 792 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
792 793 index = r.index
793 794 start = r.start
794 795 length = r.length
795 796 generaldelta = r.version & revlog.FLAG_GENERALDELTA
796 797 withsparseread = getattr(r, '_withsparseread', False)
797 798
798 799 def revinfo(rev):
799 800 e = index[rev]
800 801 compsize = e[1]
801 802 uncompsize = e[2]
802 803 chainsize = 0
803 804
804 805 if generaldelta:
805 806 if e[3] == e[5]:
806 807 deltatype = b'p1'
807 808 elif e[3] == e[6]:
808 809 deltatype = b'p2'
809 810 elif e[3] == rev - 1:
810 811 deltatype = b'prev'
811 812 elif e[3] == rev:
812 813 deltatype = b'base'
813 814 else:
814 815 deltatype = b'other'
815 816 else:
816 817 if e[3] == rev:
817 818 deltatype = b'base'
818 819 else:
819 820 deltatype = b'prev'
820 821
821 822 chain = r._deltachain(rev)[0]
822 823 for iterrev in chain:
823 824 e = index[iterrev]
824 825 chainsize += e[1]
825 826
826 827 return compsize, uncompsize, deltatype, chain, chainsize
827 828
828 829 fm = ui.formatter(b'debugdeltachain', opts)
829 830
830 831 fm.plain(
831 832 b' rev chain# chainlen prev delta '
832 833 b'size rawsize chainsize ratio lindist extradist '
833 834 b'extraratio'
834 835 )
835 836 if withsparseread:
836 837 fm.plain(b' readsize largestblk rddensity srchunks')
837 838 fm.plain(b'\n')
838 839
839 840 chainbases = {}
840 841 for rev in r:
841 842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
842 843 chainbase = chain[0]
843 844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
844 845 basestart = start(chainbase)
845 846 revstart = start(rev)
846 847 lineardist = revstart + comp - basestart
847 848 extradist = lineardist - chainsize
848 849 try:
849 850 prevrev = chain[-2]
850 851 except IndexError:
851 852 prevrev = -1
852 853
853 854 if uncomp != 0:
854 855 chainratio = float(chainsize) / float(uncomp)
855 856 else:
856 857 chainratio = chainsize
857 858
858 859 if chainsize != 0:
859 860 extraratio = float(extradist) / float(chainsize)
860 861 else:
861 862 extraratio = extradist
862 863
863 864 fm.startitem()
864 865 fm.write(
865 866 b'rev chainid chainlen prevrev deltatype compsize '
866 867 b'uncompsize chainsize chainratio lindist extradist '
867 868 b'extraratio',
868 869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
869 870 rev,
870 871 chainid,
871 872 len(chain),
872 873 prevrev,
873 874 deltatype,
874 875 comp,
875 876 uncomp,
876 877 chainsize,
877 878 chainratio,
878 879 lineardist,
879 880 extradist,
880 881 extraratio,
881 882 rev=rev,
882 883 chainid=chainid,
883 884 chainlen=len(chain),
884 885 prevrev=prevrev,
885 886 deltatype=deltatype,
886 887 compsize=comp,
887 888 uncompsize=uncomp,
888 889 chainsize=chainsize,
889 890 chainratio=chainratio,
890 891 lindist=lineardist,
891 892 extradist=extradist,
892 893 extraratio=extraratio,
893 894 )
894 895 if withsparseread:
895 896 readsize = 0
896 897 largestblock = 0
897 898 srchunks = 0
898 899
899 900 for revschunk in deltautil.slicechunk(r, chain):
900 901 srchunks += 1
901 902 blkend = start(revschunk[-1]) + length(revschunk[-1])
902 903 blksize = blkend - start(revschunk[0])
903 904
904 905 readsize += blksize
905 906 if largestblock < blksize:
906 907 largestblock = blksize
907 908
908 909 if readsize:
909 910 readdensity = float(chainsize) / float(readsize)
910 911 else:
911 912 readdensity = 1
912 913
913 914 fm.write(
914 915 b'readsize largestblock readdensity srchunks',
915 916 b' %10d %10d %9.5f %8d',
916 917 readsize,
917 918 largestblock,
918 919 readdensity,
919 920 srchunks,
920 921 readsize=readsize,
921 922 largestblock=largestblock,
922 923 readdensity=readdensity,
923 924 srchunks=srchunks,
924 925 )
925 926
926 927 fm.plain(b'\n')
927 928
928 929 fm.end()
929 930
930 931
931 932 @command(
932 933 b'debugdirstate|debugstate',
933 934 [
934 935 (
935 936 b'',
936 937 b'nodates',
937 938 None,
938 939 _(b'do not display the saved mtime (DEPRECATED)'),
939 940 ),
940 941 (b'', b'dates', True, _(b'display the saved mtime')),
941 942 (b'', b'datesort', None, _(b'sort by saved mtime')),
942 943 ],
943 944 _(b'[OPTION]...'),
944 945 )
945 946 def debugstate(ui, repo, **opts):
946 947 """show the contents of the current dirstate"""
947 948
948 949 nodates = not opts['dates']
949 950 if opts.get('nodates') is not None:
950 951 nodates = True
951 952 datesort = opts.get('datesort')
952 953
953 954 if datesort:
954 955 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
955 956 else:
956 957 keyfunc = None # sort by filename
957 958 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
958 959 if ent[3] == -1:
959 960 timestr = b'unset '
960 961 elif nodates:
961 962 timestr = b'set '
962 963 else:
963 964 timestr = time.strftime(
964 965 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
965 966 )
966 967 timestr = encoding.strtolocal(timestr)
967 968 if ent[1] & 0o20000:
968 969 mode = b'lnk'
969 970 else:
970 971 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
971 972 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
972 973 for f in repo.dirstate.copies():
973 974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974 975
975 976
976 977 @command(
977 978 b'debugdiscovery',
978 979 [
979 980 (b'', b'old', None, _(b'use old-style discovery')),
980 981 (
981 982 b'',
982 983 b'nonheads',
983 984 None,
984 985 _(b'use old-style discovery with non-heads included'),
985 986 ),
986 987 (b'', b'rev', [], b'restrict discovery to this set of revs'),
987 988 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
988 989 (
989 990 b'',
990 991 b'local-as-revs',
991 992 "",
992 993 'treat local has having these revisions only',
993 994 ),
994 995 (
995 996 b'',
996 997 b'remote-as-revs',
997 998 "",
998 999 'use local as remote, with only these these revisions',
999 1000 ),
1000 1001 ]
1001 1002 + cmdutil.remoteopts
1002 1003 + cmdutil.formatteropts,
1003 1004 _(b'[--rev REV] [OTHER]'),
1004 1005 )
1005 1006 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1006 1007 """runs the changeset discovery protocol in isolation
1007 1008
1008 1009 The local peer can be "replaced" by a subset of the local repository by
1009 1010 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1010 1011 be "replaced" by a subset of the local repository using the
1011 1012 `--local-as-revs` flag. This is useful to efficiently debug pathological
1012 1013 discovery situation.
1013 1014 """
1014 1015 opts = pycompat.byteskwargs(opts)
1015 1016 unfi = repo.unfiltered()
1016 1017
1017 1018 # setup potential extra filtering
1018 1019 local_revs = opts[b"local_as_revs"]
1019 1020 remote_revs = opts[b"remote_as_revs"]
1020 1021
1021 1022 # make sure tests are repeatable
1022 1023 random.seed(int(opts[b'seed']))
1023 1024
1024 1025 if not remote_revs:
1025 1026
1026 1027 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1027 1028 remote = hg.peer(repo, opts, remoteurl)
1028 1029 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1029 1030 else:
1030 1031 branches = (None, [])
1031 1032 remote_filtered_revs = scmutil.revrange(
1032 1033 unfi, [b"not (::(%s))" % remote_revs]
1033 1034 )
1034 1035 remote_filtered_revs = frozenset(remote_filtered_revs)
1035 1036
1036 1037 def remote_func(x):
1037 1038 return remote_filtered_revs
1038 1039
1039 1040 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1040 1041
1041 1042 remote = repo.peer()
1042 1043 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1043 1044
1044 1045 if local_revs:
1045 1046 local_filtered_revs = scmutil.revrange(
1046 1047 unfi, [b"not (::(%s))" % local_revs]
1047 1048 )
1048 1049 local_filtered_revs = frozenset(local_filtered_revs)
1049 1050
1050 1051 def local_func(x):
1051 1052 return local_filtered_revs
1052 1053
1053 1054 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1054 1055 repo = repo.filtered(b'debug-discovery-local-filter')
1055 1056
1056 1057 data = {}
1057 1058 if opts.get(b'old'):
1058 1059
1059 1060 def doit(pushedrevs, remoteheads, remote=remote):
1060 1061 if not util.safehasattr(remote, b'branches'):
1061 1062 # enable in-client legacy support
1062 1063 remote = localrepo.locallegacypeer(remote.local())
1063 1064 common, _in, hds = treediscovery.findcommonincoming(
1064 1065 repo, remote, force=True, audit=data
1065 1066 )
1066 1067 common = set(common)
1067 1068 if not opts.get(b'nonheads'):
1068 1069 ui.writenoi18n(
1069 1070 b"unpruned common: %s\n"
1070 1071 % b" ".join(sorted(short(n) for n in common))
1071 1072 )
1072 1073
1073 1074 clnode = repo.changelog.node
1074 1075 common = repo.revs(b'heads(::%ln)', common)
1075 1076 common = {clnode(r) for r in common}
1076 1077 return common, hds
1077 1078
1078 1079 else:
1079 1080
1080 1081 def doit(pushedrevs, remoteheads, remote=remote):
1081 1082 nodes = None
1082 1083 if pushedrevs:
1083 1084 revs = scmutil.revrange(repo, pushedrevs)
1084 1085 nodes = [repo[r].node() for r in revs]
1085 1086 common, any, hds = setdiscovery.findcommonheads(
1086 1087 ui, repo, remote, ancestorsof=nodes, audit=data
1087 1088 )
1088 1089 return common, hds
1089 1090
1090 1091 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1091 1092 localrevs = opts[b'rev']
1093
1094 fm = ui.formatter(b'debugdiscovery', opts)
1095 if fm.strict_format:
1096
1097 @contextlib.contextmanager
1098 def may_capture_output():
1099 ui.pushbuffer()
1100 yield
1101 data[b'output'] = ui.popbuffer()
1102
1103 else:
1104 may_capture_output = util.nullcontextmanager
1105 with may_capture_output():
1092 1106 with util.timedcm('debug-discovery') as t:
1093 1107 common, hds = doit(localrevs, remoterevs)
1094 1108
1095 1109 # compute all statistics
1096 1110 heads_common = set(common)
1097 1111 heads_remote = set(hds)
1098 1112 heads_local = set(repo.heads())
1099 1113 # note: they cannot be a local or remote head that is in common and not
1100 1114 # itself a head of common.
1101 1115 heads_common_local = heads_common & heads_local
1102 1116 heads_common_remote = heads_common & heads_remote
1103 1117 heads_common_both = heads_common & heads_remote & heads_local
1104 1118
1105 1119 all = repo.revs(b'all()')
1106 1120 common = repo.revs(b'::%ln', common)
1107 1121 roots_common = repo.revs(b'roots(::%ld)', common)
1108 1122 missing = repo.revs(b'not ::%ld', common)
1109 1123 heads_missing = repo.revs(b'heads(%ld)', missing)
1110 1124 roots_missing = repo.revs(b'roots(%ld)', missing)
1111 1125 assert len(common) + len(missing) == len(all)
1112 1126
1113 1127 initial_undecided = repo.revs(
1114 1128 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1115 1129 )
1116 1130 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1117 1131 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1118 1132 common_initial_undecided = initial_undecided & common
1119 1133 missing_initial_undecided = initial_undecided & missing
1120 1134
1121 1135 data[b'elapsed'] = t.elapsed
1122 1136 data[b'nb-common-heads'] = len(heads_common)
1123 1137 data[b'nb-common-heads-local'] = len(heads_common_local)
1124 1138 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1125 1139 data[b'nb-common-heads-both'] = len(heads_common_both)
1126 1140 data[b'nb-common-roots'] = len(roots_common)
1127 1141 data[b'nb-head-local'] = len(heads_local)
1128 1142 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1129 1143 data[b'nb-head-remote'] = len(heads_remote)
1130 1144 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1131 1145 heads_common_remote
1132 1146 )
1133 1147 data[b'nb-revs'] = len(all)
1134 1148 data[b'nb-revs-common'] = len(common)
1135 1149 data[b'nb-revs-missing'] = len(missing)
1136 1150 data[b'nb-missing-heads'] = len(heads_missing)
1137 1151 data[b'nb-missing-roots'] = len(roots_missing)
1138 1152 data[b'nb-ini_und'] = len(initial_undecided)
1139 1153 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1140 1154 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1141 1155 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1142 1156 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1143 1157
1144 fm = ui.formatter(b'debugdiscovery', opts)
1145 1158 fm.startitem()
1146 1159 fm.data(**pycompat.strkwargs(data))
1147 1160 # display discovery summary
1148 1161 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1149 1162 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1150 1163 fm.plain(b"heads summary:\n")
1151 1164 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1152 1165 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1153 1166 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1154 1167 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1155 1168 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1156 1169 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1157 1170 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1158 1171 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1159 1172 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1160 1173 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1161 1174 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1162 1175 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1163 1176 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1164 1177 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1165 1178 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1166 1179 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1167 1180 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1168 1181 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1169 1182 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1170 1183 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1171 1184 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1172 1185 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1173 1186
1174 1187 if ui.verbose:
1175 1188 fm.plain(
1176 1189 b"common heads: %s\n"
1177 1190 % b" ".join(sorted(short(n) for n in heads_common))
1178 1191 )
1179 1192 fm.end()
1180 1193
1181 1194
1182 1195 _chunksize = 4 << 10
1183 1196
1184 1197
1185 1198 @command(
1186 1199 b'debugdownload',
1187 1200 [
1188 1201 (b'o', b'output', b'', _(b'path')),
1189 1202 ],
1190 1203 optionalrepo=True,
1191 1204 )
1192 1205 def debugdownload(ui, repo, url, output=None, **opts):
1193 1206 """download a resource using Mercurial logic and config"""
1194 1207 fh = urlmod.open(ui, url, output)
1195 1208
1196 1209 dest = ui
1197 1210 if output:
1198 1211 dest = open(output, b"wb", _chunksize)
1199 1212 try:
1200 1213 data = fh.read(_chunksize)
1201 1214 while data:
1202 1215 dest.write(data)
1203 1216 data = fh.read(_chunksize)
1204 1217 finally:
1205 1218 if output:
1206 1219 dest.close()
1207 1220
1208 1221
1209 1222 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1210 1223 def debugextensions(ui, repo, **opts):
1211 1224 '''show information about active extensions'''
1212 1225 opts = pycompat.byteskwargs(opts)
1213 1226 exts = extensions.extensions(ui)
1214 1227 hgver = util.version()
1215 1228 fm = ui.formatter(b'debugextensions', opts)
1216 1229 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1217 1230 isinternal = extensions.ismoduleinternal(extmod)
1218 1231 extsource = None
1219 1232
1220 1233 if util.safehasattr(extmod, '__file__'):
1221 1234 extsource = pycompat.fsencode(extmod.__file__)
1222 1235 elif getattr(sys, 'oxidized', False):
1223 1236 extsource = pycompat.sysexecutable
1224 1237 if isinternal:
1225 1238 exttestedwith = [] # never expose magic string to users
1226 1239 else:
1227 1240 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1228 1241 extbuglink = getattr(extmod, 'buglink', None)
1229 1242
1230 1243 fm.startitem()
1231 1244
1232 1245 if ui.quiet or ui.verbose:
1233 1246 fm.write(b'name', b'%s\n', extname)
1234 1247 else:
1235 1248 fm.write(b'name', b'%s', extname)
1236 1249 if isinternal or hgver in exttestedwith:
1237 1250 fm.plain(b'\n')
1238 1251 elif not exttestedwith:
1239 1252 fm.plain(_(b' (untested!)\n'))
1240 1253 else:
1241 1254 lasttestedversion = exttestedwith[-1]
1242 1255 fm.plain(b' (%s!)\n' % lasttestedversion)
1243 1256
1244 1257 fm.condwrite(
1245 1258 ui.verbose and extsource,
1246 1259 b'source',
1247 1260 _(b' location: %s\n'),
1248 1261 extsource or b"",
1249 1262 )
1250 1263
1251 1264 if ui.verbose:
1252 1265 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1253 1266 fm.data(bundled=isinternal)
1254 1267
1255 1268 fm.condwrite(
1256 1269 ui.verbose and exttestedwith,
1257 1270 b'testedwith',
1258 1271 _(b' tested with: %s\n'),
1259 1272 fm.formatlist(exttestedwith, name=b'ver'),
1260 1273 )
1261 1274
1262 1275 fm.condwrite(
1263 1276 ui.verbose and extbuglink,
1264 1277 b'buglink',
1265 1278 _(b' bug reporting: %s\n'),
1266 1279 extbuglink or b"",
1267 1280 )
1268 1281
1269 1282 fm.end()
1270 1283
1271 1284
1272 1285 @command(
1273 1286 b'debugfileset',
1274 1287 [
1275 1288 (
1276 1289 b'r',
1277 1290 b'rev',
1278 1291 b'',
1279 1292 _(b'apply the filespec on this revision'),
1280 1293 _(b'REV'),
1281 1294 ),
1282 1295 (
1283 1296 b'',
1284 1297 b'all-files',
1285 1298 False,
1286 1299 _(b'test files from all revisions and working directory'),
1287 1300 ),
1288 1301 (
1289 1302 b's',
1290 1303 b'show-matcher',
1291 1304 None,
1292 1305 _(b'print internal representation of matcher'),
1293 1306 ),
1294 1307 (
1295 1308 b'p',
1296 1309 b'show-stage',
1297 1310 [],
1298 1311 _(b'print parsed tree at the given stage'),
1299 1312 _(b'NAME'),
1300 1313 ),
1301 1314 ],
1302 1315 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1303 1316 )
1304 1317 def debugfileset(ui, repo, expr, **opts):
1305 1318 '''parse and apply a fileset specification'''
1306 1319 from . import fileset
1307 1320
1308 1321 fileset.symbols # force import of fileset so we have predicates to optimize
1309 1322 opts = pycompat.byteskwargs(opts)
1310 1323 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1311 1324
1312 1325 stages = [
1313 1326 (b'parsed', pycompat.identity),
1314 1327 (b'analyzed', filesetlang.analyze),
1315 1328 (b'optimized', filesetlang.optimize),
1316 1329 ]
1317 1330 stagenames = {n for n, f in stages}
1318 1331
1319 1332 showalways = set()
1320 1333 if ui.verbose and not opts[b'show_stage']:
1321 1334 # show parsed tree by --verbose (deprecated)
1322 1335 showalways.add(b'parsed')
1323 1336 if opts[b'show_stage'] == [b'all']:
1324 1337 showalways.update(stagenames)
1325 1338 else:
1326 1339 for n in opts[b'show_stage']:
1327 1340 if n not in stagenames:
1328 1341 raise error.Abort(_(b'invalid stage name: %s') % n)
1329 1342 showalways.update(opts[b'show_stage'])
1330 1343
1331 1344 tree = filesetlang.parse(expr)
1332 1345 for n, f in stages:
1333 1346 tree = f(tree)
1334 1347 if n in showalways:
1335 1348 if opts[b'show_stage'] or n != b'parsed':
1336 1349 ui.write(b"* %s:\n" % n)
1337 1350 ui.write(filesetlang.prettyformat(tree), b"\n")
1338 1351
1339 1352 files = set()
1340 1353 if opts[b'all_files']:
1341 1354 for r in repo:
1342 1355 c = repo[r]
1343 1356 files.update(c.files())
1344 1357 files.update(c.substate)
1345 1358 if opts[b'all_files'] or ctx.rev() is None:
1346 1359 wctx = repo[None]
1347 1360 files.update(
1348 1361 repo.dirstate.walk(
1349 1362 scmutil.matchall(repo),
1350 1363 subrepos=list(wctx.substate),
1351 1364 unknown=True,
1352 1365 ignored=True,
1353 1366 )
1354 1367 )
1355 1368 files.update(wctx.substate)
1356 1369 else:
1357 1370 files.update(ctx.files())
1358 1371 files.update(ctx.substate)
1359 1372
1360 1373 m = ctx.matchfileset(repo.getcwd(), expr)
1361 1374 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1362 1375 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1363 1376 for f in sorted(files):
1364 1377 if not m(f):
1365 1378 continue
1366 1379 ui.write(b"%s\n" % f)
1367 1380
1368 1381
1369 1382 @command(b'debugformat', [] + cmdutil.formatteropts)
1370 1383 def debugformat(ui, repo, **opts):
1371 1384 """display format information about the current repository
1372 1385
1373 1386 Use --verbose to get extra information about current config value and
1374 1387 Mercurial default."""
1375 1388 opts = pycompat.byteskwargs(opts)
1376 1389 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1377 1390 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1378 1391
1379 1392 def makeformatname(name):
1380 1393 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1381 1394
1382 1395 fm = ui.formatter(b'debugformat', opts)
1383 1396 if fm.isplain():
1384 1397
1385 1398 def formatvalue(value):
1386 1399 if util.safehasattr(value, b'startswith'):
1387 1400 return value
1388 1401 if value:
1389 1402 return b'yes'
1390 1403 else:
1391 1404 return b'no'
1392 1405
1393 1406 else:
1394 1407 formatvalue = pycompat.identity
1395 1408
1396 1409 fm.plain(b'format-variant')
1397 1410 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1398 1411 fm.plain(b' repo')
1399 1412 if ui.verbose:
1400 1413 fm.plain(b' config default')
1401 1414 fm.plain(b'\n')
1402 1415 for fv in upgrade.allformatvariant:
1403 1416 fm.startitem()
1404 1417 repovalue = fv.fromrepo(repo)
1405 1418 configvalue = fv.fromconfig(repo)
1406 1419
1407 1420 if repovalue != configvalue:
1408 1421 namelabel = b'formatvariant.name.mismatchconfig'
1409 1422 repolabel = b'formatvariant.repo.mismatchconfig'
1410 1423 elif repovalue != fv.default:
1411 1424 namelabel = b'formatvariant.name.mismatchdefault'
1412 1425 repolabel = b'formatvariant.repo.mismatchdefault'
1413 1426 else:
1414 1427 namelabel = b'formatvariant.name.uptodate'
1415 1428 repolabel = b'formatvariant.repo.uptodate'
1416 1429
1417 1430 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1418 1431 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1419 1432 if fv.default != configvalue:
1420 1433 configlabel = b'formatvariant.config.special'
1421 1434 else:
1422 1435 configlabel = b'formatvariant.config.default'
1423 1436 fm.condwrite(
1424 1437 ui.verbose,
1425 1438 b'config',
1426 1439 b' %6s',
1427 1440 formatvalue(configvalue),
1428 1441 label=configlabel,
1429 1442 )
1430 1443 fm.condwrite(
1431 1444 ui.verbose,
1432 1445 b'default',
1433 1446 b' %7s',
1434 1447 formatvalue(fv.default),
1435 1448 label=b'formatvariant.default',
1436 1449 )
1437 1450 fm.plain(b'\n')
1438 1451 fm.end()
1439 1452
1440 1453
1441 1454 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1442 1455 def debugfsinfo(ui, path=b"."):
1443 1456 """show information detected about current filesystem"""
1444 1457 ui.writenoi18n(b'path: %s\n' % path)
1445 1458 ui.writenoi18n(
1446 1459 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1447 1460 )
1448 1461 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1449 1462 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1450 1463 ui.writenoi18n(
1451 1464 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1452 1465 )
1453 1466 ui.writenoi18n(
1454 1467 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1455 1468 )
1456 1469 casesensitive = b'(unknown)'
1457 1470 try:
1458 1471 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1459 1472 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1460 1473 except OSError:
1461 1474 pass
1462 1475 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1463 1476
1464 1477
1465 1478 @command(
1466 1479 b'debuggetbundle',
1467 1480 [
1468 1481 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1469 1482 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1470 1483 (
1471 1484 b't',
1472 1485 b'type',
1473 1486 b'bzip2',
1474 1487 _(b'bundle compression type to use'),
1475 1488 _(b'TYPE'),
1476 1489 ),
1477 1490 ],
1478 1491 _(b'REPO FILE [-H|-C ID]...'),
1479 1492 norepo=True,
1480 1493 )
1481 1494 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1482 1495 """retrieves a bundle from a repo
1483 1496
1484 1497 Every ID must be a full-length hex node id string. Saves the bundle to the
1485 1498 given file.
1486 1499 """
1487 1500 opts = pycompat.byteskwargs(opts)
1488 1501 repo = hg.peer(ui, opts, repopath)
1489 1502 if not repo.capable(b'getbundle'):
1490 1503 raise error.Abort(b"getbundle() not supported by target repository")
1491 1504 args = {}
1492 1505 if common:
1493 1506 args['common'] = [bin(s) for s in common]
1494 1507 if head:
1495 1508 args['heads'] = [bin(s) for s in head]
1496 1509 # TODO: get desired bundlecaps from command line.
1497 1510 args['bundlecaps'] = None
1498 1511 bundle = repo.getbundle(b'debug', **args)
1499 1512
1500 1513 bundletype = opts.get(b'type', b'bzip2').lower()
1501 1514 btypes = {
1502 1515 b'none': b'HG10UN',
1503 1516 b'bzip2': b'HG10BZ',
1504 1517 b'gzip': b'HG10GZ',
1505 1518 b'bundle2': b'HG20',
1506 1519 }
1507 1520 bundletype = btypes.get(bundletype)
1508 1521 if bundletype not in bundle2.bundletypes:
1509 1522 raise error.Abort(_(b'unknown bundle type specified with --type'))
1510 1523 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1511 1524
1512 1525
1513 1526 @command(b'debugignore', [], b'[FILE]')
1514 1527 def debugignore(ui, repo, *files, **opts):
1515 1528 """display the combined ignore pattern and information about ignored files
1516 1529
1517 1530 With no argument display the combined ignore pattern.
1518 1531
1519 1532 Given space separated file names, shows if the given file is ignored and
1520 1533 if so, show the ignore rule (file and line number) that matched it.
1521 1534 """
1522 1535 ignore = repo.dirstate._ignore
1523 1536 if not files:
1524 1537 # Show all the patterns
1525 1538 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1526 1539 else:
1527 1540 m = scmutil.match(repo[None], pats=files)
1528 1541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1529 1542 for f in m.files():
1530 1543 nf = util.normpath(f)
1531 1544 ignored = None
1532 1545 ignoredata = None
1533 1546 if nf != b'.':
1534 1547 if ignore(nf):
1535 1548 ignored = nf
1536 1549 ignoredata = repo.dirstate._ignorefileandline(nf)
1537 1550 else:
1538 1551 for p in pathutil.finddirs(nf):
1539 1552 if ignore(p):
1540 1553 ignored = p
1541 1554 ignoredata = repo.dirstate._ignorefileandline(p)
1542 1555 break
1543 1556 if ignored:
1544 1557 if ignored == nf:
1545 1558 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1546 1559 else:
1547 1560 ui.write(
1548 1561 _(
1549 1562 b"%s is ignored because of "
1550 1563 b"containing directory %s\n"
1551 1564 )
1552 1565 % (uipathfn(f), ignored)
1553 1566 )
1554 1567 ignorefile, lineno, line = ignoredata
1555 1568 ui.write(
1556 1569 _(b"(ignore rule in %s, line %d: '%s')\n")
1557 1570 % (ignorefile, lineno, line)
1558 1571 )
1559 1572 else:
1560 1573 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1561 1574
1562 1575
1563 1576 @command(
1564 1577 b'debugindex',
1565 1578 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1566 1579 _(b'-c|-m|FILE'),
1567 1580 )
1568 1581 def debugindex(ui, repo, file_=None, **opts):
1569 1582 """dump index data for a storage primitive"""
1570 1583 opts = pycompat.byteskwargs(opts)
1571 1584 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1572 1585
1573 1586 if ui.debugflag:
1574 1587 shortfn = hex
1575 1588 else:
1576 1589 shortfn = short
1577 1590
1578 1591 idlen = 12
1579 1592 for i in store:
1580 1593 idlen = len(shortfn(store.node(i)))
1581 1594 break
1582 1595
1583 1596 fm = ui.formatter(b'debugindex', opts)
1584 1597 fm.plain(
1585 1598 b' rev linkrev %s %s p2\n'
1586 1599 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1587 1600 )
1588 1601
1589 1602 for rev in store:
1590 1603 node = store.node(rev)
1591 1604 parents = store.parents(node)
1592 1605
1593 1606 fm.startitem()
1594 1607 fm.write(b'rev', b'%6d ', rev)
1595 1608 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1596 1609 fm.write(b'node', b'%s ', shortfn(node))
1597 1610 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1598 1611 fm.write(b'p2', b'%s', shortfn(parents[1]))
1599 1612 fm.plain(b'\n')
1600 1613
1601 1614 fm.end()
1602 1615
1603 1616
1604 1617 @command(
1605 1618 b'debugindexdot',
1606 1619 cmdutil.debugrevlogopts,
1607 1620 _(b'-c|-m|FILE'),
1608 1621 optionalrepo=True,
1609 1622 )
1610 1623 def debugindexdot(ui, repo, file_=None, **opts):
1611 1624 """dump an index DAG as a graphviz dot file"""
1612 1625 opts = pycompat.byteskwargs(opts)
1613 1626 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1614 1627 ui.writenoi18n(b"digraph G {\n")
1615 1628 for i in r:
1616 1629 node = r.node(i)
1617 1630 pp = r.parents(node)
1618 1631 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1619 1632 if pp[1] != nullid:
1620 1633 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1621 1634 ui.write(b"}\n")
1622 1635
1623 1636
1624 1637 @command(b'debugindexstats', [])
1625 1638 def debugindexstats(ui, repo):
1626 1639 """show stats related to the changelog index"""
1627 1640 repo.changelog.shortest(nullid, 1)
1628 1641 index = repo.changelog.index
1629 1642 if not util.safehasattr(index, b'stats'):
1630 1643 raise error.Abort(_(b'debugindexstats only works with native code'))
1631 1644 for k, v in sorted(index.stats().items()):
1632 1645 ui.write(b'%s: %d\n' % (k, v))
1633 1646
1634 1647
1635 1648 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1636 1649 def debuginstall(ui, **opts):
1637 1650 """test Mercurial installation
1638 1651
1639 1652 Returns 0 on success.
1640 1653 """
1641 1654 opts = pycompat.byteskwargs(opts)
1642 1655
1643 1656 problems = 0
1644 1657
1645 1658 fm = ui.formatter(b'debuginstall', opts)
1646 1659 fm.startitem()
1647 1660
1648 1661 # encoding might be unknown or wrong. don't translate these messages.
1649 1662 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1650 1663 err = None
1651 1664 try:
1652 1665 codecs.lookup(pycompat.sysstr(encoding.encoding))
1653 1666 except LookupError as inst:
1654 1667 err = stringutil.forcebytestr(inst)
1655 1668 problems += 1
1656 1669 fm.condwrite(
1657 1670 err,
1658 1671 b'encodingerror',
1659 1672 b" %s\n (check that your locale is properly set)\n",
1660 1673 err,
1661 1674 )
1662 1675
1663 1676 # Python
1664 1677 pythonlib = None
1665 1678 if util.safehasattr(os, '__file__'):
1666 1679 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1667 1680 elif getattr(sys, 'oxidized', False):
1668 1681 pythonlib = pycompat.sysexecutable
1669 1682
1670 1683 fm.write(
1671 1684 b'pythonexe',
1672 1685 _(b"checking Python executable (%s)\n"),
1673 1686 pycompat.sysexecutable or _(b"unknown"),
1674 1687 )
1675 1688 fm.write(
1676 1689 b'pythonimplementation',
1677 1690 _(b"checking Python implementation (%s)\n"),
1678 1691 pycompat.sysbytes(platform.python_implementation()),
1679 1692 )
1680 1693 fm.write(
1681 1694 b'pythonver',
1682 1695 _(b"checking Python version (%s)\n"),
1683 1696 (b"%d.%d.%d" % sys.version_info[:3]),
1684 1697 )
1685 1698 fm.write(
1686 1699 b'pythonlib',
1687 1700 _(b"checking Python lib (%s)...\n"),
1688 1701 pythonlib or _(b"unknown"),
1689 1702 )
1690 1703
1691 1704 try:
1692 1705 from . import rustext
1693 1706
1694 1707 rustext.__doc__ # trigger lazy import
1695 1708 except ImportError:
1696 1709 rustext = None
1697 1710
1698 1711 security = set(sslutil.supportedprotocols)
1699 1712 if sslutil.hassni:
1700 1713 security.add(b'sni')
1701 1714
1702 1715 fm.write(
1703 1716 b'pythonsecurity',
1704 1717 _(b"checking Python security support (%s)\n"),
1705 1718 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1706 1719 )
1707 1720
1708 1721 # These are warnings, not errors. So don't increment problem count. This
1709 1722 # may change in the future.
1710 1723 if b'tls1.2' not in security:
1711 1724 fm.plain(
1712 1725 _(
1713 1726 b' TLS 1.2 not supported by Python install; '
1714 1727 b'network connections lack modern security\n'
1715 1728 )
1716 1729 )
1717 1730 if b'sni' not in security:
1718 1731 fm.plain(
1719 1732 _(
1720 1733 b' SNI not supported by Python install; may have '
1721 1734 b'connectivity issues with some servers\n'
1722 1735 )
1723 1736 )
1724 1737
1725 1738 fm.plain(
1726 1739 _(
1727 1740 b"checking Rust extensions (%s)\n"
1728 1741 % (b'missing' if rustext is None else b'installed')
1729 1742 ),
1730 1743 )
1731 1744
1732 1745 # TODO print CA cert info
1733 1746
1734 1747 # hg version
1735 1748 hgver = util.version()
1736 1749 fm.write(
1737 1750 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1738 1751 )
1739 1752 fm.write(
1740 1753 b'hgverextra',
1741 1754 _(b"checking Mercurial custom build (%s)\n"),
1742 1755 b'+'.join(hgver.split(b'+')[1:]),
1743 1756 )
1744 1757
1745 1758 # compiled modules
1746 1759 hgmodules = None
1747 1760 if util.safehasattr(sys.modules[__name__], '__file__'):
1748 1761 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1749 1762 elif getattr(sys, 'oxidized', False):
1750 1763 hgmodules = pycompat.sysexecutable
1751 1764
1752 1765 fm.write(
1753 1766 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1754 1767 )
1755 1768 fm.write(
1756 1769 b'hgmodules',
1757 1770 _(b"checking installed modules (%s)...\n"),
1758 1771 hgmodules or _(b"unknown"),
1759 1772 )
1760 1773
1761 1774 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1762 1775 rustext = rustandc # for now, that's the only case
1763 1776 cext = policy.policy in (b'c', b'allow') or rustandc
1764 1777 nopure = cext or rustext
1765 1778 if nopure:
1766 1779 err = None
1767 1780 try:
1768 1781 if cext:
1769 1782 from .cext import ( # pytype: disable=import-error
1770 1783 base85,
1771 1784 bdiff,
1772 1785 mpatch,
1773 1786 osutil,
1774 1787 )
1775 1788
1776 1789 # quiet pyflakes
1777 1790 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1778 1791 if rustext:
1779 1792 from .rustext import ( # pytype: disable=import-error
1780 1793 ancestor,
1781 1794 dirstate,
1782 1795 )
1783 1796
1784 1797 dir(ancestor), dir(dirstate) # quiet pyflakes
1785 1798 except Exception as inst:
1786 1799 err = stringutil.forcebytestr(inst)
1787 1800 problems += 1
1788 1801 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1789 1802
1790 1803 compengines = util.compengines._engines.values()
1791 1804 fm.write(
1792 1805 b'compengines',
1793 1806 _(b'checking registered compression engines (%s)\n'),
1794 1807 fm.formatlist(
1795 1808 sorted(e.name() for e in compengines),
1796 1809 name=b'compengine',
1797 1810 fmt=b'%s',
1798 1811 sep=b', ',
1799 1812 ),
1800 1813 )
1801 1814 fm.write(
1802 1815 b'compenginesavail',
1803 1816 _(b'checking available compression engines (%s)\n'),
1804 1817 fm.formatlist(
1805 1818 sorted(e.name() for e in compengines if e.available()),
1806 1819 name=b'compengine',
1807 1820 fmt=b'%s',
1808 1821 sep=b', ',
1809 1822 ),
1810 1823 )
1811 1824 wirecompengines = compression.compengines.supportedwireengines(
1812 1825 compression.SERVERROLE
1813 1826 )
1814 1827 fm.write(
1815 1828 b'compenginesserver',
1816 1829 _(
1817 1830 b'checking available compression engines '
1818 1831 b'for wire protocol (%s)\n'
1819 1832 ),
1820 1833 fm.formatlist(
1821 1834 [e.name() for e in wirecompengines if e.wireprotosupport()],
1822 1835 name=b'compengine',
1823 1836 fmt=b'%s',
1824 1837 sep=b', ',
1825 1838 ),
1826 1839 )
1827 1840 re2 = b'missing'
1828 1841 if util._re2:
1829 1842 re2 = b'available'
1830 1843 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1831 1844 fm.data(re2=bool(util._re2))
1832 1845
1833 1846 # templates
1834 1847 p = templater.templatedir()
1835 1848 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1836 1849 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1837 1850 if p:
1838 1851 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1839 1852 if m:
1840 1853 # template found, check if it is working
1841 1854 err = None
1842 1855 try:
1843 1856 templater.templater.frommapfile(m)
1844 1857 except Exception as inst:
1845 1858 err = stringutil.forcebytestr(inst)
1846 1859 p = None
1847 1860 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1848 1861 else:
1849 1862 p = None
1850 1863 fm.condwrite(
1851 1864 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1852 1865 )
1853 1866 fm.condwrite(
1854 1867 not m,
1855 1868 b'defaulttemplatenotfound',
1856 1869 _(b" template '%s' not found\n"),
1857 1870 b"default",
1858 1871 )
1859 1872 if not p:
1860 1873 problems += 1
1861 1874 fm.condwrite(
1862 1875 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1863 1876 )
1864 1877
1865 1878 # editor
1866 1879 editor = ui.geteditor()
1867 1880 editor = util.expandpath(editor)
1868 1881 editorbin = procutil.shellsplit(editor)[0]
1869 1882 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1870 1883 cmdpath = procutil.findexe(editorbin)
1871 1884 fm.condwrite(
1872 1885 not cmdpath and editor == b'vi',
1873 1886 b'vinotfound',
1874 1887 _(
1875 1888 b" No commit editor set and can't find %s in PATH\n"
1876 1889 b" (specify a commit editor in your configuration"
1877 1890 b" file)\n"
1878 1891 ),
1879 1892 not cmdpath and editor == b'vi' and editorbin,
1880 1893 )
1881 1894 fm.condwrite(
1882 1895 not cmdpath and editor != b'vi',
1883 1896 b'editornotfound',
1884 1897 _(
1885 1898 b" Can't find editor '%s' in PATH\n"
1886 1899 b" (specify a commit editor in your configuration"
1887 1900 b" file)\n"
1888 1901 ),
1889 1902 not cmdpath and editorbin,
1890 1903 )
1891 1904 if not cmdpath and editor != b'vi':
1892 1905 problems += 1
1893 1906
1894 1907 # check username
1895 1908 username = None
1896 1909 err = None
1897 1910 try:
1898 1911 username = ui.username()
1899 1912 except error.Abort as e:
1900 1913 err = e.message
1901 1914 problems += 1
1902 1915
1903 1916 fm.condwrite(
1904 1917 username, b'username', _(b"checking username (%s)\n"), username
1905 1918 )
1906 1919 fm.condwrite(
1907 1920 err,
1908 1921 b'usernameerror',
1909 1922 _(
1910 1923 b"checking username...\n %s\n"
1911 1924 b" (specify a username in your configuration file)\n"
1912 1925 ),
1913 1926 err,
1914 1927 )
1915 1928
1916 1929 for name, mod in extensions.extensions():
1917 1930 handler = getattr(mod, 'debuginstall', None)
1918 1931 if handler is not None:
1919 1932 problems += handler(ui, fm)
1920 1933
1921 1934 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1922 1935 if not problems:
1923 1936 fm.data(problems=problems)
1924 1937 fm.condwrite(
1925 1938 problems,
1926 1939 b'problems',
1927 1940 _(b"%d problems detected, please check your install!\n"),
1928 1941 problems,
1929 1942 )
1930 1943 fm.end()
1931 1944
1932 1945 return problems
1933 1946
1934 1947
1935 1948 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1936 1949 def debugknown(ui, repopath, *ids, **opts):
1937 1950 """test whether node ids are known to a repo
1938 1951
1939 1952 Every ID must be a full-length hex node id string. Returns a list of 0s
1940 1953 and 1s indicating unknown/known.
1941 1954 """
1942 1955 opts = pycompat.byteskwargs(opts)
1943 1956 repo = hg.peer(ui, opts, repopath)
1944 1957 if not repo.capable(b'known'):
1945 1958 raise error.Abort(b"known() not supported by target repository")
1946 1959 flags = repo.known([bin(s) for s in ids])
1947 1960 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1948 1961
1949 1962
1950 1963 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1951 1964 def debuglabelcomplete(ui, repo, *args):
1952 1965 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1953 1966 debugnamecomplete(ui, repo, *args)
1954 1967
1955 1968
1956 1969 @command(
1957 1970 b'debuglocks',
1958 1971 [
1959 1972 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1960 1973 (
1961 1974 b'W',
1962 1975 b'force-free-wlock',
1963 1976 None,
1964 1977 _(b'free the working state lock (DANGEROUS)'),
1965 1978 ),
1966 1979 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1967 1980 (
1968 1981 b'S',
1969 1982 b'set-wlock',
1970 1983 None,
1971 1984 _(b'set the working state lock until stopped'),
1972 1985 ),
1973 1986 ],
1974 1987 _(b'[OPTION]...'),
1975 1988 )
1976 1989 def debuglocks(ui, repo, **opts):
1977 1990 """show or modify state of locks
1978 1991
1979 1992 By default, this command will show which locks are held. This
1980 1993 includes the user and process holding the lock, the amount of time
1981 1994 the lock has been held, and the machine name where the process is
1982 1995 running if it's not local.
1983 1996
1984 1997 Locks protect the integrity of Mercurial's data, so should be
1985 1998 treated with care. System crashes or other interruptions may cause
1986 1999 locks to not be properly released, though Mercurial will usually
1987 2000 detect and remove such stale locks automatically.
1988 2001
1989 2002 However, detecting stale locks may not always be possible (for
1990 2003 instance, on a shared filesystem). Removing locks may also be
1991 2004 blocked by filesystem permissions.
1992 2005
1993 2006 Setting a lock will prevent other commands from changing the data.
1994 2007 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1995 2008 The set locks are removed when the command exits.
1996 2009
1997 2010 Returns 0 if no locks are held.
1998 2011
1999 2012 """
2000 2013
2001 2014 if opts.get('force_free_lock'):
2002 2015 repo.svfs.unlink(b'lock')
2003 2016 if opts.get('force_free_wlock'):
2004 2017 repo.vfs.unlink(b'wlock')
2005 2018 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2006 2019 return 0
2007 2020
2008 2021 locks = []
2009 2022 try:
2010 2023 if opts.get('set_wlock'):
2011 2024 try:
2012 2025 locks.append(repo.wlock(False))
2013 2026 except error.LockHeld:
2014 2027 raise error.Abort(_(b'wlock is already held'))
2015 2028 if opts.get('set_lock'):
2016 2029 try:
2017 2030 locks.append(repo.lock(False))
2018 2031 except error.LockHeld:
2019 2032 raise error.Abort(_(b'lock is already held'))
2020 2033 if len(locks):
2021 2034 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2022 2035 return 0
2023 2036 finally:
2024 2037 release(*locks)
2025 2038
2026 2039 now = time.time()
2027 2040 held = 0
2028 2041
2029 2042 def report(vfs, name, method):
2030 2043 # this causes stale locks to get reaped for more accurate reporting
2031 2044 try:
2032 2045 l = method(False)
2033 2046 except error.LockHeld:
2034 2047 l = None
2035 2048
2036 2049 if l:
2037 2050 l.release()
2038 2051 else:
2039 2052 try:
2040 2053 st = vfs.lstat(name)
2041 2054 age = now - st[stat.ST_MTIME]
2042 2055 user = util.username(st.st_uid)
2043 2056 locker = vfs.readlock(name)
2044 2057 if b":" in locker:
2045 2058 host, pid = locker.split(b':')
2046 2059 if host == socket.gethostname():
2047 2060 locker = b'user %s, process %s' % (user or b'None', pid)
2048 2061 else:
2049 2062 locker = b'user %s, process %s, host %s' % (
2050 2063 user or b'None',
2051 2064 pid,
2052 2065 host,
2053 2066 )
2054 2067 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2055 2068 return 1
2056 2069 except OSError as e:
2057 2070 if e.errno != errno.ENOENT:
2058 2071 raise
2059 2072
2060 2073 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2061 2074 return 0
2062 2075
2063 2076 held += report(repo.svfs, b"lock", repo.lock)
2064 2077 held += report(repo.vfs, b"wlock", repo.wlock)
2065 2078
2066 2079 return held
2067 2080
2068 2081
2069 2082 @command(
2070 2083 b'debugmanifestfulltextcache',
2071 2084 [
2072 2085 (b'', b'clear', False, _(b'clear the cache')),
2073 2086 (
2074 2087 b'a',
2075 2088 b'add',
2076 2089 [],
2077 2090 _(b'add the given manifest nodes to the cache'),
2078 2091 _(b'NODE'),
2079 2092 ),
2080 2093 ],
2081 2094 b'',
2082 2095 )
2083 2096 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2084 2097 """show, clear or amend the contents of the manifest fulltext cache"""
2085 2098
2086 2099 def getcache():
2087 2100 r = repo.manifestlog.getstorage(b'')
2088 2101 try:
2089 2102 return r._fulltextcache
2090 2103 except AttributeError:
2091 2104 msg = _(
2092 2105 b"Current revlog implementation doesn't appear to have a "
2093 2106 b"manifest fulltext cache\n"
2094 2107 )
2095 2108 raise error.Abort(msg)
2096 2109
2097 2110 if opts.get('clear'):
2098 2111 with repo.wlock():
2099 2112 cache = getcache()
2100 2113 cache.clear(clear_persisted_data=True)
2101 2114 return
2102 2115
2103 2116 if add:
2104 2117 with repo.wlock():
2105 2118 m = repo.manifestlog
2106 2119 store = m.getstorage(b'')
2107 2120 for n in add:
2108 2121 try:
2109 2122 manifest = m[store.lookup(n)]
2110 2123 except error.LookupError as e:
2111 2124 raise error.Abort(e, hint=b"Check your manifest node id")
2112 2125 manifest.read() # stores revisision in cache too
2113 2126 return
2114 2127
2115 2128 cache = getcache()
2116 2129 if not len(cache):
2117 2130 ui.write(_(b'cache empty\n'))
2118 2131 else:
2119 2132 ui.write(
2120 2133 _(
2121 2134 b'cache contains %d manifest entries, in order of most to '
2122 2135 b'least recent:\n'
2123 2136 )
2124 2137 % (len(cache),)
2125 2138 )
2126 2139 totalsize = 0
2127 2140 for nodeid in cache:
2128 2141 # Use cache.get to not update the LRU order
2129 2142 data = cache.peek(nodeid)
2130 2143 size = len(data)
2131 2144 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2132 2145 ui.write(
2133 2146 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2134 2147 )
2135 2148 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2136 2149 ui.write(
2137 2150 _(b'total cache data size %s, on-disk %s\n')
2138 2151 % (util.bytecount(totalsize), util.bytecount(ondisk))
2139 2152 )
2140 2153
2141 2154
2142 2155 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2143 2156 def debugmergestate(ui, repo, *args, **opts):
2144 2157 """print merge state
2145 2158
2146 2159 Use --verbose to print out information about whether v1 or v2 merge state
2147 2160 was chosen."""
2148 2161
2149 2162 if ui.verbose:
2150 2163 ms = mergestatemod.mergestate(repo)
2151 2164
2152 2165 # sort so that reasonable information is on top
2153 2166 v1records = ms._readrecordsv1()
2154 2167 v2records = ms._readrecordsv2()
2155 2168
2156 2169 if not v1records and not v2records:
2157 2170 pass
2158 2171 elif not v2records:
2159 2172 ui.writenoi18n(b'no version 2 merge state\n')
2160 2173 elif ms._v1v2match(v1records, v2records):
2161 2174 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2162 2175 else:
2163 2176 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2164 2177
2165 2178 opts = pycompat.byteskwargs(opts)
2166 2179 if not opts[b'template']:
2167 2180 opts[b'template'] = (
2168 2181 b'{if(commits, "", "no merge state found\n")}'
2169 2182 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2170 2183 b'{files % "file: {path} (state \\"{state}\\")\n'
2171 2184 b'{if(local_path, "'
2172 2185 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2173 2186 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2174 2187 b' other path: {other_path} (node {other_node})\n'
2175 2188 b'")}'
2176 2189 b'{if(rename_side, "'
2177 2190 b' rename side: {rename_side}\n'
2178 2191 b' renamed path: {renamed_path}\n'
2179 2192 b'")}'
2180 2193 b'{extras % " extra: {key} = {value}\n"}'
2181 2194 b'"}'
2182 2195 b'{extras % "extra: {file} ({key} = {value})\n"}'
2183 2196 )
2184 2197
2185 2198 ms = mergestatemod.mergestate.read(repo)
2186 2199
2187 2200 fm = ui.formatter(b'debugmergestate', opts)
2188 2201 fm.startitem()
2189 2202
2190 2203 fm_commits = fm.nested(b'commits')
2191 2204 if ms.active():
2192 2205 for name, node, label_index in (
2193 2206 (b'local', ms.local, 0),
2194 2207 (b'other', ms.other, 1),
2195 2208 ):
2196 2209 fm_commits.startitem()
2197 2210 fm_commits.data(name=name)
2198 2211 fm_commits.data(node=hex(node))
2199 2212 if ms._labels and len(ms._labels) > label_index:
2200 2213 fm_commits.data(label=ms._labels[label_index])
2201 2214 fm_commits.end()
2202 2215
2203 2216 fm_files = fm.nested(b'files')
2204 2217 if ms.active():
2205 2218 for f in ms:
2206 2219 fm_files.startitem()
2207 2220 fm_files.data(path=f)
2208 2221 state = ms._state[f]
2209 2222 fm_files.data(state=state[0])
2210 2223 if state[0] in (
2211 2224 mergestatemod.MERGE_RECORD_UNRESOLVED,
2212 2225 mergestatemod.MERGE_RECORD_RESOLVED,
2213 2226 ):
2214 2227 fm_files.data(local_key=state[1])
2215 2228 fm_files.data(local_path=state[2])
2216 2229 fm_files.data(ancestor_path=state[3])
2217 2230 fm_files.data(ancestor_node=state[4])
2218 2231 fm_files.data(other_path=state[5])
2219 2232 fm_files.data(other_node=state[6])
2220 2233 fm_files.data(local_flags=state[7])
2221 2234 elif state[0] in (
2222 2235 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2223 2236 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2224 2237 ):
2225 2238 fm_files.data(renamed_path=state[1])
2226 2239 fm_files.data(rename_side=state[2])
2227 2240 fm_extras = fm_files.nested(b'extras')
2228 2241 for k, v in sorted(ms.extras(f).items()):
2229 2242 fm_extras.startitem()
2230 2243 fm_extras.data(key=k)
2231 2244 fm_extras.data(value=v)
2232 2245 fm_extras.end()
2233 2246
2234 2247 fm_files.end()
2235 2248
2236 2249 fm_extras = fm.nested(b'extras')
2237 2250 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2238 2251 if f in ms:
2239 2252 # If file is in mergestate, we have already processed it's extras
2240 2253 continue
2241 2254 for k, v in pycompat.iteritems(d):
2242 2255 fm_extras.startitem()
2243 2256 fm_extras.data(file=f)
2244 2257 fm_extras.data(key=k)
2245 2258 fm_extras.data(value=v)
2246 2259 fm_extras.end()
2247 2260
2248 2261 fm.end()
2249 2262
2250 2263
2251 2264 @command(b'debugnamecomplete', [], _(b'NAME...'))
2252 2265 def debugnamecomplete(ui, repo, *args):
2253 2266 '''complete "names" - tags, open branch names, bookmark names'''
2254 2267
2255 2268 names = set()
2256 2269 # since we previously only listed open branches, we will handle that
2257 2270 # specially (after this for loop)
2258 2271 for name, ns in pycompat.iteritems(repo.names):
2259 2272 if name != b'branches':
2260 2273 names.update(ns.listnames(repo))
2261 2274 names.update(
2262 2275 tag
2263 2276 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2264 2277 if not closed
2265 2278 )
2266 2279 completions = set()
2267 2280 if not args:
2268 2281 args = [b'']
2269 2282 for a in args:
2270 2283 completions.update(n for n in names if n.startswith(a))
2271 2284 ui.write(b'\n'.join(sorted(completions)))
2272 2285 ui.write(b'\n')
2273 2286
2274 2287
2275 2288 @command(
2276 2289 b'debugnodemap',
2277 2290 [
2278 2291 (
2279 2292 b'',
2280 2293 b'dump-new',
2281 2294 False,
2282 2295 _(b'write a (new) persistent binary nodemap on stdout'),
2283 2296 ),
2284 2297 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2285 2298 (
2286 2299 b'',
2287 2300 b'check',
2288 2301 False,
2289 2302 _(b'check that the data on disk data are correct.'),
2290 2303 ),
2291 2304 (
2292 2305 b'',
2293 2306 b'metadata',
2294 2307 False,
2295 2308 _(b'display the on disk meta data for the nodemap'),
2296 2309 ),
2297 2310 ],
2298 2311 )
2299 2312 def debugnodemap(ui, repo, **opts):
2300 2313 """write and inspect on disk nodemap"""
2301 2314 if opts['dump_new']:
2302 2315 unfi = repo.unfiltered()
2303 2316 cl = unfi.changelog
2304 2317 if util.safehasattr(cl.index, "nodemap_data_all"):
2305 2318 data = cl.index.nodemap_data_all()
2306 2319 else:
2307 2320 data = nodemap.persistent_data(cl.index)
2308 2321 ui.write(data)
2309 2322 elif opts['dump_disk']:
2310 2323 unfi = repo.unfiltered()
2311 2324 cl = unfi.changelog
2312 2325 nm_data = nodemap.persisted_data(cl)
2313 2326 if nm_data is not None:
2314 2327 docket, data = nm_data
2315 2328 ui.write(data[:])
2316 2329 elif opts['check']:
2317 2330 unfi = repo.unfiltered()
2318 2331 cl = unfi.changelog
2319 2332 nm_data = nodemap.persisted_data(cl)
2320 2333 if nm_data is not None:
2321 2334 docket, data = nm_data
2322 2335 return nodemap.check_data(ui, cl.index, data)
2323 2336 elif opts['metadata']:
2324 2337 unfi = repo.unfiltered()
2325 2338 cl = unfi.changelog
2326 2339 nm_data = nodemap.persisted_data(cl)
2327 2340 if nm_data is not None:
2328 2341 docket, data = nm_data
2329 2342 ui.write((b"uid: %s\n") % docket.uid)
2330 2343 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2331 2344 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2332 2345 ui.write((b"data-length: %d\n") % docket.data_length)
2333 2346 ui.write((b"data-unused: %d\n") % docket.data_unused)
2334 2347 unused_perc = docket.data_unused * 100.0 / docket.data_length
2335 2348 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2336 2349
2337 2350
2338 2351 @command(
2339 2352 b'debugobsolete',
2340 2353 [
2341 2354 (b'', b'flags', 0, _(b'markers flag')),
2342 2355 (
2343 2356 b'',
2344 2357 b'record-parents',
2345 2358 False,
2346 2359 _(b'record parent information for the precursor'),
2347 2360 ),
2348 2361 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2349 2362 (
2350 2363 b'',
2351 2364 b'exclusive',
2352 2365 False,
2353 2366 _(b'restrict display to markers only relevant to REV'),
2354 2367 ),
2355 2368 (b'', b'index', False, _(b'display index of the marker')),
2356 2369 (b'', b'delete', [], _(b'delete markers specified by indices')),
2357 2370 ]
2358 2371 + cmdutil.commitopts2
2359 2372 + cmdutil.formatteropts,
2360 2373 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2361 2374 )
2362 2375 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2363 2376 """create arbitrary obsolete marker
2364 2377
2365 2378 With no arguments, displays the list of obsolescence markers."""
2366 2379
2367 2380 opts = pycompat.byteskwargs(opts)
2368 2381
2369 2382 def parsenodeid(s):
2370 2383 try:
2371 2384 # We do not use revsingle/revrange functions here to accept
2372 2385 # arbitrary node identifiers, possibly not present in the
2373 2386 # local repository.
2374 2387 n = bin(s)
2375 2388 if len(n) != len(nullid):
2376 2389 raise TypeError()
2377 2390 return n
2378 2391 except TypeError:
2379 2392 raise error.InputError(
2380 2393 b'changeset references must be full hexadecimal '
2381 2394 b'node identifiers'
2382 2395 )
2383 2396
2384 2397 if opts.get(b'delete'):
2385 2398 indices = []
2386 2399 for v in opts.get(b'delete'):
2387 2400 try:
2388 2401 indices.append(int(v))
2389 2402 except ValueError:
2390 2403 raise error.InputError(
2391 2404 _(b'invalid index value: %r') % v,
2392 2405 hint=_(b'use integers for indices'),
2393 2406 )
2394 2407
2395 2408 if repo.currenttransaction():
2396 2409 raise error.Abort(
2397 2410 _(b'cannot delete obsmarkers in the middle of transaction.')
2398 2411 )
2399 2412
2400 2413 with repo.lock():
2401 2414 n = repair.deleteobsmarkers(repo.obsstore, indices)
2402 2415 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2403 2416
2404 2417 return
2405 2418
2406 2419 if precursor is not None:
2407 2420 if opts[b'rev']:
2408 2421 raise error.InputError(
2409 2422 b'cannot select revision when creating marker'
2410 2423 )
2411 2424 metadata = {}
2412 2425 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2413 2426 succs = tuple(parsenodeid(succ) for succ in successors)
2414 2427 l = repo.lock()
2415 2428 try:
2416 2429 tr = repo.transaction(b'debugobsolete')
2417 2430 try:
2418 2431 date = opts.get(b'date')
2419 2432 if date:
2420 2433 date = dateutil.parsedate(date)
2421 2434 else:
2422 2435 date = None
2423 2436 prec = parsenodeid(precursor)
2424 2437 parents = None
2425 2438 if opts[b'record_parents']:
2426 2439 if prec not in repo.unfiltered():
2427 2440 raise error.Abort(
2428 2441 b'cannot used --record-parents on '
2429 2442 b'unknown changesets'
2430 2443 )
2431 2444 parents = repo.unfiltered()[prec].parents()
2432 2445 parents = tuple(p.node() for p in parents)
2433 2446 repo.obsstore.create(
2434 2447 tr,
2435 2448 prec,
2436 2449 succs,
2437 2450 opts[b'flags'],
2438 2451 parents=parents,
2439 2452 date=date,
2440 2453 metadata=metadata,
2441 2454 ui=ui,
2442 2455 )
2443 2456 tr.close()
2444 2457 except ValueError as exc:
2445 2458 raise error.Abort(
2446 2459 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2447 2460 )
2448 2461 finally:
2449 2462 tr.release()
2450 2463 finally:
2451 2464 l.release()
2452 2465 else:
2453 2466 if opts[b'rev']:
2454 2467 revs = scmutil.revrange(repo, opts[b'rev'])
2455 2468 nodes = [repo[r].node() for r in revs]
2456 2469 markers = list(
2457 2470 obsutil.getmarkers(
2458 2471 repo, nodes=nodes, exclusive=opts[b'exclusive']
2459 2472 )
2460 2473 )
2461 2474 markers.sort(key=lambda x: x._data)
2462 2475 else:
2463 2476 markers = obsutil.getmarkers(repo)
2464 2477
2465 2478 markerstoiter = markers
2466 2479 isrelevant = lambda m: True
2467 2480 if opts.get(b'rev') and opts.get(b'index'):
2468 2481 markerstoiter = obsutil.getmarkers(repo)
2469 2482 markerset = set(markers)
2470 2483 isrelevant = lambda m: m in markerset
2471 2484
2472 2485 fm = ui.formatter(b'debugobsolete', opts)
2473 2486 for i, m in enumerate(markerstoiter):
2474 2487 if not isrelevant(m):
2475 2488 # marker can be irrelevant when we're iterating over a set
2476 2489 # of markers (markerstoiter) which is bigger than the set
2477 2490 # of markers we want to display (markers)
2478 2491 # this can happen if both --index and --rev options are
2479 2492 # provided and thus we need to iterate over all of the markers
2480 2493 # to get the correct indices, but only display the ones that
2481 2494 # are relevant to --rev value
2482 2495 continue
2483 2496 fm.startitem()
2484 2497 ind = i if opts.get(b'index') else None
2485 2498 cmdutil.showmarker(fm, m, index=ind)
2486 2499 fm.end()
2487 2500
2488 2501
2489 2502 @command(
2490 2503 b'debugp1copies',
2491 2504 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2492 2505 _(b'[-r REV]'),
2493 2506 )
2494 2507 def debugp1copies(ui, repo, **opts):
2495 2508 """dump copy information compared to p1"""
2496 2509
2497 2510 opts = pycompat.byteskwargs(opts)
2498 2511 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2499 2512 for dst, src in ctx.p1copies().items():
2500 2513 ui.write(b'%s -> %s\n' % (src, dst))
2501 2514
2502 2515
2503 2516 @command(
2504 2517 b'debugp2copies',
2505 2518 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2506 2519 _(b'[-r REV]'),
2507 2520 )
2508 2521 def debugp1copies(ui, repo, **opts):
2509 2522 """dump copy information compared to p2"""
2510 2523
2511 2524 opts = pycompat.byteskwargs(opts)
2512 2525 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2513 2526 for dst, src in ctx.p2copies().items():
2514 2527 ui.write(b'%s -> %s\n' % (src, dst))
2515 2528
2516 2529
2517 2530 @command(
2518 2531 b'debugpathcomplete',
2519 2532 [
2520 2533 (b'f', b'full', None, _(b'complete an entire path')),
2521 2534 (b'n', b'normal', None, _(b'show only normal files')),
2522 2535 (b'a', b'added', None, _(b'show only added files')),
2523 2536 (b'r', b'removed', None, _(b'show only removed files')),
2524 2537 ],
2525 2538 _(b'FILESPEC...'),
2526 2539 )
2527 2540 def debugpathcomplete(ui, repo, *specs, **opts):
2528 2541 """complete part or all of a tracked path
2529 2542
2530 2543 This command supports shells that offer path name completion. It
2531 2544 currently completes only files already known to the dirstate.
2532 2545
2533 2546 Completion extends only to the next path segment unless
2534 2547 --full is specified, in which case entire paths are used."""
2535 2548
2536 2549 def complete(path, acceptable):
2537 2550 dirstate = repo.dirstate
2538 2551 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2539 2552 rootdir = repo.root + pycompat.ossep
2540 2553 if spec != repo.root and not spec.startswith(rootdir):
2541 2554 return [], []
2542 2555 if os.path.isdir(spec):
2543 2556 spec += b'/'
2544 2557 spec = spec[len(rootdir) :]
2545 2558 fixpaths = pycompat.ossep != b'/'
2546 2559 if fixpaths:
2547 2560 spec = spec.replace(pycompat.ossep, b'/')
2548 2561 speclen = len(spec)
2549 2562 fullpaths = opts['full']
2550 2563 files, dirs = set(), set()
2551 2564 adddir, addfile = dirs.add, files.add
2552 2565 for f, st in pycompat.iteritems(dirstate):
2553 2566 if f.startswith(spec) and st[0] in acceptable:
2554 2567 if fixpaths:
2555 2568 f = f.replace(b'/', pycompat.ossep)
2556 2569 if fullpaths:
2557 2570 addfile(f)
2558 2571 continue
2559 2572 s = f.find(pycompat.ossep, speclen)
2560 2573 if s >= 0:
2561 2574 adddir(f[:s])
2562 2575 else:
2563 2576 addfile(f)
2564 2577 return files, dirs
2565 2578
2566 2579 acceptable = b''
2567 2580 if opts['normal']:
2568 2581 acceptable += b'nm'
2569 2582 if opts['added']:
2570 2583 acceptable += b'a'
2571 2584 if opts['removed']:
2572 2585 acceptable += b'r'
2573 2586 cwd = repo.getcwd()
2574 2587 if not specs:
2575 2588 specs = [b'.']
2576 2589
2577 2590 files, dirs = set(), set()
2578 2591 for spec in specs:
2579 2592 f, d = complete(spec, acceptable or b'nmar')
2580 2593 files.update(f)
2581 2594 dirs.update(d)
2582 2595 files.update(dirs)
2583 2596 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2584 2597 ui.write(b'\n')
2585 2598
2586 2599
2587 2600 @command(
2588 2601 b'debugpathcopies',
2589 2602 cmdutil.walkopts,
2590 2603 b'hg debugpathcopies REV1 REV2 [FILE]',
2591 2604 inferrepo=True,
2592 2605 )
2593 2606 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2594 2607 """show copies between two revisions"""
2595 2608 ctx1 = scmutil.revsingle(repo, rev1)
2596 2609 ctx2 = scmutil.revsingle(repo, rev2)
2597 2610 m = scmutil.match(ctx1, pats, opts)
2598 2611 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2599 2612 ui.write(b'%s -> %s\n' % (src, dst))
2600 2613
2601 2614
2602 2615 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2603 2616 def debugpeer(ui, path):
2604 2617 """establish a connection to a peer repository"""
2605 2618 # Always enable peer request logging. Requires --debug to display
2606 2619 # though.
2607 2620 overrides = {
2608 2621 (b'devel', b'debug.peer-request'): True,
2609 2622 }
2610 2623
2611 2624 with ui.configoverride(overrides):
2612 2625 peer = hg.peer(ui, {}, path)
2613 2626
2614 2627 try:
2615 2628 local = peer.local() is not None
2616 2629 canpush = peer.canpush()
2617 2630
2618 2631 ui.write(_(b'url: %s\n') % peer.url())
2619 2632 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2620 2633 ui.write(
2621 2634 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2622 2635 )
2623 2636 finally:
2624 2637 peer.close()
2625 2638
2626 2639
2627 2640 @command(
2628 2641 b'debugpickmergetool',
2629 2642 [
2630 2643 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2631 2644 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2632 2645 ]
2633 2646 + cmdutil.walkopts
2634 2647 + cmdutil.mergetoolopts,
2635 2648 _(b'[PATTERN]...'),
2636 2649 inferrepo=True,
2637 2650 )
2638 2651 def debugpickmergetool(ui, repo, *pats, **opts):
2639 2652 """examine which merge tool is chosen for specified file
2640 2653
2641 2654 As described in :hg:`help merge-tools`, Mercurial examines
2642 2655 configurations below in this order to decide which merge tool is
2643 2656 chosen for specified file.
2644 2657
2645 2658 1. ``--tool`` option
2646 2659 2. ``HGMERGE`` environment variable
2647 2660 3. configurations in ``merge-patterns`` section
2648 2661 4. configuration of ``ui.merge``
2649 2662 5. configurations in ``merge-tools`` section
2650 2663 6. ``hgmerge`` tool (for historical reason only)
2651 2664 7. default tool for fallback (``:merge`` or ``:prompt``)
2652 2665
2653 2666 This command writes out examination result in the style below::
2654 2667
2655 2668 FILE = MERGETOOL
2656 2669
2657 2670 By default, all files known in the first parent context of the
2658 2671 working directory are examined. Use file patterns and/or -I/-X
2659 2672 options to limit target files. -r/--rev is also useful to examine
2660 2673 files in another context without actual updating to it.
2661 2674
2662 2675 With --debug, this command shows warning messages while matching
2663 2676 against ``merge-patterns`` and so on, too. It is recommended to
2664 2677 use this option with explicit file patterns and/or -I/-X options,
2665 2678 because this option increases amount of output per file according
2666 2679 to configurations in hgrc.
2667 2680
2668 2681 With -v/--verbose, this command shows configurations below at
2669 2682 first (only if specified).
2670 2683
2671 2684 - ``--tool`` option
2672 2685 - ``HGMERGE`` environment variable
2673 2686 - configuration of ``ui.merge``
2674 2687
2675 2688 If merge tool is chosen before matching against
2676 2689 ``merge-patterns``, this command can't show any helpful
2677 2690 information, even with --debug. In such case, information above is
2678 2691 useful to know why a merge tool is chosen.
2679 2692 """
2680 2693 opts = pycompat.byteskwargs(opts)
2681 2694 overrides = {}
2682 2695 if opts[b'tool']:
2683 2696 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2684 2697 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2685 2698
2686 2699 with ui.configoverride(overrides, b'debugmergepatterns'):
2687 2700 hgmerge = encoding.environ.get(b"HGMERGE")
2688 2701 if hgmerge is not None:
2689 2702 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2690 2703 uimerge = ui.config(b"ui", b"merge")
2691 2704 if uimerge:
2692 2705 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2693 2706
2694 2707 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2695 2708 m = scmutil.match(ctx, pats, opts)
2696 2709 changedelete = opts[b'changedelete']
2697 2710 for path in ctx.walk(m):
2698 2711 fctx = ctx[path]
2699 2712 try:
2700 2713 if not ui.debugflag:
2701 2714 ui.pushbuffer(error=True)
2702 2715 tool, toolpath = filemerge._picktool(
2703 2716 repo,
2704 2717 ui,
2705 2718 path,
2706 2719 fctx.isbinary(),
2707 2720 b'l' in fctx.flags(),
2708 2721 changedelete,
2709 2722 )
2710 2723 finally:
2711 2724 if not ui.debugflag:
2712 2725 ui.popbuffer()
2713 2726 ui.write(b'%s = %s\n' % (path, tool))
2714 2727
2715 2728
2716 2729 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2717 2730 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2718 2731 """access the pushkey key/value protocol
2719 2732
2720 2733 With two args, list the keys in the given namespace.
2721 2734
2722 2735 With five args, set a key to new if it currently is set to old.
2723 2736 Reports success or failure.
2724 2737 """
2725 2738
2726 2739 target = hg.peer(ui, {}, repopath)
2727 2740 try:
2728 2741 if keyinfo:
2729 2742 key, old, new = keyinfo
2730 2743 with target.commandexecutor() as e:
2731 2744 r = e.callcommand(
2732 2745 b'pushkey',
2733 2746 {
2734 2747 b'namespace': namespace,
2735 2748 b'key': key,
2736 2749 b'old': old,
2737 2750 b'new': new,
2738 2751 },
2739 2752 ).result()
2740 2753
2741 2754 ui.status(pycompat.bytestr(r) + b'\n')
2742 2755 return not r
2743 2756 else:
2744 2757 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2745 2758 ui.write(
2746 2759 b"%s\t%s\n"
2747 2760 % (stringutil.escapestr(k), stringutil.escapestr(v))
2748 2761 )
2749 2762 finally:
2750 2763 target.close()
2751 2764
2752 2765
2753 2766 @command(b'debugpvec', [], _(b'A B'))
2754 2767 def debugpvec(ui, repo, a, b=None):
2755 2768 ca = scmutil.revsingle(repo, a)
2756 2769 cb = scmutil.revsingle(repo, b)
2757 2770 pa = pvec.ctxpvec(ca)
2758 2771 pb = pvec.ctxpvec(cb)
2759 2772 if pa == pb:
2760 2773 rel = b"="
2761 2774 elif pa > pb:
2762 2775 rel = b">"
2763 2776 elif pa < pb:
2764 2777 rel = b"<"
2765 2778 elif pa | pb:
2766 2779 rel = b"|"
2767 2780 ui.write(_(b"a: %s\n") % pa)
2768 2781 ui.write(_(b"b: %s\n") % pb)
2769 2782 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2770 2783 ui.write(
2771 2784 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2772 2785 % (
2773 2786 abs(pa._depth - pb._depth),
2774 2787 pvec._hamming(pa._vec, pb._vec),
2775 2788 pa.distance(pb),
2776 2789 rel,
2777 2790 )
2778 2791 )
2779 2792
2780 2793
2781 2794 @command(
2782 2795 b'debugrebuilddirstate|debugrebuildstate',
2783 2796 [
2784 2797 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2785 2798 (
2786 2799 b'',
2787 2800 b'minimal',
2788 2801 None,
2789 2802 _(
2790 2803 b'only rebuild files that are inconsistent with '
2791 2804 b'the working copy parent'
2792 2805 ),
2793 2806 ),
2794 2807 ],
2795 2808 _(b'[-r REV]'),
2796 2809 )
2797 2810 def debugrebuilddirstate(ui, repo, rev, **opts):
2798 2811 """rebuild the dirstate as it would look like for the given revision
2799 2812
2800 2813 If no revision is specified the first current parent will be used.
2801 2814
2802 2815 The dirstate will be set to the files of the given revision.
2803 2816 The actual working directory content or existing dirstate
2804 2817 information such as adds or removes is not considered.
2805 2818
2806 2819 ``minimal`` will only rebuild the dirstate status for files that claim to be
2807 2820 tracked but are not in the parent manifest, or that exist in the parent
2808 2821 manifest but are not in the dirstate. It will not change adds, removes, or
2809 2822 modified files that are in the working copy parent.
2810 2823
2811 2824 One use of this command is to make the next :hg:`status` invocation
2812 2825 check the actual file content.
2813 2826 """
2814 2827 ctx = scmutil.revsingle(repo, rev)
2815 2828 with repo.wlock():
2816 2829 dirstate = repo.dirstate
2817 2830 changedfiles = None
2818 2831 # See command doc for what minimal does.
2819 2832 if opts.get('minimal'):
2820 2833 manifestfiles = set(ctx.manifest().keys())
2821 2834 dirstatefiles = set(dirstate)
2822 2835 manifestonly = manifestfiles - dirstatefiles
2823 2836 dsonly = dirstatefiles - manifestfiles
2824 2837 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2825 2838 changedfiles = manifestonly | dsnotadded
2826 2839
2827 2840 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2828 2841
2829 2842
2830 2843 @command(b'debugrebuildfncache', [], b'')
2831 2844 def debugrebuildfncache(ui, repo):
2832 2845 """rebuild the fncache file"""
2833 2846 repair.rebuildfncache(ui, repo)
2834 2847
2835 2848
2836 2849 @command(
2837 2850 b'debugrename',
2838 2851 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2839 2852 _(b'[-r REV] [FILE]...'),
2840 2853 )
2841 2854 def debugrename(ui, repo, *pats, **opts):
2842 2855 """dump rename information"""
2843 2856
2844 2857 opts = pycompat.byteskwargs(opts)
2845 2858 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2846 2859 m = scmutil.match(ctx, pats, opts)
2847 2860 for abs in ctx.walk(m):
2848 2861 fctx = ctx[abs]
2849 2862 o = fctx.filelog().renamed(fctx.filenode())
2850 2863 rel = repo.pathto(abs)
2851 2864 if o:
2852 2865 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2853 2866 else:
2854 2867 ui.write(_(b"%s not renamed\n") % rel)
2855 2868
2856 2869
2857 2870 @command(b'debugrequires|debugrequirements', [], b'')
2858 2871 def debugrequirements(ui, repo):
2859 2872 """ print the current repo requirements """
2860 2873 for r in sorted(repo.requirements):
2861 2874 ui.write(b"%s\n" % r)
2862 2875
2863 2876
2864 2877 @command(
2865 2878 b'debugrevlog',
2866 2879 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2867 2880 _(b'-c|-m|FILE'),
2868 2881 optionalrepo=True,
2869 2882 )
2870 2883 def debugrevlog(ui, repo, file_=None, **opts):
2871 2884 """show data and statistics about a revlog"""
2872 2885 opts = pycompat.byteskwargs(opts)
2873 2886 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2874 2887
2875 2888 if opts.get(b"dump"):
2876 2889 numrevs = len(r)
2877 2890 ui.write(
2878 2891 (
2879 2892 b"# rev p1rev p2rev start end deltastart base p1 p2"
2880 2893 b" rawsize totalsize compression heads chainlen\n"
2881 2894 )
2882 2895 )
2883 2896 ts = 0
2884 2897 heads = set()
2885 2898
2886 2899 for rev in pycompat.xrange(numrevs):
2887 2900 dbase = r.deltaparent(rev)
2888 2901 if dbase == -1:
2889 2902 dbase = rev
2890 2903 cbase = r.chainbase(rev)
2891 2904 clen = r.chainlen(rev)
2892 2905 p1, p2 = r.parentrevs(rev)
2893 2906 rs = r.rawsize(rev)
2894 2907 ts = ts + rs
2895 2908 heads -= set(r.parentrevs(rev))
2896 2909 heads.add(rev)
2897 2910 try:
2898 2911 compression = ts / r.end(rev)
2899 2912 except ZeroDivisionError:
2900 2913 compression = 0
2901 2914 ui.write(
2902 2915 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2903 2916 b"%11d %5d %8d\n"
2904 2917 % (
2905 2918 rev,
2906 2919 p1,
2907 2920 p2,
2908 2921 r.start(rev),
2909 2922 r.end(rev),
2910 2923 r.start(dbase),
2911 2924 r.start(cbase),
2912 2925 r.start(p1),
2913 2926 r.start(p2),
2914 2927 rs,
2915 2928 ts,
2916 2929 compression,
2917 2930 len(heads),
2918 2931 clen,
2919 2932 )
2920 2933 )
2921 2934 return 0
2922 2935
2923 2936 v = r.version
2924 2937 format = v & 0xFFFF
2925 2938 flags = []
2926 2939 gdelta = False
2927 2940 if v & revlog.FLAG_INLINE_DATA:
2928 2941 flags.append(b'inline')
2929 2942 if v & revlog.FLAG_GENERALDELTA:
2930 2943 gdelta = True
2931 2944 flags.append(b'generaldelta')
2932 2945 if not flags:
2933 2946 flags = [b'(none)']
2934 2947
2935 2948 ### tracks merge vs single parent
2936 2949 nummerges = 0
2937 2950
2938 2951 ### tracks ways the "delta" are build
2939 2952 # nodelta
2940 2953 numempty = 0
2941 2954 numemptytext = 0
2942 2955 numemptydelta = 0
2943 2956 # full file content
2944 2957 numfull = 0
2945 2958 # intermediate snapshot against a prior snapshot
2946 2959 numsemi = 0
2947 2960 # snapshot count per depth
2948 2961 numsnapdepth = collections.defaultdict(lambda: 0)
2949 2962 # delta against previous revision
2950 2963 numprev = 0
2951 2964 # delta against first or second parent (not prev)
2952 2965 nump1 = 0
2953 2966 nump2 = 0
2954 2967 # delta against neither prev nor parents
2955 2968 numother = 0
2956 2969 # delta against prev that are also first or second parent
2957 2970 # (details of `numprev`)
2958 2971 nump1prev = 0
2959 2972 nump2prev = 0
2960 2973
2961 2974 # data about delta chain of each revs
2962 2975 chainlengths = []
2963 2976 chainbases = []
2964 2977 chainspans = []
2965 2978
2966 2979 # data about each revision
2967 2980 datasize = [None, 0, 0]
2968 2981 fullsize = [None, 0, 0]
2969 2982 semisize = [None, 0, 0]
2970 2983 # snapshot count per depth
2971 2984 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2972 2985 deltasize = [None, 0, 0]
2973 2986 chunktypecounts = {}
2974 2987 chunktypesizes = {}
2975 2988
2976 2989 def addsize(size, l):
2977 2990 if l[0] is None or size < l[0]:
2978 2991 l[0] = size
2979 2992 if size > l[1]:
2980 2993 l[1] = size
2981 2994 l[2] += size
2982 2995
2983 2996 numrevs = len(r)
2984 2997 for rev in pycompat.xrange(numrevs):
2985 2998 p1, p2 = r.parentrevs(rev)
2986 2999 delta = r.deltaparent(rev)
2987 3000 if format > 0:
2988 3001 addsize(r.rawsize(rev), datasize)
2989 3002 if p2 != nullrev:
2990 3003 nummerges += 1
2991 3004 size = r.length(rev)
2992 3005 if delta == nullrev:
2993 3006 chainlengths.append(0)
2994 3007 chainbases.append(r.start(rev))
2995 3008 chainspans.append(size)
2996 3009 if size == 0:
2997 3010 numempty += 1
2998 3011 numemptytext += 1
2999 3012 else:
3000 3013 numfull += 1
3001 3014 numsnapdepth[0] += 1
3002 3015 addsize(size, fullsize)
3003 3016 addsize(size, snapsizedepth[0])
3004 3017 else:
3005 3018 chainlengths.append(chainlengths[delta] + 1)
3006 3019 baseaddr = chainbases[delta]
3007 3020 revaddr = r.start(rev)
3008 3021 chainbases.append(baseaddr)
3009 3022 chainspans.append((revaddr - baseaddr) + size)
3010 3023 if size == 0:
3011 3024 numempty += 1
3012 3025 numemptydelta += 1
3013 3026 elif r.issnapshot(rev):
3014 3027 addsize(size, semisize)
3015 3028 numsemi += 1
3016 3029 depth = r.snapshotdepth(rev)
3017 3030 numsnapdepth[depth] += 1
3018 3031 addsize(size, snapsizedepth[depth])
3019 3032 else:
3020 3033 addsize(size, deltasize)
3021 3034 if delta == rev - 1:
3022 3035 numprev += 1
3023 3036 if delta == p1:
3024 3037 nump1prev += 1
3025 3038 elif delta == p2:
3026 3039 nump2prev += 1
3027 3040 elif delta == p1:
3028 3041 nump1 += 1
3029 3042 elif delta == p2:
3030 3043 nump2 += 1
3031 3044 elif delta != nullrev:
3032 3045 numother += 1
3033 3046
3034 3047 # Obtain data on the raw chunks in the revlog.
3035 3048 if util.safehasattr(r, b'_getsegmentforrevs'):
3036 3049 segment = r._getsegmentforrevs(rev, rev)[1]
3037 3050 else:
3038 3051 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3039 3052 if segment:
3040 3053 chunktype = bytes(segment[0:1])
3041 3054 else:
3042 3055 chunktype = b'empty'
3043 3056
3044 3057 if chunktype not in chunktypecounts:
3045 3058 chunktypecounts[chunktype] = 0
3046 3059 chunktypesizes[chunktype] = 0
3047 3060
3048 3061 chunktypecounts[chunktype] += 1
3049 3062 chunktypesizes[chunktype] += size
3050 3063
3051 3064 # Adjust size min value for empty cases
3052 3065 for size in (datasize, fullsize, semisize, deltasize):
3053 3066 if size[0] is None:
3054 3067 size[0] = 0
3055 3068
3056 3069 numdeltas = numrevs - numfull - numempty - numsemi
3057 3070 numoprev = numprev - nump1prev - nump2prev
3058 3071 totalrawsize = datasize[2]
3059 3072 datasize[2] /= numrevs
3060 3073 fulltotal = fullsize[2]
3061 3074 if numfull == 0:
3062 3075 fullsize[2] = 0
3063 3076 else:
3064 3077 fullsize[2] /= numfull
3065 3078 semitotal = semisize[2]
3066 3079 snaptotal = {}
3067 3080 if numsemi > 0:
3068 3081 semisize[2] /= numsemi
3069 3082 for depth in snapsizedepth:
3070 3083 snaptotal[depth] = snapsizedepth[depth][2]
3071 3084 snapsizedepth[depth][2] /= numsnapdepth[depth]
3072 3085
3073 3086 deltatotal = deltasize[2]
3074 3087 if numdeltas > 0:
3075 3088 deltasize[2] /= numdeltas
3076 3089 totalsize = fulltotal + semitotal + deltatotal
3077 3090 avgchainlen = sum(chainlengths) / numrevs
3078 3091 maxchainlen = max(chainlengths)
3079 3092 maxchainspan = max(chainspans)
3080 3093 compratio = 1
3081 3094 if totalsize:
3082 3095 compratio = totalrawsize / totalsize
3083 3096
3084 3097 basedfmtstr = b'%%%dd\n'
3085 3098 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3086 3099
3087 3100 def dfmtstr(max):
3088 3101 return basedfmtstr % len(str(max))
3089 3102
3090 3103 def pcfmtstr(max, padding=0):
3091 3104 return basepcfmtstr % (len(str(max)), b' ' * padding)
3092 3105
3093 3106 def pcfmt(value, total):
3094 3107 if total:
3095 3108 return (value, 100 * float(value) / total)
3096 3109 else:
3097 3110 return value, 100.0
3098 3111
3099 3112 ui.writenoi18n(b'format : %d\n' % format)
3100 3113 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3101 3114
3102 3115 ui.write(b'\n')
3103 3116 fmt = pcfmtstr(totalsize)
3104 3117 fmt2 = dfmtstr(totalsize)
3105 3118 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3106 3119 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3107 3120 ui.writenoi18n(
3108 3121 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3109 3122 )
3110 3123 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3111 3124 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3112 3125 ui.writenoi18n(
3113 3126 b' text : '
3114 3127 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3115 3128 )
3116 3129 ui.writenoi18n(
3117 3130 b' delta : '
3118 3131 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3119 3132 )
3120 3133 ui.writenoi18n(
3121 3134 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3122 3135 )
3123 3136 for depth in sorted(numsnapdepth):
3124 3137 ui.write(
3125 3138 (b' lvl-%-3d : ' % depth)
3126 3139 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3127 3140 )
3128 3141 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3129 3142 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3130 3143 ui.writenoi18n(
3131 3144 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3132 3145 )
3133 3146 for depth in sorted(numsnapdepth):
3134 3147 ui.write(
3135 3148 (b' lvl-%-3d : ' % depth)
3136 3149 + fmt % pcfmt(snaptotal[depth], totalsize)
3137 3150 )
3138 3151 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3139 3152
3140 3153 def fmtchunktype(chunktype):
3141 3154 if chunktype == b'empty':
3142 3155 return b' %s : ' % chunktype
3143 3156 elif chunktype in pycompat.bytestr(string.ascii_letters):
3144 3157 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3145 3158 else:
3146 3159 return b' 0x%s : ' % hex(chunktype)
3147 3160
3148 3161 ui.write(b'\n')
3149 3162 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3150 3163 for chunktype in sorted(chunktypecounts):
3151 3164 ui.write(fmtchunktype(chunktype))
3152 3165 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3153 3166 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3154 3167 for chunktype in sorted(chunktypecounts):
3155 3168 ui.write(fmtchunktype(chunktype))
3156 3169 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3157 3170
3158 3171 ui.write(b'\n')
3159 3172 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3160 3173 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3161 3174 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3162 3175 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3163 3176 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3164 3177
3165 3178 if format > 0:
3166 3179 ui.write(b'\n')
3167 3180 ui.writenoi18n(
3168 3181 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3169 3182 % tuple(datasize)
3170 3183 )
3171 3184 ui.writenoi18n(
3172 3185 b'full revision size (min/max/avg) : %d / %d / %d\n'
3173 3186 % tuple(fullsize)
3174 3187 )
3175 3188 ui.writenoi18n(
3176 3189 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3177 3190 % tuple(semisize)
3178 3191 )
3179 3192 for depth in sorted(snapsizedepth):
3180 3193 if depth == 0:
3181 3194 continue
3182 3195 ui.writenoi18n(
3183 3196 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3184 3197 % ((depth,) + tuple(snapsizedepth[depth]))
3185 3198 )
3186 3199 ui.writenoi18n(
3187 3200 b'delta size (min/max/avg) : %d / %d / %d\n'
3188 3201 % tuple(deltasize)
3189 3202 )
3190 3203
3191 3204 if numdeltas > 0:
3192 3205 ui.write(b'\n')
3193 3206 fmt = pcfmtstr(numdeltas)
3194 3207 fmt2 = pcfmtstr(numdeltas, 4)
3195 3208 ui.writenoi18n(
3196 3209 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3197 3210 )
3198 3211 if numprev > 0:
3199 3212 ui.writenoi18n(
3200 3213 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3201 3214 )
3202 3215 ui.writenoi18n(
3203 3216 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3204 3217 )
3205 3218 ui.writenoi18n(
3206 3219 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3207 3220 )
3208 3221 if gdelta:
3209 3222 ui.writenoi18n(
3210 3223 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3211 3224 )
3212 3225 ui.writenoi18n(
3213 3226 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3214 3227 )
3215 3228 ui.writenoi18n(
3216 3229 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3217 3230 )
3218 3231
3219 3232
3220 3233 @command(
3221 3234 b'debugrevlogindex',
3222 3235 cmdutil.debugrevlogopts
3223 3236 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3224 3237 _(b'[-f FORMAT] -c|-m|FILE'),
3225 3238 optionalrepo=True,
3226 3239 )
3227 3240 def debugrevlogindex(ui, repo, file_=None, **opts):
3228 3241 """dump the contents of a revlog index"""
3229 3242 opts = pycompat.byteskwargs(opts)
3230 3243 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3231 3244 format = opts.get(b'format', 0)
3232 3245 if format not in (0, 1):
3233 3246 raise error.Abort(_(b"unknown format %d") % format)
3234 3247
3235 3248 if ui.debugflag:
3236 3249 shortfn = hex
3237 3250 else:
3238 3251 shortfn = short
3239 3252
3240 3253 # There might not be anything in r, so have a sane default
3241 3254 idlen = 12
3242 3255 for i in r:
3243 3256 idlen = len(shortfn(r.node(i)))
3244 3257 break
3245 3258
3246 3259 if format == 0:
3247 3260 if ui.verbose:
3248 3261 ui.writenoi18n(
3249 3262 b" rev offset length linkrev %s %s p2\n"
3250 3263 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3251 3264 )
3252 3265 else:
3253 3266 ui.writenoi18n(
3254 3267 b" rev linkrev %s %s p2\n"
3255 3268 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3256 3269 )
3257 3270 elif format == 1:
3258 3271 if ui.verbose:
3259 3272 ui.writenoi18n(
3260 3273 (
3261 3274 b" rev flag offset length size link p1"
3262 3275 b" p2 %s\n"
3263 3276 )
3264 3277 % b"nodeid".rjust(idlen)
3265 3278 )
3266 3279 else:
3267 3280 ui.writenoi18n(
3268 3281 b" rev flag size link p1 p2 %s\n"
3269 3282 % b"nodeid".rjust(idlen)
3270 3283 )
3271 3284
3272 3285 for i in r:
3273 3286 node = r.node(i)
3274 3287 if format == 0:
3275 3288 try:
3276 3289 pp = r.parents(node)
3277 3290 except Exception:
3278 3291 pp = [nullid, nullid]
3279 3292 if ui.verbose:
3280 3293 ui.write(
3281 3294 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3282 3295 % (
3283 3296 i,
3284 3297 r.start(i),
3285 3298 r.length(i),
3286 3299 r.linkrev(i),
3287 3300 shortfn(node),
3288 3301 shortfn(pp[0]),
3289 3302 shortfn(pp[1]),
3290 3303 )
3291 3304 )
3292 3305 else:
3293 3306 ui.write(
3294 3307 b"% 6d % 7d %s %s %s\n"
3295 3308 % (
3296 3309 i,
3297 3310 r.linkrev(i),
3298 3311 shortfn(node),
3299 3312 shortfn(pp[0]),
3300 3313 shortfn(pp[1]),
3301 3314 )
3302 3315 )
3303 3316 elif format == 1:
3304 3317 pr = r.parentrevs(i)
3305 3318 if ui.verbose:
3306 3319 ui.write(
3307 3320 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3308 3321 % (
3309 3322 i,
3310 3323 r.flags(i),
3311 3324 r.start(i),
3312 3325 r.length(i),
3313 3326 r.rawsize(i),
3314 3327 r.linkrev(i),
3315 3328 pr[0],
3316 3329 pr[1],
3317 3330 shortfn(node),
3318 3331 )
3319 3332 )
3320 3333 else:
3321 3334 ui.write(
3322 3335 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3323 3336 % (
3324 3337 i,
3325 3338 r.flags(i),
3326 3339 r.rawsize(i),
3327 3340 r.linkrev(i),
3328 3341 pr[0],
3329 3342 pr[1],
3330 3343 shortfn(node),
3331 3344 )
3332 3345 )
3333 3346
3334 3347
3335 3348 @command(
3336 3349 b'debugrevspec',
3337 3350 [
3338 3351 (
3339 3352 b'',
3340 3353 b'optimize',
3341 3354 None,
3342 3355 _(b'print parsed tree after optimizing (DEPRECATED)'),
3343 3356 ),
3344 3357 (
3345 3358 b'',
3346 3359 b'show-revs',
3347 3360 True,
3348 3361 _(b'print list of result revisions (default)'),
3349 3362 ),
3350 3363 (
3351 3364 b's',
3352 3365 b'show-set',
3353 3366 None,
3354 3367 _(b'print internal representation of result set'),
3355 3368 ),
3356 3369 (
3357 3370 b'p',
3358 3371 b'show-stage',
3359 3372 [],
3360 3373 _(b'print parsed tree at the given stage'),
3361 3374 _(b'NAME'),
3362 3375 ),
3363 3376 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3364 3377 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3365 3378 ],
3366 3379 b'REVSPEC',
3367 3380 )
3368 3381 def debugrevspec(ui, repo, expr, **opts):
3369 3382 """parse and apply a revision specification
3370 3383
3371 3384 Use -p/--show-stage option to print the parsed tree at the given stages.
3372 3385 Use -p all to print tree at every stage.
3373 3386
3374 3387 Use --no-show-revs option with -s or -p to print only the set
3375 3388 representation or the parsed tree respectively.
3376 3389
3377 3390 Use --verify-optimized to compare the optimized result with the unoptimized
3378 3391 one. Returns 1 if the optimized result differs.
3379 3392 """
3380 3393 opts = pycompat.byteskwargs(opts)
3381 3394 aliases = ui.configitems(b'revsetalias')
3382 3395 stages = [
3383 3396 (b'parsed', lambda tree: tree),
3384 3397 (
3385 3398 b'expanded',
3386 3399 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3387 3400 ),
3388 3401 (b'concatenated', revsetlang.foldconcat),
3389 3402 (b'analyzed', revsetlang.analyze),
3390 3403 (b'optimized', revsetlang.optimize),
3391 3404 ]
3392 3405 if opts[b'no_optimized']:
3393 3406 stages = stages[:-1]
3394 3407 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3395 3408 raise error.Abort(
3396 3409 _(b'cannot use --verify-optimized with --no-optimized')
3397 3410 )
3398 3411 stagenames = {n for n, f in stages}
3399 3412
3400 3413 showalways = set()
3401 3414 showchanged = set()
3402 3415 if ui.verbose and not opts[b'show_stage']:
3403 3416 # show parsed tree by --verbose (deprecated)
3404 3417 showalways.add(b'parsed')
3405 3418 showchanged.update([b'expanded', b'concatenated'])
3406 3419 if opts[b'optimize']:
3407 3420 showalways.add(b'optimized')
3408 3421 if opts[b'show_stage'] and opts[b'optimize']:
3409 3422 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3410 3423 if opts[b'show_stage'] == [b'all']:
3411 3424 showalways.update(stagenames)
3412 3425 else:
3413 3426 for n in opts[b'show_stage']:
3414 3427 if n not in stagenames:
3415 3428 raise error.Abort(_(b'invalid stage name: %s') % n)
3416 3429 showalways.update(opts[b'show_stage'])
3417 3430
3418 3431 treebystage = {}
3419 3432 printedtree = None
3420 3433 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3421 3434 for n, f in stages:
3422 3435 treebystage[n] = tree = f(tree)
3423 3436 if n in showalways or (n in showchanged and tree != printedtree):
3424 3437 if opts[b'show_stage'] or n != b'parsed':
3425 3438 ui.write(b"* %s:\n" % n)
3426 3439 ui.write(revsetlang.prettyformat(tree), b"\n")
3427 3440 printedtree = tree
3428 3441
3429 3442 if opts[b'verify_optimized']:
3430 3443 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3431 3444 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3432 3445 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3433 3446 ui.writenoi18n(
3434 3447 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3435 3448 )
3436 3449 ui.writenoi18n(
3437 3450 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3438 3451 )
3439 3452 arevs = list(arevs)
3440 3453 brevs = list(brevs)
3441 3454 if arevs == brevs:
3442 3455 return 0
3443 3456 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3444 3457 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3445 3458 sm = difflib.SequenceMatcher(None, arevs, brevs)
3446 3459 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3447 3460 if tag in ('delete', 'replace'):
3448 3461 for c in arevs[alo:ahi]:
3449 3462 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3450 3463 if tag in ('insert', 'replace'):
3451 3464 for c in brevs[blo:bhi]:
3452 3465 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3453 3466 if tag == 'equal':
3454 3467 for c in arevs[alo:ahi]:
3455 3468 ui.write(b' %d\n' % c)
3456 3469 return 1
3457 3470
3458 3471 func = revset.makematcher(tree)
3459 3472 revs = func(repo)
3460 3473 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3461 3474 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3462 3475 if not opts[b'show_revs']:
3463 3476 return
3464 3477 for c in revs:
3465 3478 ui.write(b"%d\n" % c)
3466 3479
3467 3480
3468 3481 @command(
3469 3482 b'debugserve',
3470 3483 [
3471 3484 (
3472 3485 b'',
3473 3486 b'sshstdio',
3474 3487 False,
3475 3488 _(b'run an SSH server bound to process handles'),
3476 3489 ),
3477 3490 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3478 3491 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3479 3492 ],
3480 3493 b'',
3481 3494 )
3482 3495 def debugserve(ui, repo, **opts):
3483 3496 """run a server with advanced settings
3484 3497
3485 3498 This command is similar to :hg:`serve`. It exists partially as a
3486 3499 workaround to the fact that ``hg serve --stdio`` must have specific
3487 3500 arguments for security reasons.
3488 3501 """
3489 3502 opts = pycompat.byteskwargs(opts)
3490 3503
3491 3504 if not opts[b'sshstdio']:
3492 3505 raise error.Abort(_(b'only --sshstdio is currently supported'))
3493 3506
3494 3507 logfh = None
3495 3508
3496 3509 if opts[b'logiofd'] and opts[b'logiofile']:
3497 3510 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3498 3511
3499 3512 if opts[b'logiofd']:
3500 3513 # Ideally we would be line buffered. But line buffering in binary
3501 3514 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3502 3515 # buffering could have performance impacts. But since this isn't
3503 3516 # performance critical code, it should be fine.
3504 3517 try:
3505 3518 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3506 3519 except OSError as e:
3507 3520 if e.errno != errno.ESPIPE:
3508 3521 raise
3509 3522 # can't seek a pipe, so `ab` mode fails on py3
3510 3523 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3511 3524 elif opts[b'logiofile']:
3512 3525 logfh = open(opts[b'logiofile'], b'ab', 0)
3513 3526
3514 3527 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3515 3528 s.serve_forever()
3516 3529
3517 3530
3518 3531 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3519 3532 def debugsetparents(ui, repo, rev1, rev2=None):
3520 3533 """manually set the parents of the current working directory (DANGEROUS)
3521 3534
3522 3535 This command is not what you are looking for and should not be used. Using
3523 3536 this command will most certainly results in slight corruption of the file
3524 3537 level histories withing your repository. DO NOT USE THIS COMMAND.
3525 3538
3526 3539 The command update the p1 and p2 field in the dirstate, and not touching
3527 3540 anything else. This useful for writing repository conversion tools, but
3528 3541 should be used with extreme care. For example, neither the working
3529 3542 directory nor the dirstate is updated, so file status may be incorrect
3530 3543 after running this command. Only used if you are one of the few people that
3531 3544 deeply unstand both conversion tools and file level histories. If you are
3532 3545 reading this help, you are not one of this people (most of them sailed west
3533 3546 from Mithlond anyway.
3534 3547
3535 3548 So one last time DO NOT USE THIS COMMAND.
3536 3549
3537 3550 Returns 0 on success.
3538 3551 """
3539 3552
3540 3553 node1 = scmutil.revsingle(repo, rev1).node()
3541 3554 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3542 3555
3543 3556 with repo.wlock():
3544 3557 repo.setparents(node1, node2)
3545 3558
3546 3559
3547 3560 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3548 3561 def debugsidedata(ui, repo, file_, rev=None, **opts):
3549 3562 """dump the side data for a cl/manifest/file revision
3550 3563
3551 3564 Use --verbose to dump the sidedata content."""
3552 3565 opts = pycompat.byteskwargs(opts)
3553 3566 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3554 3567 if rev is not None:
3555 3568 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3556 3569 file_, rev = None, file_
3557 3570 elif rev is None:
3558 3571 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3559 3572 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3560 3573 r = getattr(r, '_revlog', r)
3561 3574 try:
3562 3575 sidedata = r.sidedata(r.lookup(rev))
3563 3576 except KeyError:
3564 3577 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3565 3578 if sidedata:
3566 3579 sidedata = list(sidedata.items())
3567 3580 sidedata.sort()
3568 3581 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3569 3582 for key, value in sidedata:
3570 3583 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3571 3584 if ui.verbose:
3572 3585 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3573 3586
3574 3587
3575 3588 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3576 3589 def debugssl(ui, repo, source=None, **opts):
3577 3590 """test a secure connection to a server
3578 3591
3579 3592 This builds the certificate chain for the server on Windows, installing the
3580 3593 missing intermediates and trusted root via Windows Update if necessary. It
3581 3594 does nothing on other platforms.
3582 3595
3583 3596 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3584 3597 that server is used. See :hg:`help urls` for more information.
3585 3598
3586 3599 If the update succeeds, retry the original operation. Otherwise, the cause
3587 3600 of the SSL error is likely another issue.
3588 3601 """
3589 3602 if not pycompat.iswindows:
3590 3603 raise error.Abort(
3591 3604 _(b'certificate chain building is only possible on Windows')
3592 3605 )
3593 3606
3594 3607 if not source:
3595 3608 if not repo:
3596 3609 raise error.Abort(
3597 3610 _(
3598 3611 b"there is no Mercurial repository here, and no "
3599 3612 b"server specified"
3600 3613 )
3601 3614 )
3602 3615 source = b"default"
3603 3616
3604 3617 source, branches = hg.parseurl(ui.expandpath(source))
3605 3618 url = util.url(source)
3606 3619
3607 3620 defaultport = {b'https': 443, b'ssh': 22}
3608 3621 if url.scheme in defaultport:
3609 3622 try:
3610 3623 addr = (url.host, int(url.port or defaultport[url.scheme]))
3611 3624 except ValueError:
3612 3625 raise error.Abort(_(b"malformed port number in URL"))
3613 3626 else:
3614 3627 raise error.Abort(_(b"only https and ssh connections are supported"))
3615 3628
3616 3629 from . import win32
3617 3630
3618 3631 s = ssl.wrap_socket(
3619 3632 socket.socket(),
3620 3633 ssl_version=ssl.PROTOCOL_TLS,
3621 3634 cert_reqs=ssl.CERT_NONE,
3622 3635 ca_certs=None,
3623 3636 )
3624 3637
3625 3638 try:
3626 3639 s.connect(addr)
3627 3640 cert = s.getpeercert(True)
3628 3641
3629 3642 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3630 3643
3631 3644 complete = win32.checkcertificatechain(cert, build=False)
3632 3645
3633 3646 if not complete:
3634 3647 ui.status(_(b'certificate chain is incomplete, updating... '))
3635 3648
3636 3649 if not win32.checkcertificatechain(cert):
3637 3650 ui.status(_(b'failed.\n'))
3638 3651 else:
3639 3652 ui.status(_(b'done.\n'))
3640 3653 else:
3641 3654 ui.status(_(b'full certificate chain is available\n'))
3642 3655 finally:
3643 3656 s.close()
3644 3657
3645 3658
3646 3659 @command(
3647 3660 b"debugbackupbundle",
3648 3661 [
3649 3662 (
3650 3663 b"",
3651 3664 b"recover",
3652 3665 b"",
3653 3666 b"brings the specified changeset back into the repository",
3654 3667 )
3655 3668 ]
3656 3669 + cmdutil.logopts,
3657 3670 _(b"hg debugbackupbundle [--recover HASH]"),
3658 3671 )
3659 3672 def debugbackupbundle(ui, repo, *pats, **opts):
3660 3673 """lists the changesets available in backup bundles
3661 3674
3662 3675 Without any arguments, this command prints a list of the changesets in each
3663 3676 backup bundle.
3664 3677
3665 3678 --recover takes a changeset hash and unbundles the first bundle that
3666 3679 contains that hash, which puts that changeset back in your repository.
3667 3680
3668 3681 --verbose will print the entire commit message and the bundle path for that
3669 3682 backup.
3670 3683 """
3671 3684 backups = list(
3672 3685 filter(
3673 3686 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3674 3687 )
3675 3688 )
3676 3689 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3677 3690
3678 3691 opts = pycompat.byteskwargs(opts)
3679 3692 opts[b"bundle"] = b""
3680 3693 opts[b"force"] = None
3681 3694 limit = logcmdutil.getlimit(opts)
3682 3695
3683 3696 def display(other, chlist, displayer):
3684 3697 if opts.get(b"newest_first"):
3685 3698 chlist.reverse()
3686 3699 count = 0
3687 3700 for n in chlist:
3688 3701 if limit is not None and count >= limit:
3689 3702 break
3690 3703 parents = [True for p in other.changelog.parents(n) if p != nullid]
3691 3704 if opts.get(b"no_merges") and len(parents) == 2:
3692 3705 continue
3693 3706 count += 1
3694 3707 displayer.show(other[n])
3695 3708
3696 3709 recovernode = opts.get(b"recover")
3697 3710 if recovernode:
3698 3711 if scmutil.isrevsymbol(repo, recovernode):
3699 3712 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3700 3713 return
3701 3714 elif backups:
3702 3715 msg = _(
3703 3716 b"Recover changesets using: hg debugbackupbundle --recover "
3704 3717 b"<changeset hash>\n\nAvailable backup changesets:"
3705 3718 )
3706 3719 ui.status(msg, label=b"status.removed")
3707 3720 else:
3708 3721 ui.status(_(b"no backup changesets found\n"))
3709 3722 return
3710 3723
3711 3724 for backup in backups:
3712 3725 # Much of this is copied from the hg incoming logic
3713 3726 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3714 3727 source, branches = hg.parseurl(source, opts.get(b"branch"))
3715 3728 try:
3716 3729 other = hg.peer(repo, opts, source)
3717 3730 except error.LookupError as ex:
3718 3731 msg = _(b"\nwarning: unable to open bundle %s") % source
3719 3732 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3720 3733 ui.warn(msg, hint=hint)
3721 3734 continue
3722 3735 revs, checkout = hg.addbranchrevs(
3723 3736 repo, other, branches, opts.get(b"rev")
3724 3737 )
3725 3738
3726 3739 if revs:
3727 3740 revs = [other.lookup(rev) for rev in revs]
3728 3741
3729 3742 quiet = ui.quiet
3730 3743 try:
3731 3744 ui.quiet = True
3732 3745 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3733 3746 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3734 3747 )
3735 3748 except error.LookupError:
3736 3749 continue
3737 3750 finally:
3738 3751 ui.quiet = quiet
3739 3752
3740 3753 try:
3741 3754 if not chlist:
3742 3755 continue
3743 3756 if recovernode:
3744 3757 with repo.lock(), repo.transaction(b"unbundle") as tr:
3745 3758 if scmutil.isrevsymbol(other, recovernode):
3746 3759 ui.status(_(b"Unbundling %s\n") % (recovernode))
3747 3760 f = hg.openpath(ui, source)
3748 3761 gen = exchange.readbundle(ui, f, source)
3749 3762 if isinstance(gen, bundle2.unbundle20):
3750 3763 bundle2.applybundle(
3751 3764 repo,
3752 3765 gen,
3753 3766 tr,
3754 3767 source=b"unbundle",
3755 3768 url=b"bundle:" + source,
3756 3769 )
3757 3770 else:
3758 3771 gen.apply(repo, b"unbundle", b"bundle:" + source)
3759 3772 break
3760 3773 else:
3761 3774 backupdate = encoding.strtolocal(
3762 3775 time.strftime(
3763 3776 "%a %H:%M, %Y-%m-%d",
3764 3777 time.localtime(os.path.getmtime(source)),
3765 3778 )
3766 3779 )
3767 3780 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3768 3781 if ui.verbose:
3769 3782 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3770 3783 else:
3771 3784 opts[
3772 3785 b"template"
3773 3786 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3774 3787 displayer = logcmdutil.changesetdisplayer(
3775 3788 ui, other, opts, False
3776 3789 )
3777 3790 display(other, chlist, displayer)
3778 3791 displayer.close()
3779 3792 finally:
3780 3793 cleanupfn()
3781 3794
3782 3795
3783 3796 @command(
3784 3797 b'debugsub',
3785 3798 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3786 3799 _(b'[-r REV] [REV]'),
3787 3800 )
3788 3801 def debugsub(ui, repo, rev=None):
3789 3802 ctx = scmutil.revsingle(repo, rev, None)
3790 3803 for k, v in sorted(ctx.substate.items()):
3791 3804 ui.writenoi18n(b'path %s\n' % k)
3792 3805 ui.writenoi18n(b' source %s\n' % v[0])
3793 3806 ui.writenoi18n(b' revision %s\n' % v[1])
3794 3807
3795 3808
3796 3809 @command(b'debugshell', optionalrepo=True)
3797 3810 def debugshell(ui, repo):
3798 3811 """run an interactive Python interpreter
3799 3812
3800 3813 The local namespace is provided with a reference to the ui and
3801 3814 the repo instance (if available).
3802 3815 """
3803 3816 import code
3804 3817
3805 3818 imported_objects = {
3806 3819 'ui': ui,
3807 3820 'repo': repo,
3808 3821 }
3809 3822
3810 3823 code.interact(local=imported_objects)
3811 3824
3812 3825
3813 3826 @command(
3814 3827 b'debugsuccessorssets',
3815 3828 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3816 3829 _(b'[REV]'),
3817 3830 )
3818 3831 def debugsuccessorssets(ui, repo, *revs, **opts):
3819 3832 """show set of successors for revision
3820 3833
3821 3834 A successors set of changeset A is a consistent group of revisions that
3822 3835 succeed A. It contains non-obsolete changesets only unless closests
3823 3836 successors set is set.
3824 3837
3825 3838 In most cases a changeset A has a single successors set containing a single
3826 3839 successor (changeset A replaced by A').
3827 3840
3828 3841 A changeset that is made obsolete with no successors are called "pruned".
3829 3842 Such changesets have no successors sets at all.
3830 3843
3831 3844 A changeset that has been "split" will have a successors set containing
3832 3845 more than one successor.
3833 3846
3834 3847 A changeset that has been rewritten in multiple different ways is called
3835 3848 "divergent". Such changesets have multiple successor sets (each of which
3836 3849 may also be split, i.e. have multiple successors).
3837 3850
3838 3851 Results are displayed as follows::
3839 3852
3840 3853 <rev1>
3841 3854 <successors-1A>
3842 3855 <rev2>
3843 3856 <successors-2A>
3844 3857 <successors-2B1> <successors-2B2> <successors-2B3>
3845 3858
3846 3859 Here rev2 has two possible (i.e. divergent) successors sets. The first
3847 3860 holds one element, whereas the second holds three (i.e. the changeset has
3848 3861 been split).
3849 3862 """
3850 3863 # passed to successorssets caching computation from one call to another
3851 3864 cache = {}
3852 3865 ctx2str = bytes
3853 3866 node2str = short
3854 3867 for rev in scmutil.revrange(repo, revs):
3855 3868 ctx = repo[rev]
3856 3869 ui.write(b'%s\n' % ctx2str(ctx))
3857 3870 for succsset in obsutil.successorssets(
3858 3871 repo, ctx.node(), closest=opts['closest'], cache=cache
3859 3872 ):
3860 3873 if succsset:
3861 3874 ui.write(b' ')
3862 3875 ui.write(node2str(succsset[0]))
3863 3876 for node in succsset[1:]:
3864 3877 ui.write(b' ')
3865 3878 ui.write(node2str(node))
3866 3879 ui.write(b'\n')
3867 3880
3868 3881
3869 3882 @command(b'debugtagscache', [])
3870 3883 def debugtagscache(ui, repo):
3871 3884 """display the contents of .hg/cache/hgtagsfnodes1"""
3872 3885 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3873 3886 flog = repo.file(b'.hgtags')
3874 3887 for r in repo:
3875 3888 node = repo[r].node()
3876 3889 tagsnode = cache.getfnode(node, computemissing=False)
3877 3890 if tagsnode:
3878 3891 tagsnodedisplay = hex(tagsnode)
3879 3892 if not flog.hasnode(tagsnode):
3880 3893 tagsnodedisplay += b' (unknown node)'
3881 3894 elif tagsnode is None:
3882 3895 tagsnodedisplay = b'missing'
3883 3896 else:
3884 3897 tagsnodedisplay = b'invalid'
3885 3898
3886 3899 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3887 3900
3888 3901
3889 3902 @command(
3890 3903 b'debugtemplate',
3891 3904 [
3892 3905 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3893 3906 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3894 3907 ],
3895 3908 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3896 3909 optionalrepo=True,
3897 3910 )
3898 3911 def debugtemplate(ui, repo, tmpl, **opts):
3899 3912 """parse and apply a template
3900 3913
3901 3914 If -r/--rev is given, the template is processed as a log template and
3902 3915 applied to the given changesets. Otherwise, it is processed as a generic
3903 3916 template.
3904 3917
3905 3918 Use --verbose to print the parsed tree.
3906 3919 """
3907 3920 revs = None
3908 3921 if opts['rev']:
3909 3922 if repo is None:
3910 3923 raise error.RepoError(
3911 3924 _(b'there is no Mercurial repository here (.hg not found)')
3912 3925 )
3913 3926 revs = scmutil.revrange(repo, opts['rev'])
3914 3927
3915 3928 props = {}
3916 3929 for d in opts['define']:
3917 3930 try:
3918 3931 k, v = (e.strip() for e in d.split(b'=', 1))
3919 3932 if not k or k == b'ui':
3920 3933 raise ValueError
3921 3934 props[k] = v
3922 3935 except ValueError:
3923 3936 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3924 3937
3925 3938 if ui.verbose:
3926 3939 aliases = ui.configitems(b'templatealias')
3927 3940 tree = templater.parse(tmpl)
3928 3941 ui.note(templater.prettyformat(tree), b'\n')
3929 3942 newtree = templater.expandaliases(tree, aliases)
3930 3943 if newtree != tree:
3931 3944 ui.notenoi18n(
3932 3945 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3933 3946 )
3934 3947
3935 3948 if revs is None:
3936 3949 tres = formatter.templateresources(ui, repo)
3937 3950 t = formatter.maketemplater(ui, tmpl, resources=tres)
3938 3951 if ui.verbose:
3939 3952 kwds, funcs = t.symbolsuseddefault()
3940 3953 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3941 3954 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3942 3955 ui.write(t.renderdefault(props))
3943 3956 else:
3944 3957 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3945 3958 if ui.verbose:
3946 3959 kwds, funcs = displayer.t.symbolsuseddefault()
3947 3960 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3948 3961 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3949 3962 for r in revs:
3950 3963 displayer.show(repo[r], **pycompat.strkwargs(props))
3951 3964 displayer.close()
3952 3965
3953 3966
3954 3967 @command(
3955 3968 b'debuguigetpass',
3956 3969 [
3957 3970 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3958 3971 ],
3959 3972 _(b'[-p TEXT]'),
3960 3973 norepo=True,
3961 3974 )
3962 3975 def debuguigetpass(ui, prompt=b''):
3963 3976 """show prompt to type password"""
3964 3977 r = ui.getpass(prompt)
3965 3978 if r is None:
3966 3979 r = b"<default response>"
3967 3980 ui.writenoi18n(b'response: %s\n' % r)
3968 3981
3969 3982
3970 3983 @command(
3971 3984 b'debuguiprompt',
3972 3985 [
3973 3986 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3974 3987 ],
3975 3988 _(b'[-p TEXT]'),
3976 3989 norepo=True,
3977 3990 )
3978 3991 def debuguiprompt(ui, prompt=b''):
3979 3992 """show plain prompt"""
3980 3993 r = ui.prompt(prompt)
3981 3994 ui.writenoi18n(b'response: %s\n' % r)
3982 3995
3983 3996
3984 3997 @command(b'debugupdatecaches', [])
3985 3998 def debugupdatecaches(ui, repo, *pats, **opts):
3986 3999 """warm all known caches in the repository"""
3987 4000 with repo.wlock(), repo.lock():
3988 4001 repo.updatecaches(full=True)
3989 4002
3990 4003
3991 4004 @command(
3992 4005 b'debugupgraderepo',
3993 4006 [
3994 4007 (
3995 4008 b'o',
3996 4009 b'optimize',
3997 4010 [],
3998 4011 _(b'extra optimization to perform'),
3999 4012 _(b'NAME'),
4000 4013 ),
4001 4014 (b'', b'run', False, _(b'performs an upgrade')),
4002 4015 (b'', b'backup', True, _(b'keep the old repository content around')),
4003 4016 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4004 4017 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4005 4018 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4006 4019 ],
4007 4020 )
4008 4021 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4009 4022 """upgrade a repository to use different features
4010 4023
4011 4024 If no arguments are specified, the repository is evaluated for upgrade
4012 4025 and a list of problems and potential optimizations is printed.
4013 4026
4014 4027 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4015 4028 can be influenced via additional arguments. More details will be provided
4016 4029 by the command output when run without ``--run``.
4017 4030
4018 4031 During the upgrade, the repository will be locked and no writes will be
4019 4032 allowed.
4020 4033
4021 4034 At the end of the upgrade, the repository may not be readable while new
4022 4035 repository data is swapped in. This window will be as long as it takes to
4023 4036 rename some directories inside the ``.hg`` directory. On most machines, this
4024 4037 should complete almost instantaneously and the chances of a consumer being
4025 4038 unable to access the repository should be low.
4026 4039
4027 4040 By default, all revlog will be upgraded. You can restrict this using flag
4028 4041 such as `--manifest`:
4029 4042
4030 4043 * `--manifest`: only optimize the manifest
4031 4044 * `--no-manifest`: optimize all revlog but the manifest
4032 4045 * `--changelog`: optimize the changelog only
4033 4046 * `--no-changelog --no-manifest`: optimize filelogs only
4034 4047 * `--filelogs`: optimize the filelogs only
4035 4048 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4036 4049 """
4037 4050 return upgrade.upgraderepo(
4038 4051 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4039 4052 )
4040 4053
4041 4054
4042 4055 @command(
4043 4056 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4044 4057 )
4045 4058 def debugwalk(ui, repo, *pats, **opts):
4046 4059 """show how files match on given patterns"""
4047 4060 opts = pycompat.byteskwargs(opts)
4048 4061 m = scmutil.match(repo[None], pats, opts)
4049 4062 if ui.verbose:
4050 4063 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4051 4064 items = list(repo[None].walk(m))
4052 4065 if not items:
4053 4066 return
4054 4067 f = lambda fn: fn
4055 4068 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4056 4069 f = lambda fn: util.normpath(fn)
4057 4070 fmt = b'f %%-%ds %%-%ds %%s' % (
4058 4071 max([len(abs) for abs in items]),
4059 4072 max([len(repo.pathto(abs)) for abs in items]),
4060 4073 )
4061 4074 for abs in items:
4062 4075 line = fmt % (
4063 4076 abs,
4064 4077 f(repo.pathto(abs)),
4065 4078 m.exact(abs) and b'exact' or b'',
4066 4079 )
4067 4080 ui.write(b"%s\n" % line.rstrip())
4068 4081
4069 4082
4070 4083 @command(b'debugwhyunstable', [], _(b'REV'))
4071 4084 def debugwhyunstable(ui, repo, rev):
4072 4085 """explain instabilities of a changeset"""
4073 4086 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4074 4087 dnodes = b''
4075 4088 if entry.get(b'divergentnodes'):
4076 4089 dnodes = (
4077 4090 b' '.join(
4078 4091 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4079 4092 for ctx in entry[b'divergentnodes']
4080 4093 )
4081 4094 + b' '
4082 4095 )
4083 4096 ui.write(
4084 4097 b'%s: %s%s %s\n'
4085 4098 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4086 4099 )
4087 4100
4088 4101
4089 4102 @command(
4090 4103 b'debugwireargs',
4091 4104 [
4092 4105 (b'', b'three', b'', b'three'),
4093 4106 (b'', b'four', b'', b'four'),
4094 4107 (b'', b'five', b'', b'five'),
4095 4108 ]
4096 4109 + cmdutil.remoteopts,
4097 4110 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4098 4111 norepo=True,
4099 4112 )
4100 4113 def debugwireargs(ui, repopath, *vals, **opts):
4101 4114 opts = pycompat.byteskwargs(opts)
4102 4115 repo = hg.peer(ui, opts, repopath)
4103 4116 try:
4104 4117 for opt in cmdutil.remoteopts:
4105 4118 del opts[opt[1]]
4106 4119 args = {}
4107 4120 for k, v in pycompat.iteritems(opts):
4108 4121 if v:
4109 4122 args[k] = v
4110 4123 args = pycompat.strkwargs(args)
4111 4124 # run twice to check that we don't mess up the stream for the next command
4112 4125 res1 = repo.debugwireargs(*vals, **args)
4113 4126 res2 = repo.debugwireargs(*vals, **args)
4114 4127 ui.write(b"%s\n" % res1)
4115 4128 if res1 != res2:
4116 4129 ui.warn(b"%s\n" % res2)
4117 4130 finally:
4118 4131 repo.close()
4119 4132
4120 4133
4121 4134 def _parsewirelangblocks(fh):
4122 4135 activeaction = None
4123 4136 blocklines = []
4124 4137 lastindent = 0
4125 4138
4126 4139 for line in fh:
4127 4140 line = line.rstrip()
4128 4141 if not line:
4129 4142 continue
4130 4143
4131 4144 if line.startswith(b'#'):
4132 4145 continue
4133 4146
4134 4147 if not line.startswith(b' '):
4135 4148 # New block. Flush previous one.
4136 4149 if activeaction:
4137 4150 yield activeaction, blocklines
4138 4151
4139 4152 activeaction = line
4140 4153 blocklines = []
4141 4154 lastindent = 0
4142 4155 continue
4143 4156
4144 4157 # Else we start with an indent.
4145 4158
4146 4159 if not activeaction:
4147 4160 raise error.Abort(_(b'indented line outside of block'))
4148 4161
4149 4162 indent = len(line) - len(line.lstrip())
4150 4163
4151 4164 # If this line is indented more than the last line, concatenate it.
4152 4165 if indent > lastindent and blocklines:
4153 4166 blocklines[-1] += line.lstrip()
4154 4167 else:
4155 4168 blocklines.append(line)
4156 4169 lastindent = indent
4157 4170
4158 4171 # Flush last block.
4159 4172 if activeaction:
4160 4173 yield activeaction, blocklines
4161 4174
4162 4175
4163 4176 @command(
4164 4177 b'debugwireproto',
4165 4178 [
4166 4179 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4167 4180 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4168 4181 (
4169 4182 b'',
4170 4183 b'noreadstderr',
4171 4184 False,
4172 4185 _(b'do not read from stderr of the remote'),
4173 4186 ),
4174 4187 (
4175 4188 b'',
4176 4189 b'nologhandshake',
4177 4190 False,
4178 4191 _(b'do not log I/O related to the peer handshake'),
4179 4192 ),
4180 4193 ]
4181 4194 + cmdutil.remoteopts,
4182 4195 _(b'[PATH]'),
4183 4196 optionalrepo=True,
4184 4197 )
4185 4198 def debugwireproto(ui, repo, path=None, **opts):
4186 4199 """send wire protocol commands to a server
4187 4200
4188 4201 This command can be used to issue wire protocol commands to remote
4189 4202 peers and to debug the raw data being exchanged.
4190 4203
4191 4204 ``--localssh`` will start an SSH server against the current repository
4192 4205 and connect to that. By default, the connection will perform a handshake
4193 4206 and establish an appropriate peer instance.
4194 4207
4195 4208 ``--peer`` can be used to bypass the handshake protocol and construct a
4196 4209 peer instance using the specified class type. Valid values are ``raw``,
4197 4210 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4198 4211 raw data payloads and don't support higher-level command actions.
4199 4212
4200 4213 ``--noreadstderr`` can be used to disable automatic reading from stderr
4201 4214 of the peer (for SSH connections only). Disabling automatic reading of
4202 4215 stderr is useful for making output more deterministic.
4203 4216
4204 4217 Commands are issued via a mini language which is specified via stdin.
4205 4218 The language consists of individual actions to perform. An action is
4206 4219 defined by a block. A block is defined as a line with no leading
4207 4220 space followed by 0 or more lines with leading space. Blocks are
4208 4221 effectively a high-level command with additional metadata.
4209 4222
4210 4223 Lines beginning with ``#`` are ignored.
4211 4224
4212 4225 The following sections denote available actions.
4213 4226
4214 4227 raw
4215 4228 ---
4216 4229
4217 4230 Send raw data to the server.
4218 4231
4219 4232 The block payload contains the raw data to send as one atomic send
4220 4233 operation. The data may not actually be delivered in a single system
4221 4234 call: it depends on the abilities of the transport being used.
4222 4235
4223 4236 Each line in the block is de-indented and concatenated. Then, that
4224 4237 value is evaluated as a Python b'' literal. This allows the use of
4225 4238 backslash escaping, etc.
4226 4239
4227 4240 raw+
4228 4241 ----
4229 4242
4230 4243 Behaves like ``raw`` except flushes output afterwards.
4231 4244
4232 4245 command <X>
4233 4246 -----------
4234 4247
4235 4248 Send a request to run a named command, whose name follows the ``command``
4236 4249 string.
4237 4250
4238 4251 Arguments to the command are defined as lines in this block. The format of
4239 4252 each line is ``<key> <value>``. e.g.::
4240 4253
4241 4254 command listkeys
4242 4255 namespace bookmarks
4243 4256
4244 4257 If the value begins with ``eval:``, it will be interpreted as a Python
4245 4258 literal expression. Otherwise values are interpreted as Python b'' literals.
4246 4259 This allows sending complex types and encoding special byte sequences via
4247 4260 backslash escaping.
4248 4261
4249 4262 The following arguments have special meaning:
4250 4263
4251 4264 ``PUSHFILE``
4252 4265 When defined, the *push* mechanism of the peer will be used instead
4253 4266 of the static request-response mechanism and the content of the
4254 4267 file specified in the value of this argument will be sent as the
4255 4268 command payload.
4256 4269
4257 4270 This can be used to submit a local bundle file to the remote.
4258 4271
4259 4272 batchbegin
4260 4273 ----------
4261 4274
4262 4275 Instruct the peer to begin a batched send.
4263 4276
4264 4277 All ``command`` blocks are queued for execution until the next
4265 4278 ``batchsubmit`` block.
4266 4279
4267 4280 batchsubmit
4268 4281 -----------
4269 4282
4270 4283 Submit previously queued ``command`` blocks as a batch request.
4271 4284
4272 4285 This action MUST be paired with a ``batchbegin`` action.
4273 4286
4274 4287 httprequest <method> <path>
4275 4288 ---------------------------
4276 4289
4277 4290 (HTTP peer only)
4278 4291
4279 4292 Send an HTTP request to the peer.
4280 4293
4281 4294 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4282 4295
4283 4296 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4284 4297 headers to add to the request. e.g. ``Accept: foo``.
4285 4298
4286 4299 The following arguments are special:
4287 4300
4288 4301 ``BODYFILE``
4289 4302 The content of the file defined as the value to this argument will be
4290 4303 transferred verbatim as the HTTP request body.
4291 4304
4292 4305 ``frame <type> <flags> <payload>``
4293 4306 Send a unified protocol frame as part of the request body.
4294 4307
4295 4308 All frames will be collected and sent as the body to the HTTP
4296 4309 request.
4297 4310
4298 4311 close
4299 4312 -----
4300 4313
4301 4314 Close the connection to the server.
4302 4315
4303 4316 flush
4304 4317 -----
4305 4318
4306 4319 Flush data written to the server.
4307 4320
4308 4321 readavailable
4309 4322 -------------
4310 4323
4311 4324 Close the write end of the connection and read all available data from
4312 4325 the server.
4313 4326
4314 4327 If the connection to the server encompasses multiple pipes, we poll both
4315 4328 pipes and read available data.
4316 4329
4317 4330 readline
4318 4331 --------
4319 4332
4320 4333 Read a line of output from the server. If there are multiple output
4321 4334 pipes, reads only the main pipe.
4322 4335
4323 4336 ereadline
4324 4337 ---------
4325 4338
4326 4339 Like ``readline``, but read from the stderr pipe, if available.
4327 4340
4328 4341 read <X>
4329 4342 --------
4330 4343
4331 4344 ``read()`` N bytes from the server's main output pipe.
4332 4345
4333 4346 eread <X>
4334 4347 ---------
4335 4348
4336 4349 ``read()`` N bytes from the server's stderr pipe, if available.
4337 4350
4338 4351 Specifying Unified Frame-Based Protocol Frames
4339 4352 ----------------------------------------------
4340 4353
4341 4354 It is possible to emit a *Unified Frame-Based Protocol* by using special
4342 4355 syntax.
4343 4356
4344 4357 A frame is composed as a type, flags, and payload. These can be parsed
4345 4358 from a string of the form:
4346 4359
4347 4360 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4348 4361
4349 4362 ``request-id`` and ``stream-id`` are integers defining the request and
4350 4363 stream identifiers.
4351 4364
4352 4365 ``type`` can be an integer value for the frame type or the string name
4353 4366 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4354 4367 ``command-name``.
4355 4368
4356 4369 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4357 4370 components. Each component (and there can be just one) can be an integer
4358 4371 or a flag name for stream flags or frame flags, respectively. Values are
4359 4372 resolved to integers and then bitwise OR'd together.
4360 4373
4361 4374 ``payload`` represents the raw frame payload. If it begins with
4362 4375 ``cbor:``, the following string is evaluated as Python code and the
4363 4376 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4364 4377 as a Python byte string literal.
4365 4378 """
4366 4379 opts = pycompat.byteskwargs(opts)
4367 4380
4368 4381 if opts[b'localssh'] and not repo:
4369 4382 raise error.Abort(_(b'--localssh requires a repository'))
4370 4383
4371 4384 if opts[b'peer'] and opts[b'peer'] not in (
4372 4385 b'raw',
4373 4386 b'http2',
4374 4387 b'ssh1',
4375 4388 b'ssh2',
4376 4389 ):
4377 4390 raise error.Abort(
4378 4391 _(b'invalid value for --peer'),
4379 4392 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4380 4393 )
4381 4394
4382 4395 if path and opts[b'localssh']:
4383 4396 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4384 4397
4385 4398 if ui.interactive():
4386 4399 ui.write(_(b'(waiting for commands on stdin)\n'))
4387 4400
4388 4401 blocks = list(_parsewirelangblocks(ui.fin))
4389 4402
4390 4403 proc = None
4391 4404 stdin = None
4392 4405 stdout = None
4393 4406 stderr = None
4394 4407 opener = None
4395 4408
4396 4409 if opts[b'localssh']:
4397 4410 # We start the SSH server in its own process so there is process
4398 4411 # separation. This prevents a whole class of potential bugs around
4399 4412 # shared state from interfering with server operation.
4400 4413 args = procutil.hgcmd() + [
4401 4414 b'-R',
4402 4415 repo.root,
4403 4416 b'debugserve',
4404 4417 b'--sshstdio',
4405 4418 ]
4406 4419 proc = subprocess.Popen(
4407 4420 pycompat.rapply(procutil.tonativestr, args),
4408 4421 stdin=subprocess.PIPE,
4409 4422 stdout=subprocess.PIPE,
4410 4423 stderr=subprocess.PIPE,
4411 4424 bufsize=0,
4412 4425 )
4413 4426
4414 4427 stdin = proc.stdin
4415 4428 stdout = proc.stdout
4416 4429 stderr = proc.stderr
4417 4430
4418 4431 # We turn the pipes into observers so we can log I/O.
4419 4432 if ui.verbose or opts[b'peer'] == b'raw':
4420 4433 stdin = util.makeloggingfileobject(
4421 4434 ui, proc.stdin, b'i', logdata=True
4422 4435 )
4423 4436 stdout = util.makeloggingfileobject(
4424 4437 ui, proc.stdout, b'o', logdata=True
4425 4438 )
4426 4439 stderr = util.makeloggingfileobject(
4427 4440 ui, proc.stderr, b'e', logdata=True
4428 4441 )
4429 4442
4430 4443 # --localssh also implies the peer connection settings.
4431 4444
4432 4445 url = b'ssh://localserver'
4433 4446 autoreadstderr = not opts[b'noreadstderr']
4434 4447
4435 4448 if opts[b'peer'] == b'ssh1':
4436 4449 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4437 4450 peer = sshpeer.sshv1peer(
4438 4451 ui,
4439 4452 url,
4440 4453 proc,
4441 4454 stdin,
4442 4455 stdout,
4443 4456 stderr,
4444 4457 None,
4445 4458 autoreadstderr=autoreadstderr,
4446 4459 )
4447 4460 elif opts[b'peer'] == b'ssh2':
4448 4461 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4449 4462 peer = sshpeer.sshv2peer(
4450 4463 ui,
4451 4464 url,
4452 4465 proc,
4453 4466 stdin,
4454 4467 stdout,
4455 4468 stderr,
4456 4469 None,
4457 4470 autoreadstderr=autoreadstderr,
4458 4471 )
4459 4472 elif opts[b'peer'] == b'raw':
4460 4473 ui.write(_(b'using raw connection to peer\n'))
4461 4474 peer = None
4462 4475 else:
4463 4476 ui.write(_(b'creating ssh peer from handshake results\n'))
4464 4477 peer = sshpeer.makepeer(
4465 4478 ui,
4466 4479 url,
4467 4480 proc,
4468 4481 stdin,
4469 4482 stdout,
4470 4483 stderr,
4471 4484 autoreadstderr=autoreadstderr,
4472 4485 )
4473 4486
4474 4487 elif path:
4475 4488 # We bypass hg.peer() so we can proxy the sockets.
4476 4489 # TODO consider not doing this because we skip
4477 4490 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4478 4491 u = util.url(path)
4479 4492 if u.scheme != b'http':
4480 4493 raise error.Abort(_(b'only http:// paths are currently supported'))
4481 4494
4482 4495 url, authinfo = u.authinfo()
4483 4496 openerargs = {
4484 4497 'useragent': b'Mercurial debugwireproto',
4485 4498 }
4486 4499
4487 4500 # Turn pipes/sockets into observers so we can log I/O.
4488 4501 if ui.verbose:
4489 4502 openerargs.update(
4490 4503 {
4491 4504 'loggingfh': ui,
4492 4505 'loggingname': b's',
4493 4506 'loggingopts': {
4494 4507 'logdata': True,
4495 4508 'logdataapis': False,
4496 4509 },
4497 4510 }
4498 4511 )
4499 4512
4500 4513 if ui.debugflag:
4501 4514 openerargs['loggingopts']['logdataapis'] = True
4502 4515
4503 4516 # Don't send default headers when in raw mode. This allows us to
4504 4517 # bypass most of the behavior of our URL handling code so we can
4505 4518 # have near complete control over what's sent on the wire.
4506 4519 if opts[b'peer'] == b'raw':
4507 4520 openerargs['sendaccept'] = False
4508 4521
4509 4522 opener = urlmod.opener(ui, authinfo, **openerargs)
4510 4523
4511 4524 if opts[b'peer'] == b'http2':
4512 4525 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4513 4526 # We go through makepeer() because we need an API descriptor for
4514 4527 # the peer instance to be useful.
4515 4528 with ui.configoverride(
4516 4529 {(b'experimental', b'httppeer.advertise-v2'): True}
4517 4530 ):
4518 4531 if opts[b'nologhandshake']:
4519 4532 ui.pushbuffer()
4520 4533
4521 4534 peer = httppeer.makepeer(ui, path, opener=opener)
4522 4535
4523 4536 if opts[b'nologhandshake']:
4524 4537 ui.popbuffer()
4525 4538
4526 4539 if not isinstance(peer, httppeer.httpv2peer):
4527 4540 raise error.Abort(
4528 4541 _(
4529 4542 b'could not instantiate HTTP peer for '
4530 4543 b'wire protocol version 2'
4531 4544 ),
4532 4545 hint=_(
4533 4546 b'the server may not have the feature '
4534 4547 b'enabled or is not allowing this '
4535 4548 b'client version'
4536 4549 ),
4537 4550 )
4538 4551
4539 4552 elif opts[b'peer'] == b'raw':
4540 4553 ui.write(_(b'using raw connection to peer\n'))
4541 4554 peer = None
4542 4555 elif opts[b'peer']:
4543 4556 raise error.Abort(
4544 4557 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4545 4558 )
4546 4559 else:
4547 4560 peer = httppeer.makepeer(ui, path, opener=opener)
4548 4561
4549 4562 # We /could/ populate stdin/stdout with sock.makefile()...
4550 4563 else:
4551 4564 raise error.Abort(_(b'unsupported connection configuration'))
4552 4565
4553 4566 batchedcommands = None
4554 4567
4555 4568 # Now perform actions based on the parsed wire language instructions.
4556 4569 for action, lines in blocks:
4557 4570 if action in (b'raw', b'raw+'):
4558 4571 if not stdin:
4559 4572 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4560 4573
4561 4574 # Concatenate the data together.
4562 4575 data = b''.join(l.lstrip() for l in lines)
4563 4576 data = stringutil.unescapestr(data)
4564 4577 stdin.write(data)
4565 4578
4566 4579 if action == b'raw+':
4567 4580 stdin.flush()
4568 4581 elif action == b'flush':
4569 4582 if not stdin:
4570 4583 raise error.Abort(_(b'cannot call flush on this peer'))
4571 4584 stdin.flush()
4572 4585 elif action.startswith(b'command'):
4573 4586 if not peer:
4574 4587 raise error.Abort(
4575 4588 _(
4576 4589 b'cannot send commands unless peer instance '
4577 4590 b'is available'
4578 4591 )
4579 4592 )
4580 4593
4581 4594 command = action.split(b' ', 1)[1]
4582 4595
4583 4596 args = {}
4584 4597 for line in lines:
4585 4598 # We need to allow empty values.
4586 4599 fields = line.lstrip().split(b' ', 1)
4587 4600 if len(fields) == 1:
4588 4601 key = fields[0]
4589 4602 value = b''
4590 4603 else:
4591 4604 key, value = fields
4592 4605
4593 4606 if value.startswith(b'eval:'):
4594 4607 value = stringutil.evalpythonliteral(value[5:])
4595 4608 else:
4596 4609 value = stringutil.unescapestr(value)
4597 4610
4598 4611 args[key] = value
4599 4612
4600 4613 if batchedcommands is not None:
4601 4614 batchedcommands.append((command, args))
4602 4615 continue
4603 4616
4604 4617 ui.status(_(b'sending %s command\n') % command)
4605 4618
4606 4619 if b'PUSHFILE' in args:
4607 4620 with open(args[b'PUSHFILE'], 'rb') as fh:
4608 4621 del args[b'PUSHFILE']
4609 4622 res, output = peer._callpush(
4610 4623 command, fh, **pycompat.strkwargs(args)
4611 4624 )
4612 4625 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4613 4626 ui.status(
4614 4627 _(b'remote output: %s\n') % stringutil.escapestr(output)
4615 4628 )
4616 4629 else:
4617 4630 with peer.commandexecutor() as e:
4618 4631 res = e.callcommand(command, args).result()
4619 4632
4620 4633 if isinstance(res, wireprotov2peer.commandresponse):
4621 4634 val = res.objects()
4622 4635 ui.status(
4623 4636 _(b'response: %s\n')
4624 4637 % stringutil.pprint(val, bprefix=True, indent=2)
4625 4638 )
4626 4639 else:
4627 4640 ui.status(
4628 4641 _(b'response: %s\n')
4629 4642 % stringutil.pprint(res, bprefix=True, indent=2)
4630 4643 )
4631 4644
4632 4645 elif action == b'batchbegin':
4633 4646 if batchedcommands is not None:
4634 4647 raise error.Abort(_(b'nested batchbegin not allowed'))
4635 4648
4636 4649 batchedcommands = []
4637 4650 elif action == b'batchsubmit':
4638 4651 # There is a batching API we could go through. But it would be
4639 4652 # difficult to normalize requests into function calls. It is easier
4640 4653 # to bypass this layer and normalize to commands + args.
4641 4654 ui.status(
4642 4655 _(b'sending batch with %d sub-commands\n')
4643 4656 % len(batchedcommands)
4644 4657 )
4645 4658 assert peer is not None
4646 4659 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4647 4660 ui.status(
4648 4661 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4649 4662 )
4650 4663
4651 4664 batchedcommands = None
4652 4665
4653 4666 elif action.startswith(b'httprequest '):
4654 4667 if not opener:
4655 4668 raise error.Abort(
4656 4669 _(b'cannot use httprequest without an HTTP peer')
4657 4670 )
4658 4671
4659 4672 request = action.split(b' ', 2)
4660 4673 if len(request) != 3:
4661 4674 raise error.Abort(
4662 4675 _(
4663 4676 b'invalid httprequest: expected format is '
4664 4677 b'"httprequest <method> <path>'
4665 4678 )
4666 4679 )
4667 4680
4668 4681 method, httppath = request[1:]
4669 4682 headers = {}
4670 4683 body = None
4671 4684 frames = []
4672 4685 for line in lines:
4673 4686 line = line.lstrip()
4674 4687 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4675 4688 if m:
4676 4689 # Headers need to use native strings.
4677 4690 key = pycompat.strurl(m.group(1))
4678 4691 value = pycompat.strurl(m.group(2))
4679 4692 headers[key] = value
4680 4693 continue
4681 4694
4682 4695 if line.startswith(b'BODYFILE '):
4683 4696 with open(line.split(b' ', 1), b'rb') as fh:
4684 4697 body = fh.read()
4685 4698 elif line.startswith(b'frame '):
4686 4699 frame = wireprotoframing.makeframefromhumanstring(
4687 4700 line[len(b'frame ') :]
4688 4701 )
4689 4702
4690 4703 frames.append(frame)
4691 4704 else:
4692 4705 raise error.Abort(
4693 4706 _(b'unknown argument to httprequest: %s') % line
4694 4707 )
4695 4708
4696 4709 url = path + httppath
4697 4710
4698 4711 if frames:
4699 4712 body = b''.join(bytes(f) for f in frames)
4700 4713
4701 4714 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4702 4715
4703 4716 # urllib.Request insists on using has_data() as a proxy for
4704 4717 # determining the request method. Override that to use our
4705 4718 # explicitly requested method.
4706 4719 req.get_method = lambda: pycompat.sysstr(method)
4707 4720
4708 4721 try:
4709 4722 res = opener.open(req)
4710 4723 body = res.read()
4711 4724 except util.urlerr.urlerror as e:
4712 4725 # read() method must be called, but only exists in Python 2
4713 4726 getattr(e, 'read', lambda: None)()
4714 4727 continue
4715 4728
4716 4729 ct = res.headers.get('Content-Type')
4717 4730 if ct == 'application/mercurial-cbor':
4718 4731 ui.write(
4719 4732 _(b'cbor> %s\n')
4720 4733 % stringutil.pprint(
4721 4734 cborutil.decodeall(body), bprefix=True, indent=2
4722 4735 )
4723 4736 )
4724 4737
4725 4738 elif action == b'close':
4726 4739 assert peer is not None
4727 4740 peer.close()
4728 4741 elif action == b'readavailable':
4729 4742 if not stdout or not stderr:
4730 4743 raise error.Abort(
4731 4744 _(b'readavailable not available on this peer')
4732 4745 )
4733 4746
4734 4747 stdin.close()
4735 4748 stdout.read()
4736 4749 stderr.read()
4737 4750
4738 4751 elif action == b'readline':
4739 4752 if not stdout:
4740 4753 raise error.Abort(_(b'readline not available on this peer'))
4741 4754 stdout.readline()
4742 4755 elif action == b'ereadline':
4743 4756 if not stderr:
4744 4757 raise error.Abort(_(b'ereadline not available on this peer'))
4745 4758 stderr.readline()
4746 4759 elif action.startswith(b'read '):
4747 4760 count = int(action.split(b' ', 1)[1])
4748 4761 if not stdout:
4749 4762 raise error.Abort(_(b'read not available on this peer'))
4750 4763 stdout.read(count)
4751 4764 elif action.startswith(b'eread '):
4752 4765 count = int(action.split(b' ', 1)[1])
4753 4766 if not stderr:
4754 4767 raise error.Abort(_(b'eread not available on this peer'))
4755 4768 stderr.read(count)
4756 4769 else:
4757 4770 raise error.Abort(_(b'unknown action: %s') % action)
4758 4771
4759 4772 if batchedcommands is not None:
4760 4773 raise error.Abort(_(b'unclosed "batchbegin" request'))
4761 4774
4762 4775 if peer:
4763 4776 peer.close()
4764 4777
4765 4778 if proc:
4766 4779 proc.kill()
@@ -1,865 +1,873 b''
1 1 # formatter.py - generic output formatting for mercurial
2 2 #
3 3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Generic output formatting for Mercurial
9 9
10 10 The formatter provides API to show data in various ways. The following
11 11 functions should be used in place of ui.write():
12 12
13 13 - fm.write() for unconditional output
14 14 - fm.condwrite() to show some extra data conditionally in plain output
15 15 - fm.context() to provide changectx to template output
16 16 - fm.data() to provide extra data to JSON or template output
17 17 - fm.plain() to show raw text that isn't provided to JSON or template output
18 18
19 19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 20 beforehand so the data is converted to the appropriate data type. Use
21 21 fm.isplain() if you need to convert or format data conditionally which isn't
22 22 supported by the formatter API.
23 23
24 24 To build nested structure (i.e. a list of dicts), use fm.nested().
25 25
26 26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27 27
28 28 fm.condwrite() vs 'if cond:':
29 29
30 30 In most cases, use fm.condwrite() so users can selectively show the data
31 31 in template output. If it's costly to build data, use plain 'if cond:' with
32 32 fm.write().
33 33
34 34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35 35
36 36 fm.nested() should be used to form a tree structure (a list of dicts of
37 37 lists of dicts...) which can be accessed through template keywords, e.g.
38 38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 39 exports a dict-type object to template, which can be accessed by e.g.
40 40 "{get(foo, key)}" function.
41 41
42 42 Doctest helper:
43 43
44 44 >>> def show(fn, verbose=False, **opts):
45 45 ... import sys
46 46 ... from . import ui as uimod
47 47 ... ui = uimod.ui()
48 48 ... ui.verbose = verbose
49 49 ... ui.pushbuffer()
50 50 ... try:
51 51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
52 52 ... pycompat.byteskwargs(opts)))
53 53 ... finally:
54 54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
55 55
56 56 Basic example:
57 57
58 58 >>> def files(ui, fm):
59 59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
60 60 ... for f in files:
61 61 ... fm.startitem()
62 62 ... fm.write(b'path', b'%s', f[0])
63 63 ... fm.condwrite(ui.verbose, b'date', b' %s',
64 64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
65 65 ... fm.data(size=f[1])
66 66 ... fm.plain(b'\\n')
67 67 ... fm.end()
68 68 >>> show(files)
69 69 foo
70 70 bar
71 71 >>> show(files, verbose=True)
72 72 foo 1970-01-01 00:00:00
73 73 bar 1970-01-01 00:00:01
74 74 >>> show(files, template=b'json')
75 75 [
76 76 {
77 77 "date": [0, 0],
78 78 "path": "foo",
79 79 "size": 123
80 80 },
81 81 {
82 82 "date": [1, 0],
83 83 "path": "bar",
84 84 "size": 456
85 85 }
86 86 ]
87 87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
88 88 path: foo
89 89 date: 1970-01-01T00:00:00+00:00
90 90 path: bar
91 91 date: 1970-01-01T00:00:01+00:00
92 92
93 93 Nested example:
94 94
95 95 >>> def subrepos(ui, fm):
96 96 ... fm.startitem()
97 97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
98 98 ... files(ui, fm.nested(b'files', tmpl=b'{reponame}'))
99 99 ... fm.end()
100 100 >>> show(subrepos)
101 101 [baz]
102 102 foo
103 103 bar
104 104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
105 105 baz: foo, bar
106 106 """
107 107
108 108 from __future__ import absolute_import, print_function
109 109
110 110 import contextlib
111 111 import itertools
112 112 import os
113 113
114 114 from .i18n import _
115 115 from .node import (
116 116 hex,
117 117 short,
118 118 )
119 119 from .thirdparty import attr
120 120
121 121 from . import (
122 122 error,
123 123 pycompat,
124 124 templatefilters,
125 125 templatekw,
126 126 templater,
127 127 templateutil,
128 128 util,
129 129 )
130 130 from .utils import (
131 131 cborutil,
132 132 dateutil,
133 133 stringutil,
134 134 )
135 135
136 136 pickle = util.pickle
137 137
138 138
139 139 def isprintable(obj):
140 140 """Check if the given object can be directly passed in to formatter's
141 141 write() and data() functions
142 142
143 143 Returns False if the object is unsupported or must be pre-processed by
144 144 formatdate(), formatdict(), or formatlist().
145 145 """
146 146 return isinstance(obj, (type(None), bool, int, pycompat.long, float, bytes))
147 147
148 148
149 149 class _nullconverter(object):
150 150 '''convert non-primitive data types to be processed by formatter'''
151 151
152 152 # set to True if context object should be stored as item
153 153 storecontext = False
154 154
155 155 @staticmethod
156 156 def wrapnested(data, tmpl, sep):
157 157 '''wrap nested data by appropriate type'''
158 158 return data
159 159
160 160 @staticmethod
161 161 def formatdate(date, fmt):
162 162 '''convert date tuple to appropriate format'''
163 163 # timestamp can be float, but the canonical form should be int
164 164 ts, tz = date
165 165 return (int(ts), tz)
166 166
167 167 @staticmethod
168 168 def formatdict(data, key, value, fmt, sep):
169 169 '''convert dict or key-value pairs to appropriate dict format'''
170 170 # use plain dict instead of util.sortdict so that data can be
171 171 # serialized as a builtin dict in pickle output
172 172 return dict(data)
173 173
174 174 @staticmethod
175 175 def formatlist(data, name, fmt, sep):
176 176 '''convert iterable to appropriate list format'''
177 177 return list(data)
178 178
179 179
180 180 class baseformatter(object):
181
182 # set to True if the formater output a strict format that does not support
183 # arbitrary output in the stream.
184 strict_format = False
185
181 186 def __init__(self, ui, topic, opts, converter):
182 187 self._ui = ui
183 188 self._topic = topic
184 189 self._opts = opts
185 190 self._converter = converter
186 191 self._item = None
187 192 # function to convert node to string suitable for this output
188 193 self.hexfunc = hex
189 194
190 195 def __enter__(self):
191 196 return self
192 197
193 198 def __exit__(self, exctype, excvalue, traceback):
194 199 if exctype is None:
195 200 self.end()
196 201
197 202 def _showitem(self):
198 203 '''show a formatted item once all data is collected'''
199 204
200 205 def startitem(self):
201 206 '''begin an item in the format list'''
202 207 if self._item is not None:
203 208 self._showitem()
204 209 self._item = {}
205 210
206 211 def formatdate(self, date, fmt=b'%a %b %d %H:%M:%S %Y %1%2'):
207 212 '''convert date tuple to appropriate format'''
208 213 return self._converter.formatdate(date, fmt)
209 214
210 215 def formatdict(self, data, key=b'key', value=b'value', fmt=None, sep=b' '):
211 216 '''convert dict or key-value pairs to appropriate dict format'''
212 217 return self._converter.formatdict(data, key, value, fmt, sep)
213 218
214 219 def formatlist(self, data, name, fmt=None, sep=b' '):
215 220 '''convert iterable to appropriate list format'''
216 221 # name is mandatory argument for now, but it could be optional if
217 222 # we have default template keyword, e.g. {item}
218 223 return self._converter.formatlist(data, name, fmt, sep)
219 224
220 225 def context(self, **ctxs):
221 226 '''insert context objects to be used to render template keywords'''
222 227 ctxs = pycompat.byteskwargs(ctxs)
223 228 assert all(k in {b'repo', b'ctx', b'fctx'} for k in ctxs)
224 229 if self._converter.storecontext:
225 230 # populate missing resources in fctx -> ctx -> repo order
226 231 if b'fctx' in ctxs and b'ctx' not in ctxs:
227 232 ctxs[b'ctx'] = ctxs[b'fctx'].changectx()
228 233 if b'ctx' in ctxs and b'repo' not in ctxs:
229 234 ctxs[b'repo'] = ctxs[b'ctx'].repo()
230 235 self._item.update(ctxs)
231 236
232 237 def datahint(self):
233 238 '''set of field names to be referenced'''
234 239 return set()
235 240
236 241 def data(self, **data):
237 242 '''insert data into item that's not shown in default output'''
238 243 data = pycompat.byteskwargs(data)
239 244 self._item.update(data)
240 245
241 246 def write(self, fields, deftext, *fielddata, **opts):
242 247 '''do default text output while assigning data to item'''
243 248 fieldkeys = fields.split()
244 249 assert len(fieldkeys) == len(fielddata), (fieldkeys, fielddata)
245 250 self._item.update(zip(fieldkeys, fielddata))
246 251
247 252 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
248 253 '''do conditional write (primarily for plain formatter)'''
249 254 fieldkeys = fields.split()
250 255 assert len(fieldkeys) == len(fielddata)
251 256 self._item.update(zip(fieldkeys, fielddata))
252 257
253 258 def plain(self, text, **opts):
254 259 '''show raw text for non-templated mode'''
255 260
256 261 def isplain(self):
257 262 '''check for plain formatter usage'''
258 263 return False
259 264
260 265 def nested(self, field, tmpl=None, sep=b''):
261 266 '''sub formatter to store nested data in the specified field'''
262 267 data = []
263 268 self._item[field] = self._converter.wrapnested(data, tmpl, sep)
264 269 return _nestedformatter(self._ui, self._converter, data)
265 270
266 271 def end(self):
267 272 '''end output for the formatter'''
268 273 if self._item is not None:
269 274 self._showitem()
270 275
271 276
272 277 def nullformatter(ui, topic, opts):
273 278 '''formatter that prints nothing'''
274 279 return baseformatter(ui, topic, opts, converter=_nullconverter)
275 280
276 281
277 282 class _nestedformatter(baseformatter):
278 283 '''build sub items and store them in the parent formatter'''
279 284
280 285 def __init__(self, ui, converter, data):
281 286 baseformatter.__init__(
282 287 self, ui, topic=b'', opts={}, converter=converter
283 288 )
284 289 self._data = data
285 290
286 291 def _showitem(self):
287 292 self._data.append(self._item)
288 293
289 294
290 295 def _iteritems(data):
291 296 '''iterate key-value pairs in stable order'''
292 297 if isinstance(data, dict):
293 298 return sorted(pycompat.iteritems(data))
294 299 return data
295 300
296 301
297 302 class _plainconverter(object):
298 303 '''convert non-primitive data types to text'''
299 304
300 305 storecontext = False
301 306
302 307 @staticmethod
303 308 def wrapnested(data, tmpl, sep):
304 309 raise error.ProgrammingError(b'plainformatter should never be nested')
305 310
306 311 @staticmethod
307 312 def formatdate(date, fmt):
308 313 '''stringify date tuple in the given format'''
309 314 return dateutil.datestr(date, fmt)
310 315
311 316 @staticmethod
312 317 def formatdict(data, key, value, fmt, sep):
313 318 '''stringify key-value pairs separated by sep'''
314 319 prefmt = pycompat.identity
315 320 if fmt is None:
316 321 fmt = b'%s=%s'
317 322 prefmt = pycompat.bytestr
318 323 return sep.join(
319 324 fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
320 325 )
321 326
322 327 @staticmethod
323 328 def formatlist(data, name, fmt, sep):
324 329 '''stringify iterable separated by sep'''
325 330 prefmt = pycompat.identity
326 331 if fmt is None:
327 332 fmt = b'%s'
328 333 prefmt = pycompat.bytestr
329 334 return sep.join(fmt % prefmt(e) for e in data)
330 335
331 336
332 337 class plainformatter(baseformatter):
333 338 '''the default text output scheme'''
334 339
335 340 def __init__(self, ui, out, topic, opts):
336 341 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
337 342 if ui.debugflag:
338 343 self.hexfunc = hex
339 344 else:
340 345 self.hexfunc = short
341 346 if ui is out:
342 347 self._write = ui.write
343 348 else:
344 349 self._write = lambda s, **opts: out.write(s)
345 350
346 351 def startitem(self):
347 352 pass
348 353
349 354 def data(self, **data):
350 355 pass
351 356
352 357 def write(self, fields, deftext, *fielddata, **opts):
353 358 self._write(deftext % fielddata, **opts)
354 359
355 360 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
356 361 '''do conditional write'''
357 362 if cond:
358 363 self._write(deftext % fielddata, **opts)
359 364
360 365 def plain(self, text, **opts):
361 366 self._write(text, **opts)
362 367
363 368 def isplain(self):
364 369 return True
365 370
366 371 def nested(self, field, tmpl=None, sep=b''):
367 372 # nested data will be directly written to ui
368 373 return self
369 374
370 375 def end(self):
371 376 pass
372 377
373 378
374 379 class debugformatter(baseformatter):
375 380 def __init__(self, ui, out, topic, opts):
376 381 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
377 382 self._out = out
378 383 self._out.write(b"%s = [\n" % self._topic)
379 384
380 385 def _showitem(self):
381 386 self._out.write(
382 387 b' %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
383 388 )
384 389
385 390 def end(self):
386 391 baseformatter.end(self)
387 392 self._out.write(b"]\n")
388 393
389 394
390 395 class pickleformatter(baseformatter):
391 396 def __init__(self, ui, out, topic, opts):
392 397 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
393 398 self._out = out
394 399 self._data = []
395 400
396 401 def _showitem(self):
397 402 self._data.append(self._item)
398 403
399 404 def end(self):
400 405 baseformatter.end(self)
401 406 self._out.write(pickle.dumps(self._data))
402 407
403 408
404 409 class cborformatter(baseformatter):
405 410 '''serialize items as an indefinite-length CBOR array'''
406 411
407 412 def __init__(self, ui, out, topic, opts):
408 413 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
409 414 self._out = out
410 415 self._out.write(cborutil.BEGIN_INDEFINITE_ARRAY)
411 416
412 417 def _showitem(self):
413 418 self._out.write(b''.join(cborutil.streamencode(self._item)))
414 419
415 420 def end(self):
416 421 baseformatter.end(self)
417 422 self._out.write(cborutil.BREAK)
418 423
419 424
420 425 class jsonformatter(baseformatter):
426
427 strict_format = True
428
421 429 def __init__(self, ui, out, topic, opts):
422 430 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
423 431 self._out = out
424 432 self._out.write(b"[")
425 433 self._first = True
426 434
427 435 def _showitem(self):
428 436 if self._first:
429 437 self._first = False
430 438 else:
431 439 self._out.write(b",")
432 440
433 441 self._out.write(b"\n {\n")
434 442 first = True
435 443 for k, v in sorted(self._item.items()):
436 444 if first:
437 445 first = False
438 446 else:
439 447 self._out.write(b",\n")
440 448 u = templatefilters.json(v, paranoid=False)
441 449 self._out.write(b' "%s": %s' % (k, u))
442 450 self._out.write(b"\n }")
443 451
444 452 def end(self):
445 453 baseformatter.end(self)
446 454 self._out.write(b"\n]\n")
447 455
448 456
449 457 class _templateconverter(object):
450 458 '''convert non-primitive data types to be processed by templater'''
451 459
452 460 storecontext = True
453 461
454 462 @staticmethod
455 463 def wrapnested(data, tmpl, sep):
456 464 '''wrap nested data by templatable type'''
457 465 return templateutil.mappinglist(data, tmpl=tmpl, sep=sep)
458 466
459 467 @staticmethod
460 468 def formatdate(date, fmt):
461 469 '''return date tuple'''
462 470 return templateutil.date(date)
463 471
464 472 @staticmethod
465 473 def formatdict(data, key, value, fmt, sep):
466 474 '''build object that can be evaluated as either plain string or dict'''
467 475 data = util.sortdict(_iteritems(data))
468 476
469 477 def f():
470 478 yield _plainconverter.formatdict(data, key, value, fmt, sep)
471 479
472 480 return templateutil.hybriddict(
473 481 data, key=key, value=value, fmt=fmt, gen=f
474 482 )
475 483
476 484 @staticmethod
477 485 def formatlist(data, name, fmt, sep):
478 486 '''build object that can be evaluated as either plain string or list'''
479 487 data = list(data)
480 488
481 489 def f():
482 490 yield _plainconverter.formatlist(data, name, fmt, sep)
483 491
484 492 return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
485 493
486 494
487 495 class templateformatter(baseformatter):
488 496 def __init__(self, ui, out, topic, opts, spec, overridetemplates=None):
489 497 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
490 498 self._out = out
491 499 self._tref = spec.ref
492 500 self._t = loadtemplater(
493 501 ui,
494 502 spec,
495 503 defaults=templatekw.keywords,
496 504 resources=templateresources(ui),
497 505 cache=templatekw.defaulttempl,
498 506 )
499 507 if overridetemplates:
500 508 self._t.cache.update(overridetemplates)
501 509 self._parts = templatepartsmap(
502 510 spec, self._t, [b'docheader', b'docfooter', b'separator']
503 511 )
504 512 self._counter = itertools.count()
505 513 self._renderitem(b'docheader', {})
506 514
507 515 def _showitem(self):
508 516 item = self._item.copy()
509 517 item[b'index'] = index = next(self._counter)
510 518 if index > 0:
511 519 self._renderitem(b'separator', {})
512 520 self._renderitem(self._tref, item)
513 521
514 522 def _renderitem(self, part, item):
515 523 if part not in self._parts:
516 524 return
517 525 ref = self._parts[part]
518 526 # None can't be put in the mapping dict since it means <unset>
519 527 for k, v in item.items():
520 528 if v is None:
521 529 item[k] = templateutil.wrappedvalue(v)
522 530 self._out.write(self._t.render(ref, item))
523 531
524 532 @util.propertycache
525 533 def _symbolsused(self):
526 534 return self._t.symbolsused(self._tref)
527 535
528 536 def datahint(self):
529 537 '''set of field names to be referenced from the template'''
530 538 return self._symbolsused[0]
531 539
532 540 def end(self):
533 541 baseformatter.end(self)
534 542 self._renderitem(b'docfooter', {})
535 543
536 544
537 545 @attr.s(frozen=True)
538 546 class templatespec(object):
539 547 ref = attr.ib()
540 548 tmpl = attr.ib()
541 549 mapfile = attr.ib()
542 550 refargs = attr.ib(default=None)
543 551 fp = attr.ib(default=None)
544 552
545 553
546 554 def empty_templatespec():
547 555 return templatespec(None, None, None)
548 556
549 557
550 558 def reference_templatespec(ref, refargs=None):
551 559 return templatespec(ref, None, None, refargs)
552 560
553 561
554 562 def literal_templatespec(tmpl):
555 563 if pycompat.ispy3:
556 564 assert not isinstance(tmpl, str), b'tmpl must not be a str'
557 565 return templatespec(b'', tmpl, None)
558 566
559 567
560 568 def mapfile_templatespec(topic, mapfile, fp=None):
561 569 return templatespec(topic, None, mapfile, fp=fp)
562 570
563 571
564 572 def lookuptemplate(ui, topic, tmpl):
565 573 """Find the template matching the given -T/--template spec 'tmpl'
566 574
567 575 'tmpl' can be any of the following:
568 576
569 577 - a literal template (e.g. '{rev}')
570 578 - a reference to built-in template (i.e. formatter)
571 579 - a map-file name or path (e.g. 'changelog')
572 580 - a reference to [templates] in config file
573 581 - a path to raw template file
574 582
575 583 A map file defines a stand-alone template environment. If a map file
576 584 selected, all templates defined in the file will be loaded, and the
577 585 template matching the given topic will be rendered. Aliases won't be
578 586 loaded from user config, but from the map file.
579 587
580 588 If no map file selected, all templates in [templates] section will be
581 589 available as well as aliases in [templatealias].
582 590 """
583 591
584 592 if not tmpl:
585 593 return empty_templatespec()
586 594
587 595 # looks like a literal template?
588 596 if b'{' in tmpl:
589 597 return literal_templatespec(tmpl)
590 598
591 599 # a reference to built-in (formatter) template
592 600 if tmpl in {b'cbor', b'json', b'pickle', b'debug'}:
593 601 return reference_templatespec(tmpl)
594 602
595 603 # a function-style reference to built-in template
596 604 func, fsep, ftail = tmpl.partition(b'(')
597 605 if func in {b'cbor', b'json'} and fsep and ftail.endswith(b')'):
598 606 templater.parseexpr(tmpl) # make sure syntax errors are confined
599 607 return reference_templatespec(func, refargs=ftail[:-1])
600 608
601 609 # perhaps a stock style?
602 610 if not os.path.split(tmpl)[0]:
603 611 (mapname, fp) = templater.try_open_template(
604 612 b'map-cmdline.' + tmpl
605 613 ) or templater.try_open_template(tmpl)
606 614 if mapname:
607 615 return mapfile_templatespec(topic, mapname, fp)
608 616
609 617 # perhaps it's a reference to [templates]
610 618 if ui.config(b'templates', tmpl):
611 619 return reference_templatespec(tmpl)
612 620
613 621 if tmpl == b'list':
614 622 ui.write(_(b"available styles: %s\n") % templater.stylelist())
615 623 raise error.Abort(_(b"specify a template"))
616 624
617 625 # perhaps it's a path to a map or a template
618 626 if (b'/' in tmpl or b'\\' in tmpl) and os.path.isfile(tmpl):
619 627 # is it a mapfile for a style?
620 628 if os.path.basename(tmpl).startswith(b"map-"):
621 629 return mapfile_templatespec(topic, os.path.realpath(tmpl))
622 630 with util.posixfile(tmpl, b'rb') as f:
623 631 tmpl = f.read()
624 632 return literal_templatespec(tmpl)
625 633
626 634 # constant string?
627 635 return literal_templatespec(tmpl)
628 636
629 637
630 638 def templatepartsmap(spec, t, partnames):
631 639 """Create a mapping of {part: ref}"""
632 640 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
633 641 if spec.mapfile:
634 642 partsmap.update((p, p) for p in partnames if p in t)
635 643 elif spec.ref:
636 644 for part in partnames:
637 645 ref = b'%s:%s' % (spec.ref, part) # select config sub-section
638 646 if ref in t:
639 647 partsmap[part] = ref
640 648 return partsmap
641 649
642 650
643 651 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
644 652 """Create a templater from either a literal template or loading from
645 653 a map file"""
646 654 assert not (spec.tmpl and spec.mapfile)
647 655 if spec.mapfile:
648 656 return templater.templater.frommapfile(
649 657 spec.mapfile,
650 658 spec.fp,
651 659 defaults=defaults,
652 660 resources=resources,
653 661 cache=cache,
654 662 )
655 663 return maketemplater(
656 664 ui, spec.tmpl, defaults=defaults, resources=resources, cache=cache
657 665 )
658 666
659 667
660 668 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
661 669 """Create a templater from a string template 'tmpl'"""
662 670 aliases = ui.configitems(b'templatealias')
663 671 t = templater.templater(
664 672 defaults=defaults, resources=resources, cache=cache, aliases=aliases
665 673 )
666 674 t.cache.update(
667 675 (k, templater.unquotestring(v)) for k, v in ui.configitems(b'templates')
668 676 )
669 677 if tmpl:
670 678 t.cache[b''] = tmpl
671 679 return t
672 680
673 681
674 682 # marker to denote a resource to be loaded on demand based on mapping values
675 683 # (e.g. (ctx, path) -> fctx)
676 684 _placeholder = object()
677 685
678 686
679 687 class templateresources(templater.resourcemapper):
680 688 """Resource mapper designed for the default templatekw and function"""
681 689
682 690 def __init__(self, ui, repo=None):
683 691 self._resmap = {
684 692 b'cache': {}, # for templatekw/funcs to store reusable data
685 693 b'repo': repo,
686 694 b'ui': ui,
687 695 }
688 696
689 697 def availablekeys(self, mapping):
690 698 return {
691 699 k for k in self.knownkeys() if self._getsome(mapping, k) is not None
692 700 }
693 701
694 702 def knownkeys(self):
695 703 return {b'cache', b'ctx', b'fctx', b'repo', b'revcache', b'ui'}
696 704
697 705 def lookup(self, mapping, key):
698 706 if key not in self.knownkeys():
699 707 return None
700 708 v = self._getsome(mapping, key)
701 709 if v is _placeholder:
702 710 v = mapping[key] = self._loadermap[key](self, mapping)
703 711 return v
704 712
705 713 def populatemap(self, context, origmapping, newmapping):
706 714 mapping = {}
707 715 if self._hasnodespec(newmapping):
708 716 mapping[b'revcache'] = {} # per-ctx cache
709 717 if self._hasnodespec(origmapping) and self._hasnodespec(newmapping):
710 718 orignode = templateutil.runsymbol(context, origmapping, b'node')
711 719 mapping[b'originalnode'] = orignode
712 720 # put marker to override 'ctx'/'fctx' in mapping if any, and flag
713 721 # its existence to be reported by availablekeys()
714 722 if b'ctx' not in newmapping and self._hasliteral(newmapping, b'node'):
715 723 mapping[b'ctx'] = _placeholder
716 724 if b'fctx' not in newmapping and self._hasliteral(newmapping, b'path'):
717 725 mapping[b'fctx'] = _placeholder
718 726 return mapping
719 727
720 728 def _getsome(self, mapping, key):
721 729 v = mapping.get(key)
722 730 if v is not None:
723 731 return v
724 732 return self._resmap.get(key)
725 733
726 734 def _hasliteral(self, mapping, key):
727 735 """Test if a literal value is set or unset in the given mapping"""
728 736 return key in mapping and not callable(mapping[key])
729 737
730 738 def _getliteral(self, mapping, key):
731 739 """Return value of the given name if it is a literal"""
732 740 v = mapping.get(key)
733 741 if callable(v):
734 742 return None
735 743 return v
736 744
737 745 def _hasnodespec(self, mapping):
738 746 """Test if context revision is set or unset in the given mapping"""
739 747 return b'node' in mapping or b'ctx' in mapping
740 748
741 749 def _loadctx(self, mapping):
742 750 repo = self._getsome(mapping, b'repo')
743 751 node = self._getliteral(mapping, b'node')
744 752 if repo is None or node is None:
745 753 return
746 754 try:
747 755 return repo[node]
748 756 except error.RepoLookupError:
749 757 return None # maybe hidden/non-existent node
750 758
751 759 def _loadfctx(self, mapping):
752 760 ctx = self._getsome(mapping, b'ctx')
753 761 path = self._getliteral(mapping, b'path')
754 762 if ctx is None or path is None:
755 763 return None
756 764 try:
757 765 return ctx[path]
758 766 except error.LookupError:
759 767 return None # maybe removed file?
760 768
761 769 _loadermap = {
762 770 b'ctx': _loadctx,
763 771 b'fctx': _loadfctx,
764 772 }
765 773
766 774
767 775 def _internaltemplateformatter(
768 776 ui,
769 777 out,
770 778 topic,
771 779 opts,
772 780 spec,
773 781 tmpl,
774 782 docheader=b'',
775 783 docfooter=b'',
776 784 separator=b'',
777 785 ):
778 786 """Build template formatter that handles customizable built-in templates
779 787 such as -Tjson(...)"""
780 788 templates = {spec.ref: tmpl}
781 789 if docheader:
782 790 templates[b'%s:docheader' % spec.ref] = docheader
783 791 if docfooter:
784 792 templates[b'%s:docfooter' % spec.ref] = docfooter
785 793 if separator:
786 794 templates[b'%s:separator' % spec.ref] = separator
787 795 return templateformatter(
788 796 ui, out, topic, opts, spec, overridetemplates=templates
789 797 )
790 798
791 799
792 800 def formatter(ui, out, topic, opts):
793 801 spec = lookuptemplate(ui, topic, opts.get(b'template', b''))
794 802 if spec.ref == b"cbor" and spec.refargs is not None:
795 803 return _internaltemplateformatter(
796 804 ui,
797 805 out,
798 806 topic,
799 807 opts,
800 808 spec,
801 809 tmpl=b'{dict(%s)|cbor}' % spec.refargs,
802 810 docheader=cborutil.BEGIN_INDEFINITE_ARRAY,
803 811 docfooter=cborutil.BREAK,
804 812 )
805 813 elif spec.ref == b"cbor":
806 814 return cborformatter(ui, out, topic, opts)
807 815 elif spec.ref == b"json" and spec.refargs is not None:
808 816 return _internaltemplateformatter(
809 817 ui,
810 818 out,
811 819 topic,
812 820 opts,
813 821 spec,
814 822 tmpl=b'{dict(%s)|json}' % spec.refargs,
815 823 docheader=b'[\n ',
816 824 docfooter=b'\n]\n',
817 825 separator=b',\n ',
818 826 )
819 827 elif spec.ref == b"json":
820 828 return jsonformatter(ui, out, topic, opts)
821 829 elif spec.ref == b"pickle":
822 830 assert spec.refargs is None, r'function-style not supported'
823 831 return pickleformatter(ui, out, topic, opts)
824 832 elif spec.ref == b"debug":
825 833 assert spec.refargs is None, r'function-style not supported'
826 834 return debugformatter(ui, out, topic, opts)
827 835 elif spec.ref or spec.tmpl or spec.mapfile:
828 836 assert spec.refargs is None, r'function-style not supported'
829 837 return templateformatter(ui, out, topic, opts, spec)
830 838 # developer config: ui.formatdebug
831 839 elif ui.configbool(b'ui', b'formatdebug'):
832 840 return debugformatter(ui, out, topic, opts)
833 841 # deprecated config: ui.formatjson
834 842 elif ui.configbool(b'ui', b'formatjson'):
835 843 return jsonformatter(ui, out, topic, opts)
836 844 return plainformatter(ui, out, topic, opts)
837 845
838 846
839 847 @contextlib.contextmanager
840 848 def openformatter(ui, filename, topic, opts):
841 849 """Create a formatter that writes outputs to the specified file
842 850
843 851 Must be invoked using the 'with' statement.
844 852 """
845 853 with util.posixfile(filename, b'wb') as out:
846 854 with formatter(ui, out, topic, opts) as fm:
847 855 yield fm
848 856
849 857
850 858 @contextlib.contextmanager
851 859 def _neverending(fm):
852 860 yield fm
853 861
854 862
855 863 def maybereopen(fm, filename):
856 864 """Create a formatter backed by file if filename specified, else return
857 865 the given formatter
858 866
859 867 Must be invoked using the 'with' statement. This will never call fm.end()
860 868 of the given formatter.
861 869 """
862 870 if filename:
863 871 return openformatter(fm._ui, filename, fm._topic, fm._opts)
864 872 else:
865 873 return _neverending(fm)
@@ -1,1768 +1,1762 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 18 > echo
19 19 > echo "% -- a -> b set (tip only)"
20 20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 21 > echo
22 22 > echo "% -- b -> a tree"
23 23 > hg -R b debugdiscovery a --verbose --old
24 24 > echo
25 25 > echo "% -- b -> a set"
26 26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 27 > echo
28 28 > echo "% -- b -> a set (tip only)"
29 29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 30 > cd ..
31 31 > }
32 32
33 33
34 34 Small superset:
35 35
36 36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 37 > +2:f +1:a1:b1
38 38 > <f +4 :a2
39 39 > +5 :b2
40 40 > <f +3 :b3'
41 41
42 42 % -- a -> b tree
43 43 comparing with b
44 44 searching for changes
45 45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 46 elapsed time: * seconds (glob)
47 47 round-trips: 2
48 48 heads summary:
49 49 total common heads: 2
50 50 also local heads: 2
51 51 also remote heads: 1
52 52 both: 1
53 53 local heads: 2
54 54 common: 2
55 55 missing: 0
56 56 remote heads: 3
57 57 common: 1
58 58 unknown: 2
59 59 local changesets: 7
60 60 common: 7
61 61 heads: 2
62 62 roots: 1
63 63 missing: 0
64 64 heads: 0
65 65 roots: 0
66 66 first undecided set: 3
67 67 heads: 1
68 68 roots: 1
69 69 common: 3
70 70 missing: 0
71 71 common heads: 01241442b3c2 b5714e113bc0
72 72
73 73 % -- a -> b set
74 74 comparing with b
75 75 query 1; heads
76 76 searching for changes
77 77 all local changesets known remotely
78 78 elapsed time: * seconds (glob)
79 79 round-trips: 1
80 80 heads summary:
81 81 total common heads: 2
82 82 also local heads: 2
83 83 also remote heads: 1
84 84 both: 1
85 85 local heads: 2
86 86 common: 2
87 87 missing: 0
88 88 remote heads: 3
89 89 common: 1
90 90 unknown: 2
91 91 local changesets: 7
92 92 common: 7
93 93 heads: 2
94 94 roots: 1
95 95 missing: 0
96 96 heads: 0
97 97 roots: 0
98 98 first undecided set: 3
99 99 heads: 1
100 100 roots: 1
101 101 common: 3
102 102 missing: 0
103 103 common heads: 01241442b3c2 b5714e113bc0
104 104
105 105 % -- a -> b set (tip only)
106 106 comparing with b
107 107 query 1; heads
108 108 searching for changes
109 109 all local changesets known remotely
110 110 elapsed time: * seconds (glob)
111 111 round-trips: 1
112 112 heads summary:
113 113 total common heads: 1
114 114 also local heads: 1
115 115 also remote heads: 0
116 116 both: 0
117 117 local heads: 2
118 118 common: 1
119 119 missing: 1
120 120 remote heads: 3
121 121 common: 0
122 122 unknown: 3
123 123 local changesets: 7
124 124 common: 6
125 125 heads: 1
126 126 roots: 1
127 127 missing: 1
128 128 heads: 1
129 129 roots: 1
130 130 first undecided set: 6
131 131 heads: 2
132 132 roots: 1
133 133 common: 5
134 134 missing: 1
135 135 common heads: b5714e113bc0
136 136
137 137 % -- b -> a tree
138 138 comparing with a
139 139 searching for changes
140 140 unpruned common: 01241442b3c2 b5714e113bc0
141 141 elapsed time: * seconds (glob)
142 142 round-trips: 1
143 143 heads summary:
144 144 total common heads: 2
145 145 also local heads: 1
146 146 also remote heads: 2
147 147 both: 1
148 148 local heads: 3
149 149 common: 1
150 150 missing: 2
151 151 remote heads: 2
152 152 common: 2
153 153 unknown: 0
154 154 local changesets: 15
155 155 common: 7
156 156 heads: 2
157 157 roots: 1
158 158 missing: 8
159 159 heads: 2
160 160 roots: 2
161 161 first undecided set: 8
162 162 heads: 2
163 163 roots: 2
164 164 common: 0
165 165 missing: 8
166 166 common heads: 01241442b3c2 b5714e113bc0
167 167
168 168 % -- b -> a set
169 169 comparing with a
170 170 query 1; heads
171 171 searching for changes
172 172 all remote heads known locally
173 173 elapsed time: * seconds (glob)
174 174 round-trips: 1
175 175 heads summary:
176 176 total common heads: 2
177 177 also local heads: 1
178 178 also remote heads: 2
179 179 both: 1
180 180 local heads: 3
181 181 common: 1
182 182 missing: 2
183 183 remote heads: 2
184 184 common: 2
185 185 unknown: 0
186 186 local changesets: 15
187 187 common: 7
188 188 heads: 2
189 189 roots: 1
190 190 missing: 8
191 191 heads: 2
192 192 roots: 2
193 193 first undecided set: 8
194 194 heads: 2
195 195 roots: 2
196 196 common: 0
197 197 missing: 8
198 198 common heads: 01241442b3c2 b5714e113bc0
199 199
200 200 % -- b -> a set (tip only)
201 201 comparing with a
202 202 query 1; heads
203 203 searching for changes
204 204 all remote heads known locally
205 205 elapsed time: * seconds (glob)
206 206 round-trips: 1
207 207 heads summary:
208 208 total common heads: 2
209 209 also local heads: 1
210 210 also remote heads: 2
211 211 both: 1
212 212 local heads: 3
213 213 common: 1
214 214 missing: 2
215 215 remote heads: 2
216 216 common: 2
217 217 unknown: 0
218 218 local changesets: 15
219 219 common: 7
220 220 heads: 2
221 221 roots: 1
222 222 missing: 8
223 223 heads: 2
224 224 roots: 2
225 225 first undecided set: 8
226 226 heads: 2
227 227 roots: 2
228 228 common: 0
229 229 missing: 8
230 230 common heads: 01241442b3c2 b5714e113bc0
231 231
232 232
233 233 Many new:
234 234
235 235 $ testdesc '-ra1 -ra2' '-rb' '
236 236 > +2:f +3:a1 +3:b
237 237 > <f +30 :a2'
238 238
239 239 % -- a -> b tree
240 240 comparing with b
241 241 searching for changes
242 242 unpruned common: bebd167eb94d
243 243 elapsed time: * seconds (glob)
244 244 round-trips: 2
245 245 heads summary:
246 246 total common heads: 1
247 247 also local heads: 1
248 248 also remote heads: 0
249 249 both: 0
250 250 local heads: 2
251 251 common: 1
252 252 missing: 1
253 253 remote heads: 1
254 254 common: 0
255 255 unknown: 1
256 256 local changesets: 35
257 257 common: 5
258 258 heads: 1
259 259 roots: 1
260 260 missing: 30
261 261 heads: 1
262 262 roots: 1
263 263 first undecided set: 34
264 264 heads: 2
265 265 roots: 1
266 266 common: 4
267 267 missing: 30
268 268 common heads: bebd167eb94d
269 269
270 270 % -- a -> b set
271 271 comparing with b
272 272 query 1; heads
273 273 searching for changes
274 274 taking initial sample
275 275 searching: 2 queries
276 276 query 2; still undecided: 29, sample size is: 29
277 277 2 total queries in *.????s (glob)
278 278 elapsed time: * seconds (glob)
279 279 round-trips: 2
280 280 heads summary:
281 281 total common heads: 1
282 282 also local heads: 1
283 283 also remote heads: 0
284 284 both: 0
285 285 local heads: 2
286 286 common: 1
287 287 missing: 1
288 288 remote heads: 1
289 289 common: 0
290 290 unknown: 1
291 291 local changesets: 35
292 292 common: 5
293 293 heads: 1
294 294 roots: 1
295 295 missing: 30
296 296 heads: 1
297 297 roots: 1
298 298 first undecided set: 34
299 299 heads: 2
300 300 roots: 1
301 301 common: 4
302 302 missing: 30
303 303 common heads: bebd167eb94d
304 304
305 305 % -- a -> b set (tip only)
306 306 comparing with b
307 307 query 1; heads
308 308 searching for changes
309 309 taking quick initial sample
310 310 searching: 2 queries
311 311 query 2; still undecided: 31, sample size is: 31
312 312 2 total queries in *.????s (glob)
313 313 elapsed time: * seconds (glob)
314 314 round-trips: 2
315 315 heads summary:
316 316 total common heads: 1
317 317 also local heads: 0
318 318 also remote heads: 0
319 319 both: 0
320 320 local heads: 2
321 321 common: 0
322 322 missing: 2
323 323 remote heads: 1
324 324 common: 0
325 325 unknown: 1
326 326 local changesets: 35
327 327 common: 2
328 328 heads: 1
329 329 roots: 1
330 330 missing: 33
331 331 heads: 2
332 332 roots: 2
333 333 first undecided set: 35
334 334 heads: 2
335 335 roots: 1
336 336 common: 2
337 337 missing: 33
338 338 common heads: 66f7d451a68b
339 339
340 340 % -- b -> a tree
341 341 comparing with a
342 342 searching for changes
343 343 unpruned common: 66f7d451a68b bebd167eb94d
344 344 elapsed time: * seconds (glob)
345 345 round-trips: 4
346 346 heads summary:
347 347 total common heads: 1
348 348 also local heads: 0
349 349 also remote heads: 1
350 350 both: 0
351 351 local heads: 1
352 352 common: 0
353 353 missing: 1
354 354 remote heads: 2
355 355 common: 1
356 356 unknown: 1
357 357 local changesets: 8
358 358 common: 5
359 359 heads: 1
360 360 roots: 1
361 361 missing: 3
362 362 heads: 1
363 363 roots: 1
364 364 first undecided set: 3
365 365 heads: 1
366 366 roots: 1
367 367 common: 0
368 368 missing: 3
369 369 common heads: bebd167eb94d
370 370
371 371 % -- b -> a set
372 372 comparing with a
373 373 query 1; heads
374 374 searching for changes
375 375 taking initial sample
376 376 searching: 2 queries
377 377 query 2; still undecided: 2, sample size is: 2
378 378 2 total queries in *.????s (glob)
379 379 elapsed time: * seconds (glob)
380 380 round-trips: 2
381 381 heads summary:
382 382 total common heads: 1
383 383 also local heads: 0
384 384 also remote heads: 1
385 385 both: 0
386 386 local heads: 1
387 387 common: 0
388 388 missing: 1
389 389 remote heads: 2
390 390 common: 1
391 391 unknown: 1
392 392 local changesets: 8
393 393 common: 5
394 394 heads: 1
395 395 roots: 1
396 396 missing: 3
397 397 heads: 1
398 398 roots: 1
399 399 first undecided set: 3
400 400 heads: 1
401 401 roots: 1
402 402 common: 0
403 403 missing: 3
404 404 common heads: bebd167eb94d
405 405
406 406 % -- b -> a set (tip only)
407 407 comparing with a
408 408 query 1; heads
409 409 searching for changes
410 410 taking initial sample
411 411 searching: 2 queries
412 412 query 2; still undecided: 2, sample size is: 2
413 413 2 total queries in *.????s (glob)
414 414 elapsed time: * seconds (glob)
415 415 round-trips: 2
416 416 heads summary:
417 417 total common heads: 1
418 418 also local heads: 0
419 419 also remote heads: 1
420 420 both: 0
421 421 local heads: 1
422 422 common: 0
423 423 missing: 1
424 424 remote heads: 2
425 425 common: 1
426 426 unknown: 1
427 427 local changesets: 8
428 428 common: 5
429 429 heads: 1
430 430 roots: 1
431 431 missing: 3
432 432 heads: 1
433 433 roots: 1
434 434 first undecided set: 3
435 435 heads: 1
436 436 roots: 1
437 437 common: 0
438 438 missing: 3
439 439 common heads: bebd167eb94d
440 440
441 441 Both sides many new with stub:
442 442
443 443 $ testdesc '-ra1 -ra2' '-rb' '
444 444 > +2:f +2:a1 +30 :b
445 445 > <f +30 :a2'
446 446
447 447 % -- a -> b tree
448 448 comparing with b
449 449 searching for changes
450 450 unpruned common: 2dc09a01254d
451 451 elapsed time: * seconds (glob)
452 452 round-trips: 4
453 453 heads summary:
454 454 total common heads: 1
455 455 also local heads: 1
456 456 also remote heads: 0
457 457 both: 0
458 458 local heads: 2
459 459 common: 1
460 460 missing: 1
461 461 remote heads: 1
462 462 common: 0
463 463 unknown: 1
464 464 local changesets: 34
465 465 common: 4
466 466 heads: 1
467 467 roots: 1
468 468 missing: 30
469 469 heads: 1
470 470 roots: 1
471 471 first undecided set: 33
472 472 heads: 2
473 473 roots: 1
474 474 common: 3
475 475 missing: 30
476 476 common heads: 2dc09a01254d
477 477
478 478 % -- a -> b set
479 479 comparing with b
480 480 query 1; heads
481 481 searching for changes
482 482 taking initial sample
483 483 searching: 2 queries
484 484 query 2; still undecided: 29, sample size is: 29
485 485 2 total queries in *.????s (glob)
486 486 elapsed time: * seconds (glob)
487 487 round-trips: 2
488 488 heads summary:
489 489 total common heads: 1
490 490 also local heads: 1
491 491 also remote heads: 0
492 492 both: 0
493 493 local heads: 2
494 494 common: 1
495 495 missing: 1
496 496 remote heads: 1
497 497 common: 0
498 498 unknown: 1
499 499 local changesets: 34
500 500 common: 4
501 501 heads: 1
502 502 roots: 1
503 503 missing: 30
504 504 heads: 1
505 505 roots: 1
506 506 first undecided set: 33
507 507 heads: 2
508 508 roots: 1
509 509 common: 3
510 510 missing: 30
511 511 common heads: 2dc09a01254d
512 512
513 513 % -- a -> b set (tip only)
514 514 comparing with b
515 515 query 1; heads
516 516 searching for changes
517 517 taking quick initial sample
518 518 searching: 2 queries
519 519 query 2; still undecided: 31, sample size is: 31
520 520 2 total queries in *.????s (glob)
521 521 elapsed time: * seconds (glob)
522 522 round-trips: 2
523 523 heads summary:
524 524 total common heads: 1
525 525 also local heads: 0
526 526 also remote heads: 0
527 527 both: 0
528 528 local heads: 2
529 529 common: 0
530 530 missing: 2
531 531 remote heads: 1
532 532 common: 0
533 533 unknown: 1
534 534 local changesets: 34
535 535 common: 2
536 536 heads: 1
537 537 roots: 1
538 538 missing: 32
539 539 heads: 2
540 540 roots: 2
541 541 first undecided set: 34
542 542 heads: 2
543 543 roots: 1
544 544 common: 2
545 545 missing: 32
546 546 common heads: 66f7d451a68b
547 547
548 548 % -- b -> a tree
549 549 comparing with a
550 550 searching for changes
551 551 unpruned common: 2dc09a01254d 66f7d451a68b
552 552 elapsed time: * seconds (glob)
553 553 round-trips: 4
554 554 heads summary:
555 555 total common heads: 1
556 556 also local heads: 0
557 557 also remote heads: 1
558 558 both: 0
559 559 local heads: 1
560 560 common: 0
561 561 missing: 1
562 562 remote heads: 2
563 563 common: 1
564 564 unknown: 1
565 565 local changesets: 34
566 566 common: 4
567 567 heads: 1
568 568 roots: 1
569 569 missing: 30
570 570 heads: 1
571 571 roots: 1
572 572 first undecided set: 30
573 573 heads: 1
574 574 roots: 1
575 575 common: 0
576 576 missing: 30
577 577 common heads: 2dc09a01254d
578 578
579 579 % -- b -> a set
580 580 comparing with a
581 581 query 1; heads
582 582 searching for changes
583 583 taking initial sample
584 584 searching: 2 queries
585 585 query 2; still undecided: 29, sample size is: 29
586 586 2 total queries in *.????s (glob)
587 587 elapsed time: * seconds (glob)
588 588 round-trips: 2
589 589 heads summary:
590 590 total common heads: 1
591 591 also local heads: 0
592 592 also remote heads: 1
593 593 both: 0
594 594 local heads: 1
595 595 common: 0
596 596 missing: 1
597 597 remote heads: 2
598 598 common: 1
599 599 unknown: 1
600 600 local changesets: 34
601 601 common: 4
602 602 heads: 1
603 603 roots: 1
604 604 missing: 30
605 605 heads: 1
606 606 roots: 1
607 607 first undecided set: 30
608 608 heads: 1
609 609 roots: 1
610 610 common: 0
611 611 missing: 30
612 612 common heads: 2dc09a01254d
613 613
614 614 % -- b -> a set (tip only)
615 615 comparing with a
616 616 query 1; heads
617 617 searching for changes
618 618 taking initial sample
619 619 searching: 2 queries
620 620 query 2; still undecided: 29, sample size is: 29
621 621 2 total queries in *.????s (glob)
622 622 elapsed time: * seconds (glob)
623 623 round-trips: 2
624 624 heads summary:
625 625 total common heads: 1
626 626 also local heads: 0
627 627 also remote heads: 1
628 628 both: 0
629 629 local heads: 1
630 630 common: 0
631 631 missing: 1
632 632 remote heads: 2
633 633 common: 1
634 634 unknown: 1
635 635 local changesets: 34
636 636 common: 4
637 637 heads: 1
638 638 roots: 1
639 639 missing: 30
640 640 heads: 1
641 641 roots: 1
642 642 first undecided set: 30
643 643 heads: 1
644 644 roots: 1
645 645 common: 0
646 646 missing: 30
647 647 common heads: 2dc09a01254d
648 648
649 649
650 650 Both many new:
651 651
652 652 $ testdesc '-ra' '-rb' '
653 653 > +2:f +30 :b
654 654 > <f +30 :a'
655 655
656 656 % -- a -> b tree
657 657 comparing with b
658 658 searching for changes
659 659 unpruned common: 66f7d451a68b
660 660 elapsed time: * seconds (glob)
661 661 round-trips: 4
662 662 heads summary:
663 663 total common heads: 1
664 664 also local heads: 0
665 665 also remote heads: 0
666 666 both: 0
667 667 local heads: 1
668 668 common: 0
669 669 missing: 1
670 670 remote heads: 1
671 671 common: 0
672 672 unknown: 1
673 673 local changesets: 32
674 674 common: 2
675 675 heads: 1
676 676 roots: 1
677 677 missing: 30
678 678 heads: 1
679 679 roots: 1
680 680 first undecided set: 32
681 681 heads: 1
682 682 roots: 1
683 683 common: 2
684 684 missing: 30
685 685 common heads: 66f7d451a68b
686 686
687 687 % -- a -> b set
688 688 comparing with b
689 689 query 1; heads
690 690 searching for changes
691 691 taking quick initial sample
692 692 searching: 2 queries
693 693 query 2; still undecided: 31, sample size is: 31
694 694 2 total queries in *.????s (glob)
695 695 elapsed time: * seconds (glob)
696 696 round-trips: 2
697 697 heads summary:
698 698 total common heads: 1
699 699 also local heads: 0
700 700 also remote heads: 0
701 701 both: 0
702 702 local heads: 1
703 703 common: 0
704 704 missing: 1
705 705 remote heads: 1
706 706 common: 0
707 707 unknown: 1
708 708 local changesets: 32
709 709 common: 2
710 710 heads: 1
711 711 roots: 1
712 712 missing: 30
713 713 heads: 1
714 714 roots: 1
715 715 first undecided set: 32
716 716 heads: 1
717 717 roots: 1
718 718 common: 2
719 719 missing: 30
720 720 common heads: 66f7d451a68b
721 721
722 722 % -- a -> b set (tip only)
723 723 comparing with b
724 724 query 1; heads
725 725 searching for changes
726 726 taking quick initial sample
727 727 searching: 2 queries
728 728 query 2; still undecided: 31, sample size is: 31
729 729 2 total queries in *.????s (glob)
730 730 elapsed time: * seconds (glob)
731 731 round-trips: 2
732 732 heads summary:
733 733 total common heads: 1
734 734 also local heads: 0
735 735 also remote heads: 0
736 736 both: 0
737 737 local heads: 1
738 738 common: 0
739 739 missing: 1
740 740 remote heads: 1
741 741 common: 0
742 742 unknown: 1
743 743 local changesets: 32
744 744 common: 2
745 745 heads: 1
746 746 roots: 1
747 747 missing: 30
748 748 heads: 1
749 749 roots: 1
750 750 first undecided set: 32
751 751 heads: 1
752 752 roots: 1
753 753 common: 2
754 754 missing: 30
755 755 common heads: 66f7d451a68b
756 756
757 757 % -- b -> a tree
758 758 comparing with a
759 759 searching for changes
760 760 unpruned common: 66f7d451a68b
761 761 elapsed time: * seconds (glob)
762 762 round-trips: 4
763 763 heads summary:
764 764 total common heads: 1
765 765 also local heads: 0
766 766 also remote heads: 0
767 767 both: 0
768 768 local heads: 1
769 769 common: 0
770 770 missing: 1
771 771 remote heads: 1
772 772 common: 0
773 773 unknown: 1
774 774 local changesets: 32
775 775 common: 2
776 776 heads: 1
777 777 roots: 1
778 778 missing: 30
779 779 heads: 1
780 780 roots: 1
781 781 first undecided set: 32
782 782 heads: 1
783 783 roots: 1
784 784 common: 2
785 785 missing: 30
786 786 common heads: 66f7d451a68b
787 787
788 788 % -- b -> a set
789 789 comparing with a
790 790 query 1; heads
791 791 searching for changes
792 792 taking quick initial sample
793 793 searching: 2 queries
794 794 query 2; still undecided: 31, sample size is: 31
795 795 2 total queries in *.????s (glob)
796 796 elapsed time: * seconds (glob)
797 797 round-trips: 2
798 798 heads summary:
799 799 total common heads: 1
800 800 also local heads: 0
801 801 also remote heads: 0
802 802 both: 0
803 803 local heads: 1
804 804 common: 0
805 805 missing: 1
806 806 remote heads: 1
807 807 common: 0
808 808 unknown: 1
809 809 local changesets: 32
810 810 common: 2
811 811 heads: 1
812 812 roots: 1
813 813 missing: 30
814 814 heads: 1
815 815 roots: 1
816 816 first undecided set: 32
817 817 heads: 1
818 818 roots: 1
819 819 common: 2
820 820 missing: 30
821 821 common heads: 66f7d451a68b
822 822
823 823 % -- b -> a set (tip only)
824 824 comparing with a
825 825 query 1; heads
826 826 searching for changes
827 827 taking quick initial sample
828 828 searching: 2 queries
829 829 query 2; still undecided: 31, sample size is: 31
830 830 2 total queries in *.????s (glob)
831 831 elapsed time: * seconds (glob)
832 832 round-trips: 2
833 833 heads summary:
834 834 total common heads: 1
835 835 also local heads: 0
836 836 also remote heads: 0
837 837 both: 0
838 838 local heads: 1
839 839 common: 0
840 840 missing: 1
841 841 remote heads: 1
842 842 common: 0
843 843 unknown: 1
844 844 local changesets: 32
845 845 common: 2
846 846 heads: 1
847 847 roots: 1
848 848 missing: 30
849 849 heads: 1
850 850 roots: 1
851 851 first undecided set: 32
852 852 heads: 1
853 853 roots: 1
854 854 common: 2
855 855 missing: 30
856 856 common heads: 66f7d451a68b
857 857
858 858
859 859 Both many new skewed:
860 860
861 861 $ testdesc '-ra' '-rb' '
862 862 > +2:f +30 :b
863 863 > <f +50 :a'
864 864
865 865 % -- a -> b tree
866 866 comparing with b
867 867 searching for changes
868 868 unpruned common: 66f7d451a68b
869 869 elapsed time: * seconds (glob)
870 870 round-trips: 4
871 871 heads summary:
872 872 total common heads: 1
873 873 also local heads: 0
874 874 also remote heads: 0
875 875 both: 0
876 876 local heads: 1
877 877 common: 0
878 878 missing: 1
879 879 remote heads: 1
880 880 common: 0
881 881 unknown: 1
882 882 local changesets: 52
883 883 common: 2
884 884 heads: 1
885 885 roots: 1
886 886 missing: 50
887 887 heads: 1
888 888 roots: 1
889 889 first undecided set: 52
890 890 heads: 1
891 891 roots: 1
892 892 common: 2
893 893 missing: 50
894 894 common heads: 66f7d451a68b
895 895
896 896 % -- a -> b set
897 897 comparing with b
898 898 query 1; heads
899 899 searching for changes
900 900 taking quick initial sample
901 901 searching: 2 queries
902 902 query 2; still undecided: 51, sample size is: 51
903 903 2 total queries in *.????s (glob)
904 904 elapsed time: * seconds (glob)
905 905 round-trips: 2
906 906 heads summary:
907 907 total common heads: 1
908 908 also local heads: 0
909 909 also remote heads: 0
910 910 both: 0
911 911 local heads: 1
912 912 common: 0
913 913 missing: 1
914 914 remote heads: 1
915 915 common: 0
916 916 unknown: 1
917 917 local changesets: 52
918 918 common: 2
919 919 heads: 1
920 920 roots: 1
921 921 missing: 50
922 922 heads: 1
923 923 roots: 1
924 924 first undecided set: 52
925 925 heads: 1
926 926 roots: 1
927 927 common: 2
928 928 missing: 50
929 929 common heads: 66f7d451a68b
930 930
931 931 % -- a -> b set (tip only)
932 932 comparing with b
933 933 query 1; heads
934 934 searching for changes
935 935 taking quick initial sample
936 936 searching: 2 queries
937 937 query 2; still undecided: 51, sample size is: 51
938 938 2 total queries in *.????s (glob)
939 939 elapsed time: * seconds (glob)
940 940 round-trips: 2
941 941 heads summary:
942 942 total common heads: 1
943 943 also local heads: 0
944 944 also remote heads: 0
945 945 both: 0
946 946 local heads: 1
947 947 common: 0
948 948 missing: 1
949 949 remote heads: 1
950 950 common: 0
951 951 unknown: 1
952 952 local changesets: 52
953 953 common: 2
954 954 heads: 1
955 955 roots: 1
956 956 missing: 50
957 957 heads: 1
958 958 roots: 1
959 959 first undecided set: 52
960 960 heads: 1
961 961 roots: 1
962 962 common: 2
963 963 missing: 50
964 964 common heads: 66f7d451a68b
965 965
966 966 % -- b -> a tree
967 967 comparing with a
968 968 searching for changes
969 969 unpruned common: 66f7d451a68b
970 970 elapsed time: * seconds (glob)
971 971 round-trips: 3
972 972 heads summary:
973 973 total common heads: 1
974 974 also local heads: 0
975 975 also remote heads: 0
976 976 both: 0
977 977 local heads: 1
978 978 common: 0
979 979 missing: 1
980 980 remote heads: 1
981 981 common: 0
982 982 unknown: 1
983 983 local changesets: 32
984 984 common: 2
985 985 heads: 1
986 986 roots: 1
987 987 missing: 30
988 988 heads: 1
989 989 roots: 1
990 990 first undecided set: 32
991 991 heads: 1
992 992 roots: 1
993 993 common: 2
994 994 missing: 30
995 995 common heads: 66f7d451a68b
996 996
997 997 % -- b -> a set
998 998 comparing with a
999 999 query 1; heads
1000 1000 searching for changes
1001 1001 taking quick initial sample
1002 1002 searching: 2 queries
1003 1003 query 2; still undecided: 31, sample size is: 31
1004 1004 2 total queries in *.????s (glob)
1005 1005 elapsed time: * seconds (glob)
1006 1006 round-trips: 2
1007 1007 heads summary:
1008 1008 total common heads: 1
1009 1009 also local heads: 0
1010 1010 also remote heads: 0
1011 1011 both: 0
1012 1012 local heads: 1
1013 1013 common: 0
1014 1014 missing: 1
1015 1015 remote heads: 1
1016 1016 common: 0
1017 1017 unknown: 1
1018 1018 local changesets: 32
1019 1019 common: 2
1020 1020 heads: 1
1021 1021 roots: 1
1022 1022 missing: 30
1023 1023 heads: 1
1024 1024 roots: 1
1025 1025 first undecided set: 32
1026 1026 heads: 1
1027 1027 roots: 1
1028 1028 common: 2
1029 1029 missing: 30
1030 1030 common heads: 66f7d451a68b
1031 1031
1032 1032 % -- b -> a set (tip only)
1033 1033 comparing with a
1034 1034 query 1; heads
1035 1035 searching for changes
1036 1036 taking quick initial sample
1037 1037 searching: 2 queries
1038 1038 query 2; still undecided: 31, sample size is: 31
1039 1039 2 total queries in *.????s (glob)
1040 1040 elapsed time: * seconds (glob)
1041 1041 round-trips: 2
1042 1042 heads summary:
1043 1043 total common heads: 1
1044 1044 also local heads: 0
1045 1045 also remote heads: 0
1046 1046 both: 0
1047 1047 local heads: 1
1048 1048 common: 0
1049 1049 missing: 1
1050 1050 remote heads: 1
1051 1051 common: 0
1052 1052 unknown: 1
1053 1053 local changesets: 32
1054 1054 common: 2
1055 1055 heads: 1
1056 1056 roots: 1
1057 1057 missing: 30
1058 1058 heads: 1
1059 1059 roots: 1
1060 1060 first undecided set: 32
1061 1061 heads: 1
1062 1062 roots: 1
1063 1063 common: 2
1064 1064 missing: 30
1065 1065 common heads: 66f7d451a68b
1066 1066
1067 1067
1068 1068 Both many new on top of long history:
1069 1069
1070 1070 $ testdesc '-ra' '-rb' '
1071 1071 > +1000:f +30 :b
1072 1072 > <f +50 :a'
1073 1073
1074 1074 % -- a -> b tree
1075 1075 comparing with b
1076 1076 searching for changes
1077 1077 unpruned common: 7ead0cba2838
1078 1078 elapsed time: * seconds (glob)
1079 1079 round-trips: 4
1080 1080 heads summary:
1081 1081 total common heads: 1
1082 1082 also local heads: 0
1083 1083 also remote heads: 0
1084 1084 both: 0
1085 1085 local heads: 1
1086 1086 common: 0
1087 1087 missing: 1
1088 1088 remote heads: 1
1089 1089 common: 0
1090 1090 unknown: 1
1091 1091 local changesets: 1050
1092 1092 common: 1000
1093 1093 heads: 1
1094 1094 roots: 1
1095 1095 missing: 50
1096 1096 heads: 1
1097 1097 roots: 1
1098 1098 first undecided set: 1050
1099 1099 heads: 1
1100 1100 roots: 1
1101 1101 common: 1000
1102 1102 missing: 50
1103 1103 common heads: 7ead0cba2838
1104 1104
1105 1105 % -- a -> b set
1106 1106 comparing with b
1107 1107 query 1; heads
1108 1108 searching for changes
1109 1109 taking quick initial sample
1110 1110 searching: 2 queries
1111 1111 query 2; still undecided: 1049, sample size is: 11
1112 1112 sampling from both directions
1113 1113 searching: 3 queries
1114 1114 query 3; still undecided: 31, sample size is: 31
1115 1115 3 total queries in *.????s (glob)
1116 1116 elapsed time: * seconds (glob)
1117 1117 round-trips: 3
1118 1118 heads summary:
1119 1119 total common heads: 1
1120 1120 also local heads: 0
1121 1121 also remote heads: 0
1122 1122 both: 0
1123 1123 local heads: 1
1124 1124 common: 0
1125 1125 missing: 1
1126 1126 remote heads: 1
1127 1127 common: 0
1128 1128 unknown: 1
1129 1129 local changesets: 1050
1130 1130 common: 1000
1131 1131 heads: 1
1132 1132 roots: 1
1133 1133 missing: 50
1134 1134 heads: 1
1135 1135 roots: 1
1136 1136 first undecided set: 1050
1137 1137 heads: 1
1138 1138 roots: 1
1139 1139 common: 1000
1140 1140 missing: 50
1141 1141 common heads: 7ead0cba2838
1142 1142
1143 1143 % -- a -> b set (tip only)
1144 1144 comparing with b
1145 1145 query 1; heads
1146 1146 searching for changes
1147 1147 taking quick initial sample
1148 1148 searching: 2 queries
1149 1149 query 2; still undecided: 1049, sample size is: 11
1150 1150 sampling from both directions
1151 1151 searching: 3 queries
1152 1152 query 3; still undecided: 31, sample size is: 31
1153 1153 3 total queries in *.????s (glob)
1154 1154 elapsed time: * seconds (glob)
1155 1155 round-trips: 3
1156 1156 heads summary:
1157 1157 total common heads: 1
1158 1158 also local heads: 0
1159 1159 also remote heads: 0
1160 1160 both: 0
1161 1161 local heads: 1
1162 1162 common: 0
1163 1163 missing: 1
1164 1164 remote heads: 1
1165 1165 common: 0
1166 1166 unknown: 1
1167 1167 local changesets: 1050
1168 1168 common: 1000
1169 1169 heads: 1
1170 1170 roots: 1
1171 1171 missing: 50
1172 1172 heads: 1
1173 1173 roots: 1
1174 1174 first undecided set: 1050
1175 1175 heads: 1
1176 1176 roots: 1
1177 1177 common: 1000
1178 1178 missing: 50
1179 1179 common heads: 7ead0cba2838
1180 1180
1181 1181 % -- b -> a tree
1182 1182 comparing with a
1183 1183 searching for changes
1184 1184 unpruned common: 7ead0cba2838
1185 1185 elapsed time: * seconds (glob)
1186 1186 round-trips: 3
1187 1187 heads summary:
1188 1188 total common heads: 1
1189 1189 also local heads: 0
1190 1190 also remote heads: 0
1191 1191 both: 0
1192 1192 local heads: 1
1193 1193 common: 0
1194 1194 missing: 1
1195 1195 remote heads: 1
1196 1196 common: 0
1197 1197 unknown: 1
1198 1198 local changesets: 1030
1199 1199 common: 1000
1200 1200 heads: 1
1201 1201 roots: 1
1202 1202 missing: 30
1203 1203 heads: 1
1204 1204 roots: 1
1205 1205 first undecided set: 1030
1206 1206 heads: 1
1207 1207 roots: 1
1208 1208 common: 1000
1209 1209 missing: 30
1210 1210 common heads: 7ead0cba2838
1211 1211
1212 1212 % -- b -> a set
1213 1213 comparing with a
1214 1214 query 1; heads
1215 1215 searching for changes
1216 1216 taking quick initial sample
1217 1217 searching: 2 queries
1218 1218 query 2; still undecided: 1029, sample size is: 11
1219 1219 sampling from both directions
1220 1220 searching: 3 queries
1221 1221 query 3; still undecided: 15, sample size is: 15
1222 1222 3 total queries in *.????s (glob)
1223 1223 elapsed time: * seconds (glob)
1224 1224 round-trips: 3
1225 1225 heads summary:
1226 1226 total common heads: 1
1227 1227 also local heads: 0
1228 1228 also remote heads: 0
1229 1229 both: 0
1230 1230 local heads: 1
1231 1231 common: 0
1232 1232 missing: 1
1233 1233 remote heads: 1
1234 1234 common: 0
1235 1235 unknown: 1
1236 1236 local changesets: 1030
1237 1237 common: 1000
1238 1238 heads: 1
1239 1239 roots: 1
1240 1240 missing: 30
1241 1241 heads: 1
1242 1242 roots: 1
1243 1243 first undecided set: 1030
1244 1244 heads: 1
1245 1245 roots: 1
1246 1246 common: 1000
1247 1247 missing: 30
1248 1248 common heads: 7ead0cba2838
1249 1249
1250 1250 % -- b -> a set (tip only)
1251 1251 comparing with a
1252 1252 query 1; heads
1253 1253 searching for changes
1254 1254 taking quick initial sample
1255 1255 searching: 2 queries
1256 1256 query 2; still undecided: 1029, sample size is: 11
1257 1257 sampling from both directions
1258 1258 searching: 3 queries
1259 1259 query 3; still undecided: 15, sample size is: 15
1260 1260 3 total queries in *.????s (glob)
1261 1261 elapsed time: * seconds (glob)
1262 1262 round-trips: 3
1263 1263 heads summary:
1264 1264 total common heads: 1
1265 1265 also local heads: 0
1266 1266 also remote heads: 0
1267 1267 both: 0
1268 1268 local heads: 1
1269 1269 common: 0
1270 1270 missing: 1
1271 1271 remote heads: 1
1272 1272 common: 0
1273 1273 unknown: 1
1274 1274 local changesets: 1030
1275 1275 common: 1000
1276 1276 heads: 1
1277 1277 roots: 1
1278 1278 missing: 30
1279 1279 heads: 1
1280 1280 roots: 1
1281 1281 first undecided set: 1030
1282 1282 heads: 1
1283 1283 roots: 1
1284 1284 common: 1000
1285 1285 missing: 30
1286 1286 common heads: 7ead0cba2838
1287 1287
1288 1288
1289 1289 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1290 1290
1291 1291 $ hg init manyheads
1292 1292 $ cd manyheads
1293 1293 $ echo "+300:r @a" >dagdesc
1294 1294 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1295 1295 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1296 1296 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1297 1297 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1298 1298 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1299 1299 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1300 1300 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1301 1301 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1302 1302 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1303 1303 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1304 1304 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1305 1305 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1306 1306 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1307 1307 $ echo "@b *r+3" >>dagdesc # one more head
1308 1308 $ hg debugbuilddag <dagdesc
1309 1309 reading DAG from stdin
1310 1310
1311 1311 $ hg heads -t --template . | wc -c
1312 1312 \s*261 (re)
1313 1313
1314 1314 $ hg clone -b a . a
1315 1315 adding changesets
1316 1316 adding manifests
1317 1317 adding file changes
1318 1318 added 1340 changesets with 0 changes to 0 files (+259 heads)
1319 1319 new changesets 1ea73414a91b:1c51e2c80832
1320 1320 updating to branch a
1321 1321 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1322 1322 $ hg clone -b b . b
1323 1323 adding changesets
1324 1324 adding manifests
1325 1325 adding file changes
1326 1326 added 304 changesets with 0 changes to 0 files
1327 1327 new changesets 1ea73414a91b:513314ca8b3a
1328 1328 updating to branch b
1329 1329 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1330 1330
1331 1331 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
1332 1332 comparing with b
1333 1333 query 1; heads
1334 1334 searching for changes
1335 1335 taking quick initial sample
1336 1336 searching: 2 queries
1337 1337 query 2; still undecided: 1080, sample size is: 50
1338 1338 sampling from both directions
1339 1339 searching: 3 queries
1340 1340 query 3; still undecided: 1030, sample size is: 200
1341 1341 sampling from both directions
1342 1342 searching: 4 queries
1343 1343 query 4; still undecided: 547, sample size is: 210
1344 1344 sampling from both directions
1345 1345 searching: 5 queries
1346 1346 query 5; still undecided: 336, sample size is: 220
1347 1347 sampling from both directions
1348 1348 searching: 6 queries
1349 1349 query 6; still undecided: 114, sample size is: 114
1350 1350 6 total queries in *.????s (glob)
1351 1351 elapsed time: * seconds (glob)
1352 1352 round-trips: 6
1353 1353 heads summary:
1354 1354 total common heads: 1
1355 1355 also local heads: 0
1356 1356 also remote heads: 0
1357 1357 both: 0
1358 1358 local heads: 260
1359 1359 common: 0
1360 1360 missing: 260
1361 1361 remote heads: 1
1362 1362 common: 0
1363 1363 unknown: 1
1364 1364 local changesets: 1340
1365 1365 common: 300
1366 1366 heads: 1
1367 1367 roots: 1
1368 1368 missing: 1040
1369 1369 heads: 260
1370 1370 roots: 260
1371 1371 first undecided set: 1340
1372 1372 heads: 260
1373 1373 roots: 1
1374 1374 common: 300
1375 1375 missing: 1040
1376 1376 common heads: 3ee37d65064a
1377 1377 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1378 1378 comparing with b
1379 1379 query 1; heads
1380 1380 searching for changes
1381 1381 taking quick initial sample
1382 1382 searching: 2 queries
1383 1383 query 2; still undecided: 303, sample size is: 9
1384 1384 sampling from both directions
1385 1385 searching: 3 queries
1386 1386 query 3; still undecided: 3, sample size is: 3
1387 1387 3 total queries in *.????s (glob)
1388 1388 elapsed time: * seconds (glob)
1389 1389 round-trips: 3
1390 1390 heads summary:
1391 1391 total common heads: 1
1392 1392 also local heads: 0
1393 1393 also remote heads: 0
1394 1394 both: 0
1395 1395 local heads: 260
1396 1396 common: 0
1397 1397 missing: 260
1398 1398 remote heads: 1
1399 1399 common: 0
1400 1400 unknown: 1
1401 1401 local changesets: 1340
1402 1402 common: 300
1403 1403 heads: 1
1404 1404 roots: 1
1405 1405 missing: 1040
1406 1406 heads: 260
1407 1407 roots: 260
1408 1408 first undecided set: 1340
1409 1409 heads: 260
1410 1410 roots: 1
1411 1411 common: 300
1412 1412 missing: 1040
1413 1413 common heads: 3ee37d65064a
1414 1414
1415 1415 $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
1416 1416 comparing with b
1417 1417 searching for changes
1418 1418 sampling from both directions
1419 1419 query 1; still undecided: 1340, sample size is: 50
1420 1420 sampling from both directions
1421 1421 query 2; still undecided: 995, sample size is: 60
1422 1422 sampling from both directions
1423 1423 query 3; still undecided: 913, sample size is: 72
1424 1424 sampling from both directions
1425 1425 query 4; still undecided: 816, sample size is: 204
1426 1426 sampling from both directions
1427 1427 query 5; still undecided: 612, sample size is: 153
1428 1428 sampling from both directions
1429 1429 query 6; still undecided: 456, sample size is: 123
1430 1430 sampling from both directions
1431 1431 query 7; still undecided: 332, sample size is: 147
1432 1432 sampling from both directions
1433 1433 query 8; still undecided: 184, sample size is: 176
1434 1434 sampling from both directions
1435 1435 query 9; still undecided: 8, sample size is: 8
1436 1436 9 total queries in *s (glob)
1437 1437 elapsed time: * seconds (glob)
1438 1438 round-trips: 9
1439 1439 heads summary:
1440 1440 total common heads: 1
1441 1441 also local heads: 0
1442 1442 also remote heads: 0
1443 1443 both: 0
1444 1444 local heads: 260
1445 1445 common: 0
1446 1446 missing: 260
1447 1447 remote heads: 1
1448 1448 common: 0
1449 1449 unknown: 1
1450 1450 local changesets: 1340
1451 1451 common: 300
1452 1452 heads: 1
1453 1453 roots: 1
1454 1454 missing: 1040
1455 1455 heads: 260
1456 1456 roots: 260
1457 1457 first undecided set: 1340
1458 1458 heads: 260
1459 1459 roots: 1
1460 1460 common: 300
1461 1461 missing: 1040
1462 1462 common heads: 3ee37d65064a
1463 1463
1464 1464 Test actual protocol when pulling one new head in addition to common heads
1465 1465
1466 1466 $ hg clone -U b c
1467 1467 $ hg -R c id -ir tip
1468 1468 513314ca8b3a
1469 1469 $ hg -R c up -qr default
1470 1470 $ touch c/f
1471 1471 $ hg -R c ci -Aqm "extra head"
1472 1472 $ hg -R c id -i
1473 1473 e64a39e7da8b
1474 1474
1475 1475 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1476 1476 $ cat hg.pid >> $DAEMON_PIDS
1477 1477
1478 1478 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1479 1479 comparing with http://localhost:$HGPORT/
1480 1480 searching for changes
1481 1481 e64a39e7da8b
1482 1482
1483 1483 $ killdaemons.py
1484 1484 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1485 1485 "GET /?cmd=capabilities HTTP/1.1" 200 -
1486 1486 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1487 1487 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1488 1488 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1489 1489 $ cat errors.log
1490 1490
1491 1491 $ cd ..
1492 1492
1493 1493
1494 1494 Issue 4438 - test coverage for 3ef893520a85 issues.
1495 1495
1496 1496 $ mkdir issue4438
1497 1497 $ cd issue4438
1498 1498 #if false
1499 1499 generate new bundles:
1500 1500 $ hg init r1
1501 1501 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1502 1502 $ hg clone -q r1 r2
1503 1503 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1504 1504 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1505 1505 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1506 1506 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1507 1507 #else
1508 1508 use existing bundles:
1509 1509 $ hg init r1
1510 1510 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1511 1511 $ hg -R r1 -q up
1512 1512 $ hg init r2
1513 1513 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1514 1514 $ hg -R r2 -q up
1515 1515 #endif
1516 1516
1517 1517 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1518 1518
1519 1519 $ hg -R r1 outgoing r2 -T'{rev} '
1520 1520 comparing with r2
1521 1521 searching for changes
1522 1522 101 102 103 104 105 106 107 108 109 110 (no-eol)
1523 1523
1524 1524 The case where all the 'initialsamplesize' samples already were common would
1525 1525 give 'all remote heads known locally' without checking the remaining heads -
1526 1526 fixed in 86c35b7ae300:
1527 1527
1528 1528 $ cat >> r1/.hg/hgrc << EOF
1529 1529 > [devel]
1530 1530 > discovery.randomize = False
1531 1531 > EOF
1532 1532
1533 1533 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1534 1534 > --config blackbox.track='command commandfinish discovery'
1535 1535 comparing with r2
1536 1536 searching for changes
1537 1537 101 102 103 104 105 106 107 108 109 110 (no-eol)
1538 1538 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1539 1539 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1540 1540 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1541 1541 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1542 1542 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1543 1543 $ cd ..
1544 1544
1545 1545 Even if the set of revs to discover is restricted, unrelated revs may be
1546 1546 returned as common heads.
1547 1547
1548 1548 $ mkdir ancestorsof
1549 1549 $ cd ancestorsof
1550 1550 $ hg init a
1551 1551 $ hg clone a b -q
1552 1552 $ cd b
1553 1553 $ hg debugbuilddag '.:root *root *root'
1554 1554 $ hg log -G -T '{node|short}'
1555 1555 o fa942426a6fd
1556 1556 |
1557 1557 | o 66f7d451a68b
1558 1558 |/
1559 1559 o 1ea73414a91b
1560 1560
1561 1561 $ hg push -r 66f7d451a68b -q
1562 1562 $ hg debugdiscovery --verbose --rev fa942426a6fd
1563 1563 comparing with $TESTTMP/ancestorsof/a
1564 1564 searching for changes
1565 1565 elapsed time: * seconds (glob)
1566 1566 round-trips: 1
1567 1567 heads summary:
1568 1568 total common heads: 1
1569 1569 also local heads: 1
1570 1570 also remote heads: 1
1571 1571 both: 1
1572 1572 local heads: 2
1573 1573 common: 1
1574 1574 missing: 1
1575 1575 remote heads: 1
1576 1576 common: 1
1577 1577 unknown: 0
1578 1578 local changesets: 3
1579 1579 common: 2
1580 1580 heads: 1
1581 1581 roots: 1
1582 1582 missing: 1
1583 1583 heads: 1
1584 1584 roots: 1
1585 1585 first undecided set: 1
1586 1586 heads: 1
1587 1587 roots: 1
1588 1588 common: 0
1589 1589 missing: 1
1590 1590 common heads: 66f7d451a68b
1591 1591
1592 1592 $ cd ..
1593 1593
1594 1594
1595 1595 Test debuging discovery using different subset of the same repository
1596 1596 =====================================================================
1597 1597
1598 1598 remote is a local subset
1599 1599 ------------------------
1600 1600
1601 1601 remote will be last 25 heads of the local graph
1602 1602
1603 1603 $ cd $TESTTMP/manyheads
1604 1604 $ hg -R a debugdiscovery \
1605 1605 > --debug \
1606 1606 > --remote-as-revs 'last(heads(all()), 25)' \
1607 1607 > --config devel.discovery.randomize=false
1608 1608 query 1; heads
1609 1609 searching for changes
1610 1610 all remote heads known locally
1611 1611 elapsed time: * seconds (glob)
1612 1612 round-trips: 1
1613 1613 heads summary:
1614 1614 total common heads: 25
1615 1615 also local heads: 25
1616 1616 also remote heads: 25
1617 1617 both: 25
1618 1618 local heads: 260
1619 1619 common: 25
1620 1620 missing: 235
1621 1621 remote heads: 25
1622 1622 common: 25
1623 1623 unknown: 0
1624 1624 local changesets: 1340
1625 1625 common: 400
1626 1626 heads: 25
1627 1627 roots: 1
1628 1628 missing: 940
1629 1629 heads: 235
1630 1630 roots: 235
1631 1631 first undecided set: 940
1632 1632 heads: 235
1633 1633 roots: 235
1634 1634 common: 0
1635 1635 missing: 940
1636 1636 common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
1637 1637
1638 1638 local is a local subset
1639 1639 ------------------------
1640 1640
1641 1641 remote will be last 25 heads of the local graph
1642 1642
1643 1643 $ cd $TESTTMP/manyheads
1644 1644 $ hg -R a debugdiscovery b \
1645 1645 > --debug \
1646 1646 > --local-as-revs 'first(heads(all()), 25)' \
1647 1647 > --config devel.discovery.randomize=false
1648 1648 comparing with b
1649 1649 query 1; heads
1650 1650 searching for changes
1651 1651 taking quick initial sample
1652 1652 query 2; still undecided: 375, sample size is: 81
1653 1653 sampling from both directions
1654 1654 query 3; still undecided: 3, sample size is: 3
1655 1655 3 total queries *s (glob)
1656 1656 elapsed time: * seconds (glob)
1657 1657 round-trips: 3
1658 1658 heads summary:
1659 1659 total common heads: 1
1660 1660 also local heads: 0
1661 1661 also remote heads: 0
1662 1662 both: 0
1663 1663 local heads: 25
1664 1664 common: 0
1665 1665 missing: 25
1666 1666 remote heads: 1
1667 1667 common: 0
1668 1668 unknown: 1
1669 1669 local changesets: 400
1670 1670 common: 300
1671 1671 heads: 1
1672 1672 roots: 1
1673 1673 missing: 100
1674 1674 heads: 25
1675 1675 roots: 25
1676 1676 first undecided set: 400
1677 1677 heads: 25
1678 1678 roots: 1
1679 1679 common: 300
1680 1680 missing: 100
1681 1681 common heads: 3ee37d65064a
1682 1682
1683 1683 both local and remove are subset
1684 1684 ------------------------
1685 1685
1686 1686 remote will be last 25 heads of the local graph
1687 1687
1688 1688 $ cd $TESTTMP/manyheads
1689 1689 $ hg -R a debugdiscovery \
1690 1690 > --debug \
1691 1691 > --local-as-revs 'first(heads(all()), 25)' \
1692 1692 > --remote-as-revs 'last(heads(all()), 25)' \
1693 1693 > --config devel.discovery.randomize=false
1694 1694 query 1; heads
1695 1695 searching for changes
1696 1696 taking quick initial sample
1697 1697 query 2; still undecided: 375, sample size is: 81
1698 1698 sampling from both directions
1699 1699 query 3; still undecided: 3, sample size is: 3
1700 1700 3 total queries in *s (glob)
1701 1701 elapsed time: * seconds (glob)
1702 1702 round-trips: 3
1703 1703 heads summary:
1704 1704 total common heads: 1
1705 1705 also local heads: 0
1706 1706 also remote heads: 0
1707 1707 both: 0
1708 1708 local heads: 25
1709 1709 common: 0
1710 1710 missing: 25
1711 1711 remote heads: 25
1712 1712 common: 0
1713 1713 unknown: 25
1714 1714 local changesets: 400
1715 1715 common: 300
1716 1716 heads: 1
1717 1717 roots: 1
1718 1718 missing: 100
1719 1719 heads: 25
1720 1720 roots: 25
1721 1721 first undecided set: 400
1722 1722 heads: 25
1723 1723 roots: 1
1724 1724 common: 300
1725 1725 missing: 100
1726 1726 common heads: 3ee37d65064a
1727 1727
1728 1728 Test -T json output
1729 1729 -------------------
1730 1730
1731 1731 $ hg -R a debugdiscovery \
1732 1732 > -T json \
1733 1733 > --debug \
1734 1734 > --local-as-revs 'first(heads(all()), 25)' \
1735 1735 > --remote-as-revs 'last(heads(all()), 25)' \
1736 1736 > --config devel.discovery.randomize=false
1737 query 1; heads
1738 searching for changes
1739 taking quick initial sample
1740 query 2; still undecided: 375, sample size is: 81
1741 sampling from both directions
1742 query 3; still undecided: 3, sample size is: 3
1743 3 total queries in *s (glob)
1744 1737 [
1745 1738 {
1746 1739 "elapsed": *, (glob)
1747 1740 "nb-common-heads": 1,
1748 1741 "nb-common-heads-both": 0,
1749 1742 "nb-common-heads-local": 0,
1750 1743 "nb-common-heads-remote": 0,
1751 1744 "nb-common-roots": 1,
1752 1745 "nb-head-local": 25,
1753 1746 "nb-head-local-missing": 25,
1754 1747 "nb-head-remote": 25,
1755 1748 "nb-head-remote-unknown": 25,
1756 1749 "nb-ini_und": 400,
1757 1750 "nb-ini_und-common": 300,
1758 1751 "nb-ini_und-heads": 25,
1759 1752 "nb-ini_und-missing": 100,
1760 1753 "nb-ini_und-roots": 1,
1761 1754 "nb-missing-heads": 25,
1762 1755 "nb-missing-roots": 25,
1763 1756 "nb-revs": 400,
1764 1757 "nb-revs-common": 300,
1765 1758 "nb-revs-missing": 100,
1759 "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob)
1766 1760 "total-roundtrips": 3
1767 1761 }
1768 1762 ]
General Comments 0
You need to be logged in to leave comments. Login now