##// END OF EJS Templates
debugindex: move the logic into its own module...
marmoute -
r50145:61cf3d39 default
parent child Browse files
Show More
@@ -0,0 +1,51
1 # revlogutils/debug.py - utility used for revlog debuging
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2022 Octobus <contact@octobus.net>
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 from .. import (
10 node as nodemod,
11 )
12
13
14 def debug_index(
15 ui,
16 repo,
17 formatter,
18 revlog,
19 full_node,
20 ):
21 """display index data for a revlog"""
22 if full_node:
23 hexfn = nodemod.hex
24 else:
25 hexfn = nodemod.short
26
27 idlen = 12
28 for i in revlog:
29 idlen = len(hexfn(revlog.node(i)))
30 break
31
32 fm = formatter
33
34 fm.plain(
35 b' rev linkrev %s %s p2\n'
36 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
37 )
38
39 for rev in revlog:
40 node = revlog.node(rev)
41 parents = revlog.parents(node)
42
43 fm.startitem()
44 fm.write(b'rev', b'%6d ', rev)
45 fm.write(b'linkrev', b'%7d ', revlog.linkrev(rev))
46 fm.write(b'node', b'%s ', hexfn(node))
47 fm.write(b'p1', b'%s ', hexfn(parents[0]))
48 fm.write(b'p2', b'%s', hexfn(parents[1]))
49 fm.plain(b'\n')
50
51 fm.end()
@@ -1,5051 +1,5032
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revlogutils,
77 77 revset,
78 78 revsetlang,
79 79 scmutil,
80 80 setdiscovery,
81 81 simplemerge,
82 82 sshpeer,
83 83 sslutil,
84 84 streamclone,
85 85 strip,
86 86 tags as tagsmod,
87 87 templater,
88 88 treediscovery,
89 89 upgrade,
90 90 url as urlmod,
91 91 util,
92 92 vfs as vfsmod,
93 93 wireprotoframing,
94 94 wireprotoserver,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 constants as revlog_constants,
108 debug as revlog_debug,
108 109 deltas as deltautil,
109 110 nodemap,
110 111 rewrite,
111 112 sidedata,
112 113 )
113 114
114 115 release = lockmod.release
115 116
116 117 table = {}
117 118 table.update(strip.command._table)
118 119 command = registrar.command(table)
119 120
120 121
121 122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 123 def debugancestor(ui, repo, *args):
123 124 """find the ancestor revision of two revisions in a given index"""
124 125 if len(args) == 3:
125 126 index, rev1, rev2 = args
126 127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 128 lookup = r.lookup
128 129 elif len(args) == 2:
129 130 if not repo:
130 131 raise error.Abort(
131 132 _(b'there is no Mercurial repository here (.hg not found)')
132 133 )
133 134 rev1, rev2 = args
134 135 r = repo.changelog
135 136 lookup = repo.lookup
136 137 else:
137 138 raise error.Abort(_(b'either two or three arguments required'))
138 139 a = r.ancestor(lookup(rev1), lookup(rev2))
139 140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140 141
141 142
142 143 @command(b'debugantivirusrunning', [])
143 144 def debugantivirusrunning(ui, repo):
144 145 """attempt to trigger an antivirus scanner to see if one is active"""
145 146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 147 f.write(
147 148 util.b85decode(
148 149 # This is a base85-armored version of the EICAR test file. See
149 150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 153 )
153 154 )
154 155 # Give an AV engine time to scan the file.
155 156 time.sleep(2)
156 157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 158
158 159
159 160 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 161 def debugapplystreamclonebundle(ui, repo, fname):
161 162 """apply a stream clone bundle file"""
162 163 f = hg.openpath(ui, fname)
163 164 gen = exchange.readbundle(ui, f, fname)
164 165 gen.apply(repo)
165 166
166 167
167 168 @command(
168 169 b'debugbuilddag',
169 170 [
170 171 (
171 172 b'm',
172 173 b'mergeable-file',
173 174 None,
174 175 _(b'add single file mergeable changes'),
175 176 ),
176 177 (
177 178 b'o',
178 179 b'overwritten-file',
179 180 None,
180 181 _(b'add single file all revs overwrite'),
181 182 ),
182 183 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 184 (
184 185 b'',
185 186 b'from-existing',
186 187 None,
187 188 _(b'continue from a non-empty repository'),
188 189 ),
189 190 ],
190 191 _(b'[OPTION]... [TEXT]'),
191 192 )
192 193 def debugbuilddag(
193 194 ui,
194 195 repo,
195 196 text=None,
196 197 mergeable_file=False,
197 198 overwritten_file=False,
198 199 new_file=False,
199 200 from_existing=False,
200 201 ):
201 202 """builds a repo with a given DAG from scratch in the current empty repo
202 203
203 204 The description of the DAG is read from stdin if not given on the
204 205 command line.
205 206
206 207 Elements:
207 208
208 209 - "+n" is a linear run of n nodes based on the current default parent
209 210 - "." is a single node based on the current default parent
210 211 - "$" resets the default parent to null (implied at the start);
211 212 otherwise the default parent is always the last node created
212 213 - "<p" sets the default parent to the backref p
213 214 - "*p" is a fork at parent p, which is a backref
214 215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 216 - "/p2" is a merge of the preceding node and p2
216 217 - ":tag" defines a local tag for the preceding node
217 218 - "@branch" sets the named branch for subsequent nodes
218 219 - "#...\\n" is a comment up to the end of the line
219 220
220 221 Whitespace between the above elements is ignored.
221 222
222 223 A backref is either
223 224
224 225 - a number n, which references the node curr-n, where curr is the current
225 226 node, or
226 227 - the name of a local tag you placed earlier using ":tag", or
227 228 - empty to denote the default parent.
228 229
229 230 All string valued-elements are either strictly alphanumeric, or must
230 231 be enclosed in double quotes ("..."), with "\\" as escape character.
231 232 """
232 233
233 234 if text is None:
234 235 ui.status(_(b"reading DAG from stdin\n"))
235 236 text = ui.fin.read()
236 237
237 238 cl = repo.changelog
238 239 if len(cl) > 0 and not from_existing:
239 240 raise error.Abort(_(b'repository is not empty'))
240 241
241 242 # determine number of revs in DAG
242 243 total = 0
243 244 for type, data in dagparser.parsedag(text):
244 245 if type == b'n':
245 246 total += 1
246 247
247 248 if mergeable_file:
248 249 linesperrev = 2
249 250 # make a file with k lines per rev
250 251 initialmergedlines = [
251 252 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
252 253 ]
253 254 initialmergedlines.append(b"")
254 255
255 256 tags = []
256 257 progress = ui.makeprogress(
257 258 _(b'building'), unit=_(b'revisions'), total=total
258 259 )
259 260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 261 at = -1
261 262 atbranch = b'default'
262 263 nodeids = []
263 264 id = 0
264 265 progress.update(id)
265 266 for type, data in dagparser.parsedag(text):
266 267 if type == b'n':
267 268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 269 id, ps = data
269 270
270 271 files = []
271 272 filecontent = {}
272 273
273 274 p2 = None
274 275 if mergeable_file:
275 276 fn = b"mf"
276 277 p1 = repo[ps[0]]
277 278 if len(ps) > 1:
278 279 p2 = repo[ps[1]]
279 280 pa = p1.ancestor(p2)
280 281 base, local, other = [
281 282 x[fn].data() for x in (pa, p1, p2)
282 283 ]
283 284 m3 = simplemerge.Merge3Text(base, local, other)
284 285 ml = [
285 286 l.strip()
286 287 for l in simplemerge.render_minimized(m3)[0]
287 288 ]
288 289 ml.append(b"")
289 290 elif at > 0:
290 291 ml = p1[fn].data().split(b"\n")
291 292 else:
292 293 ml = initialmergedlines
293 294 ml[id * linesperrev] += b" r%i" % id
294 295 mergedtext = b"\n".join(ml)
295 296 files.append(fn)
296 297 filecontent[fn] = mergedtext
297 298
298 299 if overwritten_file:
299 300 fn = b"of"
300 301 files.append(fn)
301 302 filecontent[fn] = b"r%i\n" % id
302 303
303 304 if new_file:
304 305 fn = b"nf%i" % id
305 306 files.append(fn)
306 307 filecontent[fn] = b"r%i\n" % id
307 308 if len(ps) > 1:
308 309 if not p2:
309 310 p2 = repo[ps[1]]
310 311 for fn in p2:
311 312 if fn.startswith(b"nf"):
312 313 files.append(fn)
313 314 filecontent[fn] = p2[fn].data()
314 315
315 316 def fctxfn(repo, cx, path):
316 317 if path in filecontent:
317 318 return context.memfilectx(
318 319 repo, cx, path, filecontent[path]
319 320 )
320 321 return None
321 322
322 323 if len(ps) == 0 or ps[0] < 0:
323 324 pars = [None, None]
324 325 elif len(ps) == 1:
325 326 pars = [nodeids[ps[0]], None]
326 327 else:
327 328 pars = [nodeids[p] for p in ps]
328 329 cx = context.memctx(
329 330 repo,
330 331 pars,
331 332 b"r%i" % id,
332 333 files,
333 334 fctxfn,
334 335 date=(id, 0),
335 336 user=b"debugbuilddag",
336 337 extra={b'branch': atbranch},
337 338 )
338 339 nodeid = repo.commitctx(cx)
339 340 nodeids.append(nodeid)
340 341 at = id
341 342 elif type == b'l':
342 343 id, name = data
343 344 ui.note((b'tag %s\n' % name))
344 345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 346 elif type == b'a':
346 347 ui.note((b'branch %s\n' % data))
347 348 atbranch = data
348 349 progress.update(id)
349 350
350 351 if tags:
351 352 repo.vfs.write(b"localtags", b"".join(tags))
352 353
353 354
354 355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 356 indent_string = b' ' * indent
356 357 if all:
357 358 ui.writenoi18n(
358 359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 360 % indent_string
360 361 )
361 362
362 363 def showchunks(named):
363 364 ui.write(b"\n%s%s\n" % (indent_string, named))
364 365 for deltadata in gen.deltaiter():
365 366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 367 ui.write(
367 368 b"%s%s %s %s %s %s %d\n"
368 369 % (
369 370 indent_string,
370 371 hex(node),
371 372 hex(p1),
372 373 hex(p2),
373 374 hex(cs),
374 375 hex(deltabase),
375 376 len(delta),
376 377 )
377 378 )
378 379
379 380 gen.changelogheader()
380 381 showchunks(b"changelog")
381 382 gen.manifestheader()
382 383 showchunks(b"manifest")
383 384 for chunkdata in iter(gen.filelogheader, {}):
384 385 fname = chunkdata[b'filename']
385 386 showchunks(fname)
386 387 else:
387 388 if isinstance(gen, bundle2.unbundle20):
388 389 raise error.Abort(_(b'use debugbundle2 for this file'))
389 390 gen.changelogheader()
390 391 for deltadata in gen.deltaiter():
391 392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393 394
394 395
395 396 def _debugobsmarkers(ui, part, indent=0, **opts):
396 397 """display version and markers contained in 'data'"""
397 398 opts = pycompat.byteskwargs(opts)
398 399 data = part.read()
399 400 indent_string = b' ' * indent
400 401 try:
401 402 version, markers = obsolete._readmarkers(data)
402 403 except error.UnknownVersion as exc:
403 404 msg = b"%sunsupported version: %s (%d bytes)\n"
404 405 msg %= indent_string, exc.version, len(data)
405 406 ui.write(msg)
406 407 else:
407 408 msg = b"%sversion: %d (%d bytes)\n"
408 409 msg %= indent_string, version, len(data)
409 410 ui.write(msg)
410 411 fm = ui.formatter(b'debugobsolete', opts)
411 412 for rawmarker in sorted(markers):
412 413 m = obsutil.marker(None, rawmarker)
413 414 fm.startitem()
414 415 fm.plain(indent_string)
415 416 cmdutil.showmarker(fm, m)
416 417 fm.end()
417 418
418 419
419 420 def _debugphaseheads(ui, data, indent=0):
420 421 """display version and markers contained in 'data'"""
421 422 indent_string = b' ' * indent
422 423 headsbyphase = phases.binarydecode(data)
423 424 for phase in phases.allphases:
424 425 for head in headsbyphase[phase]:
425 426 ui.write(indent_string)
426 427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
427 428
428 429
429 430 def _quasirepr(thing):
430 431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
431 432 return b'{%s}' % (
432 433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
433 434 )
434 435 return pycompat.bytestr(repr(thing))
435 436
436 437
437 438 def _debugbundle2(ui, gen, all=None, **opts):
438 439 """lists the contents of a bundle2"""
439 440 if not isinstance(gen, bundle2.unbundle20):
440 441 raise error.Abort(_(b'not a bundle2 file'))
441 442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
442 443 parttypes = opts.get('part_type', [])
443 444 for part in gen.iterparts():
444 445 if parttypes and part.type not in parttypes:
445 446 continue
446 447 msg = b'%s -- %s (mandatory: %r)\n'
447 448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
448 449 if part.type == b'changegroup':
449 450 version = part.params.get(b'version', b'01')
450 451 cg = changegroup.getunbundler(version, part, b'UN')
451 452 if not ui.quiet:
452 453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
453 454 if part.type == b'obsmarkers':
454 455 if not ui.quiet:
455 456 _debugobsmarkers(ui, part, indent=4, **opts)
456 457 if part.type == b'phase-heads':
457 458 if not ui.quiet:
458 459 _debugphaseheads(ui, part, indent=4)
459 460
460 461
461 462 @command(
462 463 b'debugbundle',
463 464 [
464 465 (b'a', b'all', None, _(b'show all details')),
465 466 (b'', b'part-type', [], _(b'show only the named part type')),
466 467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
467 468 ],
468 469 _(b'FILE'),
469 470 norepo=True,
470 471 )
471 472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
472 473 """lists the contents of a bundle"""
473 474 with hg.openpath(ui, bundlepath) as f:
474 475 if spec:
475 476 spec = exchange.getbundlespec(ui, f)
476 477 ui.write(b'%s\n' % spec)
477 478 return
478 479
479 480 gen = exchange.readbundle(ui, f, bundlepath)
480 481 if isinstance(gen, bundle2.unbundle20):
481 482 return _debugbundle2(ui, gen, all=all, **opts)
482 483 _debugchangegroup(ui, gen, all=all, **opts)
483 484
484 485
485 486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
486 487 def debugcapabilities(ui, path, **opts):
487 488 """lists the capabilities of a remote peer"""
488 489 opts = pycompat.byteskwargs(opts)
489 490 peer = hg.peer(ui, opts, path)
490 491 try:
491 492 caps = peer.capabilities()
492 493 ui.writenoi18n(b'Main capabilities:\n')
493 494 for c in sorted(caps):
494 495 ui.write(b' %s\n' % c)
495 496 b2caps = bundle2.bundle2caps(peer)
496 497 if b2caps:
497 498 ui.writenoi18n(b'Bundle2 capabilities:\n')
498 499 for key, values in sorted(b2caps.items()):
499 500 ui.write(b' %s\n' % key)
500 501 for v in values:
501 502 ui.write(b' %s\n' % v)
502 503 finally:
503 504 peer.close()
504 505
505 506
506 507 @command(
507 508 b'debugchangedfiles',
508 509 [
509 510 (
510 511 b'',
511 512 b'compute',
512 513 False,
513 514 b"compute information instead of reading it from storage",
514 515 ),
515 516 ],
516 517 b'REV',
517 518 )
518 519 def debugchangedfiles(ui, repo, rev, **opts):
519 520 """list the stored files changes for a revision"""
520 521 ctx = logcmdutil.revsingle(repo, rev, None)
521 522 files = None
522 523
523 524 if opts['compute']:
524 525 files = metadata.compute_all_files_changes(ctx)
525 526 else:
526 527 sd = repo.changelog.sidedata(ctx.rev())
527 528 files_block = sd.get(sidedata.SD_FILES)
528 529 if files_block is not None:
529 530 files = metadata.decode_files_sidedata(sd)
530 531 if files is not None:
531 532 for f in sorted(files.touched):
532 533 if f in files.added:
533 534 action = b"added"
534 535 elif f in files.removed:
535 536 action = b"removed"
536 537 elif f in files.merged:
537 538 action = b"merged"
538 539 elif f in files.salvaged:
539 540 action = b"salvaged"
540 541 else:
541 542 action = b"touched"
542 543
543 544 copy_parent = b""
544 545 copy_source = b""
545 546 if f in files.copied_from_p1:
546 547 copy_parent = b"p1"
547 548 copy_source = files.copied_from_p1[f]
548 549 elif f in files.copied_from_p2:
549 550 copy_parent = b"p2"
550 551 copy_source = files.copied_from_p2[f]
551 552
552 553 data = (action, copy_parent, f, copy_source)
553 554 template = b"%-8s %2s: %s, %s;\n"
554 555 ui.write(template % data)
555 556
556 557
557 558 @command(b'debugcheckstate', [], b'')
558 559 def debugcheckstate(ui, repo):
559 560 """validate the correctness of the current dirstate"""
560 561 parent1, parent2 = repo.dirstate.parents()
561 562 m1 = repo[parent1].manifest()
562 563 m2 = repo[parent2].manifest()
563 564 errors = 0
564 565 for err in repo.dirstate.verify(m1, m2):
565 566 ui.warn(err[0] % err[1:])
566 567 errors += 1
567 568 if errors:
568 569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 570 raise error.Abort(errstr)
570 571
571 572
572 573 @command(
573 574 b'debugcolor',
574 575 [(b'', b'style', None, _(b'show all configured styles'))],
575 576 b'hg debugcolor',
576 577 )
577 578 def debugcolor(ui, repo, **opts):
578 579 """show available color, effects or style"""
579 580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 581 if opts.get('style'):
581 582 return _debugdisplaystyle(ui)
582 583 else:
583 584 return _debugdisplaycolor(ui)
584 585
585 586
586 587 def _debugdisplaycolor(ui):
587 588 ui = ui.copy()
588 589 ui._styles.clear()
589 590 for effect in color._activeeffects(ui).keys():
590 591 ui._styles[effect] = effect
591 592 if ui._terminfoparams:
592 593 for k, v in ui.configitems(b'color'):
593 594 if k.startswith(b'color.'):
594 595 ui._styles[k] = k[6:]
595 596 elif k.startswith(b'terminfo.'):
596 597 ui._styles[k] = k[9:]
597 598 ui.write(_(b'available colors:\n'))
598 599 # sort label with a '_' after the other to group '_background' entry.
599 600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 601 for colorname, label in items:
601 602 ui.write(b'%s\n' % colorname, label=label)
602 603
603 604
604 605 def _debugdisplaystyle(ui):
605 606 ui.write(_(b'available style:\n'))
606 607 if not ui._styles:
607 608 return
608 609 width = max(len(s) for s in ui._styles)
609 610 for label, effects in sorted(ui._styles.items()):
610 611 ui.write(b'%s' % label, label=label)
611 612 if effects:
612 613 # 50
613 614 ui.write(b': ')
614 615 ui.write(b' ' * (max(0, width - len(label))))
615 616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 617 ui.write(b'\n')
617 618
618 619
619 620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 621 def debugcreatestreamclonebundle(ui, repo, fname):
621 622 """create a stream clone bundle file
622 623
623 624 Stream bundles are special bundles that are essentially archives of
624 625 revlog files. They are commonly used for cloning very quickly.
625 626 """
626 627 # TODO we may want to turn this into an abort when this functionality
627 628 # is moved into `hg bundle`.
628 629 if phases.hassecret(repo):
629 630 ui.warn(
630 631 _(
631 632 b'(warning: stream clone bundle will contain secret '
632 633 b'revisions)\n'
633 634 )
634 635 )
635 636
636 637 requirements, gen = streamclone.generatebundlev1(repo)
637 638 changegroup.writechunks(ui, gen, fname)
638 639
639 640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 641
641 642
642 643 @command(
643 644 b'debugdag',
644 645 [
645 646 (b't', b'tags', None, _(b'use tags as labels')),
646 647 (b'b', b'branches', None, _(b'annotate with branch names')),
647 648 (b'', b'dots', None, _(b'use dots for runs')),
648 649 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 650 ],
650 651 _(b'[OPTION]... [FILE [REV]...]'),
651 652 optionalrepo=True,
652 653 )
653 654 def debugdag(ui, repo, file_=None, *revs, **opts):
654 655 """format the changelog or an index DAG as a concise textual description
655 656
656 657 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 658 revision numbers, they get labeled in the output as rN.
658 659
659 660 Otherwise, the changelog DAG of the current repo is emitted.
660 661 """
661 662 spaces = opts.get('spaces')
662 663 dots = opts.get('dots')
663 664 if file_:
664 665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 666 revs = {int(r) for r in revs}
666 667
667 668 def events():
668 669 for r in rlog:
669 670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 671 if r in revs:
671 672 yield b'l', (r, b"r%i" % r)
672 673
673 674 elif repo:
674 675 cl = repo.changelog
675 676 tags = opts.get('tags')
676 677 branches = opts.get('branches')
677 678 if tags:
678 679 labels = {}
679 680 for l, n in repo.tags().items():
680 681 labels.setdefault(cl.rev(n), []).append(l)
681 682
682 683 def events():
683 684 b = b"default"
684 685 for r in cl:
685 686 if branches:
686 687 newb = cl.read(cl.node(r))[5][b'branch']
687 688 if newb != b:
688 689 yield b'a', newb
689 690 b = newb
690 691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 692 if tags:
692 693 ls = labels.get(r)
693 694 if ls:
694 695 for l in ls:
695 696 yield b'l', (r, l)
696 697
697 698 else:
698 699 raise error.Abort(_(b'need repo for changelog dag'))
699 700
700 701 for line in dagparser.dagtextlines(
701 702 events(),
702 703 addspaces=spaces,
703 704 wraplabels=True,
704 705 wrapannotations=True,
705 706 wrapnonlinear=dots,
706 707 usedots=dots,
707 708 maxlinewidth=70,
708 709 ):
709 710 ui.write(line)
710 711 ui.write(b"\n")
711 712
712 713
713 714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 715 def debugdata(ui, repo, file_, rev=None, **opts):
715 716 """dump the contents of a data file revision"""
716 717 opts = pycompat.byteskwargs(opts)
717 718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 719 if rev is not None:
719 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 721 file_, rev = None, file_
721 722 elif rev is None:
722 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 725 try:
725 726 ui.write(r.rawdata(r.lookup(rev)))
726 727 except KeyError:
727 728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 729
729 730
730 731 @command(
731 732 b'debugdate',
732 733 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 734 _(b'[-e] DATE [RANGE]'),
734 735 norepo=True,
735 736 optionalrepo=True,
736 737 )
737 738 def debugdate(ui, date, range=None, **opts):
738 739 """parse and display a date"""
739 740 if opts["extended"]:
740 741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 742 else:
742 743 d = dateutil.parsedate(date)
743 744 ui.writenoi18n(b"internal: %d %d\n" % d)
744 745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 746 if range:
746 747 m = dateutil.matchdate(range)
747 748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 749
749 750
750 751 @command(
751 752 b'debugdeltachain',
752 753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 754 _(b'-c|-m|FILE'),
754 755 optionalrepo=True,
755 756 )
756 757 def debugdeltachain(ui, repo, file_=None, **opts):
757 758 """dump information about delta chains in a revlog
758 759
759 760 Output can be templatized. Available template keywords are:
760 761
761 762 :``rev``: revision number
762 763 :``p1``: parent 1 revision number (for reference)
763 764 :``p2``: parent 2 revision number (for reference)
764 765 :``chainid``: delta chain identifier (numbered by unique base)
765 766 :``chainlen``: delta chain length to this revision
766 767 :``prevrev``: previous revision in delta chain
767 768 :``deltatype``: role of delta / how it was computed
768 769 - base: a full snapshot
769 770 - snap: an intermediate snapshot
770 771 - p1: a delta against the first parent
771 772 - p2: a delta against the second parent
772 773 - skip1: a delta against the same base as p1
773 774 (when p1 has empty delta
774 775 - skip2: a delta against the same base as p2
775 776 (when p2 has empty delta
776 777 - prev: a delta against the previous revision
777 778 - other: a delta against an arbitrary revision
778 779 :``compsize``: compressed size of revision
779 780 :``uncompsize``: uncompressed size of revision
780 781 :``chainsize``: total size of compressed revisions in chain
781 782 :``chainratio``: total chain size divided by uncompressed revision size
782 783 (new delta chains typically start at ratio 2.00)
783 784 :``lindist``: linear distance from base revision in delta chain to end
784 785 of this revision
785 786 :``extradist``: total size of revisions not part of this delta chain from
786 787 base of delta chain to end of this revision; a measurement
787 788 of how much extra data we need to read/seek across to read
788 789 the delta chain for this revision
789 790 :``extraratio``: extradist divided by chainsize; another representation of
790 791 how much unrelated data is needed to load this delta chain
791 792
792 793 If the repository is configured to use the sparse read, additional keywords
793 794 are available:
794 795
795 796 :``readsize``: total size of data read from the disk for a revision
796 797 (sum of the sizes of all the blocks)
797 798 :``largestblock``: size of the largest block of data read from the disk
798 799 :``readdensity``: density of useful bytes in the data read from the disk
799 800 :``srchunks``: in how many data hunks the whole revision would be read
800 801
801 802 The sparse read can be enabled with experimental.sparse-read = True
802 803 """
803 804 opts = pycompat.byteskwargs(opts)
804 805 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
805 806 index = r.index
806 807 start = r.start
807 808 length = r.length
808 809 generaldelta = r._generaldelta
809 810 withsparseread = getattr(r, '_withsparseread', False)
810 811
811 812 # security to avoid crash on corrupted revlogs
812 813 total_revs = len(index)
813 814
814 815 def revinfo(rev):
815 816 e = index[rev]
816 817 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
817 818 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
818 819 chainsize = 0
819 820
820 821 base = e[revlog_constants.ENTRY_DELTA_BASE]
821 822 p1 = e[revlog_constants.ENTRY_PARENT_1]
822 823 p2 = e[revlog_constants.ENTRY_PARENT_2]
823 824
824 825 # If the parents of a revision has an empty delta, we never try to delta
825 826 # against that parent, but directly against the delta base of that
826 827 # parent (recursively). It avoids adding a useless entry in the chain.
827 828 #
828 829 # However we need to detect that as a special case for delta-type, that
829 830 # is not simply "other".
830 831 p1_base = p1
831 832 if p1 != nullrev and p1 < total_revs:
832 833 e1 = index[p1]
833 834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
834 835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
835 836 if (
836 837 new_base == p1_base
837 838 or new_base == nullrev
838 839 or new_base >= total_revs
839 840 ):
840 841 break
841 842 p1_base = new_base
842 843 e1 = index[p1_base]
843 844 p2_base = p2
844 845 if p2 != nullrev and p2 < total_revs:
845 846 e2 = index[p2]
846 847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
847 848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
848 849 if (
849 850 new_base == p2_base
850 851 or new_base == nullrev
851 852 or new_base >= total_revs
852 853 ):
853 854 break
854 855 p2_base = new_base
855 856 e2 = index[p2_base]
856 857
857 858 if generaldelta:
858 859 if base == p1:
859 860 deltatype = b'p1'
860 861 elif base == p2:
861 862 deltatype = b'p2'
862 863 elif base == rev:
863 864 deltatype = b'base'
864 865 elif base == p1_base:
865 866 deltatype = b'skip1'
866 867 elif base == p2_base:
867 868 deltatype = b'skip2'
868 869 elif r.issnapshot(rev):
869 870 deltatype = b'snap'
870 871 elif base == rev - 1:
871 872 deltatype = b'prev'
872 873 else:
873 874 deltatype = b'other'
874 875 else:
875 876 if base == rev:
876 877 deltatype = b'base'
877 878 else:
878 879 deltatype = b'prev'
879 880
880 881 chain = r._deltachain(rev)[0]
881 882 for iterrev in chain:
882 883 e = index[iterrev]
883 884 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
884 885
885 886 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
886 887
887 888 fm = ui.formatter(b'debugdeltachain', opts)
888 889
889 890 fm.plain(
890 891 b' rev p1 p2 chain# chainlen prev delta '
891 892 b'size rawsize chainsize ratio lindist extradist '
892 893 b'extraratio'
893 894 )
894 895 if withsparseread:
895 896 fm.plain(b' readsize largestblk rddensity srchunks')
896 897 fm.plain(b'\n')
897 898
898 899 chainbases = {}
899 900 for rev in r:
900 901 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
901 902 chainbase = chain[0]
902 903 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
903 904 basestart = start(chainbase)
904 905 revstart = start(rev)
905 906 lineardist = revstart + comp - basestart
906 907 extradist = lineardist - chainsize
907 908 try:
908 909 prevrev = chain[-2]
909 910 except IndexError:
910 911 prevrev = -1
911 912
912 913 if uncomp != 0:
913 914 chainratio = float(chainsize) / float(uncomp)
914 915 else:
915 916 chainratio = chainsize
916 917
917 918 if chainsize != 0:
918 919 extraratio = float(extradist) / float(chainsize)
919 920 else:
920 921 extraratio = extradist
921 922
922 923 fm.startitem()
923 924 fm.write(
924 925 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
925 926 b'uncompsize chainsize chainratio lindist extradist '
926 927 b'extraratio',
927 928 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
928 929 rev,
929 930 p1,
930 931 p2,
931 932 chainid,
932 933 len(chain),
933 934 prevrev,
934 935 deltatype,
935 936 comp,
936 937 uncomp,
937 938 chainsize,
938 939 chainratio,
939 940 lineardist,
940 941 extradist,
941 942 extraratio,
942 943 rev=rev,
943 944 chainid=chainid,
944 945 chainlen=len(chain),
945 946 prevrev=prevrev,
946 947 deltatype=deltatype,
947 948 compsize=comp,
948 949 uncompsize=uncomp,
949 950 chainsize=chainsize,
950 951 chainratio=chainratio,
951 952 lindist=lineardist,
952 953 extradist=extradist,
953 954 extraratio=extraratio,
954 955 )
955 956 if withsparseread:
956 957 readsize = 0
957 958 largestblock = 0
958 959 srchunks = 0
959 960
960 961 for revschunk in deltautil.slicechunk(r, chain):
961 962 srchunks += 1
962 963 blkend = start(revschunk[-1]) + length(revschunk[-1])
963 964 blksize = blkend - start(revschunk[0])
964 965
965 966 readsize += blksize
966 967 if largestblock < blksize:
967 968 largestblock = blksize
968 969
969 970 if readsize:
970 971 readdensity = float(chainsize) / float(readsize)
971 972 else:
972 973 readdensity = 1
973 974
974 975 fm.write(
975 976 b'readsize largestblock readdensity srchunks',
976 977 b' %10d %10d %9.5f %8d',
977 978 readsize,
978 979 largestblock,
979 980 readdensity,
980 981 srchunks,
981 982 readsize=readsize,
982 983 largestblock=largestblock,
983 984 readdensity=readdensity,
984 985 srchunks=srchunks,
985 986 )
986 987
987 988 fm.plain(b'\n')
988 989
989 990 fm.end()
990 991
991 992
992 993 @command(
993 994 b'debug-delta-find',
994 995 cmdutil.debugrevlogopts + cmdutil.formatteropts,
995 996 _(b'-c|-m|FILE REV'),
996 997 optionalrepo=True,
997 998 )
998 999 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
999 1000 """display the computation to get to a valid delta for storing REV
1000 1001
1001 1002 This command will replay the process used to find the "best" delta to store
1002 1003 a revision and display information about all the steps used to get to that
1003 1004 result.
1004 1005
1005 1006 The revision use the revision number of the target storage (not changelog
1006 1007 revision number).
1007 1008
1008 1009 note: the process is initiated from a full text of the revision to store.
1009 1010 """
1010 1011 opts = pycompat.byteskwargs(opts)
1011 1012 if arg_2 is None:
1012 1013 file_ = None
1013 1014 rev = arg_1
1014 1015 else:
1015 1016 file_ = arg_1
1016 1017 rev = arg_2
1017 1018
1018 1019 rev = int(rev)
1019 1020
1020 1021 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1021 1022
1022 1023 deltacomputer = deltautil.deltacomputer(
1023 1024 revlog,
1024 1025 write_debug=ui.write,
1025 1026 debug_search=True,
1026 1027 )
1027 1028
1028 1029 node = revlog.node(rev)
1029 1030 p1r, p2r = revlog.parentrevs(rev)
1030 1031 p1 = revlog.node(p1r)
1031 1032 p2 = revlog.node(p2r)
1032 1033 btext = [revlog.revision(rev)]
1033 1034 textlen = len(btext[0])
1034 1035 cachedelta = None
1035 1036 flags = revlog.flags(rev)
1036 1037
1037 1038 revinfo = revlogutils.revisioninfo(
1038 1039 node,
1039 1040 p1,
1040 1041 p2,
1041 1042 btext,
1042 1043 textlen,
1043 1044 cachedelta,
1044 1045 flags,
1045 1046 )
1046 1047
1047 1048 fh = revlog._datafp()
1048 1049 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1049 1050
1050 1051
1051 1052 @command(
1052 1053 b'debugdirstate|debugstate',
1053 1054 [
1054 1055 (
1055 1056 b'',
1056 1057 b'nodates',
1057 1058 None,
1058 1059 _(b'do not display the saved mtime (DEPRECATED)'),
1059 1060 ),
1060 1061 (b'', b'dates', True, _(b'display the saved mtime')),
1061 1062 (b'', b'datesort', None, _(b'sort by saved mtime')),
1062 1063 (
1063 1064 b'',
1064 1065 b'docket',
1065 1066 False,
1066 1067 _(b'display the docket (metadata file) instead'),
1067 1068 ),
1068 1069 (
1069 1070 b'',
1070 1071 b'all',
1071 1072 False,
1072 1073 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1073 1074 ),
1074 1075 ],
1075 1076 _(b'[OPTION]...'),
1076 1077 )
1077 1078 def debugstate(ui, repo, **opts):
1078 1079 """show the contents of the current dirstate"""
1079 1080
1080 1081 if opts.get("docket"):
1081 1082 if not repo.dirstate._use_dirstate_v2:
1082 1083 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1083 1084
1084 1085 docket = repo.dirstate._map.docket
1085 1086 (
1086 1087 start_offset,
1087 1088 root_nodes,
1088 1089 nodes_with_entry,
1089 1090 nodes_with_copy,
1090 1091 unused_bytes,
1091 1092 _unused,
1092 1093 ignore_pattern,
1093 1094 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1094 1095
1095 1096 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1096 1097 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1097 1098 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1098 1099 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1099 1100 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1100 1101 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1101 1102 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1102 1103 ui.write(
1103 1104 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1104 1105 )
1105 1106 return
1106 1107
1107 1108 nodates = not opts['dates']
1108 1109 if opts.get('nodates') is not None:
1109 1110 nodates = True
1110 1111 datesort = opts.get('datesort')
1111 1112
1112 1113 if datesort:
1113 1114
1114 1115 def keyfunc(entry):
1115 1116 filename, _state, _mode, _size, mtime = entry
1116 1117 return (mtime, filename)
1117 1118
1118 1119 else:
1119 1120 keyfunc = None # sort by filename
1120 1121 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1121 1122 entries.sort(key=keyfunc)
1122 1123 for entry in entries:
1123 1124 filename, state, mode, size, mtime = entry
1124 1125 if mtime == -1:
1125 1126 timestr = b'unset '
1126 1127 elif nodates:
1127 1128 timestr = b'set '
1128 1129 else:
1129 1130 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1130 1131 timestr = encoding.strtolocal(timestr)
1131 1132 if mode & 0o20000:
1132 1133 mode = b'lnk'
1133 1134 else:
1134 1135 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1135 1136 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1136 1137 for f in repo.dirstate.copies():
1137 1138 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1138 1139
1139 1140
1140 1141 @command(
1141 1142 b'debugdirstateignorepatternshash',
1142 1143 [],
1143 1144 _(b''),
1144 1145 )
1145 1146 def debugdirstateignorepatternshash(ui, repo, **opts):
1146 1147 """show the hash of ignore patterns stored in dirstate if v2,
1147 1148 or nothing for dirstate-v2
1148 1149 """
1149 1150 if repo.dirstate._use_dirstate_v2:
1150 1151 docket = repo.dirstate._map.docket
1151 1152 hash_len = 20 # 160 bits for SHA-1
1152 1153 hash_bytes = docket.tree_metadata[-hash_len:]
1153 1154 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1154 1155
1155 1156
1156 1157 @command(
1157 1158 b'debugdiscovery',
1158 1159 [
1159 1160 (b'', b'old', None, _(b'use old-style discovery')),
1160 1161 (
1161 1162 b'',
1162 1163 b'nonheads',
1163 1164 None,
1164 1165 _(b'use old-style discovery with non-heads included'),
1165 1166 ),
1166 1167 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1167 1168 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1168 1169 (
1169 1170 b'',
1170 1171 b'local-as-revs',
1171 1172 b"",
1172 1173 b'treat local has having these revisions only',
1173 1174 ),
1174 1175 (
1175 1176 b'',
1176 1177 b'remote-as-revs',
1177 1178 b"",
1178 1179 b'use local as remote, with only these revisions',
1179 1180 ),
1180 1181 ]
1181 1182 + cmdutil.remoteopts
1182 1183 + cmdutil.formatteropts,
1183 1184 _(b'[--rev REV] [OTHER]'),
1184 1185 )
1185 1186 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1186 1187 """runs the changeset discovery protocol in isolation
1187 1188
1188 1189 The local peer can be "replaced" by a subset of the local repository by
1189 1190 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1190 1191 be "replaced" by a subset of the local repository using the
1191 1192 `--local-as-revs` flag. This is useful to efficiently debug pathological
1192 1193 discovery situation.
1193 1194
1194 1195 The following developer oriented config are relevant for people playing with this command:
1195 1196
1196 1197 * devel.discovery.exchange-heads=True
1197 1198
1198 1199 If False, the discovery will not start with
1199 1200 remote head fetching and local head querying.
1200 1201
1201 1202 * devel.discovery.grow-sample=True
1202 1203
1203 1204 If False, the sample size used in set discovery will not be increased
1204 1205 through the process
1205 1206
1206 1207 * devel.discovery.grow-sample.dynamic=True
1207 1208
1208 1209 When discovery.grow-sample.dynamic is True, the default, the sample size is
1209 1210 adapted to the shape of the undecided set (it is set to the max of:
1210 1211 <target-size>, len(roots(undecided)), len(heads(undecided)
1211 1212
1212 1213 * devel.discovery.grow-sample.rate=1.05
1213 1214
1214 1215 the rate at which the sample grow
1215 1216
1216 1217 * devel.discovery.randomize=True
1217 1218
1218 1219 If andom sampling during discovery are deterministic. It is meant for
1219 1220 integration tests.
1220 1221
1221 1222 * devel.discovery.sample-size=200
1222 1223
1223 1224 Control the initial size of the discovery sample
1224 1225
1225 1226 * devel.discovery.sample-size.initial=100
1226 1227
1227 1228 Control the initial size of the discovery for initial change
1228 1229 """
1229 1230 opts = pycompat.byteskwargs(opts)
1230 1231 unfi = repo.unfiltered()
1231 1232
1232 1233 # setup potential extra filtering
1233 1234 local_revs = opts[b"local_as_revs"]
1234 1235 remote_revs = opts[b"remote_as_revs"]
1235 1236
1236 1237 # make sure tests are repeatable
1237 1238 random.seed(int(opts[b'seed']))
1238 1239
1239 1240 if not remote_revs:
1240 1241
1241 1242 remoteurl, branches = urlutil.get_unique_pull_path(
1242 1243 b'debugdiscovery', repo, ui, remoteurl
1243 1244 )
1244 1245 remote = hg.peer(repo, opts, remoteurl)
1245 1246 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1246 1247 else:
1247 1248 branches = (None, [])
1248 1249 remote_filtered_revs = logcmdutil.revrange(
1249 1250 unfi, [b"not (::(%s))" % remote_revs]
1250 1251 )
1251 1252 remote_filtered_revs = frozenset(remote_filtered_revs)
1252 1253
1253 1254 def remote_func(x):
1254 1255 return remote_filtered_revs
1255 1256
1256 1257 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1257 1258
1258 1259 remote = repo.peer()
1259 1260 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1260 1261
1261 1262 if local_revs:
1262 1263 local_filtered_revs = logcmdutil.revrange(
1263 1264 unfi, [b"not (::(%s))" % local_revs]
1264 1265 )
1265 1266 local_filtered_revs = frozenset(local_filtered_revs)
1266 1267
1267 1268 def local_func(x):
1268 1269 return local_filtered_revs
1269 1270
1270 1271 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1271 1272 repo = repo.filtered(b'debug-discovery-local-filter')
1272 1273
1273 1274 data = {}
1274 1275 if opts.get(b'old'):
1275 1276
1276 1277 def doit(pushedrevs, remoteheads, remote=remote):
1277 1278 if not util.safehasattr(remote, b'branches'):
1278 1279 # enable in-client legacy support
1279 1280 remote = localrepo.locallegacypeer(remote.local())
1280 1281 common, _in, hds = treediscovery.findcommonincoming(
1281 1282 repo, remote, force=True, audit=data
1282 1283 )
1283 1284 common = set(common)
1284 1285 if not opts.get(b'nonheads'):
1285 1286 ui.writenoi18n(
1286 1287 b"unpruned common: %s\n"
1287 1288 % b" ".join(sorted(short(n) for n in common))
1288 1289 )
1289 1290
1290 1291 clnode = repo.changelog.node
1291 1292 common = repo.revs(b'heads(::%ln)', common)
1292 1293 common = {clnode(r) for r in common}
1293 1294 return common, hds
1294 1295
1295 1296 else:
1296 1297
1297 1298 def doit(pushedrevs, remoteheads, remote=remote):
1298 1299 nodes = None
1299 1300 if pushedrevs:
1300 1301 revs = logcmdutil.revrange(repo, pushedrevs)
1301 1302 nodes = [repo[r].node() for r in revs]
1302 1303 common, any, hds = setdiscovery.findcommonheads(
1303 1304 ui, repo, remote, ancestorsof=nodes, audit=data
1304 1305 )
1305 1306 return common, hds
1306 1307
1307 1308 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1308 1309 localrevs = opts[b'rev']
1309 1310
1310 1311 fm = ui.formatter(b'debugdiscovery', opts)
1311 1312 if fm.strict_format:
1312 1313
1313 1314 @contextlib.contextmanager
1314 1315 def may_capture_output():
1315 1316 ui.pushbuffer()
1316 1317 yield
1317 1318 data[b'output'] = ui.popbuffer()
1318 1319
1319 1320 else:
1320 1321 may_capture_output = util.nullcontextmanager
1321 1322 with may_capture_output():
1322 1323 with util.timedcm('debug-discovery') as t:
1323 1324 common, hds = doit(localrevs, remoterevs)
1324 1325
1325 1326 # compute all statistics
1326 1327 heads_common = set(common)
1327 1328 heads_remote = set(hds)
1328 1329 heads_local = set(repo.heads())
1329 1330 # note: they cannot be a local or remote head that is in common and not
1330 1331 # itself a head of common.
1331 1332 heads_common_local = heads_common & heads_local
1332 1333 heads_common_remote = heads_common & heads_remote
1333 1334 heads_common_both = heads_common & heads_remote & heads_local
1334 1335
1335 1336 all = repo.revs(b'all()')
1336 1337 common = repo.revs(b'::%ln', common)
1337 1338 roots_common = repo.revs(b'roots(::%ld)', common)
1338 1339 missing = repo.revs(b'not ::%ld', common)
1339 1340 heads_missing = repo.revs(b'heads(%ld)', missing)
1340 1341 roots_missing = repo.revs(b'roots(%ld)', missing)
1341 1342 assert len(common) + len(missing) == len(all)
1342 1343
1343 1344 initial_undecided = repo.revs(
1344 1345 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1345 1346 )
1346 1347 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1347 1348 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1348 1349 common_initial_undecided = initial_undecided & common
1349 1350 missing_initial_undecided = initial_undecided & missing
1350 1351
1351 1352 data[b'elapsed'] = t.elapsed
1352 1353 data[b'nb-common-heads'] = len(heads_common)
1353 1354 data[b'nb-common-heads-local'] = len(heads_common_local)
1354 1355 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1355 1356 data[b'nb-common-heads-both'] = len(heads_common_both)
1356 1357 data[b'nb-common-roots'] = len(roots_common)
1357 1358 data[b'nb-head-local'] = len(heads_local)
1358 1359 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1359 1360 data[b'nb-head-remote'] = len(heads_remote)
1360 1361 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1361 1362 heads_common_remote
1362 1363 )
1363 1364 data[b'nb-revs'] = len(all)
1364 1365 data[b'nb-revs-common'] = len(common)
1365 1366 data[b'nb-revs-missing'] = len(missing)
1366 1367 data[b'nb-missing-heads'] = len(heads_missing)
1367 1368 data[b'nb-missing-roots'] = len(roots_missing)
1368 1369 data[b'nb-ini_und'] = len(initial_undecided)
1369 1370 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1370 1371 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1371 1372 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1372 1373 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1373 1374
1374 1375 fm.startitem()
1375 1376 fm.data(**pycompat.strkwargs(data))
1376 1377 # display discovery summary
1377 1378 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1378 1379 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1379 1380 fm.plain(b"queries: %(total-queries)9d\n" % data)
1380 1381 fm.plain(b"heads summary:\n")
1381 1382 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1382 1383 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1383 1384 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1384 1385 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1385 1386 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1386 1387 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1387 1388 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1388 1389 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1389 1390 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1390 1391 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1391 1392 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1392 1393 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1393 1394 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1394 1395 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1395 1396 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1396 1397 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1397 1398 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1398 1399 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1399 1400 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1400 1401 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1401 1402 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1402 1403 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1403 1404
1404 1405 if ui.verbose:
1405 1406 fm.plain(
1406 1407 b"common heads: %s\n"
1407 1408 % b" ".join(sorted(short(n) for n in heads_common))
1408 1409 )
1409 1410 fm.end()
1410 1411
1411 1412
1412 1413 _chunksize = 4 << 10
1413 1414
1414 1415
1415 1416 @command(
1416 1417 b'debugdownload',
1417 1418 [
1418 1419 (b'o', b'output', b'', _(b'path')),
1419 1420 ],
1420 1421 optionalrepo=True,
1421 1422 )
1422 1423 def debugdownload(ui, repo, url, output=None, **opts):
1423 1424 """download a resource using Mercurial logic and config"""
1424 1425 fh = urlmod.open(ui, url, output)
1425 1426
1426 1427 dest = ui
1427 1428 if output:
1428 1429 dest = open(output, b"wb", _chunksize)
1429 1430 try:
1430 1431 data = fh.read(_chunksize)
1431 1432 while data:
1432 1433 dest.write(data)
1433 1434 data = fh.read(_chunksize)
1434 1435 finally:
1435 1436 if output:
1436 1437 dest.close()
1437 1438
1438 1439
1439 1440 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1440 1441 def debugextensions(ui, repo, **opts):
1441 1442 '''show information about active extensions'''
1442 1443 opts = pycompat.byteskwargs(opts)
1443 1444 exts = extensions.extensions(ui)
1444 1445 hgver = util.version()
1445 1446 fm = ui.formatter(b'debugextensions', opts)
1446 1447 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1447 1448 isinternal = extensions.ismoduleinternal(extmod)
1448 1449 extsource = None
1449 1450
1450 1451 if util.safehasattr(extmod, '__file__'):
1451 1452 extsource = pycompat.fsencode(extmod.__file__)
1452 1453 elif getattr(sys, 'oxidized', False):
1453 1454 extsource = pycompat.sysexecutable
1454 1455 if isinternal:
1455 1456 exttestedwith = [] # never expose magic string to users
1456 1457 else:
1457 1458 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1458 1459 extbuglink = getattr(extmod, 'buglink', None)
1459 1460
1460 1461 fm.startitem()
1461 1462
1462 1463 if ui.quiet or ui.verbose:
1463 1464 fm.write(b'name', b'%s\n', extname)
1464 1465 else:
1465 1466 fm.write(b'name', b'%s', extname)
1466 1467 if isinternal or hgver in exttestedwith:
1467 1468 fm.plain(b'\n')
1468 1469 elif not exttestedwith:
1469 1470 fm.plain(_(b' (untested!)\n'))
1470 1471 else:
1471 1472 lasttestedversion = exttestedwith[-1]
1472 1473 fm.plain(b' (%s!)\n' % lasttestedversion)
1473 1474
1474 1475 fm.condwrite(
1475 1476 ui.verbose and extsource,
1476 1477 b'source',
1477 1478 _(b' location: %s\n'),
1478 1479 extsource or b"",
1479 1480 )
1480 1481
1481 1482 if ui.verbose:
1482 1483 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1483 1484 fm.data(bundled=isinternal)
1484 1485
1485 1486 fm.condwrite(
1486 1487 ui.verbose and exttestedwith,
1487 1488 b'testedwith',
1488 1489 _(b' tested with: %s\n'),
1489 1490 fm.formatlist(exttestedwith, name=b'ver'),
1490 1491 )
1491 1492
1492 1493 fm.condwrite(
1493 1494 ui.verbose and extbuglink,
1494 1495 b'buglink',
1495 1496 _(b' bug reporting: %s\n'),
1496 1497 extbuglink or b"",
1497 1498 )
1498 1499
1499 1500 fm.end()
1500 1501
1501 1502
1502 1503 @command(
1503 1504 b'debugfileset',
1504 1505 [
1505 1506 (
1506 1507 b'r',
1507 1508 b'rev',
1508 1509 b'',
1509 1510 _(b'apply the filespec on this revision'),
1510 1511 _(b'REV'),
1511 1512 ),
1512 1513 (
1513 1514 b'',
1514 1515 b'all-files',
1515 1516 False,
1516 1517 _(b'test files from all revisions and working directory'),
1517 1518 ),
1518 1519 (
1519 1520 b's',
1520 1521 b'show-matcher',
1521 1522 None,
1522 1523 _(b'print internal representation of matcher'),
1523 1524 ),
1524 1525 (
1525 1526 b'p',
1526 1527 b'show-stage',
1527 1528 [],
1528 1529 _(b'print parsed tree at the given stage'),
1529 1530 _(b'NAME'),
1530 1531 ),
1531 1532 ],
1532 1533 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1533 1534 )
1534 1535 def debugfileset(ui, repo, expr, **opts):
1535 1536 '''parse and apply a fileset specification'''
1536 1537 from . import fileset
1537 1538
1538 1539 fileset.symbols # force import of fileset so we have predicates to optimize
1539 1540 opts = pycompat.byteskwargs(opts)
1540 1541 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1541 1542
1542 1543 stages = [
1543 1544 (b'parsed', pycompat.identity),
1544 1545 (b'analyzed', filesetlang.analyze),
1545 1546 (b'optimized', filesetlang.optimize),
1546 1547 ]
1547 1548 stagenames = {n for n, f in stages}
1548 1549
1549 1550 showalways = set()
1550 1551 if ui.verbose and not opts[b'show_stage']:
1551 1552 # show parsed tree by --verbose (deprecated)
1552 1553 showalways.add(b'parsed')
1553 1554 if opts[b'show_stage'] == [b'all']:
1554 1555 showalways.update(stagenames)
1555 1556 else:
1556 1557 for n in opts[b'show_stage']:
1557 1558 if n not in stagenames:
1558 1559 raise error.Abort(_(b'invalid stage name: %s') % n)
1559 1560 showalways.update(opts[b'show_stage'])
1560 1561
1561 1562 tree = filesetlang.parse(expr)
1562 1563 for n, f in stages:
1563 1564 tree = f(tree)
1564 1565 if n in showalways:
1565 1566 if opts[b'show_stage'] or n != b'parsed':
1566 1567 ui.write(b"* %s:\n" % n)
1567 1568 ui.write(filesetlang.prettyformat(tree), b"\n")
1568 1569
1569 1570 files = set()
1570 1571 if opts[b'all_files']:
1571 1572 for r in repo:
1572 1573 c = repo[r]
1573 1574 files.update(c.files())
1574 1575 files.update(c.substate)
1575 1576 if opts[b'all_files'] or ctx.rev() is None:
1576 1577 wctx = repo[None]
1577 1578 files.update(
1578 1579 repo.dirstate.walk(
1579 1580 scmutil.matchall(repo),
1580 1581 subrepos=list(wctx.substate),
1581 1582 unknown=True,
1582 1583 ignored=True,
1583 1584 )
1584 1585 )
1585 1586 files.update(wctx.substate)
1586 1587 else:
1587 1588 files.update(ctx.files())
1588 1589 files.update(ctx.substate)
1589 1590
1590 1591 m = ctx.matchfileset(repo.getcwd(), expr)
1591 1592 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1592 1593 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1593 1594 for f in sorted(files):
1594 1595 if not m(f):
1595 1596 continue
1596 1597 ui.write(b"%s\n" % f)
1597 1598
1598 1599
1599 1600 @command(
1600 1601 b"debug-repair-issue6528",
1601 1602 [
1602 1603 (
1603 1604 b'',
1604 1605 b'to-report',
1605 1606 b'',
1606 1607 _(b'build a report of affected revisions to this file'),
1607 1608 _(b'FILE'),
1608 1609 ),
1609 1610 (
1610 1611 b'',
1611 1612 b'from-report',
1612 1613 b'',
1613 1614 _(b'repair revisions listed in this report file'),
1614 1615 _(b'FILE'),
1615 1616 ),
1616 1617 (
1617 1618 b'',
1618 1619 b'paranoid',
1619 1620 False,
1620 1621 _(b'check that both detection methods do the same thing'),
1621 1622 ),
1622 1623 ]
1623 1624 + cmdutil.dryrunopts,
1624 1625 )
1625 1626 def debug_repair_issue6528(ui, repo, **opts):
1626 1627 """find affected revisions and repair them. See issue6528 for more details.
1627 1628
1628 1629 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1629 1630 computation of affected revisions for a given repository across clones.
1630 1631 The report format is line-based (with empty lines ignored):
1631 1632
1632 1633 ```
1633 1634 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1634 1635 ```
1635 1636
1636 1637 There can be multiple broken revisions per filelog, they are separated by
1637 1638 a comma with no spaces. The only space is between the revision(s) and the
1638 1639 filename.
1639 1640
1640 1641 Note that this does *not* mean that this repairs future affected revisions,
1641 1642 that needs a separate fix at the exchange level that was introduced in
1642 1643 Mercurial 5.9.1.
1643 1644
1644 1645 There is a `--paranoid` flag to test that the fast implementation is correct
1645 1646 by checking it against the slow implementation. Since this matter is quite
1646 1647 urgent and testing every edge-case is probably quite costly, we use this
1647 1648 method to test on large repositories as a fuzzing method of sorts.
1648 1649 """
1649 1650 cmdutil.check_incompatible_arguments(
1650 1651 opts, 'to_report', ['from_report', 'dry_run']
1651 1652 )
1652 1653 dry_run = opts.get('dry_run')
1653 1654 to_report = opts.get('to_report')
1654 1655 from_report = opts.get('from_report')
1655 1656 paranoid = opts.get('paranoid')
1656 1657 # TODO maybe add filelog pattern and revision pattern parameters to help
1657 1658 # narrow down the search for users that know what they're looking for?
1658 1659
1659 1660 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1660 1661 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1661 1662 raise error.Abort(_(msg))
1662 1663
1663 1664 rewrite.repair_issue6528(
1664 1665 ui,
1665 1666 repo,
1666 1667 dry_run=dry_run,
1667 1668 to_report=to_report,
1668 1669 from_report=from_report,
1669 1670 paranoid=paranoid,
1670 1671 )
1671 1672
1672 1673
1673 1674 @command(b'debugformat', [] + cmdutil.formatteropts)
1674 1675 def debugformat(ui, repo, **opts):
1675 1676 """display format information about the current repository
1676 1677
1677 1678 Use --verbose to get extra information about current config value and
1678 1679 Mercurial default."""
1679 1680 opts = pycompat.byteskwargs(opts)
1680 1681 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1681 1682 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1682 1683
1683 1684 def makeformatname(name):
1684 1685 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1685 1686
1686 1687 fm = ui.formatter(b'debugformat', opts)
1687 1688 if fm.isplain():
1688 1689
1689 1690 def formatvalue(value):
1690 1691 if util.safehasattr(value, b'startswith'):
1691 1692 return value
1692 1693 if value:
1693 1694 return b'yes'
1694 1695 else:
1695 1696 return b'no'
1696 1697
1697 1698 else:
1698 1699 formatvalue = pycompat.identity
1699 1700
1700 1701 fm.plain(b'format-variant')
1701 1702 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1702 1703 fm.plain(b' repo')
1703 1704 if ui.verbose:
1704 1705 fm.plain(b' config default')
1705 1706 fm.plain(b'\n')
1706 1707 for fv in upgrade.allformatvariant:
1707 1708 fm.startitem()
1708 1709 repovalue = fv.fromrepo(repo)
1709 1710 configvalue = fv.fromconfig(repo)
1710 1711
1711 1712 if repovalue != configvalue:
1712 1713 namelabel = b'formatvariant.name.mismatchconfig'
1713 1714 repolabel = b'formatvariant.repo.mismatchconfig'
1714 1715 elif repovalue != fv.default:
1715 1716 namelabel = b'formatvariant.name.mismatchdefault'
1716 1717 repolabel = b'formatvariant.repo.mismatchdefault'
1717 1718 else:
1718 1719 namelabel = b'formatvariant.name.uptodate'
1719 1720 repolabel = b'formatvariant.repo.uptodate'
1720 1721
1721 1722 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1722 1723 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1723 1724 if fv.default != configvalue:
1724 1725 configlabel = b'formatvariant.config.special'
1725 1726 else:
1726 1727 configlabel = b'formatvariant.config.default'
1727 1728 fm.condwrite(
1728 1729 ui.verbose,
1729 1730 b'config',
1730 1731 b' %6s',
1731 1732 formatvalue(configvalue),
1732 1733 label=configlabel,
1733 1734 )
1734 1735 fm.condwrite(
1735 1736 ui.verbose,
1736 1737 b'default',
1737 1738 b' %7s',
1738 1739 formatvalue(fv.default),
1739 1740 label=b'formatvariant.default',
1740 1741 )
1741 1742 fm.plain(b'\n')
1742 1743 fm.end()
1743 1744
1744 1745
1745 1746 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1746 1747 def debugfsinfo(ui, path=b"."):
1747 1748 """show information detected about current filesystem"""
1748 1749 ui.writenoi18n(b'path: %s\n' % path)
1749 1750 ui.writenoi18n(
1750 1751 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1751 1752 )
1752 1753 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1753 1754 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1754 1755 ui.writenoi18n(
1755 1756 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1756 1757 )
1757 1758 ui.writenoi18n(
1758 1759 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1759 1760 )
1760 1761 casesensitive = b'(unknown)'
1761 1762 try:
1762 1763 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1763 1764 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1764 1765 except OSError:
1765 1766 pass
1766 1767 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1767 1768
1768 1769
1769 1770 @command(
1770 1771 b'debuggetbundle',
1771 1772 [
1772 1773 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1773 1774 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1774 1775 (
1775 1776 b't',
1776 1777 b'type',
1777 1778 b'bzip2',
1778 1779 _(b'bundle compression type to use'),
1779 1780 _(b'TYPE'),
1780 1781 ),
1781 1782 ],
1782 1783 _(b'REPO FILE [-H|-C ID]...'),
1783 1784 norepo=True,
1784 1785 )
1785 1786 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1786 1787 """retrieves a bundle from a repo
1787 1788
1788 1789 Every ID must be a full-length hex node id string. Saves the bundle to the
1789 1790 given file.
1790 1791 """
1791 1792 opts = pycompat.byteskwargs(opts)
1792 1793 repo = hg.peer(ui, opts, repopath)
1793 1794 if not repo.capable(b'getbundle'):
1794 1795 raise error.Abort(b"getbundle() not supported by target repository")
1795 1796 args = {}
1796 1797 if common:
1797 1798 args['common'] = [bin(s) for s in common]
1798 1799 if head:
1799 1800 args['heads'] = [bin(s) for s in head]
1800 1801 # TODO: get desired bundlecaps from command line.
1801 1802 args['bundlecaps'] = None
1802 1803 bundle = repo.getbundle(b'debug', **args)
1803 1804
1804 1805 bundletype = opts.get(b'type', b'bzip2').lower()
1805 1806 btypes = {
1806 1807 b'none': b'HG10UN',
1807 1808 b'bzip2': b'HG10BZ',
1808 1809 b'gzip': b'HG10GZ',
1809 1810 b'bundle2': b'HG20',
1810 1811 }
1811 1812 bundletype = btypes.get(bundletype)
1812 1813 if bundletype not in bundle2.bundletypes:
1813 1814 raise error.Abort(_(b'unknown bundle type specified with --type'))
1814 1815 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1815 1816
1816 1817
1817 1818 @command(b'debugignore', [], b'[FILE]')
1818 1819 def debugignore(ui, repo, *files, **opts):
1819 1820 """display the combined ignore pattern and information about ignored files
1820 1821
1821 1822 With no argument display the combined ignore pattern.
1822 1823
1823 1824 Given space separated file names, shows if the given file is ignored and
1824 1825 if so, show the ignore rule (file and line number) that matched it.
1825 1826 """
1826 1827 ignore = repo.dirstate._ignore
1827 1828 if not files:
1828 1829 # Show all the patterns
1829 1830 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1830 1831 else:
1831 1832 m = scmutil.match(repo[None], pats=files)
1832 1833 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1833 1834 for f in m.files():
1834 1835 nf = util.normpath(f)
1835 1836 ignored = None
1836 1837 ignoredata = None
1837 1838 if nf != b'.':
1838 1839 if ignore(nf):
1839 1840 ignored = nf
1840 1841 ignoredata = repo.dirstate._ignorefileandline(nf)
1841 1842 else:
1842 1843 for p in pathutil.finddirs(nf):
1843 1844 if ignore(p):
1844 1845 ignored = p
1845 1846 ignoredata = repo.dirstate._ignorefileandline(p)
1846 1847 break
1847 1848 if ignored:
1848 1849 if ignored == nf:
1849 1850 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1850 1851 else:
1851 1852 ui.write(
1852 1853 _(
1853 1854 b"%s is ignored because of "
1854 1855 b"containing directory %s\n"
1855 1856 )
1856 1857 % (uipathfn(f), ignored)
1857 1858 )
1858 1859 ignorefile, lineno, line = ignoredata
1859 1860 ui.write(
1860 1861 _(b"(ignore rule in %s, line %d: '%s')\n")
1861 1862 % (ignorefile, lineno, line)
1862 1863 )
1863 1864 else:
1864 1865 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1865 1866
1866 1867
1867 1868 @command(
1868 1869 b'debug-revlog-index|debugindex',
1869 1870 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1870 1871 _(b'-c|-m|FILE'),
1871 1872 )
1872 1873 def debugindex(ui, repo, file_=None, **opts):
1873 1874 """dump index data for a revlog"""
1874 1875 opts = pycompat.byteskwargs(opts)
1875 1876 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1876 1877
1877 if ui.debugflag:
1878 shortfn = hex
1879 else:
1880 shortfn = short
1881
1882 idlen = 12
1883 for i in store:
1884 idlen = len(shortfn(store.node(i)))
1885 break
1886
1887 1878 fm = ui.formatter(b'debugindex', opts)
1888 fm.plain(
1889 b' rev linkrev %s %s p2\n'
1890 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1879
1880 return revlog_debug.debug_index(
1881 ui,
1882 repo,
1883 formatter=fm,
1884 revlog=store,
1885 full_node=ui.debugflag,
1891 1886 )
1892 1887
1893 for rev in store:
1894 node = store.node(rev)
1895 parents = store.parents(node)
1896
1897 fm.startitem()
1898 fm.write(b'rev', b'%6d ', rev)
1899 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1900 fm.write(b'node', b'%s ', shortfn(node))
1901 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1902 fm.write(b'p2', b'%s', shortfn(parents[1]))
1903 fm.plain(b'\n')
1904
1905 fm.end()
1906
1907 1888
1908 1889 @command(
1909 1890 b'debugindexdot',
1910 1891 cmdutil.debugrevlogopts,
1911 1892 _(b'-c|-m|FILE'),
1912 1893 optionalrepo=True,
1913 1894 )
1914 1895 def debugindexdot(ui, repo, file_=None, **opts):
1915 1896 """dump an index DAG as a graphviz dot file"""
1916 1897 opts = pycompat.byteskwargs(opts)
1917 1898 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1918 1899 ui.writenoi18n(b"digraph G {\n")
1919 1900 for i in r:
1920 1901 node = r.node(i)
1921 1902 pp = r.parents(node)
1922 1903 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1923 1904 if pp[1] != repo.nullid:
1924 1905 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1925 1906 ui.write(b"}\n")
1926 1907
1927 1908
1928 1909 @command(b'debugindexstats', [])
1929 1910 def debugindexstats(ui, repo):
1930 1911 """show stats related to the changelog index"""
1931 1912 repo.changelog.shortest(repo.nullid, 1)
1932 1913 index = repo.changelog.index
1933 1914 if not util.safehasattr(index, b'stats'):
1934 1915 raise error.Abort(_(b'debugindexstats only works with native code'))
1935 1916 for k, v in sorted(index.stats().items()):
1936 1917 ui.write(b'%s: %d\n' % (k, v))
1937 1918
1938 1919
1939 1920 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1940 1921 def debuginstall(ui, **opts):
1941 1922 """test Mercurial installation
1942 1923
1943 1924 Returns 0 on success.
1944 1925 """
1945 1926 opts = pycompat.byteskwargs(opts)
1946 1927
1947 1928 problems = 0
1948 1929
1949 1930 fm = ui.formatter(b'debuginstall', opts)
1950 1931 fm.startitem()
1951 1932
1952 1933 # encoding might be unknown or wrong. don't translate these messages.
1953 1934 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1954 1935 err = None
1955 1936 try:
1956 1937 codecs.lookup(pycompat.sysstr(encoding.encoding))
1957 1938 except LookupError as inst:
1958 1939 err = stringutil.forcebytestr(inst)
1959 1940 problems += 1
1960 1941 fm.condwrite(
1961 1942 err,
1962 1943 b'encodingerror',
1963 1944 b" %s\n (check that your locale is properly set)\n",
1964 1945 err,
1965 1946 )
1966 1947
1967 1948 # Python
1968 1949 pythonlib = None
1969 1950 if util.safehasattr(os, '__file__'):
1970 1951 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1971 1952 elif getattr(sys, 'oxidized', False):
1972 1953 pythonlib = pycompat.sysexecutable
1973 1954
1974 1955 fm.write(
1975 1956 b'pythonexe',
1976 1957 _(b"checking Python executable (%s)\n"),
1977 1958 pycompat.sysexecutable or _(b"unknown"),
1978 1959 )
1979 1960 fm.write(
1980 1961 b'pythonimplementation',
1981 1962 _(b"checking Python implementation (%s)\n"),
1982 1963 pycompat.sysbytes(platform.python_implementation()),
1983 1964 )
1984 1965 fm.write(
1985 1966 b'pythonver',
1986 1967 _(b"checking Python version (%s)\n"),
1987 1968 (b"%d.%d.%d" % sys.version_info[:3]),
1988 1969 )
1989 1970 fm.write(
1990 1971 b'pythonlib',
1991 1972 _(b"checking Python lib (%s)...\n"),
1992 1973 pythonlib or _(b"unknown"),
1993 1974 )
1994 1975
1995 1976 try:
1996 1977 from . import rustext # pytype: disable=import-error
1997 1978
1998 1979 rustext.__doc__ # trigger lazy import
1999 1980 except ImportError:
2000 1981 rustext = None
2001 1982
2002 1983 security = set(sslutil.supportedprotocols)
2003 1984 if sslutil.hassni:
2004 1985 security.add(b'sni')
2005 1986
2006 1987 fm.write(
2007 1988 b'pythonsecurity',
2008 1989 _(b"checking Python security support (%s)\n"),
2009 1990 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2010 1991 )
2011 1992
2012 1993 # These are warnings, not errors. So don't increment problem count. This
2013 1994 # may change in the future.
2014 1995 if b'tls1.2' not in security:
2015 1996 fm.plain(
2016 1997 _(
2017 1998 b' TLS 1.2 not supported by Python install; '
2018 1999 b'network connections lack modern security\n'
2019 2000 )
2020 2001 )
2021 2002 if b'sni' not in security:
2022 2003 fm.plain(
2023 2004 _(
2024 2005 b' SNI not supported by Python install; may have '
2025 2006 b'connectivity issues with some servers\n'
2026 2007 )
2027 2008 )
2028 2009
2029 2010 fm.plain(
2030 2011 _(
2031 2012 b"checking Rust extensions (%s)\n"
2032 2013 % (b'missing' if rustext is None else b'installed')
2033 2014 ),
2034 2015 )
2035 2016
2036 2017 # TODO print CA cert info
2037 2018
2038 2019 # hg version
2039 2020 hgver = util.version()
2040 2021 fm.write(
2041 2022 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2042 2023 )
2043 2024 fm.write(
2044 2025 b'hgverextra',
2045 2026 _(b"checking Mercurial custom build (%s)\n"),
2046 2027 b'+'.join(hgver.split(b'+')[1:]),
2047 2028 )
2048 2029
2049 2030 # compiled modules
2050 2031 hgmodules = None
2051 2032 if util.safehasattr(sys.modules[__name__], '__file__'):
2052 2033 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2053 2034 elif getattr(sys, 'oxidized', False):
2054 2035 hgmodules = pycompat.sysexecutable
2055 2036
2056 2037 fm.write(
2057 2038 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2058 2039 )
2059 2040 fm.write(
2060 2041 b'hgmodules',
2061 2042 _(b"checking installed modules (%s)...\n"),
2062 2043 hgmodules or _(b"unknown"),
2063 2044 )
2064 2045
2065 2046 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2066 2047 rustext = rustandc # for now, that's the only case
2067 2048 cext = policy.policy in (b'c', b'allow') or rustandc
2068 2049 nopure = cext or rustext
2069 2050 if nopure:
2070 2051 err = None
2071 2052 try:
2072 2053 if cext:
2073 2054 from .cext import ( # pytype: disable=import-error
2074 2055 base85,
2075 2056 bdiff,
2076 2057 mpatch,
2077 2058 osutil,
2078 2059 )
2079 2060
2080 2061 # quiet pyflakes
2081 2062 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2082 2063 if rustext:
2083 2064 from .rustext import ( # pytype: disable=import-error
2084 2065 ancestor,
2085 2066 dirstate,
2086 2067 )
2087 2068
2088 2069 dir(ancestor), dir(dirstate) # quiet pyflakes
2089 2070 except Exception as inst:
2090 2071 err = stringutil.forcebytestr(inst)
2091 2072 problems += 1
2092 2073 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2093 2074
2094 2075 compengines = util.compengines._engines.values()
2095 2076 fm.write(
2096 2077 b'compengines',
2097 2078 _(b'checking registered compression engines (%s)\n'),
2098 2079 fm.formatlist(
2099 2080 sorted(e.name() for e in compengines),
2100 2081 name=b'compengine',
2101 2082 fmt=b'%s',
2102 2083 sep=b', ',
2103 2084 ),
2104 2085 )
2105 2086 fm.write(
2106 2087 b'compenginesavail',
2107 2088 _(b'checking available compression engines (%s)\n'),
2108 2089 fm.formatlist(
2109 2090 sorted(e.name() for e in compengines if e.available()),
2110 2091 name=b'compengine',
2111 2092 fmt=b'%s',
2112 2093 sep=b', ',
2113 2094 ),
2114 2095 )
2115 2096 wirecompengines = compression.compengines.supportedwireengines(
2116 2097 compression.SERVERROLE
2117 2098 )
2118 2099 fm.write(
2119 2100 b'compenginesserver',
2120 2101 _(
2121 2102 b'checking available compression engines '
2122 2103 b'for wire protocol (%s)\n'
2123 2104 ),
2124 2105 fm.formatlist(
2125 2106 [e.name() for e in wirecompengines if e.wireprotosupport()],
2126 2107 name=b'compengine',
2127 2108 fmt=b'%s',
2128 2109 sep=b', ',
2129 2110 ),
2130 2111 )
2131 2112 re2 = b'missing'
2132 2113 if util._re2:
2133 2114 re2 = b'available'
2134 2115 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2135 2116 fm.data(re2=bool(util._re2))
2136 2117
2137 2118 # templates
2138 2119 p = templater.templatedir()
2139 2120 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2140 2121 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2141 2122 if p:
2142 2123 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2143 2124 if m:
2144 2125 # template found, check if it is working
2145 2126 err = None
2146 2127 try:
2147 2128 templater.templater.frommapfile(m)
2148 2129 except Exception as inst:
2149 2130 err = stringutil.forcebytestr(inst)
2150 2131 p = None
2151 2132 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2152 2133 else:
2153 2134 p = None
2154 2135 fm.condwrite(
2155 2136 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2156 2137 )
2157 2138 fm.condwrite(
2158 2139 not m,
2159 2140 b'defaulttemplatenotfound',
2160 2141 _(b" template '%s' not found\n"),
2161 2142 b"default",
2162 2143 )
2163 2144 if not p:
2164 2145 problems += 1
2165 2146 fm.condwrite(
2166 2147 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2167 2148 )
2168 2149
2169 2150 # editor
2170 2151 editor = ui.geteditor()
2171 2152 editor = util.expandpath(editor)
2172 2153 editorbin = procutil.shellsplit(editor)[0]
2173 2154 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2174 2155 cmdpath = procutil.findexe(editorbin)
2175 2156 fm.condwrite(
2176 2157 not cmdpath and editor == b'vi',
2177 2158 b'vinotfound',
2178 2159 _(
2179 2160 b" No commit editor set and can't find %s in PATH\n"
2180 2161 b" (specify a commit editor in your configuration"
2181 2162 b" file)\n"
2182 2163 ),
2183 2164 not cmdpath and editor == b'vi' and editorbin,
2184 2165 )
2185 2166 fm.condwrite(
2186 2167 not cmdpath and editor != b'vi',
2187 2168 b'editornotfound',
2188 2169 _(
2189 2170 b" Can't find editor '%s' in PATH\n"
2190 2171 b" (specify a commit editor in your configuration"
2191 2172 b" file)\n"
2192 2173 ),
2193 2174 not cmdpath and editorbin,
2194 2175 )
2195 2176 if not cmdpath and editor != b'vi':
2196 2177 problems += 1
2197 2178
2198 2179 # check username
2199 2180 username = None
2200 2181 err = None
2201 2182 try:
2202 2183 username = ui.username()
2203 2184 except error.Abort as e:
2204 2185 err = e.message
2205 2186 problems += 1
2206 2187
2207 2188 fm.condwrite(
2208 2189 username, b'username', _(b"checking username (%s)\n"), username
2209 2190 )
2210 2191 fm.condwrite(
2211 2192 err,
2212 2193 b'usernameerror',
2213 2194 _(
2214 2195 b"checking username...\n %s\n"
2215 2196 b" (specify a username in your configuration file)\n"
2216 2197 ),
2217 2198 err,
2218 2199 )
2219 2200
2220 2201 for name, mod in extensions.extensions():
2221 2202 handler = getattr(mod, 'debuginstall', None)
2222 2203 if handler is not None:
2223 2204 problems += handler(ui, fm)
2224 2205
2225 2206 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2226 2207 if not problems:
2227 2208 fm.data(problems=problems)
2228 2209 fm.condwrite(
2229 2210 problems,
2230 2211 b'problems',
2231 2212 _(b"%d problems detected, please check your install!\n"),
2232 2213 problems,
2233 2214 )
2234 2215 fm.end()
2235 2216
2236 2217 return problems
2237 2218
2238 2219
2239 2220 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2240 2221 def debugknown(ui, repopath, *ids, **opts):
2241 2222 """test whether node ids are known to a repo
2242 2223
2243 2224 Every ID must be a full-length hex node id string. Returns a list of 0s
2244 2225 and 1s indicating unknown/known.
2245 2226 """
2246 2227 opts = pycompat.byteskwargs(opts)
2247 2228 repo = hg.peer(ui, opts, repopath)
2248 2229 if not repo.capable(b'known'):
2249 2230 raise error.Abort(b"known() not supported by target repository")
2250 2231 flags = repo.known([bin(s) for s in ids])
2251 2232 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2252 2233
2253 2234
2254 2235 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2255 2236 def debuglabelcomplete(ui, repo, *args):
2256 2237 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2257 2238 debugnamecomplete(ui, repo, *args)
2258 2239
2259 2240
2260 2241 @command(
2261 2242 b'debuglocks',
2262 2243 [
2263 2244 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2264 2245 (
2265 2246 b'W',
2266 2247 b'force-free-wlock',
2267 2248 None,
2268 2249 _(b'free the working state lock (DANGEROUS)'),
2269 2250 ),
2270 2251 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2271 2252 (
2272 2253 b'S',
2273 2254 b'set-wlock',
2274 2255 None,
2275 2256 _(b'set the working state lock until stopped'),
2276 2257 ),
2277 2258 ],
2278 2259 _(b'[OPTION]...'),
2279 2260 )
2280 2261 def debuglocks(ui, repo, **opts):
2281 2262 """show or modify state of locks
2282 2263
2283 2264 By default, this command will show which locks are held. This
2284 2265 includes the user and process holding the lock, the amount of time
2285 2266 the lock has been held, and the machine name where the process is
2286 2267 running if it's not local.
2287 2268
2288 2269 Locks protect the integrity of Mercurial's data, so should be
2289 2270 treated with care. System crashes or other interruptions may cause
2290 2271 locks to not be properly released, though Mercurial will usually
2291 2272 detect and remove such stale locks automatically.
2292 2273
2293 2274 However, detecting stale locks may not always be possible (for
2294 2275 instance, on a shared filesystem). Removing locks may also be
2295 2276 blocked by filesystem permissions.
2296 2277
2297 2278 Setting a lock will prevent other commands from changing the data.
2298 2279 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2299 2280 The set locks are removed when the command exits.
2300 2281
2301 2282 Returns 0 if no locks are held.
2302 2283
2303 2284 """
2304 2285
2305 2286 if opts.get('force_free_lock'):
2306 2287 repo.svfs.tryunlink(b'lock')
2307 2288 if opts.get('force_free_wlock'):
2308 2289 repo.vfs.tryunlink(b'wlock')
2309 2290 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2310 2291 return 0
2311 2292
2312 2293 locks = []
2313 2294 try:
2314 2295 if opts.get('set_wlock'):
2315 2296 try:
2316 2297 locks.append(repo.wlock(False))
2317 2298 except error.LockHeld:
2318 2299 raise error.Abort(_(b'wlock is already held'))
2319 2300 if opts.get('set_lock'):
2320 2301 try:
2321 2302 locks.append(repo.lock(False))
2322 2303 except error.LockHeld:
2323 2304 raise error.Abort(_(b'lock is already held'))
2324 2305 if len(locks):
2325 2306 try:
2326 2307 if ui.interactive():
2327 2308 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2328 2309 ui.promptchoice(prompt)
2329 2310 else:
2330 2311 msg = b"%d locks held, waiting for signal\n"
2331 2312 msg %= len(locks)
2332 2313 ui.status(msg)
2333 2314 while True: # XXX wait for a signal
2334 2315 time.sleep(0.1)
2335 2316 except KeyboardInterrupt:
2336 2317 msg = b"signal-received releasing locks\n"
2337 2318 ui.status(msg)
2338 2319 return 0
2339 2320 finally:
2340 2321 release(*locks)
2341 2322
2342 2323 now = time.time()
2343 2324 held = 0
2344 2325
2345 2326 def report(vfs, name, method):
2346 2327 # this causes stale locks to get reaped for more accurate reporting
2347 2328 try:
2348 2329 l = method(False)
2349 2330 except error.LockHeld:
2350 2331 l = None
2351 2332
2352 2333 if l:
2353 2334 l.release()
2354 2335 else:
2355 2336 try:
2356 2337 st = vfs.lstat(name)
2357 2338 age = now - st[stat.ST_MTIME]
2358 2339 user = util.username(st.st_uid)
2359 2340 locker = vfs.readlock(name)
2360 2341 if b":" in locker:
2361 2342 host, pid = locker.split(b':')
2362 2343 if host == socket.gethostname():
2363 2344 locker = b'user %s, process %s' % (user or b'None', pid)
2364 2345 else:
2365 2346 locker = b'user %s, process %s, host %s' % (
2366 2347 user or b'None',
2367 2348 pid,
2368 2349 host,
2369 2350 )
2370 2351 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2371 2352 return 1
2372 2353 except OSError as e:
2373 2354 if e.errno != errno.ENOENT:
2374 2355 raise
2375 2356
2376 2357 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2377 2358 return 0
2378 2359
2379 2360 held += report(repo.svfs, b"lock", repo.lock)
2380 2361 held += report(repo.vfs, b"wlock", repo.wlock)
2381 2362
2382 2363 return held
2383 2364
2384 2365
2385 2366 @command(
2386 2367 b'debugmanifestfulltextcache',
2387 2368 [
2388 2369 (b'', b'clear', False, _(b'clear the cache')),
2389 2370 (
2390 2371 b'a',
2391 2372 b'add',
2392 2373 [],
2393 2374 _(b'add the given manifest nodes to the cache'),
2394 2375 _(b'NODE'),
2395 2376 ),
2396 2377 ],
2397 2378 b'',
2398 2379 )
2399 2380 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2400 2381 """show, clear or amend the contents of the manifest fulltext cache"""
2401 2382
2402 2383 def getcache():
2403 2384 r = repo.manifestlog.getstorage(b'')
2404 2385 try:
2405 2386 return r._fulltextcache
2406 2387 except AttributeError:
2407 2388 msg = _(
2408 2389 b"Current revlog implementation doesn't appear to have a "
2409 2390 b"manifest fulltext cache\n"
2410 2391 )
2411 2392 raise error.Abort(msg)
2412 2393
2413 2394 if opts.get('clear'):
2414 2395 with repo.wlock():
2415 2396 cache = getcache()
2416 2397 cache.clear(clear_persisted_data=True)
2417 2398 return
2418 2399
2419 2400 if add:
2420 2401 with repo.wlock():
2421 2402 m = repo.manifestlog
2422 2403 store = m.getstorage(b'')
2423 2404 for n in add:
2424 2405 try:
2425 2406 manifest = m[store.lookup(n)]
2426 2407 except error.LookupError as e:
2427 2408 raise error.Abort(
2428 2409 bytes(e), hint=b"Check your manifest node id"
2429 2410 )
2430 2411 manifest.read() # stores revisision in cache too
2431 2412 return
2432 2413
2433 2414 cache = getcache()
2434 2415 if not len(cache):
2435 2416 ui.write(_(b'cache empty\n'))
2436 2417 else:
2437 2418 ui.write(
2438 2419 _(
2439 2420 b'cache contains %d manifest entries, in order of most to '
2440 2421 b'least recent:\n'
2441 2422 )
2442 2423 % (len(cache),)
2443 2424 )
2444 2425 totalsize = 0
2445 2426 for nodeid in cache:
2446 2427 # Use cache.get to not update the LRU order
2447 2428 data = cache.peek(nodeid)
2448 2429 size = len(data)
2449 2430 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2450 2431 ui.write(
2451 2432 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2452 2433 )
2453 2434 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2454 2435 ui.write(
2455 2436 _(b'total cache data size %s, on-disk %s\n')
2456 2437 % (util.bytecount(totalsize), util.bytecount(ondisk))
2457 2438 )
2458 2439
2459 2440
2460 2441 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2461 2442 def debugmergestate(ui, repo, *args, **opts):
2462 2443 """print merge state
2463 2444
2464 2445 Use --verbose to print out information about whether v1 or v2 merge state
2465 2446 was chosen."""
2466 2447
2467 2448 if ui.verbose:
2468 2449 ms = mergestatemod.mergestate(repo)
2469 2450
2470 2451 # sort so that reasonable information is on top
2471 2452 v1records = ms._readrecordsv1()
2472 2453 v2records = ms._readrecordsv2()
2473 2454
2474 2455 if not v1records and not v2records:
2475 2456 pass
2476 2457 elif not v2records:
2477 2458 ui.writenoi18n(b'no version 2 merge state\n')
2478 2459 elif ms._v1v2match(v1records, v2records):
2479 2460 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2480 2461 else:
2481 2462 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2482 2463
2483 2464 opts = pycompat.byteskwargs(opts)
2484 2465 if not opts[b'template']:
2485 2466 opts[b'template'] = (
2486 2467 b'{if(commits, "", "no merge state found\n")}'
2487 2468 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2488 2469 b'{files % "file: {path} (state \\"{state}\\")\n'
2489 2470 b'{if(local_path, "'
2490 2471 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2491 2472 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2492 2473 b' other path: {other_path} (node {other_node})\n'
2493 2474 b'")}'
2494 2475 b'{if(rename_side, "'
2495 2476 b' rename side: {rename_side}\n'
2496 2477 b' renamed path: {renamed_path}\n'
2497 2478 b'")}'
2498 2479 b'{extras % " extra: {key} = {value}\n"}'
2499 2480 b'"}'
2500 2481 b'{extras % "extra: {file} ({key} = {value})\n"}'
2501 2482 )
2502 2483
2503 2484 ms = mergestatemod.mergestate.read(repo)
2504 2485
2505 2486 fm = ui.formatter(b'debugmergestate', opts)
2506 2487 fm.startitem()
2507 2488
2508 2489 fm_commits = fm.nested(b'commits')
2509 2490 if ms.active():
2510 2491 for name, node, label_index in (
2511 2492 (b'local', ms.local, 0),
2512 2493 (b'other', ms.other, 1),
2513 2494 ):
2514 2495 fm_commits.startitem()
2515 2496 fm_commits.data(name=name)
2516 2497 fm_commits.data(node=hex(node))
2517 2498 if ms._labels and len(ms._labels) > label_index:
2518 2499 fm_commits.data(label=ms._labels[label_index])
2519 2500 fm_commits.end()
2520 2501
2521 2502 fm_files = fm.nested(b'files')
2522 2503 if ms.active():
2523 2504 for f in ms:
2524 2505 fm_files.startitem()
2525 2506 fm_files.data(path=f)
2526 2507 state = ms._state[f]
2527 2508 fm_files.data(state=state[0])
2528 2509 if state[0] in (
2529 2510 mergestatemod.MERGE_RECORD_UNRESOLVED,
2530 2511 mergestatemod.MERGE_RECORD_RESOLVED,
2531 2512 ):
2532 2513 fm_files.data(local_key=state[1])
2533 2514 fm_files.data(local_path=state[2])
2534 2515 fm_files.data(ancestor_path=state[3])
2535 2516 fm_files.data(ancestor_node=state[4])
2536 2517 fm_files.data(other_path=state[5])
2537 2518 fm_files.data(other_node=state[6])
2538 2519 fm_files.data(local_flags=state[7])
2539 2520 elif state[0] in (
2540 2521 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2541 2522 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2542 2523 ):
2543 2524 fm_files.data(renamed_path=state[1])
2544 2525 fm_files.data(rename_side=state[2])
2545 2526 fm_extras = fm_files.nested(b'extras')
2546 2527 for k, v in sorted(ms.extras(f).items()):
2547 2528 fm_extras.startitem()
2548 2529 fm_extras.data(key=k)
2549 2530 fm_extras.data(value=v)
2550 2531 fm_extras.end()
2551 2532
2552 2533 fm_files.end()
2553 2534
2554 2535 fm_extras = fm.nested(b'extras')
2555 2536 for f, d in sorted(ms.allextras().items()):
2556 2537 if f in ms:
2557 2538 # If file is in mergestate, we have already processed it's extras
2558 2539 continue
2559 2540 for k, v in d.items():
2560 2541 fm_extras.startitem()
2561 2542 fm_extras.data(file=f)
2562 2543 fm_extras.data(key=k)
2563 2544 fm_extras.data(value=v)
2564 2545 fm_extras.end()
2565 2546
2566 2547 fm.end()
2567 2548
2568 2549
2569 2550 @command(b'debugnamecomplete', [], _(b'NAME...'))
2570 2551 def debugnamecomplete(ui, repo, *args):
2571 2552 '''complete "names" - tags, open branch names, bookmark names'''
2572 2553
2573 2554 names = set()
2574 2555 # since we previously only listed open branches, we will handle that
2575 2556 # specially (after this for loop)
2576 2557 for name, ns in repo.names.items():
2577 2558 if name != b'branches':
2578 2559 names.update(ns.listnames(repo))
2579 2560 names.update(
2580 2561 tag
2581 2562 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2582 2563 if not closed
2583 2564 )
2584 2565 completions = set()
2585 2566 if not args:
2586 2567 args = [b'']
2587 2568 for a in args:
2588 2569 completions.update(n for n in names if n.startswith(a))
2589 2570 ui.write(b'\n'.join(sorted(completions)))
2590 2571 ui.write(b'\n')
2591 2572
2592 2573
2593 2574 @command(
2594 2575 b'debugnodemap',
2595 2576 [
2596 2577 (
2597 2578 b'',
2598 2579 b'dump-new',
2599 2580 False,
2600 2581 _(b'write a (new) persistent binary nodemap on stdout'),
2601 2582 ),
2602 2583 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2603 2584 (
2604 2585 b'',
2605 2586 b'check',
2606 2587 False,
2607 2588 _(b'check that the data on disk data are correct.'),
2608 2589 ),
2609 2590 (
2610 2591 b'',
2611 2592 b'metadata',
2612 2593 False,
2613 2594 _(b'display the on disk meta data for the nodemap'),
2614 2595 ),
2615 2596 ],
2616 2597 )
2617 2598 def debugnodemap(ui, repo, **opts):
2618 2599 """write and inspect on disk nodemap"""
2619 2600 if opts['dump_new']:
2620 2601 unfi = repo.unfiltered()
2621 2602 cl = unfi.changelog
2622 2603 if util.safehasattr(cl.index, "nodemap_data_all"):
2623 2604 data = cl.index.nodemap_data_all()
2624 2605 else:
2625 2606 data = nodemap.persistent_data(cl.index)
2626 2607 ui.write(data)
2627 2608 elif opts['dump_disk']:
2628 2609 unfi = repo.unfiltered()
2629 2610 cl = unfi.changelog
2630 2611 nm_data = nodemap.persisted_data(cl)
2631 2612 if nm_data is not None:
2632 2613 docket, data = nm_data
2633 2614 ui.write(data[:])
2634 2615 elif opts['check']:
2635 2616 unfi = repo.unfiltered()
2636 2617 cl = unfi.changelog
2637 2618 nm_data = nodemap.persisted_data(cl)
2638 2619 if nm_data is not None:
2639 2620 docket, data = nm_data
2640 2621 return nodemap.check_data(ui, cl.index, data)
2641 2622 elif opts['metadata']:
2642 2623 unfi = repo.unfiltered()
2643 2624 cl = unfi.changelog
2644 2625 nm_data = nodemap.persisted_data(cl)
2645 2626 if nm_data is not None:
2646 2627 docket, data = nm_data
2647 2628 ui.write((b"uid: %s\n") % docket.uid)
2648 2629 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2649 2630 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2650 2631 ui.write((b"data-length: %d\n") % docket.data_length)
2651 2632 ui.write((b"data-unused: %d\n") % docket.data_unused)
2652 2633 unused_perc = docket.data_unused * 100.0 / docket.data_length
2653 2634 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2654 2635
2655 2636
2656 2637 @command(
2657 2638 b'debugobsolete',
2658 2639 [
2659 2640 (b'', b'flags', 0, _(b'markers flag')),
2660 2641 (
2661 2642 b'',
2662 2643 b'record-parents',
2663 2644 False,
2664 2645 _(b'record parent information for the precursor'),
2665 2646 ),
2666 2647 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2667 2648 (
2668 2649 b'',
2669 2650 b'exclusive',
2670 2651 False,
2671 2652 _(b'restrict display to markers only relevant to REV'),
2672 2653 ),
2673 2654 (b'', b'index', False, _(b'display index of the marker')),
2674 2655 (b'', b'delete', [], _(b'delete markers specified by indices')),
2675 2656 ]
2676 2657 + cmdutil.commitopts2
2677 2658 + cmdutil.formatteropts,
2678 2659 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2679 2660 )
2680 2661 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2681 2662 """create arbitrary obsolete marker
2682 2663
2683 2664 With no arguments, displays the list of obsolescence markers."""
2684 2665
2685 2666 opts = pycompat.byteskwargs(opts)
2686 2667
2687 2668 def parsenodeid(s):
2688 2669 try:
2689 2670 # We do not use revsingle/revrange functions here to accept
2690 2671 # arbitrary node identifiers, possibly not present in the
2691 2672 # local repository.
2692 2673 n = bin(s)
2693 2674 if len(n) != repo.nodeconstants.nodelen:
2694 2675 raise ValueError
2695 2676 return n
2696 2677 except ValueError:
2697 2678 raise error.InputError(
2698 2679 b'changeset references must be full hexadecimal '
2699 2680 b'node identifiers'
2700 2681 )
2701 2682
2702 2683 if opts.get(b'delete'):
2703 2684 indices = []
2704 2685 for v in opts.get(b'delete'):
2705 2686 try:
2706 2687 indices.append(int(v))
2707 2688 except ValueError:
2708 2689 raise error.InputError(
2709 2690 _(b'invalid index value: %r') % v,
2710 2691 hint=_(b'use integers for indices'),
2711 2692 )
2712 2693
2713 2694 if repo.currenttransaction():
2714 2695 raise error.Abort(
2715 2696 _(b'cannot delete obsmarkers in the middle of transaction.')
2716 2697 )
2717 2698
2718 2699 with repo.lock():
2719 2700 n = repair.deleteobsmarkers(repo.obsstore, indices)
2720 2701 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2721 2702
2722 2703 return
2723 2704
2724 2705 if precursor is not None:
2725 2706 if opts[b'rev']:
2726 2707 raise error.InputError(
2727 2708 b'cannot select revision when creating marker'
2728 2709 )
2729 2710 metadata = {}
2730 2711 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2731 2712 succs = tuple(parsenodeid(succ) for succ in successors)
2732 2713 l = repo.lock()
2733 2714 try:
2734 2715 tr = repo.transaction(b'debugobsolete')
2735 2716 try:
2736 2717 date = opts.get(b'date')
2737 2718 if date:
2738 2719 date = dateutil.parsedate(date)
2739 2720 else:
2740 2721 date = None
2741 2722 prec = parsenodeid(precursor)
2742 2723 parents = None
2743 2724 if opts[b'record_parents']:
2744 2725 if prec not in repo.unfiltered():
2745 2726 raise error.Abort(
2746 2727 b'cannot used --record-parents on '
2747 2728 b'unknown changesets'
2748 2729 )
2749 2730 parents = repo.unfiltered()[prec].parents()
2750 2731 parents = tuple(p.node() for p in parents)
2751 2732 repo.obsstore.create(
2752 2733 tr,
2753 2734 prec,
2754 2735 succs,
2755 2736 opts[b'flags'],
2756 2737 parents=parents,
2757 2738 date=date,
2758 2739 metadata=metadata,
2759 2740 ui=ui,
2760 2741 )
2761 2742 tr.close()
2762 2743 except ValueError as exc:
2763 2744 raise error.Abort(
2764 2745 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2765 2746 )
2766 2747 finally:
2767 2748 tr.release()
2768 2749 finally:
2769 2750 l.release()
2770 2751 else:
2771 2752 if opts[b'rev']:
2772 2753 revs = logcmdutil.revrange(repo, opts[b'rev'])
2773 2754 nodes = [repo[r].node() for r in revs]
2774 2755 markers = list(
2775 2756 obsutil.getmarkers(
2776 2757 repo, nodes=nodes, exclusive=opts[b'exclusive']
2777 2758 )
2778 2759 )
2779 2760 markers.sort(key=lambda x: x._data)
2780 2761 else:
2781 2762 markers = obsutil.getmarkers(repo)
2782 2763
2783 2764 markerstoiter = markers
2784 2765 isrelevant = lambda m: True
2785 2766 if opts.get(b'rev') and opts.get(b'index'):
2786 2767 markerstoiter = obsutil.getmarkers(repo)
2787 2768 markerset = set(markers)
2788 2769 isrelevant = lambda m: m in markerset
2789 2770
2790 2771 fm = ui.formatter(b'debugobsolete', opts)
2791 2772 for i, m in enumerate(markerstoiter):
2792 2773 if not isrelevant(m):
2793 2774 # marker can be irrelevant when we're iterating over a set
2794 2775 # of markers (markerstoiter) which is bigger than the set
2795 2776 # of markers we want to display (markers)
2796 2777 # this can happen if both --index and --rev options are
2797 2778 # provided and thus we need to iterate over all of the markers
2798 2779 # to get the correct indices, but only display the ones that
2799 2780 # are relevant to --rev value
2800 2781 continue
2801 2782 fm.startitem()
2802 2783 ind = i if opts.get(b'index') else None
2803 2784 cmdutil.showmarker(fm, m, index=ind)
2804 2785 fm.end()
2805 2786
2806 2787
2807 2788 @command(
2808 2789 b'debugp1copies',
2809 2790 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2810 2791 _(b'[-r REV]'),
2811 2792 )
2812 2793 def debugp1copies(ui, repo, **opts):
2813 2794 """dump copy information compared to p1"""
2814 2795
2815 2796 opts = pycompat.byteskwargs(opts)
2816 2797 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2817 2798 for dst, src in ctx.p1copies().items():
2818 2799 ui.write(b'%s -> %s\n' % (src, dst))
2819 2800
2820 2801
2821 2802 @command(
2822 2803 b'debugp2copies',
2823 2804 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2824 2805 _(b'[-r REV]'),
2825 2806 )
2826 2807 def debugp1copies(ui, repo, **opts):
2827 2808 """dump copy information compared to p2"""
2828 2809
2829 2810 opts = pycompat.byteskwargs(opts)
2830 2811 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2831 2812 for dst, src in ctx.p2copies().items():
2832 2813 ui.write(b'%s -> %s\n' % (src, dst))
2833 2814
2834 2815
2835 2816 @command(
2836 2817 b'debugpathcomplete',
2837 2818 [
2838 2819 (b'f', b'full', None, _(b'complete an entire path')),
2839 2820 (b'n', b'normal', None, _(b'show only normal files')),
2840 2821 (b'a', b'added', None, _(b'show only added files')),
2841 2822 (b'r', b'removed', None, _(b'show only removed files')),
2842 2823 ],
2843 2824 _(b'FILESPEC...'),
2844 2825 )
2845 2826 def debugpathcomplete(ui, repo, *specs, **opts):
2846 2827 """complete part or all of a tracked path
2847 2828
2848 2829 This command supports shells that offer path name completion. It
2849 2830 currently completes only files already known to the dirstate.
2850 2831
2851 2832 Completion extends only to the next path segment unless
2852 2833 --full is specified, in which case entire paths are used."""
2853 2834
2854 2835 def complete(path, acceptable):
2855 2836 dirstate = repo.dirstate
2856 2837 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2857 2838 rootdir = repo.root + pycompat.ossep
2858 2839 if spec != repo.root and not spec.startswith(rootdir):
2859 2840 return [], []
2860 2841 if os.path.isdir(spec):
2861 2842 spec += b'/'
2862 2843 spec = spec[len(rootdir) :]
2863 2844 fixpaths = pycompat.ossep != b'/'
2864 2845 if fixpaths:
2865 2846 spec = spec.replace(pycompat.ossep, b'/')
2866 2847 speclen = len(spec)
2867 2848 fullpaths = opts['full']
2868 2849 files, dirs = set(), set()
2869 2850 adddir, addfile = dirs.add, files.add
2870 2851 for f, st in dirstate.items():
2871 2852 if f.startswith(spec) and st.state in acceptable:
2872 2853 if fixpaths:
2873 2854 f = f.replace(b'/', pycompat.ossep)
2874 2855 if fullpaths:
2875 2856 addfile(f)
2876 2857 continue
2877 2858 s = f.find(pycompat.ossep, speclen)
2878 2859 if s >= 0:
2879 2860 adddir(f[:s])
2880 2861 else:
2881 2862 addfile(f)
2882 2863 return files, dirs
2883 2864
2884 2865 acceptable = b''
2885 2866 if opts['normal']:
2886 2867 acceptable += b'nm'
2887 2868 if opts['added']:
2888 2869 acceptable += b'a'
2889 2870 if opts['removed']:
2890 2871 acceptable += b'r'
2891 2872 cwd = repo.getcwd()
2892 2873 if not specs:
2893 2874 specs = [b'.']
2894 2875
2895 2876 files, dirs = set(), set()
2896 2877 for spec in specs:
2897 2878 f, d = complete(spec, acceptable or b'nmar')
2898 2879 files.update(f)
2899 2880 dirs.update(d)
2900 2881 files.update(dirs)
2901 2882 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2902 2883 ui.write(b'\n')
2903 2884
2904 2885
2905 2886 @command(
2906 2887 b'debugpathcopies',
2907 2888 cmdutil.walkopts,
2908 2889 b'hg debugpathcopies REV1 REV2 [FILE]',
2909 2890 inferrepo=True,
2910 2891 )
2911 2892 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2912 2893 """show copies between two revisions"""
2913 2894 ctx1 = scmutil.revsingle(repo, rev1)
2914 2895 ctx2 = scmutil.revsingle(repo, rev2)
2915 2896 m = scmutil.match(ctx1, pats, opts)
2916 2897 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2917 2898 ui.write(b'%s -> %s\n' % (src, dst))
2918 2899
2919 2900
2920 2901 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2921 2902 def debugpeer(ui, path):
2922 2903 """establish a connection to a peer repository"""
2923 2904 # Always enable peer request logging. Requires --debug to display
2924 2905 # though.
2925 2906 overrides = {
2926 2907 (b'devel', b'debug.peer-request'): True,
2927 2908 }
2928 2909
2929 2910 with ui.configoverride(overrides):
2930 2911 peer = hg.peer(ui, {}, path)
2931 2912
2932 2913 try:
2933 2914 local = peer.local() is not None
2934 2915 canpush = peer.canpush()
2935 2916
2936 2917 ui.write(_(b'url: %s\n') % peer.url())
2937 2918 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2938 2919 ui.write(
2939 2920 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2940 2921 )
2941 2922 finally:
2942 2923 peer.close()
2943 2924
2944 2925
2945 2926 @command(
2946 2927 b'debugpickmergetool',
2947 2928 [
2948 2929 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2949 2930 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2950 2931 ]
2951 2932 + cmdutil.walkopts
2952 2933 + cmdutil.mergetoolopts,
2953 2934 _(b'[PATTERN]...'),
2954 2935 inferrepo=True,
2955 2936 )
2956 2937 def debugpickmergetool(ui, repo, *pats, **opts):
2957 2938 """examine which merge tool is chosen for specified file
2958 2939
2959 2940 As described in :hg:`help merge-tools`, Mercurial examines
2960 2941 configurations below in this order to decide which merge tool is
2961 2942 chosen for specified file.
2962 2943
2963 2944 1. ``--tool`` option
2964 2945 2. ``HGMERGE`` environment variable
2965 2946 3. configurations in ``merge-patterns`` section
2966 2947 4. configuration of ``ui.merge``
2967 2948 5. configurations in ``merge-tools`` section
2968 2949 6. ``hgmerge`` tool (for historical reason only)
2969 2950 7. default tool for fallback (``:merge`` or ``:prompt``)
2970 2951
2971 2952 This command writes out examination result in the style below::
2972 2953
2973 2954 FILE = MERGETOOL
2974 2955
2975 2956 By default, all files known in the first parent context of the
2976 2957 working directory are examined. Use file patterns and/or -I/-X
2977 2958 options to limit target files. -r/--rev is also useful to examine
2978 2959 files in another context without actual updating to it.
2979 2960
2980 2961 With --debug, this command shows warning messages while matching
2981 2962 against ``merge-patterns`` and so on, too. It is recommended to
2982 2963 use this option with explicit file patterns and/or -I/-X options,
2983 2964 because this option increases amount of output per file according
2984 2965 to configurations in hgrc.
2985 2966
2986 2967 With -v/--verbose, this command shows configurations below at
2987 2968 first (only if specified).
2988 2969
2989 2970 - ``--tool`` option
2990 2971 - ``HGMERGE`` environment variable
2991 2972 - configuration of ``ui.merge``
2992 2973
2993 2974 If merge tool is chosen before matching against
2994 2975 ``merge-patterns``, this command can't show any helpful
2995 2976 information, even with --debug. In such case, information above is
2996 2977 useful to know why a merge tool is chosen.
2997 2978 """
2998 2979 opts = pycompat.byteskwargs(opts)
2999 2980 overrides = {}
3000 2981 if opts[b'tool']:
3001 2982 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3002 2983 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3003 2984
3004 2985 with ui.configoverride(overrides, b'debugmergepatterns'):
3005 2986 hgmerge = encoding.environ.get(b"HGMERGE")
3006 2987 if hgmerge is not None:
3007 2988 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3008 2989 uimerge = ui.config(b"ui", b"merge")
3009 2990 if uimerge:
3010 2991 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3011 2992
3012 2993 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3013 2994 m = scmutil.match(ctx, pats, opts)
3014 2995 changedelete = opts[b'changedelete']
3015 2996 for path in ctx.walk(m):
3016 2997 fctx = ctx[path]
3017 2998 with ui.silent(
3018 2999 error=True
3019 3000 ) if not ui.debugflag else util.nullcontextmanager():
3020 3001 tool, toolpath = filemerge._picktool(
3021 3002 repo,
3022 3003 ui,
3023 3004 path,
3024 3005 fctx.isbinary(),
3025 3006 b'l' in fctx.flags(),
3026 3007 changedelete,
3027 3008 )
3028 3009 ui.write(b'%s = %s\n' % (path, tool))
3029 3010
3030 3011
3031 3012 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3032 3013 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3033 3014 """access the pushkey key/value protocol
3034 3015
3035 3016 With two args, list the keys in the given namespace.
3036 3017
3037 3018 With five args, set a key to new if it currently is set to old.
3038 3019 Reports success or failure.
3039 3020 """
3040 3021
3041 3022 target = hg.peer(ui, {}, repopath)
3042 3023 try:
3043 3024 if keyinfo:
3044 3025 key, old, new = keyinfo
3045 3026 with target.commandexecutor() as e:
3046 3027 r = e.callcommand(
3047 3028 b'pushkey',
3048 3029 {
3049 3030 b'namespace': namespace,
3050 3031 b'key': key,
3051 3032 b'old': old,
3052 3033 b'new': new,
3053 3034 },
3054 3035 ).result()
3055 3036
3056 3037 ui.status(pycompat.bytestr(r) + b'\n')
3057 3038 return not r
3058 3039 else:
3059 3040 for k, v in sorted(target.listkeys(namespace).items()):
3060 3041 ui.write(
3061 3042 b"%s\t%s\n"
3062 3043 % (stringutil.escapestr(k), stringutil.escapestr(v))
3063 3044 )
3064 3045 finally:
3065 3046 target.close()
3066 3047
3067 3048
3068 3049 @command(b'debugpvec', [], _(b'A B'))
3069 3050 def debugpvec(ui, repo, a, b=None):
3070 3051 ca = scmutil.revsingle(repo, a)
3071 3052 cb = scmutil.revsingle(repo, b)
3072 3053 pa = pvec.ctxpvec(ca)
3073 3054 pb = pvec.ctxpvec(cb)
3074 3055 if pa == pb:
3075 3056 rel = b"="
3076 3057 elif pa > pb:
3077 3058 rel = b">"
3078 3059 elif pa < pb:
3079 3060 rel = b"<"
3080 3061 elif pa | pb:
3081 3062 rel = b"|"
3082 3063 ui.write(_(b"a: %s\n") % pa)
3083 3064 ui.write(_(b"b: %s\n") % pb)
3084 3065 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3085 3066 ui.write(
3086 3067 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3087 3068 % (
3088 3069 abs(pa._depth - pb._depth),
3089 3070 pvec._hamming(pa._vec, pb._vec),
3090 3071 pa.distance(pb),
3091 3072 rel,
3092 3073 )
3093 3074 )
3094 3075
3095 3076
3096 3077 @command(
3097 3078 b'debugrebuilddirstate|debugrebuildstate',
3098 3079 [
3099 3080 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3100 3081 (
3101 3082 b'',
3102 3083 b'minimal',
3103 3084 None,
3104 3085 _(
3105 3086 b'only rebuild files that are inconsistent with '
3106 3087 b'the working copy parent'
3107 3088 ),
3108 3089 ),
3109 3090 ],
3110 3091 _(b'[-r REV]'),
3111 3092 )
3112 3093 def debugrebuilddirstate(ui, repo, rev, **opts):
3113 3094 """rebuild the dirstate as it would look like for the given revision
3114 3095
3115 3096 If no revision is specified the first current parent will be used.
3116 3097
3117 3098 The dirstate will be set to the files of the given revision.
3118 3099 The actual working directory content or existing dirstate
3119 3100 information such as adds or removes is not considered.
3120 3101
3121 3102 ``minimal`` will only rebuild the dirstate status for files that claim to be
3122 3103 tracked but are not in the parent manifest, or that exist in the parent
3123 3104 manifest but are not in the dirstate. It will not change adds, removes, or
3124 3105 modified files that are in the working copy parent.
3125 3106
3126 3107 One use of this command is to make the next :hg:`status` invocation
3127 3108 check the actual file content.
3128 3109 """
3129 3110 ctx = scmutil.revsingle(repo, rev)
3130 3111 with repo.wlock():
3131 3112 dirstate = repo.dirstate
3132 3113 changedfiles = None
3133 3114 # See command doc for what minimal does.
3134 3115 if opts.get('minimal'):
3135 3116 manifestfiles = set(ctx.manifest().keys())
3136 3117 dirstatefiles = set(dirstate)
3137 3118 manifestonly = manifestfiles - dirstatefiles
3138 3119 dsonly = dirstatefiles - manifestfiles
3139 3120 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3140 3121 changedfiles = manifestonly | dsnotadded
3141 3122
3142 3123 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3143 3124
3144 3125
3145 3126 @command(
3146 3127 b'debugrebuildfncache',
3147 3128 [
3148 3129 (
3149 3130 b'',
3150 3131 b'only-data',
3151 3132 False,
3152 3133 _(b'only look for wrong .d files (much faster)'),
3153 3134 )
3154 3135 ],
3155 3136 b'',
3156 3137 )
3157 3138 def debugrebuildfncache(ui, repo, **opts):
3158 3139 """rebuild the fncache file"""
3159 3140 opts = pycompat.byteskwargs(opts)
3160 3141 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3161 3142
3162 3143
3163 3144 @command(
3164 3145 b'debugrename',
3165 3146 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3166 3147 _(b'[-r REV] [FILE]...'),
3167 3148 )
3168 3149 def debugrename(ui, repo, *pats, **opts):
3169 3150 """dump rename information"""
3170 3151
3171 3152 opts = pycompat.byteskwargs(opts)
3172 3153 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3173 3154 m = scmutil.match(ctx, pats, opts)
3174 3155 for abs in ctx.walk(m):
3175 3156 fctx = ctx[abs]
3176 3157 o = fctx.filelog().renamed(fctx.filenode())
3177 3158 rel = repo.pathto(abs)
3178 3159 if o:
3179 3160 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3180 3161 else:
3181 3162 ui.write(_(b"%s not renamed\n") % rel)
3182 3163
3183 3164
3184 3165 @command(b'debugrequires|debugrequirements', [], b'')
3185 3166 def debugrequirements(ui, repo):
3186 3167 """print the current repo requirements"""
3187 3168 for r in sorted(repo.requirements):
3188 3169 ui.write(b"%s\n" % r)
3189 3170
3190 3171
3191 3172 @command(
3192 3173 b'debugrevlog',
3193 3174 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3194 3175 _(b'-c|-m|FILE'),
3195 3176 optionalrepo=True,
3196 3177 )
3197 3178 def debugrevlog(ui, repo, file_=None, **opts):
3198 3179 """show data and statistics about a revlog"""
3199 3180 opts = pycompat.byteskwargs(opts)
3200 3181 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3201 3182
3202 3183 if opts.get(b"dump"):
3203 3184 numrevs = len(r)
3204 3185 ui.write(
3205 3186 (
3206 3187 b"# rev p1rev p2rev start end deltastart base p1 p2"
3207 3188 b" rawsize totalsize compression heads chainlen\n"
3208 3189 )
3209 3190 )
3210 3191 ts = 0
3211 3192 heads = set()
3212 3193
3213 3194 for rev in pycompat.xrange(numrevs):
3214 3195 dbase = r.deltaparent(rev)
3215 3196 if dbase == -1:
3216 3197 dbase = rev
3217 3198 cbase = r.chainbase(rev)
3218 3199 clen = r.chainlen(rev)
3219 3200 p1, p2 = r.parentrevs(rev)
3220 3201 rs = r.rawsize(rev)
3221 3202 ts = ts + rs
3222 3203 heads -= set(r.parentrevs(rev))
3223 3204 heads.add(rev)
3224 3205 try:
3225 3206 compression = ts / r.end(rev)
3226 3207 except ZeroDivisionError:
3227 3208 compression = 0
3228 3209 ui.write(
3229 3210 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3230 3211 b"%11d %5d %8d\n"
3231 3212 % (
3232 3213 rev,
3233 3214 p1,
3234 3215 p2,
3235 3216 r.start(rev),
3236 3217 r.end(rev),
3237 3218 r.start(dbase),
3238 3219 r.start(cbase),
3239 3220 r.start(p1),
3240 3221 r.start(p2),
3241 3222 rs,
3242 3223 ts,
3243 3224 compression,
3244 3225 len(heads),
3245 3226 clen,
3246 3227 )
3247 3228 )
3248 3229 return 0
3249 3230
3250 3231 format = r._format_version
3251 3232 v = r._format_flags
3252 3233 flags = []
3253 3234 gdelta = False
3254 3235 if v & revlog.FLAG_INLINE_DATA:
3255 3236 flags.append(b'inline')
3256 3237 if v & revlog.FLAG_GENERALDELTA:
3257 3238 gdelta = True
3258 3239 flags.append(b'generaldelta')
3259 3240 if not flags:
3260 3241 flags = [b'(none)']
3261 3242
3262 3243 ### tracks merge vs single parent
3263 3244 nummerges = 0
3264 3245
3265 3246 ### tracks ways the "delta" are build
3266 3247 # nodelta
3267 3248 numempty = 0
3268 3249 numemptytext = 0
3269 3250 numemptydelta = 0
3270 3251 # full file content
3271 3252 numfull = 0
3272 3253 # intermediate snapshot against a prior snapshot
3273 3254 numsemi = 0
3274 3255 # snapshot count per depth
3275 3256 numsnapdepth = collections.defaultdict(lambda: 0)
3276 3257 # delta against previous revision
3277 3258 numprev = 0
3278 3259 # delta against first or second parent (not prev)
3279 3260 nump1 = 0
3280 3261 nump2 = 0
3281 3262 # delta against neither prev nor parents
3282 3263 numother = 0
3283 3264 # delta against prev that are also first or second parent
3284 3265 # (details of `numprev`)
3285 3266 nump1prev = 0
3286 3267 nump2prev = 0
3287 3268
3288 3269 # data about delta chain of each revs
3289 3270 chainlengths = []
3290 3271 chainbases = []
3291 3272 chainspans = []
3292 3273
3293 3274 # data about each revision
3294 3275 datasize = [None, 0, 0]
3295 3276 fullsize = [None, 0, 0]
3296 3277 semisize = [None, 0, 0]
3297 3278 # snapshot count per depth
3298 3279 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3299 3280 deltasize = [None, 0, 0]
3300 3281 chunktypecounts = {}
3301 3282 chunktypesizes = {}
3302 3283
3303 3284 def addsize(size, l):
3304 3285 if l[0] is None or size < l[0]:
3305 3286 l[0] = size
3306 3287 if size > l[1]:
3307 3288 l[1] = size
3308 3289 l[2] += size
3309 3290
3310 3291 numrevs = len(r)
3311 3292 for rev in pycompat.xrange(numrevs):
3312 3293 p1, p2 = r.parentrevs(rev)
3313 3294 delta = r.deltaparent(rev)
3314 3295 if format > 0:
3315 3296 addsize(r.rawsize(rev), datasize)
3316 3297 if p2 != nullrev:
3317 3298 nummerges += 1
3318 3299 size = r.length(rev)
3319 3300 if delta == nullrev:
3320 3301 chainlengths.append(0)
3321 3302 chainbases.append(r.start(rev))
3322 3303 chainspans.append(size)
3323 3304 if size == 0:
3324 3305 numempty += 1
3325 3306 numemptytext += 1
3326 3307 else:
3327 3308 numfull += 1
3328 3309 numsnapdepth[0] += 1
3329 3310 addsize(size, fullsize)
3330 3311 addsize(size, snapsizedepth[0])
3331 3312 else:
3332 3313 chainlengths.append(chainlengths[delta] + 1)
3333 3314 baseaddr = chainbases[delta]
3334 3315 revaddr = r.start(rev)
3335 3316 chainbases.append(baseaddr)
3336 3317 chainspans.append((revaddr - baseaddr) + size)
3337 3318 if size == 0:
3338 3319 numempty += 1
3339 3320 numemptydelta += 1
3340 3321 elif r.issnapshot(rev):
3341 3322 addsize(size, semisize)
3342 3323 numsemi += 1
3343 3324 depth = r.snapshotdepth(rev)
3344 3325 numsnapdepth[depth] += 1
3345 3326 addsize(size, snapsizedepth[depth])
3346 3327 else:
3347 3328 addsize(size, deltasize)
3348 3329 if delta == rev - 1:
3349 3330 numprev += 1
3350 3331 if delta == p1:
3351 3332 nump1prev += 1
3352 3333 elif delta == p2:
3353 3334 nump2prev += 1
3354 3335 elif delta == p1:
3355 3336 nump1 += 1
3356 3337 elif delta == p2:
3357 3338 nump2 += 1
3358 3339 elif delta != nullrev:
3359 3340 numother += 1
3360 3341
3361 3342 # Obtain data on the raw chunks in the revlog.
3362 3343 if util.safehasattr(r, b'_getsegmentforrevs'):
3363 3344 segment = r._getsegmentforrevs(rev, rev)[1]
3364 3345 else:
3365 3346 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3366 3347 if segment:
3367 3348 chunktype = bytes(segment[0:1])
3368 3349 else:
3369 3350 chunktype = b'empty'
3370 3351
3371 3352 if chunktype not in chunktypecounts:
3372 3353 chunktypecounts[chunktype] = 0
3373 3354 chunktypesizes[chunktype] = 0
3374 3355
3375 3356 chunktypecounts[chunktype] += 1
3376 3357 chunktypesizes[chunktype] += size
3377 3358
3378 3359 # Adjust size min value for empty cases
3379 3360 for size in (datasize, fullsize, semisize, deltasize):
3380 3361 if size[0] is None:
3381 3362 size[0] = 0
3382 3363
3383 3364 numdeltas = numrevs - numfull - numempty - numsemi
3384 3365 numoprev = numprev - nump1prev - nump2prev
3385 3366 totalrawsize = datasize[2]
3386 3367 datasize[2] /= numrevs
3387 3368 fulltotal = fullsize[2]
3388 3369 if numfull == 0:
3389 3370 fullsize[2] = 0
3390 3371 else:
3391 3372 fullsize[2] /= numfull
3392 3373 semitotal = semisize[2]
3393 3374 snaptotal = {}
3394 3375 if numsemi > 0:
3395 3376 semisize[2] /= numsemi
3396 3377 for depth in snapsizedepth:
3397 3378 snaptotal[depth] = snapsizedepth[depth][2]
3398 3379 snapsizedepth[depth][2] /= numsnapdepth[depth]
3399 3380
3400 3381 deltatotal = deltasize[2]
3401 3382 if numdeltas > 0:
3402 3383 deltasize[2] /= numdeltas
3403 3384 totalsize = fulltotal + semitotal + deltatotal
3404 3385 avgchainlen = sum(chainlengths) / numrevs
3405 3386 maxchainlen = max(chainlengths)
3406 3387 maxchainspan = max(chainspans)
3407 3388 compratio = 1
3408 3389 if totalsize:
3409 3390 compratio = totalrawsize / totalsize
3410 3391
3411 3392 basedfmtstr = b'%%%dd\n'
3412 3393 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3413 3394
3414 3395 def dfmtstr(max):
3415 3396 return basedfmtstr % len(str(max))
3416 3397
3417 3398 def pcfmtstr(max, padding=0):
3418 3399 return basepcfmtstr % (len(str(max)), b' ' * padding)
3419 3400
3420 3401 def pcfmt(value, total):
3421 3402 if total:
3422 3403 return (value, 100 * float(value) / total)
3423 3404 else:
3424 3405 return value, 100.0
3425 3406
3426 3407 ui.writenoi18n(b'format : %d\n' % format)
3427 3408 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3428 3409
3429 3410 ui.write(b'\n')
3430 3411 fmt = pcfmtstr(totalsize)
3431 3412 fmt2 = dfmtstr(totalsize)
3432 3413 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3433 3414 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3434 3415 ui.writenoi18n(
3435 3416 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3436 3417 )
3437 3418 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3438 3419 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3439 3420 ui.writenoi18n(
3440 3421 b' text : '
3441 3422 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3442 3423 )
3443 3424 ui.writenoi18n(
3444 3425 b' delta : '
3445 3426 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3446 3427 )
3447 3428 ui.writenoi18n(
3448 3429 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3449 3430 )
3450 3431 for depth in sorted(numsnapdepth):
3451 3432 ui.write(
3452 3433 (b' lvl-%-3d : ' % depth)
3453 3434 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3454 3435 )
3455 3436 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3456 3437 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3457 3438 ui.writenoi18n(
3458 3439 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3459 3440 )
3460 3441 for depth in sorted(numsnapdepth):
3461 3442 ui.write(
3462 3443 (b' lvl-%-3d : ' % depth)
3463 3444 + fmt % pcfmt(snaptotal[depth], totalsize)
3464 3445 )
3465 3446 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3466 3447
3467 3448 def fmtchunktype(chunktype):
3468 3449 if chunktype == b'empty':
3469 3450 return b' %s : ' % chunktype
3470 3451 elif chunktype in pycompat.bytestr(string.ascii_letters):
3471 3452 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3472 3453 else:
3473 3454 return b' 0x%s : ' % hex(chunktype)
3474 3455
3475 3456 ui.write(b'\n')
3476 3457 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3477 3458 for chunktype in sorted(chunktypecounts):
3478 3459 ui.write(fmtchunktype(chunktype))
3479 3460 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3480 3461 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3481 3462 for chunktype in sorted(chunktypecounts):
3482 3463 ui.write(fmtchunktype(chunktype))
3483 3464 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3484 3465
3485 3466 ui.write(b'\n')
3486 3467 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3487 3468 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3488 3469 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3489 3470 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3490 3471 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3491 3472
3492 3473 if format > 0:
3493 3474 ui.write(b'\n')
3494 3475 ui.writenoi18n(
3495 3476 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3496 3477 % tuple(datasize)
3497 3478 )
3498 3479 ui.writenoi18n(
3499 3480 b'full revision size (min/max/avg) : %d / %d / %d\n'
3500 3481 % tuple(fullsize)
3501 3482 )
3502 3483 ui.writenoi18n(
3503 3484 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3504 3485 % tuple(semisize)
3505 3486 )
3506 3487 for depth in sorted(snapsizedepth):
3507 3488 if depth == 0:
3508 3489 continue
3509 3490 ui.writenoi18n(
3510 3491 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3511 3492 % ((depth,) + tuple(snapsizedepth[depth]))
3512 3493 )
3513 3494 ui.writenoi18n(
3514 3495 b'delta size (min/max/avg) : %d / %d / %d\n'
3515 3496 % tuple(deltasize)
3516 3497 )
3517 3498
3518 3499 if numdeltas > 0:
3519 3500 ui.write(b'\n')
3520 3501 fmt = pcfmtstr(numdeltas)
3521 3502 fmt2 = pcfmtstr(numdeltas, 4)
3522 3503 ui.writenoi18n(
3523 3504 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3524 3505 )
3525 3506 if numprev > 0:
3526 3507 ui.writenoi18n(
3527 3508 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3528 3509 )
3529 3510 ui.writenoi18n(
3530 3511 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3531 3512 )
3532 3513 ui.writenoi18n(
3533 3514 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3534 3515 )
3535 3516 if gdelta:
3536 3517 ui.writenoi18n(
3537 3518 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3538 3519 )
3539 3520 ui.writenoi18n(
3540 3521 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3541 3522 )
3542 3523 ui.writenoi18n(
3543 3524 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3544 3525 )
3545 3526
3546 3527
3547 3528 @command(
3548 3529 b'debugrevlogindex',
3549 3530 cmdutil.debugrevlogopts
3550 3531 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3551 3532 _(b'[-f FORMAT] -c|-m|FILE'),
3552 3533 optionalrepo=True,
3553 3534 )
3554 3535 def debugrevlogindex(ui, repo, file_=None, **opts):
3555 3536 """dump the contents of a revlog index"""
3556 3537 opts = pycompat.byteskwargs(opts)
3557 3538 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3558 3539 format = opts.get(b'format', 0)
3559 3540 if format not in (0, 1):
3560 3541 raise error.Abort(_(b"unknown format %d") % format)
3561 3542
3562 3543 if ui.debugflag:
3563 3544 shortfn = hex
3564 3545 else:
3565 3546 shortfn = short
3566 3547
3567 3548 # There might not be anything in r, so have a sane default
3568 3549 idlen = 12
3569 3550 for i in r:
3570 3551 idlen = len(shortfn(r.node(i)))
3571 3552 break
3572 3553
3573 3554 if format == 0:
3574 3555 if ui.verbose:
3575 3556 ui.writenoi18n(
3576 3557 b" rev offset length linkrev %s %s p2\n"
3577 3558 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3578 3559 )
3579 3560 else:
3580 3561 ui.writenoi18n(
3581 3562 b" rev linkrev %s %s p2\n"
3582 3563 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3583 3564 )
3584 3565 elif format == 1:
3585 3566 if ui.verbose:
3586 3567 ui.writenoi18n(
3587 3568 (
3588 3569 b" rev flag offset length size link p1"
3589 3570 b" p2 %s\n"
3590 3571 )
3591 3572 % b"nodeid".rjust(idlen)
3592 3573 )
3593 3574 else:
3594 3575 ui.writenoi18n(
3595 3576 b" rev flag size link p1 p2 %s\n"
3596 3577 % b"nodeid".rjust(idlen)
3597 3578 )
3598 3579
3599 3580 for i in r:
3600 3581 node = r.node(i)
3601 3582 if format == 0:
3602 3583 try:
3603 3584 pp = r.parents(node)
3604 3585 except Exception:
3605 3586 pp = [repo.nullid, repo.nullid]
3606 3587 if ui.verbose:
3607 3588 ui.write(
3608 3589 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3609 3590 % (
3610 3591 i,
3611 3592 r.start(i),
3612 3593 r.length(i),
3613 3594 r.linkrev(i),
3614 3595 shortfn(node),
3615 3596 shortfn(pp[0]),
3616 3597 shortfn(pp[1]),
3617 3598 )
3618 3599 )
3619 3600 else:
3620 3601 ui.write(
3621 3602 b"% 6d % 7d %s %s %s\n"
3622 3603 % (
3623 3604 i,
3624 3605 r.linkrev(i),
3625 3606 shortfn(node),
3626 3607 shortfn(pp[0]),
3627 3608 shortfn(pp[1]),
3628 3609 )
3629 3610 )
3630 3611 elif format == 1:
3631 3612 pr = r.parentrevs(i)
3632 3613 if ui.verbose:
3633 3614 ui.write(
3634 3615 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3635 3616 % (
3636 3617 i,
3637 3618 r.flags(i),
3638 3619 r.start(i),
3639 3620 r.length(i),
3640 3621 r.rawsize(i),
3641 3622 r.linkrev(i),
3642 3623 pr[0],
3643 3624 pr[1],
3644 3625 shortfn(node),
3645 3626 )
3646 3627 )
3647 3628 else:
3648 3629 ui.write(
3649 3630 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3650 3631 % (
3651 3632 i,
3652 3633 r.flags(i),
3653 3634 r.rawsize(i),
3654 3635 r.linkrev(i),
3655 3636 pr[0],
3656 3637 pr[1],
3657 3638 shortfn(node),
3658 3639 )
3659 3640 )
3660 3641
3661 3642
3662 3643 @command(
3663 3644 b'debugrevspec',
3664 3645 [
3665 3646 (
3666 3647 b'',
3667 3648 b'optimize',
3668 3649 None,
3669 3650 _(b'print parsed tree after optimizing (DEPRECATED)'),
3670 3651 ),
3671 3652 (
3672 3653 b'',
3673 3654 b'show-revs',
3674 3655 True,
3675 3656 _(b'print list of result revisions (default)'),
3676 3657 ),
3677 3658 (
3678 3659 b's',
3679 3660 b'show-set',
3680 3661 None,
3681 3662 _(b'print internal representation of result set'),
3682 3663 ),
3683 3664 (
3684 3665 b'p',
3685 3666 b'show-stage',
3686 3667 [],
3687 3668 _(b'print parsed tree at the given stage'),
3688 3669 _(b'NAME'),
3689 3670 ),
3690 3671 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3691 3672 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3692 3673 ],
3693 3674 b'REVSPEC',
3694 3675 )
3695 3676 def debugrevspec(ui, repo, expr, **opts):
3696 3677 """parse and apply a revision specification
3697 3678
3698 3679 Use -p/--show-stage option to print the parsed tree at the given stages.
3699 3680 Use -p all to print tree at every stage.
3700 3681
3701 3682 Use --no-show-revs option with -s or -p to print only the set
3702 3683 representation or the parsed tree respectively.
3703 3684
3704 3685 Use --verify-optimized to compare the optimized result with the unoptimized
3705 3686 one. Returns 1 if the optimized result differs.
3706 3687 """
3707 3688 opts = pycompat.byteskwargs(opts)
3708 3689 aliases = ui.configitems(b'revsetalias')
3709 3690 stages = [
3710 3691 (b'parsed', lambda tree: tree),
3711 3692 (
3712 3693 b'expanded',
3713 3694 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3714 3695 ),
3715 3696 (b'concatenated', revsetlang.foldconcat),
3716 3697 (b'analyzed', revsetlang.analyze),
3717 3698 (b'optimized', revsetlang.optimize),
3718 3699 ]
3719 3700 if opts[b'no_optimized']:
3720 3701 stages = stages[:-1]
3721 3702 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3722 3703 raise error.Abort(
3723 3704 _(b'cannot use --verify-optimized with --no-optimized')
3724 3705 )
3725 3706 stagenames = {n for n, f in stages}
3726 3707
3727 3708 showalways = set()
3728 3709 showchanged = set()
3729 3710 if ui.verbose and not opts[b'show_stage']:
3730 3711 # show parsed tree by --verbose (deprecated)
3731 3712 showalways.add(b'parsed')
3732 3713 showchanged.update([b'expanded', b'concatenated'])
3733 3714 if opts[b'optimize']:
3734 3715 showalways.add(b'optimized')
3735 3716 if opts[b'show_stage'] and opts[b'optimize']:
3736 3717 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3737 3718 if opts[b'show_stage'] == [b'all']:
3738 3719 showalways.update(stagenames)
3739 3720 else:
3740 3721 for n in opts[b'show_stage']:
3741 3722 if n not in stagenames:
3742 3723 raise error.Abort(_(b'invalid stage name: %s') % n)
3743 3724 showalways.update(opts[b'show_stage'])
3744 3725
3745 3726 treebystage = {}
3746 3727 printedtree = None
3747 3728 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3748 3729 for n, f in stages:
3749 3730 treebystage[n] = tree = f(tree)
3750 3731 if n in showalways or (n in showchanged and tree != printedtree):
3751 3732 if opts[b'show_stage'] or n != b'parsed':
3752 3733 ui.write(b"* %s:\n" % n)
3753 3734 ui.write(revsetlang.prettyformat(tree), b"\n")
3754 3735 printedtree = tree
3755 3736
3756 3737 if opts[b'verify_optimized']:
3757 3738 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3758 3739 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3759 3740 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3760 3741 ui.writenoi18n(
3761 3742 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3762 3743 )
3763 3744 ui.writenoi18n(
3764 3745 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3765 3746 )
3766 3747 arevs = list(arevs)
3767 3748 brevs = list(brevs)
3768 3749 if arevs == brevs:
3769 3750 return 0
3770 3751 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3771 3752 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3772 3753 sm = difflib.SequenceMatcher(None, arevs, brevs)
3773 3754 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3774 3755 if tag in ('delete', 'replace'):
3775 3756 for c in arevs[alo:ahi]:
3776 3757 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3777 3758 if tag in ('insert', 'replace'):
3778 3759 for c in brevs[blo:bhi]:
3779 3760 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3780 3761 if tag == 'equal':
3781 3762 for c in arevs[alo:ahi]:
3782 3763 ui.write(b' %d\n' % c)
3783 3764 return 1
3784 3765
3785 3766 func = revset.makematcher(tree)
3786 3767 revs = func(repo)
3787 3768 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3788 3769 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3789 3770 if not opts[b'show_revs']:
3790 3771 return
3791 3772 for c in revs:
3792 3773 ui.write(b"%d\n" % c)
3793 3774
3794 3775
3795 3776 @command(
3796 3777 b'debugserve',
3797 3778 [
3798 3779 (
3799 3780 b'',
3800 3781 b'sshstdio',
3801 3782 False,
3802 3783 _(b'run an SSH server bound to process handles'),
3803 3784 ),
3804 3785 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3805 3786 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3806 3787 ],
3807 3788 b'',
3808 3789 )
3809 3790 def debugserve(ui, repo, **opts):
3810 3791 """run a server with advanced settings
3811 3792
3812 3793 This command is similar to :hg:`serve`. It exists partially as a
3813 3794 workaround to the fact that ``hg serve --stdio`` must have specific
3814 3795 arguments for security reasons.
3815 3796 """
3816 3797 opts = pycompat.byteskwargs(opts)
3817 3798
3818 3799 if not opts[b'sshstdio']:
3819 3800 raise error.Abort(_(b'only --sshstdio is currently supported'))
3820 3801
3821 3802 logfh = None
3822 3803
3823 3804 if opts[b'logiofd'] and opts[b'logiofile']:
3824 3805 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3825 3806
3826 3807 if opts[b'logiofd']:
3827 3808 # Ideally we would be line buffered. But line buffering in binary
3828 3809 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3829 3810 # buffering could have performance impacts. But since this isn't
3830 3811 # performance critical code, it should be fine.
3831 3812 try:
3832 3813 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3833 3814 except OSError as e:
3834 3815 if e.errno != errno.ESPIPE:
3835 3816 raise
3836 3817 # can't seek a pipe, so `ab` mode fails on py3
3837 3818 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3838 3819 elif opts[b'logiofile']:
3839 3820 logfh = open(opts[b'logiofile'], b'ab', 0)
3840 3821
3841 3822 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3842 3823 s.serve_forever()
3843 3824
3844 3825
3845 3826 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3846 3827 def debugsetparents(ui, repo, rev1, rev2=None):
3847 3828 """manually set the parents of the current working directory (DANGEROUS)
3848 3829
3849 3830 This command is not what you are looking for and should not be used. Using
3850 3831 this command will most certainly results in slight corruption of the file
3851 3832 level histories withing your repository. DO NOT USE THIS COMMAND.
3852 3833
3853 3834 The command update the p1 and p2 field in the dirstate, and not touching
3854 3835 anything else. This useful for writing repository conversion tools, but
3855 3836 should be used with extreme care. For example, neither the working
3856 3837 directory nor the dirstate is updated, so file status may be incorrect
3857 3838 after running this command. Only used if you are one of the few people that
3858 3839 deeply unstand both conversion tools and file level histories. If you are
3859 3840 reading this help, you are not one of this people (most of them sailed west
3860 3841 from Mithlond anyway.
3861 3842
3862 3843 So one last time DO NOT USE THIS COMMAND.
3863 3844
3864 3845 Returns 0 on success.
3865 3846 """
3866 3847
3867 3848 node1 = scmutil.revsingle(repo, rev1).node()
3868 3849 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3869 3850
3870 3851 with repo.wlock():
3871 3852 repo.setparents(node1, node2)
3872 3853
3873 3854
3874 3855 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3875 3856 def debugsidedata(ui, repo, file_, rev=None, **opts):
3876 3857 """dump the side data for a cl/manifest/file revision
3877 3858
3878 3859 Use --verbose to dump the sidedata content."""
3879 3860 opts = pycompat.byteskwargs(opts)
3880 3861 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3881 3862 if rev is not None:
3882 3863 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3883 3864 file_, rev = None, file_
3884 3865 elif rev is None:
3885 3866 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3886 3867 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3887 3868 r = getattr(r, '_revlog', r)
3888 3869 try:
3889 3870 sidedata = r.sidedata(r.lookup(rev))
3890 3871 except KeyError:
3891 3872 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3892 3873 if sidedata:
3893 3874 sidedata = list(sidedata.items())
3894 3875 sidedata.sort()
3895 3876 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3896 3877 for key, value in sidedata:
3897 3878 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3898 3879 if ui.verbose:
3899 3880 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3900 3881
3901 3882
3902 3883 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3903 3884 def debugssl(ui, repo, source=None, **opts):
3904 3885 """test a secure connection to a server
3905 3886
3906 3887 This builds the certificate chain for the server on Windows, installing the
3907 3888 missing intermediates and trusted root via Windows Update if necessary. It
3908 3889 does nothing on other platforms.
3909 3890
3910 3891 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3911 3892 that server is used. See :hg:`help urls` for more information.
3912 3893
3913 3894 If the update succeeds, retry the original operation. Otherwise, the cause
3914 3895 of the SSL error is likely another issue.
3915 3896 """
3916 3897 if not pycompat.iswindows:
3917 3898 raise error.Abort(
3918 3899 _(b'certificate chain building is only possible on Windows')
3919 3900 )
3920 3901
3921 3902 if not source:
3922 3903 if not repo:
3923 3904 raise error.Abort(
3924 3905 _(
3925 3906 b"there is no Mercurial repository here, and no "
3926 3907 b"server specified"
3927 3908 )
3928 3909 )
3929 3910 source = b"default"
3930 3911
3931 3912 source, branches = urlutil.get_unique_pull_path(
3932 3913 b'debugssl', repo, ui, source
3933 3914 )
3934 3915 url = urlutil.url(source)
3935 3916
3936 3917 defaultport = {b'https': 443, b'ssh': 22}
3937 3918 if url.scheme in defaultport:
3938 3919 try:
3939 3920 addr = (url.host, int(url.port or defaultport[url.scheme]))
3940 3921 except ValueError:
3941 3922 raise error.Abort(_(b"malformed port number in URL"))
3942 3923 else:
3943 3924 raise error.Abort(_(b"only https and ssh connections are supported"))
3944 3925
3945 3926 from . import win32
3946 3927
3947 3928 s = ssl.wrap_socket(
3948 3929 socket.socket(),
3949 3930 ssl_version=ssl.PROTOCOL_TLS,
3950 3931 cert_reqs=ssl.CERT_NONE,
3951 3932 ca_certs=None,
3952 3933 )
3953 3934
3954 3935 try:
3955 3936 s.connect(addr)
3956 3937 cert = s.getpeercert(True)
3957 3938
3958 3939 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3959 3940
3960 3941 complete = win32.checkcertificatechain(cert, build=False)
3961 3942
3962 3943 if not complete:
3963 3944 ui.status(_(b'certificate chain is incomplete, updating... '))
3964 3945
3965 3946 if not win32.checkcertificatechain(cert):
3966 3947 ui.status(_(b'failed.\n'))
3967 3948 else:
3968 3949 ui.status(_(b'done.\n'))
3969 3950 else:
3970 3951 ui.status(_(b'full certificate chain is available\n'))
3971 3952 finally:
3972 3953 s.close()
3973 3954
3974 3955
3975 3956 @command(
3976 3957 b"debugbackupbundle",
3977 3958 [
3978 3959 (
3979 3960 b"",
3980 3961 b"recover",
3981 3962 b"",
3982 3963 b"brings the specified changeset back into the repository",
3983 3964 )
3984 3965 ]
3985 3966 + cmdutil.logopts,
3986 3967 _(b"hg debugbackupbundle [--recover HASH]"),
3987 3968 )
3988 3969 def debugbackupbundle(ui, repo, *pats, **opts):
3989 3970 """lists the changesets available in backup bundles
3990 3971
3991 3972 Without any arguments, this command prints a list of the changesets in each
3992 3973 backup bundle.
3993 3974
3994 3975 --recover takes a changeset hash and unbundles the first bundle that
3995 3976 contains that hash, which puts that changeset back in your repository.
3996 3977
3997 3978 --verbose will print the entire commit message and the bundle path for that
3998 3979 backup.
3999 3980 """
4000 3981 backups = list(
4001 3982 filter(
4002 3983 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4003 3984 )
4004 3985 )
4005 3986 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4006 3987
4007 3988 opts = pycompat.byteskwargs(opts)
4008 3989 opts[b"bundle"] = b""
4009 3990 opts[b"force"] = None
4010 3991 limit = logcmdutil.getlimit(opts)
4011 3992
4012 3993 def display(other, chlist, displayer):
4013 3994 if opts.get(b"newest_first"):
4014 3995 chlist.reverse()
4015 3996 count = 0
4016 3997 for n in chlist:
4017 3998 if limit is not None and count >= limit:
4018 3999 break
4019 4000 parents = [
4020 4001 True for p in other.changelog.parents(n) if p != repo.nullid
4021 4002 ]
4022 4003 if opts.get(b"no_merges") and len(parents) == 2:
4023 4004 continue
4024 4005 count += 1
4025 4006 displayer.show(other[n])
4026 4007
4027 4008 recovernode = opts.get(b"recover")
4028 4009 if recovernode:
4029 4010 if scmutil.isrevsymbol(repo, recovernode):
4030 4011 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4031 4012 return
4032 4013 elif backups:
4033 4014 msg = _(
4034 4015 b"Recover changesets using: hg debugbackupbundle --recover "
4035 4016 b"<changeset hash>\n\nAvailable backup changesets:"
4036 4017 )
4037 4018 ui.status(msg, label=b"status.removed")
4038 4019 else:
4039 4020 ui.status(_(b"no backup changesets found\n"))
4040 4021 return
4041 4022
4042 4023 for backup in backups:
4043 4024 # Much of this is copied from the hg incoming logic
4044 4025 source = os.path.relpath(backup, encoding.getcwd())
4045 4026 source, branches = urlutil.get_unique_pull_path(
4046 4027 b'debugbackupbundle',
4047 4028 repo,
4048 4029 ui,
4049 4030 source,
4050 4031 default_branches=opts.get(b'branch'),
4051 4032 )
4052 4033 try:
4053 4034 other = hg.peer(repo, opts, source)
4054 4035 except error.LookupError as ex:
4055 4036 msg = _(b"\nwarning: unable to open bundle %s") % source
4056 4037 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4057 4038 ui.warn(msg, hint=hint)
4058 4039 continue
4059 4040 revs, checkout = hg.addbranchrevs(
4060 4041 repo, other, branches, opts.get(b"rev")
4061 4042 )
4062 4043
4063 4044 if revs:
4064 4045 revs = [other.lookup(rev) for rev in revs]
4065 4046
4066 4047 with ui.silent():
4067 4048 try:
4068 4049 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4069 4050 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4070 4051 )
4071 4052 except error.LookupError:
4072 4053 continue
4073 4054
4074 4055 try:
4075 4056 if not chlist:
4076 4057 continue
4077 4058 if recovernode:
4078 4059 with repo.lock(), repo.transaction(b"unbundle") as tr:
4079 4060 if scmutil.isrevsymbol(other, recovernode):
4080 4061 ui.status(_(b"Unbundling %s\n") % (recovernode))
4081 4062 f = hg.openpath(ui, source)
4082 4063 gen = exchange.readbundle(ui, f, source)
4083 4064 if isinstance(gen, bundle2.unbundle20):
4084 4065 bundle2.applybundle(
4085 4066 repo,
4086 4067 gen,
4087 4068 tr,
4088 4069 source=b"unbundle",
4089 4070 url=b"bundle:" + source,
4090 4071 )
4091 4072 else:
4092 4073 gen.apply(repo, b"unbundle", b"bundle:" + source)
4093 4074 break
4094 4075 else:
4095 4076 backupdate = encoding.strtolocal(
4096 4077 time.strftime(
4097 4078 "%a %H:%M, %Y-%m-%d",
4098 4079 time.localtime(os.path.getmtime(source)),
4099 4080 )
4100 4081 )
4101 4082 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4102 4083 if ui.verbose:
4103 4084 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4104 4085 else:
4105 4086 opts[
4106 4087 b"template"
4107 4088 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4108 4089 displayer = logcmdutil.changesetdisplayer(
4109 4090 ui, other, opts, False
4110 4091 )
4111 4092 display(other, chlist, displayer)
4112 4093 displayer.close()
4113 4094 finally:
4114 4095 cleanupfn()
4115 4096
4116 4097
4117 4098 @command(
4118 4099 b'debugsub',
4119 4100 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4120 4101 _(b'[-r REV] [REV]'),
4121 4102 )
4122 4103 def debugsub(ui, repo, rev=None):
4123 4104 ctx = scmutil.revsingle(repo, rev, None)
4124 4105 for k, v in sorted(ctx.substate.items()):
4125 4106 ui.writenoi18n(b'path %s\n' % k)
4126 4107 ui.writenoi18n(b' source %s\n' % v[0])
4127 4108 ui.writenoi18n(b' revision %s\n' % v[1])
4128 4109
4129 4110
4130 4111 @command(b'debugshell', optionalrepo=True)
4131 4112 def debugshell(ui, repo):
4132 4113 """run an interactive Python interpreter
4133 4114
4134 4115 The local namespace is provided with a reference to the ui and
4135 4116 the repo instance (if available).
4136 4117 """
4137 4118 import code
4138 4119
4139 4120 imported_objects = {
4140 4121 'ui': ui,
4141 4122 'repo': repo,
4142 4123 }
4143 4124
4144 4125 code.interact(local=imported_objects)
4145 4126
4146 4127
4147 4128 @command(
4148 4129 b'debugsuccessorssets',
4149 4130 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4150 4131 _(b'[REV]'),
4151 4132 )
4152 4133 def debugsuccessorssets(ui, repo, *revs, **opts):
4153 4134 """show set of successors for revision
4154 4135
4155 4136 A successors set of changeset A is a consistent group of revisions that
4156 4137 succeed A. It contains non-obsolete changesets only unless closests
4157 4138 successors set is set.
4158 4139
4159 4140 In most cases a changeset A has a single successors set containing a single
4160 4141 successor (changeset A replaced by A').
4161 4142
4162 4143 A changeset that is made obsolete with no successors are called "pruned".
4163 4144 Such changesets have no successors sets at all.
4164 4145
4165 4146 A changeset that has been "split" will have a successors set containing
4166 4147 more than one successor.
4167 4148
4168 4149 A changeset that has been rewritten in multiple different ways is called
4169 4150 "divergent". Such changesets have multiple successor sets (each of which
4170 4151 may also be split, i.e. have multiple successors).
4171 4152
4172 4153 Results are displayed as follows::
4173 4154
4174 4155 <rev1>
4175 4156 <successors-1A>
4176 4157 <rev2>
4177 4158 <successors-2A>
4178 4159 <successors-2B1> <successors-2B2> <successors-2B3>
4179 4160
4180 4161 Here rev2 has two possible (i.e. divergent) successors sets. The first
4181 4162 holds one element, whereas the second holds three (i.e. the changeset has
4182 4163 been split).
4183 4164 """
4184 4165 # passed to successorssets caching computation from one call to another
4185 4166 cache = {}
4186 4167 ctx2str = bytes
4187 4168 node2str = short
4188 4169 for rev in logcmdutil.revrange(repo, revs):
4189 4170 ctx = repo[rev]
4190 4171 ui.write(b'%s\n' % ctx2str(ctx))
4191 4172 for succsset in obsutil.successorssets(
4192 4173 repo, ctx.node(), closest=opts['closest'], cache=cache
4193 4174 ):
4194 4175 if succsset:
4195 4176 ui.write(b' ')
4196 4177 ui.write(node2str(succsset[0]))
4197 4178 for node in succsset[1:]:
4198 4179 ui.write(b' ')
4199 4180 ui.write(node2str(node))
4200 4181 ui.write(b'\n')
4201 4182
4202 4183
4203 4184 @command(b'debugtagscache', [])
4204 4185 def debugtagscache(ui, repo):
4205 4186 """display the contents of .hg/cache/hgtagsfnodes1"""
4206 4187 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4207 4188 flog = repo.file(b'.hgtags')
4208 4189 for r in repo:
4209 4190 node = repo[r].node()
4210 4191 tagsnode = cache.getfnode(node, computemissing=False)
4211 4192 if tagsnode:
4212 4193 tagsnodedisplay = hex(tagsnode)
4213 4194 if not flog.hasnode(tagsnode):
4214 4195 tagsnodedisplay += b' (unknown node)'
4215 4196 elif tagsnode is None:
4216 4197 tagsnodedisplay = b'missing'
4217 4198 else:
4218 4199 tagsnodedisplay = b'invalid'
4219 4200
4220 4201 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4221 4202
4222 4203
4223 4204 @command(
4224 4205 b'debugtemplate',
4225 4206 [
4226 4207 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4227 4208 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4228 4209 ],
4229 4210 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4230 4211 optionalrepo=True,
4231 4212 )
4232 4213 def debugtemplate(ui, repo, tmpl, **opts):
4233 4214 """parse and apply a template
4234 4215
4235 4216 If -r/--rev is given, the template is processed as a log template and
4236 4217 applied to the given changesets. Otherwise, it is processed as a generic
4237 4218 template.
4238 4219
4239 4220 Use --verbose to print the parsed tree.
4240 4221 """
4241 4222 revs = None
4242 4223 if opts['rev']:
4243 4224 if repo is None:
4244 4225 raise error.RepoError(
4245 4226 _(b'there is no Mercurial repository here (.hg not found)')
4246 4227 )
4247 4228 revs = logcmdutil.revrange(repo, opts['rev'])
4248 4229
4249 4230 props = {}
4250 4231 for d in opts['define']:
4251 4232 try:
4252 4233 k, v = (e.strip() for e in d.split(b'=', 1))
4253 4234 if not k or k == b'ui':
4254 4235 raise ValueError
4255 4236 props[k] = v
4256 4237 except ValueError:
4257 4238 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4258 4239
4259 4240 if ui.verbose:
4260 4241 aliases = ui.configitems(b'templatealias')
4261 4242 tree = templater.parse(tmpl)
4262 4243 ui.note(templater.prettyformat(tree), b'\n')
4263 4244 newtree = templater.expandaliases(tree, aliases)
4264 4245 if newtree != tree:
4265 4246 ui.notenoi18n(
4266 4247 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4267 4248 )
4268 4249
4269 4250 if revs is None:
4270 4251 tres = formatter.templateresources(ui, repo)
4271 4252 t = formatter.maketemplater(ui, tmpl, resources=tres)
4272 4253 if ui.verbose:
4273 4254 kwds, funcs = t.symbolsuseddefault()
4274 4255 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4275 4256 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4276 4257 ui.write(t.renderdefault(props))
4277 4258 else:
4278 4259 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4279 4260 if ui.verbose:
4280 4261 kwds, funcs = displayer.t.symbolsuseddefault()
4281 4262 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4282 4263 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4283 4264 for r in revs:
4284 4265 displayer.show(repo[r], **pycompat.strkwargs(props))
4285 4266 displayer.close()
4286 4267
4287 4268
4288 4269 @command(
4289 4270 b'debuguigetpass',
4290 4271 [
4291 4272 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4292 4273 ],
4293 4274 _(b'[-p TEXT]'),
4294 4275 norepo=True,
4295 4276 )
4296 4277 def debuguigetpass(ui, prompt=b''):
4297 4278 """show prompt to type password"""
4298 4279 r = ui.getpass(prompt)
4299 4280 if r is None:
4300 4281 r = b"<default response>"
4301 4282 ui.writenoi18n(b'response: %s\n' % r)
4302 4283
4303 4284
4304 4285 @command(
4305 4286 b'debuguiprompt',
4306 4287 [
4307 4288 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4308 4289 ],
4309 4290 _(b'[-p TEXT]'),
4310 4291 norepo=True,
4311 4292 )
4312 4293 def debuguiprompt(ui, prompt=b''):
4313 4294 """show plain prompt"""
4314 4295 r = ui.prompt(prompt)
4315 4296 ui.writenoi18n(b'response: %s\n' % r)
4316 4297
4317 4298
4318 4299 @command(b'debugupdatecaches', [])
4319 4300 def debugupdatecaches(ui, repo, *pats, **opts):
4320 4301 """warm all known caches in the repository"""
4321 4302 with repo.wlock(), repo.lock():
4322 4303 repo.updatecaches(caches=repository.CACHES_ALL)
4323 4304
4324 4305
4325 4306 @command(
4326 4307 b'debugupgraderepo',
4327 4308 [
4328 4309 (
4329 4310 b'o',
4330 4311 b'optimize',
4331 4312 [],
4332 4313 _(b'extra optimization to perform'),
4333 4314 _(b'NAME'),
4334 4315 ),
4335 4316 (b'', b'run', False, _(b'performs an upgrade')),
4336 4317 (b'', b'backup', True, _(b'keep the old repository content around')),
4337 4318 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4338 4319 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4339 4320 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4340 4321 ],
4341 4322 )
4342 4323 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4343 4324 """upgrade a repository to use different features
4344 4325
4345 4326 If no arguments are specified, the repository is evaluated for upgrade
4346 4327 and a list of problems and potential optimizations is printed.
4347 4328
4348 4329 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4349 4330 can be influenced via additional arguments. More details will be provided
4350 4331 by the command output when run without ``--run``.
4351 4332
4352 4333 During the upgrade, the repository will be locked and no writes will be
4353 4334 allowed.
4354 4335
4355 4336 At the end of the upgrade, the repository may not be readable while new
4356 4337 repository data is swapped in. This window will be as long as it takes to
4357 4338 rename some directories inside the ``.hg`` directory. On most machines, this
4358 4339 should complete almost instantaneously and the chances of a consumer being
4359 4340 unable to access the repository should be low.
4360 4341
4361 4342 By default, all revlogs will be upgraded. You can restrict this using flags
4362 4343 such as `--manifest`:
4363 4344
4364 4345 * `--manifest`: only optimize the manifest
4365 4346 * `--no-manifest`: optimize all revlog but the manifest
4366 4347 * `--changelog`: optimize the changelog only
4367 4348 * `--no-changelog --no-manifest`: optimize filelogs only
4368 4349 * `--filelogs`: optimize the filelogs only
4369 4350 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4370 4351 """
4371 4352 return upgrade.upgraderepo(
4372 4353 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4373 4354 )
4374 4355
4375 4356
4376 4357 @command(
4377 4358 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4378 4359 )
4379 4360 def debugwalk(ui, repo, *pats, **opts):
4380 4361 """show how files match on given patterns"""
4381 4362 opts = pycompat.byteskwargs(opts)
4382 4363 m = scmutil.match(repo[None], pats, opts)
4383 4364 if ui.verbose:
4384 4365 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4385 4366 items = list(repo[None].walk(m))
4386 4367 if not items:
4387 4368 return
4388 4369 f = lambda fn: fn
4389 4370 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4390 4371 f = lambda fn: util.normpath(fn)
4391 4372 fmt = b'f %%-%ds %%-%ds %%s' % (
4392 4373 max([len(abs) for abs in items]),
4393 4374 max([len(repo.pathto(abs)) for abs in items]),
4394 4375 )
4395 4376 for abs in items:
4396 4377 line = fmt % (
4397 4378 abs,
4398 4379 f(repo.pathto(abs)),
4399 4380 m.exact(abs) and b'exact' or b'',
4400 4381 )
4401 4382 ui.write(b"%s\n" % line.rstrip())
4402 4383
4403 4384
4404 4385 @command(b'debugwhyunstable', [], _(b'REV'))
4405 4386 def debugwhyunstable(ui, repo, rev):
4406 4387 """explain instabilities of a changeset"""
4407 4388 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4408 4389 dnodes = b''
4409 4390 if entry.get(b'divergentnodes'):
4410 4391 dnodes = (
4411 4392 b' '.join(
4412 4393 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4413 4394 for ctx in entry[b'divergentnodes']
4414 4395 )
4415 4396 + b' '
4416 4397 )
4417 4398 ui.write(
4418 4399 b'%s: %s%s %s\n'
4419 4400 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4420 4401 )
4421 4402
4422 4403
4423 4404 @command(
4424 4405 b'debugwireargs',
4425 4406 [
4426 4407 (b'', b'three', b'', b'three'),
4427 4408 (b'', b'four', b'', b'four'),
4428 4409 (b'', b'five', b'', b'five'),
4429 4410 ]
4430 4411 + cmdutil.remoteopts,
4431 4412 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4432 4413 norepo=True,
4433 4414 )
4434 4415 def debugwireargs(ui, repopath, *vals, **opts):
4435 4416 opts = pycompat.byteskwargs(opts)
4436 4417 repo = hg.peer(ui, opts, repopath)
4437 4418 try:
4438 4419 for opt in cmdutil.remoteopts:
4439 4420 del opts[opt[1]]
4440 4421 args = {}
4441 4422 for k, v in opts.items():
4442 4423 if v:
4443 4424 args[k] = v
4444 4425 args = pycompat.strkwargs(args)
4445 4426 # run twice to check that we don't mess up the stream for the next command
4446 4427 res1 = repo.debugwireargs(*vals, **args)
4447 4428 res2 = repo.debugwireargs(*vals, **args)
4448 4429 ui.write(b"%s\n" % res1)
4449 4430 if res1 != res2:
4450 4431 ui.warn(b"%s\n" % res2)
4451 4432 finally:
4452 4433 repo.close()
4453 4434
4454 4435
4455 4436 def _parsewirelangblocks(fh):
4456 4437 activeaction = None
4457 4438 blocklines = []
4458 4439 lastindent = 0
4459 4440
4460 4441 for line in fh:
4461 4442 line = line.rstrip()
4462 4443 if not line:
4463 4444 continue
4464 4445
4465 4446 if line.startswith(b'#'):
4466 4447 continue
4467 4448
4468 4449 if not line.startswith(b' '):
4469 4450 # New block. Flush previous one.
4470 4451 if activeaction:
4471 4452 yield activeaction, blocklines
4472 4453
4473 4454 activeaction = line
4474 4455 blocklines = []
4475 4456 lastindent = 0
4476 4457 continue
4477 4458
4478 4459 # Else we start with an indent.
4479 4460
4480 4461 if not activeaction:
4481 4462 raise error.Abort(_(b'indented line outside of block'))
4482 4463
4483 4464 indent = len(line) - len(line.lstrip())
4484 4465
4485 4466 # If this line is indented more than the last line, concatenate it.
4486 4467 if indent > lastindent and blocklines:
4487 4468 blocklines[-1] += line.lstrip()
4488 4469 else:
4489 4470 blocklines.append(line)
4490 4471 lastindent = indent
4491 4472
4492 4473 # Flush last block.
4493 4474 if activeaction:
4494 4475 yield activeaction, blocklines
4495 4476
4496 4477
4497 4478 @command(
4498 4479 b'debugwireproto',
4499 4480 [
4500 4481 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4501 4482 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4502 4483 (
4503 4484 b'',
4504 4485 b'noreadstderr',
4505 4486 False,
4506 4487 _(b'do not read from stderr of the remote'),
4507 4488 ),
4508 4489 (
4509 4490 b'',
4510 4491 b'nologhandshake',
4511 4492 False,
4512 4493 _(b'do not log I/O related to the peer handshake'),
4513 4494 ),
4514 4495 ]
4515 4496 + cmdutil.remoteopts,
4516 4497 _(b'[PATH]'),
4517 4498 optionalrepo=True,
4518 4499 )
4519 4500 def debugwireproto(ui, repo, path=None, **opts):
4520 4501 """send wire protocol commands to a server
4521 4502
4522 4503 This command can be used to issue wire protocol commands to remote
4523 4504 peers and to debug the raw data being exchanged.
4524 4505
4525 4506 ``--localssh`` will start an SSH server against the current repository
4526 4507 and connect to that. By default, the connection will perform a handshake
4527 4508 and establish an appropriate peer instance.
4528 4509
4529 4510 ``--peer`` can be used to bypass the handshake protocol and construct a
4530 4511 peer instance using the specified class type. Valid values are ``raw``,
4531 4512 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4532 4513 don't support higher-level command actions.
4533 4514
4534 4515 ``--noreadstderr`` can be used to disable automatic reading from stderr
4535 4516 of the peer (for SSH connections only). Disabling automatic reading of
4536 4517 stderr is useful for making output more deterministic.
4537 4518
4538 4519 Commands are issued via a mini language which is specified via stdin.
4539 4520 The language consists of individual actions to perform. An action is
4540 4521 defined by a block. A block is defined as a line with no leading
4541 4522 space followed by 0 or more lines with leading space. Blocks are
4542 4523 effectively a high-level command with additional metadata.
4543 4524
4544 4525 Lines beginning with ``#`` are ignored.
4545 4526
4546 4527 The following sections denote available actions.
4547 4528
4548 4529 raw
4549 4530 ---
4550 4531
4551 4532 Send raw data to the server.
4552 4533
4553 4534 The block payload contains the raw data to send as one atomic send
4554 4535 operation. The data may not actually be delivered in a single system
4555 4536 call: it depends on the abilities of the transport being used.
4556 4537
4557 4538 Each line in the block is de-indented and concatenated. Then, that
4558 4539 value is evaluated as a Python b'' literal. This allows the use of
4559 4540 backslash escaping, etc.
4560 4541
4561 4542 raw+
4562 4543 ----
4563 4544
4564 4545 Behaves like ``raw`` except flushes output afterwards.
4565 4546
4566 4547 command <X>
4567 4548 -----------
4568 4549
4569 4550 Send a request to run a named command, whose name follows the ``command``
4570 4551 string.
4571 4552
4572 4553 Arguments to the command are defined as lines in this block. The format of
4573 4554 each line is ``<key> <value>``. e.g.::
4574 4555
4575 4556 command listkeys
4576 4557 namespace bookmarks
4577 4558
4578 4559 If the value begins with ``eval:``, it will be interpreted as a Python
4579 4560 literal expression. Otherwise values are interpreted as Python b'' literals.
4580 4561 This allows sending complex types and encoding special byte sequences via
4581 4562 backslash escaping.
4582 4563
4583 4564 The following arguments have special meaning:
4584 4565
4585 4566 ``PUSHFILE``
4586 4567 When defined, the *push* mechanism of the peer will be used instead
4587 4568 of the static request-response mechanism and the content of the
4588 4569 file specified in the value of this argument will be sent as the
4589 4570 command payload.
4590 4571
4591 4572 This can be used to submit a local bundle file to the remote.
4592 4573
4593 4574 batchbegin
4594 4575 ----------
4595 4576
4596 4577 Instruct the peer to begin a batched send.
4597 4578
4598 4579 All ``command`` blocks are queued for execution until the next
4599 4580 ``batchsubmit`` block.
4600 4581
4601 4582 batchsubmit
4602 4583 -----------
4603 4584
4604 4585 Submit previously queued ``command`` blocks as a batch request.
4605 4586
4606 4587 This action MUST be paired with a ``batchbegin`` action.
4607 4588
4608 4589 httprequest <method> <path>
4609 4590 ---------------------------
4610 4591
4611 4592 (HTTP peer only)
4612 4593
4613 4594 Send an HTTP request to the peer.
4614 4595
4615 4596 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4616 4597
4617 4598 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4618 4599 headers to add to the request. e.g. ``Accept: foo``.
4619 4600
4620 4601 The following arguments are special:
4621 4602
4622 4603 ``BODYFILE``
4623 4604 The content of the file defined as the value to this argument will be
4624 4605 transferred verbatim as the HTTP request body.
4625 4606
4626 4607 ``frame <type> <flags> <payload>``
4627 4608 Send a unified protocol frame as part of the request body.
4628 4609
4629 4610 All frames will be collected and sent as the body to the HTTP
4630 4611 request.
4631 4612
4632 4613 close
4633 4614 -----
4634 4615
4635 4616 Close the connection to the server.
4636 4617
4637 4618 flush
4638 4619 -----
4639 4620
4640 4621 Flush data written to the server.
4641 4622
4642 4623 readavailable
4643 4624 -------------
4644 4625
4645 4626 Close the write end of the connection and read all available data from
4646 4627 the server.
4647 4628
4648 4629 If the connection to the server encompasses multiple pipes, we poll both
4649 4630 pipes and read available data.
4650 4631
4651 4632 readline
4652 4633 --------
4653 4634
4654 4635 Read a line of output from the server. If there are multiple output
4655 4636 pipes, reads only the main pipe.
4656 4637
4657 4638 ereadline
4658 4639 ---------
4659 4640
4660 4641 Like ``readline``, but read from the stderr pipe, if available.
4661 4642
4662 4643 read <X>
4663 4644 --------
4664 4645
4665 4646 ``read()`` N bytes from the server's main output pipe.
4666 4647
4667 4648 eread <X>
4668 4649 ---------
4669 4650
4670 4651 ``read()`` N bytes from the server's stderr pipe, if available.
4671 4652
4672 4653 Specifying Unified Frame-Based Protocol Frames
4673 4654 ----------------------------------------------
4674 4655
4675 4656 It is possible to emit a *Unified Frame-Based Protocol* by using special
4676 4657 syntax.
4677 4658
4678 4659 A frame is composed as a type, flags, and payload. These can be parsed
4679 4660 from a string of the form:
4680 4661
4681 4662 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4682 4663
4683 4664 ``request-id`` and ``stream-id`` are integers defining the request and
4684 4665 stream identifiers.
4685 4666
4686 4667 ``type`` can be an integer value for the frame type or the string name
4687 4668 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4688 4669 ``command-name``.
4689 4670
4690 4671 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4691 4672 components. Each component (and there can be just one) can be an integer
4692 4673 or a flag name for stream flags or frame flags, respectively. Values are
4693 4674 resolved to integers and then bitwise OR'd together.
4694 4675
4695 4676 ``payload`` represents the raw frame payload. If it begins with
4696 4677 ``cbor:``, the following string is evaluated as Python code and the
4697 4678 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4698 4679 as a Python byte string literal.
4699 4680 """
4700 4681 opts = pycompat.byteskwargs(opts)
4701 4682
4702 4683 if opts[b'localssh'] and not repo:
4703 4684 raise error.Abort(_(b'--localssh requires a repository'))
4704 4685
4705 4686 if opts[b'peer'] and opts[b'peer'] not in (
4706 4687 b'raw',
4707 4688 b'ssh1',
4708 4689 ):
4709 4690 raise error.Abort(
4710 4691 _(b'invalid value for --peer'),
4711 4692 hint=_(b'valid values are "raw" and "ssh1"'),
4712 4693 )
4713 4694
4714 4695 if path and opts[b'localssh']:
4715 4696 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4716 4697
4717 4698 if ui.interactive():
4718 4699 ui.write(_(b'(waiting for commands on stdin)\n'))
4719 4700
4720 4701 blocks = list(_parsewirelangblocks(ui.fin))
4721 4702
4722 4703 proc = None
4723 4704 stdin = None
4724 4705 stdout = None
4725 4706 stderr = None
4726 4707 opener = None
4727 4708
4728 4709 if opts[b'localssh']:
4729 4710 # We start the SSH server in its own process so there is process
4730 4711 # separation. This prevents a whole class of potential bugs around
4731 4712 # shared state from interfering with server operation.
4732 4713 args = procutil.hgcmd() + [
4733 4714 b'-R',
4734 4715 repo.root,
4735 4716 b'debugserve',
4736 4717 b'--sshstdio',
4737 4718 ]
4738 4719 proc = subprocess.Popen(
4739 4720 pycompat.rapply(procutil.tonativestr, args),
4740 4721 stdin=subprocess.PIPE,
4741 4722 stdout=subprocess.PIPE,
4742 4723 stderr=subprocess.PIPE,
4743 4724 bufsize=0,
4744 4725 )
4745 4726
4746 4727 stdin = proc.stdin
4747 4728 stdout = proc.stdout
4748 4729 stderr = proc.stderr
4749 4730
4750 4731 # We turn the pipes into observers so we can log I/O.
4751 4732 if ui.verbose or opts[b'peer'] == b'raw':
4752 4733 stdin = util.makeloggingfileobject(
4753 4734 ui, proc.stdin, b'i', logdata=True
4754 4735 )
4755 4736 stdout = util.makeloggingfileobject(
4756 4737 ui, proc.stdout, b'o', logdata=True
4757 4738 )
4758 4739 stderr = util.makeloggingfileobject(
4759 4740 ui, proc.stderr, b'e', logdata=True
4760 4741 )
4761 4742
4762 4743 # --localssh also implies the peer connection settings.
4763 4744
4764 4745 url = b'ssh://localserver'
4765 4746 autoreadstderr = not opts[b'noreadstderr']
4766 4747
4767 4748 if opts[b'peer'] == b'ssh1':
4768 4749 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4769 4750 peer = sshpeer.sshv1peer(
4770 4751 ui,
4771 4752 url,
4772 4753 proc,
4773 4754 stdin,
4774 4755 stdout,
4775 4756 stderr,
4776 4757 None,
4777 4758 autoreadstderr=autoreadstderr,
4778 4759 )
4779 4760 elif opts[b'peer'] == b'raw':
4780 4761 ui.write(_(b'using raw connection to peer\n'))
4781 4762 peer = None
4782 4763 else:
4783 4764 ui.write(_(b'creating ssh peer from handshake results\n'))
4784 4765 peer = sshpeer.makepeer(
4785 4766 ui,
4786 4767 url,
4787 4768 proc,
4788 4769 stdin,
4789 4770 stdout,
4790 4771 stderr,
4791 4772 autoreadstderr=autoreadstderr,
4792 4773 )
4793 4774
4794 4775 elif path:
4795 4776 # We bypass hg.peer() so we can proxy the sockets.
4796 4777 # TODO consider not doing this because we skip
4797 4778 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4798 4779 u = urlutil.url(path)
4799 4780 if u.scheme != b'http':
4800 4781 raise error.Abort(_(b'only http:// paths are currently supported'))
4801 4782
4802 4783 url, authinfo = u.authinfo()
4803 4784 openerargs = {
4804 4785 'useragent': b'Mercurial debugwireproto',
4805 4786 }
4806 4787
4807 4788 # Turn pipes/sockets into observers so we can log I/O.
4808 4789 if ui.verbose:
4809 4790 openerargs.update(
4810 4791 {
4811 4792 'loggingfh': ui,
4812 4793 'loggingname': b's',
4813 4794 'loggingopts': {
4814 4795 'logdata': True,
4815 4796 'logdataapis': False,
4816 4797 },
4817 4798 }
4818 4799 )
4819 4800
4820 4801 if ui.debugflag:
4821 4802 openerargs['loggingopts']['logdataapis'] = True
4822 4803
4823 4804 # Don't send default headers when in raw mode. This allows us to
4824 4805 # bypass most of the behavior of our URL handling code so we can
4825 4806 # have near complete control over what's sent on the wire.
4826 4807 if opts[b'peer'] == b'raw':
4827 4808 openerargs['sendaccept'] = False
4828 4809
4829 4810 opener = urlmod.opener(ui, authinfo, **openerargs)
4830 4811
4831 4812 if opts[b'peer'] == b'raw':
4832 4813 ui.write(_(b'using raw connection to peer\n'))
4833 4814 peer = None
4834 4815 elif opts[b'peer']:
4835 4816 raise error.Abort(
4836 4817 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4837 4818 )
4838 4819 else:
4839 4820 peer = httppeer.makepeer(ui, path, opener=opener)
4840 4821
4841 4822 # We /could/ populate stdin/stdout with sock.makefile()...
4842 4823 else:
4843 4824 raise error.Abort(_(b'unsupported connection configuration'))
4844 4825
4845 4826 batchedcommands = None
4846 4827
4847 4828 # Now perform actions based on the parsed wire language instructions.
4848 4829 for action, lines in blocks:
4849 4830 if action in (b'raw', b'raw+'):
4850 4831 if not stdin:
4851 4832 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4852 4833
4853 4834 # Concatenate the data together.
4854 4835 data = b''.join(l.lstrip() for l in lines)
4855 4836 data = stringutil.unescapestr(data)
4856 4837 stdin.write(data)
4857 4838
4858 4839 if action == b'raw+':
4859 4840 stdin.flush()
4860 4841 elif action == b'flush':
4861 4842 if not stdin:
4862 4843 raise error.Abort(_(b'cannot call flush on this peer'))
4863 4844 stdin.flush()
4864 4845 elif action.startswith(b'command'):
4865 4846 if not peer:
4866 4847 raise error.Abort(
4867 4848 _(
4868 4849 b'cannot send commands unless peer instance '
4869 4850 b'is available'
4870 4851 )
4871 4852 )
4872 4853
4873 4854 command = action.split(b' ', 1)[1]
4874 4855
4875 4856 args = {}
4876 4857 for line in lines:
4877 4858 # We need to allow empty values.
4878 4859 fields = line.lstrip().split(b' ', 1)
4879 4860 if len(fields) == 1:
4880 4861 key = fields[0]
4881 4862 value = b''
4882 4863 else:
4883 4864 key, value = fields
4884 4865
4885 4866 if value.startswith(b'eval:'):
4886 4867 value = stringutil.evalpythonliteral(value[5:])
4887 4868 else:
4888 4869 value = stringutil.unescapestr(value)
4889 4870
4890 4871 args[key] = value
4891 4872
4892 4873 if batchedcommands is not None:
4893 4874 batchedcommands.append((command, args))
4894 4875 continue
4895 4876
4896 4877 ui.status(_(b'sending %s command\n') % command)
4897 4878
4898 4879 if b'PUSHFILE' in args:
4899 4880 with open(args[b'PUSHFILE'], 'rb') as fh:
4900 4881 del args[b'PUSHFILE']
4901 4882 res, output = peer._callpush(
4902 4883 command, fh, **pycompat.strkwargs(args)
4903 4884 )
4904 4885 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4905 4886 ui.status(
4906 4887 _(b'remote output: %s\n') % stringutil.escapestr(output)
4907 4888 )
4908 4889 else:
4909 4890 with peer.commandexecutor() as e:
4910 4891 res = e.callcommand(command, args).result()
4911 4892
4912 4893 ui.status(
4913 4894 _(b'response: %s\n')
4914 4895 % stringutil.pprint(res, bprefix=True, indent=2)
4915 4896 )
4916 4897
4917 4898 elif action == b'batchbegin':
4918 4899 if batchedcommands is not None:
4919 4900 raise error.Abort(_(b'nested batchbegin not allowed'))
4920 4901
4921 4902 batchedcommands = []
4922 4903 elif action == b'batchsubmit':
4923 4904 # There is a batching API we could go through. But it would be
4924 4905 # difficult to normalize requests into function calls. It is easier
4925 4906 # to bypass this layer and normalize to commands + args.
4926 4907 ui.status(
4927 4908 _(b'sending batch with %d sub-commands\n')
4928 4909 % len(batchedcommands)
4929 4910 )
4930 4911 assert peer is not None
4931 4912 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4932 4913 ui.status(
4933 4914 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4934 4915 )
4935 4916
4936 4917 batchedcommands = None
4937 4918
4938 4919 elif action.startswith(b'httprequest '):
4939 4920 if not opener:
4940 4921 raise error.Abort(
4941 4922 _(b'cannot use httprequest without an HTTP peer')
4942 4923 )
4943 4924
4944 4925 request = action.split(b' ', 2)
4945 4926 if len(request) != 3:
4946 4927 raise error.Abort(
4947 4928 _(
4948 4929 b'invalid httprequest: expected format is '
4949 4930 b'"httprequest <method> <path>'
4950 4931 )
4951 4932 )
4952 4933
4953 4934 method, httppath = request[1:]
4954 4935 headers = {}
4955 4936 body = None
4956 4937 frames = []
4957 4938 for line in lines:
4958 4939 line = line.lstrip()
4959 4940 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4960 4941 if m:
4961 4942 # Headers need to use native strings.
4962 4943 key = pycompat.strurl(m.group(1))
4963 4944 value = pycompat.strurl(m.group(2))
4964 4945 headers[key] = value
4965 4946 continue
4966 4947
4967 4948 if line.startswith(b'BODYFILE '):
4968 4949 with open(line.split(b' ', 1), b'rb') as fh:
4969 4950 body = fh.read()
4970 4951 elif line.startswith(b'frame '):
4971 4952 frame = wireprotoframing.makeframefromhumanstring(
4972 4953 line[len(b'frame ') :]
4973 4954 )
4974 4955
4975 4956 frames.append(frame)
4976 4957 else:
4977 4958 raise error.Abort(
4978 4959 _(b'unknown argument to httprequest: %s') % line
4979 4960 )
4980 4961
4981 4962 url = path + httppath
4982 4963
4983 4964 if frames:
4984 4965 body = b''.join(bytes(f) for f in frames)
4985 4966
4986 4967 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4987 4968
4988 4969 # urllib.Request insists on using has_data() as a proxy for
4989 4970 # determining the request method. Override that to use our
4990 4971 # explicitly requested method.
4991 4972 req.get_method = lambda: pycompat.sysstr(method)
4992 4973
4993 4974 try:
4994 4975 res = opener.open(req)
4995 4976 body = res.read()
4996 4977 except util.urlerr.urlerror as e:
4997 4978 # read() method must be called, but only exists in Python 2
4998 4979 getattr(e, 'read', lambda: None)()
4999 4980 continue
5000 4981
5001 4982 ct = res.headers.get('Content-Type')
5002 4983 if ct == 'application/mercurial-cbor':
5003 4984 ui.write(
5004 4985 _(b'cbor> %s\n')
5005 4986 % stringutil.pprint(
5006 4987 cborutil.decodeall(body), bprefix=True, indent=2
5007 4988 )
5008 4989 )
5009 4990
5010 4991 elif action == b'close':
5011 4992 assert peer is not None
5012 4993 peer.close()
5013 4994 elif action == b'readavailable':
5014 4995 if not stdout or not stderr:
5015 4996 raise error.Abort(
5016 4997 _(b'readavailable not available on this peer')
5017 4998 )
5018 4999
5019 5000 stdin.close()
5020 5001 stdout.read()
5021 5002 stderr.read()
5022 5003
5023 5004 elif action == b'readline':
5024 5005 if not stdout:
5025 5006 raise error.Abort(_(b'readline not available on this peer'))
5026 5007 stdout.readline()
5027 5008 elif action == b'ereadline':
5028 5009 if not stderr:
5029 5010 raise error.Abort(_(b'ereadline not available on this peer'))
5030 5011 stderr.readline()
5031 5012 elif action.startswith(b'read '):
5032 5013 count = int(action.split(b' ', 1)[1])
5033 5014 if not stdout:
5034 5015 raise error.Abort(_(b'read not available on this peer'))
5035 5016 stdout.read(count)
5036 5017 elif action.startswith(b'eread '):
5037 5018 count = int(action.split(b' ', 1)[1])
5038 5019 if not stderr:
5039 5020 raise error.Abort(_(b'eread not available on this peer'))
5040 5021 stderr.read(count)
5041 5022 else:
5042 5023 raise error.Abort(_(b'unknown action: %s') % action)
5043 5024
5044 5025 if batchedcommands is not None:
5045 5026 raise error.Abort(_(b'unclosed "batchbegin" request'))
5046 5027
5047 5028 if peer:
5048 5029 peer.close()
5049 5030
5050 5031 if proc:
5051 5032 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now