##// END OF EJS Templates
ui: remove excessive strtolocal() from debuguigetpass...
Yuya Nishihara -
r46653:e614eeb7 stable
parent child Browse files
Show More
@@ -1,4582 +1,4580 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import glob
15 15 import operator
16 16 import os
17 17 import platform
18 18 import random
19 19 import re
20 20 import socket
21 21 import ssl
22 22 import stat
23 23 import string
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 revlog,
73 73 revset,
74 74 revsetlang,
75 75 scmutil,
76 76 setdiscovery,
77 77 simplemerge,
78 78 sshpeer,
79 79 sslutil,
80 80 streamclone,
81 81 tags as tagsmod,
82 82 templater,
83 83 treediscovery,
84 84 upgrade,
85 85 url as urlmod,
86 86 util,
87 87 vfs as vfsmod,
88 88 wireprotoframing,
89 89 wireprotoserver,
90 90 wireprotov2peer,
91 91 )
92 92 from .utils import (
93 93 cborutil,
94 94 compression,
95 95 dateutil,
96 96 procutil,
97 97 stringutil,
98 98 )
99 99
100 100 from .revlogutils import (
101 101 deltas as deltautil,
102 102 nodemap,
103 103 sidedata,
104 104 )
105 105
106 106 release = lockmod.release
107 107
108 108 command = registrar.command()
109 109
110 110
111 111 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
112 112 def debugancestor(ui, repo, *args):
113 113 """find the ancestor revision of two revisions in a given index"""
114 114 if len(args) == 3:
115 115 index, rev1, rev2 = args
116 116 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
117 117 lookup = r.lookup
118 118 elif len(args) == 2:
119 119 if not repo:
120 120 raise error.Abort(
121 121 _(b'there is no Mercurial repository here (.hg not found)')
122 122 )
123 123 rev1, rev2 = args
124 124 r = repo.changelog
125 125 lookup = repo.lookup
126 126 else:
127 127 raise error.Abort(_(b'either two or three arguments required'))
128 128 a = r.ancestor(lookup(rev1), lookup(rev2))
129 129 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
130 130
131 131
132 132 @command(b'debugantivirusrunning', [])
133 133 def debugantivirusrunning(ui, repo):
134 134 """attempt to trigger an antivirus scanner to see if one is active"""
135 135 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
136 136 f.write(
137 137 util.b85decode(
138 138 # This is a base85-armored version of the EICAR test file. See
139 139 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
140 140 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
141 141 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
142 142 )
143 143 )
144 144 # Give an AV engine time to scan the file.
145 145 time.sleep(2)
146 146 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
147 147
148 148
149 149 @command(b'debugapplystreamclonebundle', [], b'FILE')
150 150 def debugapplystreamclonebundle(ui, repo, fname):
151 151 """apply a stream clone bundle file"""
152 152 f = hg.openpath(ui, fname)
153 153 gen = exchange.readbundle(ui, f, fname)
154 154 gen.apply(repo)
155 155
156 156
157 157 @command(
158 158 b'debugbuilddag',
159 159 [
160 160 (
161 161 b'm',
162 162 b'mergeable-file',
163 163 None,
164 164 _(b'add single file mergeable changes'),
165 165 ),
166 166 (
167 167 b'o',
168 168 b'overwritten-file',
169 169 None,
170 170 _(b'add single file all revs overwrite'),
171 171 ),
172 172 (b'n', b'new-file', None, _(b'add new file at each rev')),
173 173 ],
174 174 _(b'[OPTION]... [TEXT]'),
175 175 )
176 176 def debugbuilddag(
177 177 ui,
178 178 repo,
179 179 text=None,
180 180 mergeable_file=False,
181 181 overwritten_file=False,
182 182 new_file=False,
183 183 ):
184 184 """builds a repo with a given DAG from scratch in the current empty repo
185 185
186 186 The description of the DAG is read from stdin if not given on the
187 187 command line.
188 188
189 189 Elements:
190 190
191 191 - "+n" is a linear run of n nodes based on the current default parent
192 192 - "." is a single node based on the current default parent
193 193 - "$" resets the default parent to null (implied at the start);
194 194 otherwise the default parent is always the last node created
195 195 - "<p" sets the default parent to the backref p
196 196 - "*p" is a fork at parent p, which is a backref
197 197 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
198 198 - "/p2" is a merge of the preceding node and p2
199 199 - ":tag" defines a local tag for the preceding node
200 200 - "@branch" sets the named branch for subsequent nodes
201 201 - "#...\\n" is a comment up to the end of the line
202 202
203 203 Whitespace between the above elements is ignored.
204 204
205 205 A backref is either
206 206
207 207 - a number n, which references the node curr-n, where curr is the current
208 208 node, or
209 209 - the name of a local tag you placed earlier using ":tag", or
210 210 - empty to denote the default parent.
211 211
212 212 All string valued-elements are either strictly alphanumeric, or must
213 213 be enclosed in double quotes ("..."), with "\\" as escape character.
214 214 """
215 215
216 216 if text is None:
217 217 ui.status(_(b"reading DAG from stdin\n"))
218 218 text = ui.fin.read()
219 219
220 220 cl = repo.changelog
221 221 if len(cl) > 0:
222 222 raise error.Abort(_(b'repository is not empty'))
223 223
224 224 # determine number of revs in DAG
225 225 total = 0
226 226 for type, data in dagparser.parsedag(text):
227 227 if type == b'n':
228 228 total += 1
229 229
230 230 if mergeable_file:
231 231 linesperrev = 2
232 232 # make a file with k lines per rev
233 233 initialmergedlines = [
234 234 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
235 235 ]
236 236 initialmergedlines.append(b"")
237 237
238 238 tags = []
239 239 progress = ui.makeprogress(
240 240 _(b'building'), unit=_(b'revisions'), total=total
241 241 )
242 242 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
243 243 at = -1
244 244 atbranch = b'default'
245 245 nodeids = []
246 246 id = 0
247 247 progress.update(id)
248 248 for type, data in dagparser.parsedag(text):
249 249 if type == b'n':
250 250 ui.note((b'node %s\n' % pycompat.bytestr(data)))
251 251 id, ps = data
252 252
253 253 files = []
254 254 filecontent = {}
255 255
256 256 p2 = None
257 257 if mergeable_file:
258 258 fn = b"mf"
259 259 p1 = repo[ps[0]]
260 260 if len(ps) > 1:
261 261 p2 = repo[ps[1]]
262 262 pa = p1.ancestor(p2)
263 263 base, local, other = [
264 264 x[fn].data() for x in (pa, p1, p2)
265 265 ]
266 266 m3 = simplemerge.Merge3Text(base, local, other)
267 267 ml = [l.strip() for l in m3.merge_lines()]
268 268 ml.append(b"")
269 269 elif at > 0:
270 270 ml = p1[fn].data().split(b"\n")
271 271 else:
272 272 ml = initialmergedlines
273 273 ml[id * linesperrev] += b" r%i" % id
274 274 mergedtext = b"\n".join(ml)
275 275 files.append(fn)
276 276 filecontent[fn] = mergedtext
277 277
278 278 if overwritten_file:
279 279 fn = b"of"
280 280 files.append(fn)
281 281 filecontent[fn] = b"r%i\n" % id
282 282
283 283 if new_file:
284 284 fn = b"nf%i" % id
285 285 files.append(fn)
286 286 filecontent[fn] = b"r%i\n" % id
287 287 if len(ps) > 1:
288 288 if not p2:
289 289 p2 = repo[ps[1]]
290 290 for fn in p2:
291 291 if fn.startswith(b"nf"):
292 292 files.append(fn)
293 293 filecontent[fn] = p2[fn].data()
294 294
295 295 def fctxfn(repo, cx, path):
296 296 if path in filecontent:
297 297 return context.memfilectx(
298 298 repo, cx, path, filecontent[path]
299 299 )
300 300 return None
301 301
302 302 if len(ps) == 0 or ps[0] < 0:
303 303 pars = [None, None]
304 304 elif len(ps) == 1:
305 305 pars = [nodeids[ps[0]], None]
306 306 else:
307 307 pars = [nodeids[p] for p in ps]
308 308 cx = context.memctx(
309 309 repo,
310 310 pars,
311 311 b"r%i" % id,
312 312 files,
313 313 fctxfn,
314 314 date=(id, 0),
315 315 user=b"debugbuilddag",
316 316 extra={b'branch': atbranch},
317 317 )
318 318 nodeid = repo.commitctx(cx)
319 319 nodeids.append(nodeid)
320 320 at = id
321 321 elif type == b'l':
322 322 id, name = data
323 323 ui.note((b'tag %s\n' % name))
324 324 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
325 325 elif type == b'a':
326 326 ui.note((b'branch %s\n' % data))
327 327 atbranch = data
328 328 progress.update(id)
329 329
330 330 if tags:
331 331 repo.vfs.write(b"localtags", b"".join(tags))
332 332
333 333
334 334 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
335 335 indent_string = b' ' * indent
336 336 if all:
337 337 ui.writenoi18n(
338 338 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
339 339 % indent_string
340 340 )
341 341
342 342 def showchunks(named):
343 343 ui.write(b"\n%s%s\n" % (indent_string, named))
344 344 for deltadata in gen.deltaiter():
345 345 node, p1, p2, cs, deltabase, delta, flags = deltadata
346 346 ui.write(
347 347 b"%s%s %s %s %s %s %d\n"
348 348 % (
349 349 indent_string,
350 350 hex(node),
351 351 hex(p1),
352 352 hex(p2),
353 353 hex(cs),
354 354 hex(deltabase),
355 355 len(delta),
356 356 )
357 357 )
358 358
359 359 gen.changelogheader()
360 360 showchunks(b"changelog")
361 361 gen.manifestheader()
362 362 showchunks(b"manifest")
363 363 for chunkdata in iter(gen.filelogheader, {}):
364 364 fname = chunkdata[b'filename']
365 365 showchunks(fname)
366 366 else:
367 367 if isinstance(gen, bundle2.unbundle20):
368 368 raise error.Abort(_(b'use debugbundle2 for this file'))
369 369 gen.changelogheader()
370 370 for deltadata in gen.deltaiter():
371 371 node, p1, p2, cs, deltabase, delta, flags = deltadata
372 372 ui.write(b"%s%s\n" % (indent_string, hex(node)))
373 373
374 374
375 375 def _debugobsmarkers(ui, part, indent=0, **opts):
376 376 """display version and markers contained in 'data'"""
377 377 opts = pycompat.byteskwargs(opts)
378 378 data = part.read()
379 379 indent_string = b' ' * indent
380 380 try:
381 381 version, markers = obsolete._readmarkers(data)
382 382 except error.UnknownVersion as exc:
383 383 msg = b"%sunsupported version: %s (%d bytes)\n"
384 384 msg %= indent_string, exc.version, len(data)
385 385 ui.write(msg)
386 386 else:
387 387 msg = b"%sversion: %d (%d bytes)\n"
388 388 msg %= indent_string, version, len(data)
389 389 ui.write(msg)
390 390 fm = ui.formatter(b'debugobsolete', opts)
391 391 for rawmarker in sorted(markers):
392 392 m = obsutil.marker(None, rawmarker)
393 393 fm.startitem()
394 394 fm.plain(indent_string)
395 395 cmdutil.showmarker(fm, m)
396 396 fm.end()
397 397
398 398
399 399 def _debugphaseheads(ui, data, indent=0):
400 400 """display version and markers contained in 'data'"""
401 401 indent_string = b' ' * indent
402 402 headsbyphase = phases.binarydecode(data)
403 403 for phase in phases.allphases:
404 404 for head in headsbyphase[phase]:
405 405 ui.write(indent_string)
406 406 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
407 407
408 408
409 409 def _quasirepr(thing):
410 410 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
411 411 return b'{%s}' % (
412 412 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
413 413 )
414 414 return pycompat.bytestr(repr(thing))
415 415
416 416
417 417 def _debugbundle2(ui, gen, all=None, **opts):
418 418 """lists the contents of a bundle2"""
419 419 if not isinstance(gen, bundle2.unbundle20):
420 420 raise error.Abort(_(b'not a bundle2 file'))
421 421 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
422 422 parttypes = opts.get('part_type', [])
423 423 for part in gen.iterparts():
424 424 if parttypes and part.type not in parttypes:
425 425 continue
426 426 msg = b'%s -- %s (mandatory: %r)\n'
427 427 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
428 428 if part.type == b'changegroup':
429 429 version = part.params.get(b'version', b'01')
430 430 cg = changegroup.getunbundler(version, part, b'UN')
431 431 if not ui.quiet:
432 432 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
433 433 if part.type == b'obsmarkers':
434 434 if not ui.quiet:
435 435 _debugobsmarkers(ui, part, indent=4, **opts)
436 436 if part.type == b'phase-heads':
437 437 if not ui.quiet:
438 438 _debugphaseheads(ui, part, indent=4)
439 439
440 440
441 441 @command(
442 442 b'debugbundle',
443 443 [
444 444 (b'a', b'all', None, _(b'show all details')),
445 445 (b'', b'part-type', [], _(b'show only the named part type')),
446 446 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
447 447 ],
448 448 _(b'FILE'),
449 449 norepo=True,
450 450 )
451 451 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
452 452 """lists the contents of a bundle"""
453 453 with hg.openpath(ui, bundlepath) as f:
454 454 if spec:
455 455 spec = exchange.getbundlespec(ui, f)
456 456 ui.write(b'%s\n' % spec)
457 457 return
458 458
459 459 gen = exchange.readbundle(ui, f, bundlepath)
460 460 if isinstance(gen, bundle2.unbundle20):
461 461 return _debugbundle2(ui, gen, all=all, **opts)
462 462 _debugchangegroup(ui, gen, all=all, **opts)
463 463
464 464
465 465 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
466 466 def debugcapabilities(ui, path, **opts):
467 467 """lists the capabilities of a remote peer"""
468 468 opts = pycompat.byteskwargs(opts)
469 469 peer = hg.peer(ui, opts, path)
470 470 caps = peer.capabilities()
471 471 ui.writenoi18n(b'Main capabilities:\n')
472 472 for c in sorted(caps):
473 473 ui.write(b' %s\n' % c)
474 474 b2caps = bundle2.bundle2caps(peer)
475 475 if b2caps:
476 476 ui.writenoi18n(b'Bundle2 capabilities:\n')
477 477 for key, values in sorted(pycompat.iteritems(b2caps)):
478 478 ui.write(b' %s\n' % key)
479 479 for v in values:
480 480 ui.write(b' %s\n' % v)
481 481
482 482
483 483 @command(b'debugchangedfiles', [], b'REV')
484 484 def debugchangedfiles(ui, repo, rev):
485 485 """list the stored files changes for a revision"""
486 486 ctx = scmutil.revsingle(repo, rev, None)
487 487 sd = repo.changelog.sidedata(ctx.rev())
488 488 files_block = sd.get(sidedata.SD_FILES)
489 489 if files_block is not None:
490 490 files = metadata.decode_files_sidedata(sd)
491 491 for f in sorted(files.touched):
492 492 if f in files.added:
493 493 action = b"added"
494 494 elif f in files.removed:
495 495 action = b"removed"
496 496 elif f in files.merged:
497 497 action = b"merged"
498 498 elif f in files.salvaged:
499 499 action = b"salvaged"
500 500 else:
501 501 action = b"touched"
502 502
503 503 copy_parent = b""
504 504 copy_source = b""
505 505 if f in files.copied_from_p1:
506 506 copy_parent = b"p1"
507 507 copy_source = files.copied_from_p1[f]
508 508 elif f in files.copied_from_p2:
509 509 copy_parent = b"p2"
510 510 copy_source = files.copied_from_p2[f]
511 511
512 512 data = (action, copy_parent, f, copy_source)
513 513 template = b"%-8s %2s: %s, %s;\n"
514 514 ui.write(template % data)
515 515
516 516
517 517 @command(b'debugcheckstate', [], b'')
518 518 def debugcheckstate(ui, repo):
519 519 """validate the correctness of the current dirstate"""
520 520 parent1, parent2 = repo.dirstate.parents()
521 521 m1 = repo[parent1].manifest()
522 522 m2 = repo[parent2].manifest()
523 523 errors = 0
524 524 for f in repo.dirstate:
525 525 state = repo.dirstate[f]
526 526 if state in b"nr" and f not in m1:
527 527 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
528 528 errors += 1
529 529 if state in b"a" and f in m1:
530 530 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
531 531 errors += 1
532 532 if state in b"m" and f not in m1 and f not in m2:
533 533 ui.warn(
534 534 _(b"%s in state %s, but not in either manifest\n") % (f, state)
535 535 )
536 536 errors += 1
537 537 for f in m1:
538 538 state = repo.dirstate[f]
539 539 if state not in b"nrm":
540 540 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
541 541 errors += 1
542 542 if errors:
543 543 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
544 544 raise error.Abort(errstr)
545 545
546 546
547 547 @command(
548 548 b'debugcolor',
549 549 [(b'', b'style', None, _(b'show all configured styles'))],
550 550 b'hg debugcolor',
551 551 )
552 552 def debugcolor(ui, repo, **opts):
553 553 """show available color, effects or style"""
554 554 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
555 555 if opts.get('style'):
556 556 return _debugdisplaystyle(ui)
557 557 else:
558 558 return _debugdisplaycolor(ui)
559 559
560 560
561 561 def _debugdisplaycolor(ui):
562 562 ui = ui.copy()
563 563 ui._styles.clear()
564 564 for effect in color._activeeffects(ui).keys():
565 565 ui._styles[effect] = effect
566 566 if ui._terminfoparams:
567 567 for k, v in ui.configitems(b'color'):
568 568 if k.startswith(b'color.'):
569 569 ui._styles[k] = k[6:]
570 570 elif k.startswith(b'terminfo.'):
571 571 ui._styles[k] = k[9:]
572 572 ui.write(_(b'available colors:\n'))
573 573 # sort label with a '_' after the other to group '_background' entry.
574 574 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
575 575 for colorname, label in items:
576 576 ui.write(b'%s\n' % colorname, label=label)
577 577
578 578
579 579 def _debugdisplaystyle(ui):
580 580 ui.write(_(b'available style:\n'))
581 581 if not ui._styles:
582 582 return
583 583 width = max(len(s) for s in ui._styles)
584 584 for label, effects in sorted(ui._styles.items()):
585 585 ui.write(b'%s' % label, label=label)
586 586 if effects:
587 587 # 50
588 588 ui.write(b': ')
589 589 ui.write(b' ' * (max(0, width - len(label))))
590 590 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
591 591 ui.write(b'\n')
592 592
593 593
594 594 @command(b'debugcreatestreamclonebundle', [], b'FILE')
595 595 def debugcreatestreamclonebundle(ui, repo, fname):
596 596 """create a stream clone bundle file
597 597
598 598 Stream bundles are special bundles that are essentially archives of
599 599 revlog files. They are commonly used for cloning very quickly.
600 600 """
601 601 # TODO we may want to turn this into an abort when this functionality
602 602 # is moved into `hg bundle`.
603 603 if phases.hassecret(repo):
604 604 ui.warn(
605 605 _(
606 606 b'(warning: stream clone bundle will contain secret '
607 607 b'revisions)\n'
608 608 )
609 609 )
610 610
611 611 requirements, gen = streamclone.generatebundlev1(repo)
612 612 changegroup.writechunks(ui, gen, fname)
613 613
614 614 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
615 615
616 616
617 617 @command(
618 618 b'debugdag',
619 619 [
620 620 (b't', b'tags', None, _(b'use tags as labels')),
621 621 (b'b', b'branches', None, _(b'annotate with branch names')),
622 622 (b'', b'dots', None, _(b'use dots for runs')),
623 623 (b's', b'spaces', None, _(b'separate elements by spaces')),
624 624 ],
625 625 _(b'[OPTION]... [FILE [REV]...]'),
626 626 optionalrepo=True,
627 627 )
628 628 def debugdag(ui, repo, file_=None, *revs, **opts):
629 629 """format the changelog or an index DAG as a concise textual description
630 630
631 631 If you pass a revlog index, the revlog's DAG is emitted. If you list
632 632 revision numbers, they get labeled in the output as rN.
633 633
634 634 Otherwise, the changelog DAG of the current repo is emitted.
635 635 """
636 636 spaces = opts.get('spaces')
637 637 dots = opts.get('dots')
638 638 if file_:
639 639 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
640 640 revs = {int(r) for r in revs}
641 641
642 642 def events():
643 643 for r in rlog:
644 644 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
645 645 if r in revs:
646 646 yield b'l', (r, b"r%i" % r)
647 647
648 648 elif repo:
649 649 cl = repo.changelog
650 650 tags = opts.get('tags')
651 651 branches = opts.get('branches')
652 652 if tags:
653 653 labels = {}
654 654 for l, n in repo.tags().items():
655 655 labels.setdefault(cl.rev(n), []).append(l)
656 656
657 657 def events():
658 658 b = b"default"
659 659 for r in cl:
660 660 if branches:
661 661 newb = cl.read(cl.node(r))[5][b'branch']
662 662 if newb != b:
663 663 yield b'a', newb
664 664 b = newb
665 665 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
666 666 if tags:
667 667 ls = labels.get(r)
668 668 if ls:
669 669 for l in ls:
670 670 yield b'l', (r, l)
671 671
672 672 else:
673 673 raise error.Abort(_(b'need repo for changelog dag'))
674 674
675 675 for line in dagparser.dagtextlines(
676 676 events(),
677 677 addspaces=spaces,
678 678 wraplabels=True,
679 679 wrapannotations=True,
680 680 wrapnonlinear=dots,
681 681 usedots=dots,
682 682 maxlinewidth=70,
683 683 ):
684 684 ui.write(line)
685 685 ui.write(b"\n")
686 686
687 687
688 688 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
689 689 def debugdata(ui, repo, file_, rev=None, **opts):
690 690 """dump the contents of a data file revision"""
691 691 opts = pycompat.byteskwargs(opts)
692 692 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
693 693 if rev is not None:
694 694 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
695 695 file_, rev = None, file_
696 696 elif rev is None:
697 697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 698 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
699 699 try:
700 700 ui.write(r.rawdata(r.lookup(rev)))
701 701 except KeyError:
702 702 raise error.Abort(_(b'invalid revision identifier %s') % rev)
703 703
704 704
705 705 @command(
706 706 b'debugdate',
707 707 [(b'e', b'extended', None, _(b'try extended date formats'))],
708 708 _(b'[-e] DATE [RANGE]'),
709 709 norepo=True,
710 710 optionalrepo=True,
711 711 )
712 712 def debugdate(ui, date, range=None, **opts):
713 713 """parse and display a date"""
714 714 if opts["extended"]:
715 715 d = dateutil.parsedate(date, dateutil.extendeddateformats)
716 716 else:
717 717 d = dateutil.parsedate(date)
718 718 ui.writenoi18n(b"internal: %d %d\n" % d)
719 719 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
720 720 if range:
721 721 m = dateutil.matchdate(range)
722 722 ui.writenoi18n(b"match: %s\n" % m(d[0]))
723 723
724 724
725 725 @command(
726 726 b'debugdeltachain',
727 727 cmdutil.debugrevlogopts + cmdutil.formatteropts,
728 728 _(b'-c|-m|FILE'),
729 729 optionalrepo=True,
730 730 )
731 731 def debugdeltachain(ui, repo, file_=None, **opts):
732 732 """dump information about delta chains in a revlog
733 733
734 734 Output can be templatized. Available template keywords are:
735 735
736 736 :``rev``: revision number
737 737 :``chainid``: delta chain identifier (numbered by unique base)
738 738 :``chainlen``: delta chain length to this revision
739 739 :``prevrev``: previous revision in delta chain
740 740 :``deltatype``: role of delta / how it was computed
741 741 :``compsize``: compressed size of revision
742 742 :``uncompsize``: uncompressed size of revision
743 743 :``chainsize``: total size of compressed revisions in chain
744 744 :``chainratio``: total chain size divided by uncompressed revision size
745 745 (new delta chains typically start at ratio 2.00)
746 746 :``lindist``: linear distance from base revision in delta chain to end
747 747 of this revision
748 748 :``extradist``: total size of revisions not part of this delta chain from
749 749 base of delta chain to end of this revision; a measurement
750 750 of how much extra data we need to read/seek across to read
751 751 the delta chain for this revision
752 752 :``extraratio``: extradist divided by chainsize; another representation of
753 753 how much unrelated data is needed to load this delta chain
754 754
755 755 If the repository is configured to use the sparse read, additional keywords
756 756 are available:
757 757
758 758 :``readsize``: total size of data read from the disk for a revision
759 759 (sum of the sizes of all the blocks)
760 760 :``largestblock``: size of the largest block of data read from the disk
761 761 :``readdensity``: density of useful bytes in the data read from the disk
762 762 :``srchunks``: in how many data hunks the whole revision would be read
763 763
764 764 The sparse read can be enabled with experimental.sparse-read = True
765 765 """
766 766 opts = pycompat.byteskwargs(opts)
767 767 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
768 768 index = r.index
769 769 start = r.start
770 770 length = r.length
771 771 generaldelta = r.version & revlog.FLAG_GENERALDELTA
772 772 withsparseread = getattr(r, '_withsparseread', False)
773 773
774 774 def revinfo(rev):
775 775 e = index[rev]
776 776 compsize = e[1]
777 777 uncompsize = e[2]
778 778 chainsize = 0
779 779
780 780 if generaldelta:
781 781 if e[3] == e[5]:
782 782 deltatype = b'p1'
783 783 elif e[3] == e[6]:
784 784 deltatype = b'p2'
785 785 elif e[3] == rev - 1:
786 786 deltatype = b'prev'
787 787 elif e[3] == rev:
788 788 deltatype = b'base'
789 789 else:
790 790 deltatype = b'other'
791 791 else:
792 792 if e[3] == rev:
793 793 deltatype = b'base'
794 794 else:
795 795 deltatype = b'prev'
796 796
797 797 chain = r._deltachain(rev)[0]
798 798 for iterrev in chain:
799 799 e = index[iterrev]
800 800 chainsize += e[1]
801 801
802 802 return compsize, uncompsize, deltatype, chain, chainsize
803 803
804 804 fm = ui.formatter(b'debugdeltachain', opts)
805 805
806 806 fm.plain(
807 807 b' rev chain# chainlen prev delta '
808 808 b'size rawsize chainsize ratio lindist extradist '
809 809 b'extraratio'
810 810 )
811 811 if withsparseread:
812 812 fm.plain(b' readsize largestblk rddensity srchunks')
813 813 fm.plain(b'\n')
814 814
815 815 chainbases = {}
816 816 for rev in r:
817 817 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
818 818 chainbase = chain[0]
819 819 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
820 820 basestart = start(chainbase)
821 821 revstart = start(rev)
822 822 lineardist = revstart + comp - basestart
823 823 extradist = lineardist - chainsize
824 824 try:
825 825 prevrev = chain[-2]
826 826 except IndexError:
827 827 prevrev = -1
828 828
829 829 if uncomp != 0:
830 830 chainratio = float(chainsize) / float(uncomp)
831 831 else:
832 832 chainratio = chainsize
833 833
834 834 if chainsize != 0:
835 835 extraratio = float(extradist) / float(chainsize)
836 836 else:
837 837 extraratio = extradist
838 838
839 839 fm.startitem()
840 840 fm.write(
841 841 b'rev chainid chainlen prevrev deltatype compsize '
842 842 b'uncompsize chainsize chainratio lindist extradist '
843 843 b'extraratio',
844 844 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
845 845 rev,
846 846 chainid,
847 847 len(chain),
848 848 prevrev,
849 849 deltatype,
850 850 comp,
851 851 uncomp,
852 852 chainsize,
853 853 chainratio,
854 854 lineardist,
855 855 extradist,
856 856 extraratio,
857 857 rev=rev,
858 858 chainid=chainid,
859 859 chainlen=len(chain),
860 860 prevrev=prevrev,
861 861 deltatype=deltatype,
862 862 compsize=comp,
863 863 uncompsize=uncomp,
864 864 chainsize=chainsize,
865 865 chainratio=chainratio,
866 866 lindist=lineardist,
867 867 extradist=extradist,
868 868 extraratio=extraratio,
869 869 )
870 870 if withsparseread:
871 871 readsize = 0
872 872 largestblock = 0
873 873 srchunks = 0
874 874
875 875 for revschunk in deltautil.slicechunk(r, chain):
876 876 srchunks += 1
877 877 blkend = start(revschunk[-1]) + length(revschunk[-1])
878 878 blksize = blkend - start(revschunk[0])
879 879
880 880 readsize += blksize
881 881 if largestblock < blksize:
882 882 largestblock = blksize
883 883
884 884 if readsize:
885 885 readdensity = float(chainsize) / float(readsize)
886 886 else:
887 887 readdensity = 1
888 888
889 889 fm.write(
890 890 b'readsize largestblock readdensity srchunks',
891 891 b' %10d %10d %9.5f %8d',
892 892 readsize,
893 893 largestblock,
894 894 readdensity,
895 895 srchunks,
896 896 readsize=readsize,
897 897 largestblock=largestblock,
898 898 readdensity=readdensity,
899 899 srchunks=srchunks,
900 900 )
901 901
902 902 fm.plain(b'\n')
903 903
904 904 fm.end()
905 905
906 906
907 907 @command(
908 908 b'debugdirstate|debugstate',
909 909 [
910 910 (
911 911 b'',
912 912 b'nodates',
913 913 None,
914 914 _(b'do not display the saved mtime (DEPRECATED)'),
915 915 ),
916 916 (b'', b'dates', True, _(b'display the saved mtime')),
917 917 (b'', b'datesort', None, _(b'sort by saved mtime')),
918 918 ],
919 919 _(b'[OPTION]...'),
920 920 )
921 921 def debugstate(ui, repo, **opts):
922 922 """show the contents of the current dirstate"""
923 923
924 924 nodates = not opts['dates']
925 925 if opts.get('nodates') is not None:
926 926 nodates = True
927 927 datesort = opts.get('datesort')
928 928
929 929 if datesort:
930 930 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
931 931 else:
932 932 keyfunc = None # sort by filename
933 933 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
934 934 if ent[3] == -1:
935 935 timestr = b'unset '
936 936 elif nodates:
937 937 timestr = b'set '
938 938 else:
939 939 timestr = time.strftime(
940 940 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
941 941 )
942 942 timestr = encoding.strtolocal(timestr)
943 943 if ent[1] & 0o20000:
944 944 mode = b'lnk'
945 945 else:
946 946 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
947 947 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
948 948 for f in repo.dirstate.copies():
949 949 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
950 950
951 951
952 952 @command(
953 953 b'debugdiscovery',
954 954 [
955 955 (b'', b'old', None, _(b'use old-style discovery')),
956 956 (
957 957 b'',
958 958 b'nonheads',
959 959 None,
960 960 _(b'use old-style discovery with non-heads included'),
961 961 ),
962 962 (b'', b'rev', [], b'restrict discovery to this set of revs'),
963 963 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
964 964 ]
965 965 + cmdutil.remoteopts,
966 966 _(b'[--rev REV] [OTHER]'),
967 967 )
968 968 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
969 969 """runs the changeset discovery protocol in isolation"""
970 970 opts = pycompat.byteskwargs(opts)
971 971 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
972 972 remote = hg.peer(repo, opts, remoteurl)
973 973 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
974 974
975 975 # make sure tests are repeatable
976 976 random.seed(int(opts[b'seed']))
977 977
978 978 if opts.get(b'old'):
979 979
980 980 def doit(pushedrevs, remoteheads, remote=remote):
981 981 if not util.safehasattr(remote, b'branches'):
982 982 # enable in-client legacy support
983 983 remote = localrepo.locallegacypeer(remote.local())
984 984 common, _in, hds = treediscovery.findcommonincoming(
985 985 repo, remote, force=True
986 986 )
987 987 common = set(common)
988 988 if not opts.get(b'nonheads'):
989 989 ui.writenoi18n(
990 990 b"unpruned common: %s\n"
991 991 % b" ".join(sorted(short(n) for n in common))
992 992 )
993 993
994 994 clnode = repo.changelog.node
995 995 common = repo.revs(b'heads(::%ln)', common)
996 996 common = {clnode(r) for r in common}
997 997 return common, hds
998 998
999 999 else:
1000 1000
1001 1001 def doit(pushedrevs, remoteheads, remote=remote):
1002 1002 nodes = None
1003 1003 if pushedrevs:
1004 1004 revs = scmutil.revrange(repo, pushedrevs)
1005 1005 nodes = [repo[r].node() for r in revs]
1006 1006 common, any, hds = setdiscovery.findcommonheads(
1007 1007 ui, repo, remote, ancestorsof=nodes
1008 1008 )
1009 1009 return common, hds
1010 1010
1011 1011 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1012 1012 localrevs = opts[b'rev']
1013 1013 with util.timedcm('debug-discovery') as t:
1014 1014 common, hds = doit(localrevs, remoterevs)
1015 1015
1016 1016 # compute all statistics
1017 1017 common = set(common)
1018 1018 rheads = set(hds)
1019 1019 lheads = set(repo.heads())
1020 1020
1021 1021 data = {}
1022 1022 data[b'elapsed'] = t.elapsed
1023 1023 data[b'nb-common'] = len(common)
1024 1024 data[b'nb-common-local'] = len(common & lheads)
1025 1025 data[b'nb-common-remote'] = len(common & rheads)
1026 1026 data[b'nb-common-both'] = len(common & rheads & lheads)
1027 1027 data[b'nb-local'] = len(lheads)
1028 1028 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
1029 1029 data[b'nb-remote'] = len(rheads)
1030 1030 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
1031 1031 data[b'nb-revs'] = len(repo.revs(b'all()'))
1032 1032 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
1033 1033 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
1034 1034
1035 1035 # display discovery summary
1036 1036 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1037 1037 ui.writenoi18n(b"heads summary:\n")
1038 1038 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1039 1039 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1040 1040 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1041 1041 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1042 1042 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1043 1043 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1044 1044 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1045 1045 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1046 1046 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1047 1047 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1048 1048 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1049 1049 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1050 1050 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1051 1051
1052 1052 if ui.verbose:
1053 1053 ui.writenoi18n(
1054 1054 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1055 1055 )
1056 1056
1057 1057
1058 1058 _chunksize = 4 << 10
1059 1059
1060 1060
1061 1061 @command(
1062 1062 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1063 1063 )
1064 1064 def debugdownload(ui, repo, url, output=None, **opts):
1065 1065 """download a resource using Mercurial logic and config
1066 1066 """
1067 1067 fh = urlmod.open(ui, url, output)
1068 1068
1069 1069 dest = ui
1070 1070 if output:
1071 1071 dest = open(output, b"wb", _chunksize)
1072 1072 try:
1073 1073 data = fh.read(_chunksize)
1074 1074 while data:
1075 1075 dest.write(data)
1076 1076 data = fh.read(_chunksize)
1077 1077 finally:
1078 1078 if output:
1079 1079 dest.close()
1080 1080
1081 1081
1082 1082 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1083 1083 def debugextensions(ui, repo, **opts):
1084 1084 '''show information about active extensions'''
1085 1085 opts = pycompat.byteskwargs(opts)
1086 1086 exts = extensions.extensions(ui)
1087 1087 hgver = util.version()
1088 1088 fm = ui.formatter(b'debugextensions', opts)
1089 1089 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1090 1090 isinternal = extensions.ismoduleinternal(extmod)
1091 1091 extsource = None
1092 1092
1093 1093 if util.safehasattr(extmod, '__file__'):
1094 1094 extsource = pycompat.fsencode(extmod.__file__)
1095 1095 elif getattr(sys, 'oxidized', False):
1096 1096 extsource = pycompat.sysexecutable
1097 1097 if isinternal:
1098 1098 exttestedwith = [] # never expose magic string to users
1099 1099 else:
1100 1100 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1101 1101 extbuglink = getattr(extmod, 'buglink', None)
1102 1102
1103 1103 fm.startitem()
1104 1104
1105 1105 if ui.quiet or ui.verbose:
1106 1106 fm.write(b'name', b'%s\n', extname)
1107 1107 else:
1108 1108 fm.write(b'name', b'%s', extname)
1109 1109 if isinternal or hgver in exttestedwith:
1110 1110 fm.plain(b'\n')
1111 1111 elif not exttestedwith:
1112 1112 fm.plain(_(b' (untested!)\n'))
1113 1113 else:
1114 1114 lasttestedversion = exttestedwith[-1]
1115 1115 fm.plain(b' (%s!)\n' % lasttestedversion)
1116 1116
1117 1117 fm.condwrite(
1118 1118 ui.verbose and extsource,
1119 1119 b'source',
1120 1120 _(b' location: %s\n'),
1121 1121 extsource or b"",
1122 1122 )
1123 1123
1124 1124 if ui.verbose:
1125 1125 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1126 1126 fm.data(bundled=isinternal)
1127 1127
1128 1128 fm.condwrite(
1129 1129 ui.verbose and exttestedwith,
1130 1130 b'testedwith',
1131 1131 _(b' tested with: %s\n'),
1132 1132 fm.formatlist(exttestedwith, name=b'ver'),
1133 1133 )
1134 1134
1135 1135 fm.condwrite(
1136 1136 ui.verbose and extbuglink,
1137 1137 b'buglink',
1138 1138 _(b' bug reporting: %s\n'),
1139 1139 extbuglink or b"",
1140 1140 )
1141 1141
1142 1142 fm.end()
1143 1143
1144 1144
1145 1145 @command(
1146 1146 b'debugfileset',
1147 1147 [
1148 1148 (
1149 1149 b'r',
1150 1150 b'rev',
1151 1151 b'',
1152 1152 _(b'apply the filespec on this revision'),
1153 1153 _(b'REV'),
1154 1154 ),
1155 1155 (
1156 1156 b'',
1157 1157 b'all-files',
1158 1158 False,
1159 1159 _(b'test files from all revisions and working directory'),
1160 1160 ),
1161 1161 (
1162 1162 b's',
1163 1163 b'show-matcher',
1164 1164 None,
1165 1165 _(b'print internal representation of matcher'),
1166 1166 ),
1167 1167 (
1168 1168 b'p',
1169 1169 b'show-stage',
1170 1170 [],
1171 1171 _(b'print parsed tree at the given stage'),
1172 1172 _(b'NAME'),
1173 1173 ),
1174 1174 ],
1175 1175 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1176 1176 )
1177 1177 def debugfileset(ui, repo, expr, **opts):
1178 1178 '''parse and apply a fileset specification'''
1179 1179 from . import fileset
1180 1180
1181 1181 fileset.symbols # force import of fileset so we have predicates to optimize
1182 1182 opts = pycompat.byteskwargs(opts)
1183 1183 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1184 1184
1185 1185 stages = [
1186 1186 (b'parsed', pycompat.identity),
1187 1187 (b'analyzed', filesetlang.analyze),
1188 1188 (b'optimized', filesetlang.optimize),
1189 1189 ]
1190 1190 stagenames = {n for n, f in stages}
1191 1191
1192 1192 showalways = set()
1193 1193 if ui.verbose and not opts[b'show_stage']:
1194 1194 # show parsed tree by --verbose (deprecated)
1195 1195 showalways.add(b'parsed')
1196 1196 if opts[b'show_stage'] == [b'all']:
1197 1197 showalways.update(stagenames)
1198 1198 else:
1199 1199 for n in opts[b'show_stage']:
1200 1200 if n not in stagenames:
1201 1201 raise error.Abort(_(b'invalid stage name: %s') % n)
1202 1202 showalways.update(opts[b'show_stage'])
1203 1203
1204 1204 tree = filesetlang.parse(expr)
1205 1205 for n, f in stages:
1206 1206 tree = f(tree)
1207 1207 if n in showalways:
1208 1208 if opts[b'show_stage'] or n != b'parsed':
1209 1209 ui.write(b"* %s:\n" % n)
1210 1210 ui.write(filesetlang.prettyformat(tree), b"\n")
1211 1211
1212 1212 files = set()
1213 1213 if opts[b'all_files']:
1214 1214 for r in repo:
1215 1215 c = repo[r]
1216 1216 files.update(c.files())
1217 1217 files.update(c.substate)
1218 1218 if opts[b'all_files'] or ctx.rev() is None:
1219 1219 wctx = repo[None]
1220 1220 files.update(
1221 1221 repo.dirstate.walk(
1222 1222 scmutil.matchall(repo),
1223 1223 subrepos=list(wctx.substate),
1224 1224 unknown=True,
1225 1225 ignored=True,
1226 1226 )
1227 1227 )
1228 1228 files.update(wctx.substate)
1229 1229 else:
1230 1230 files.update(ctx.files())
1231 1231 files.update(ctx.substate)
1232 1232
1233 1233 m = ctx.matchfileset(repo.getcwd(), expr)
1234 1234 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1235 1235 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1236 1236 for f in sorted(files):
1237 1237 if not m(f):
1238 1238 continue
1239 1239 ui.write(b"%s\n" % f)
1240 1240
1241 1241
1242 1242 @command(b'debugformat', [] + cmdutil.formatteropts)
1243 1243 def debugformat(ui, repo, **opts):
1244 1244 """display format information about the current repository
1245 1245
1246 1246 Use --verbose to get extra information about current config value and
1247 1247 Mercurial default."""
1248 1248 opts = pycompat.byteskwargs(opts)
1249 1249 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1250 1250 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1251 1251
1252 1252 def makeformatname(name):
1253 1253 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1254 1254
1255 1255 fm = ui.formatter(b'debugformat', opts)
1256 1256 if fm.isplain():
1257 1257
1258 1258 def formatvalue(value):
1259 1259 if util.safehasattr(value, b'startswith'):
1260 1260 return value
1261 1261 if value:
1262 1262 return b'yes'
1263 1263 else:
1264 1264 return b'no'
1265 1265
1266 1266 else:
1267 1267 formatvalue = pycompat.identity
1268 1268
1269 1269 fm.plain(b'format-variant')
1270 1270 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1271 1271 fm.plain(b' repo')
1272 1272 if ui.verbose:
1273 1273 fm.plain(b' config default')
1274 1274 fm.plain(b'\n')
1275 1275 for fv in upgrade.allformatvariant:
1276 1276 fm.startitem()
1277 1277 repovalue = fv.fromrepo(repo)
1278 1278 configvalue = fv.fromconfig(repo)
1279 1279
1280 1280 if repovalue != configvalue:
1281 1281 namelabel = b'formatvariant.name.mismatchconfig'
1282 1282 repolabel = b'formatvariant.repo.mismatchconfig'
1283 1283 elif repovalue != fv.default:
1284 1284 namelabel = b'formatvariant.name.mismatchdefault'
1285 1285 repolabel = b'formatvariant.repo.mismatchdefault'
1286 1286 else:
1287 1287 namelabel = b'formatvariant.name.uptodate'
1288 1288 repolabel = b'formatvariant.repo.uptodate'
1289 1289
1290 1290 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1291 1291 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1292 1292 if fv.default != configvalue:
1293 1293 configlabel = b'formatvariant.config.special'
1294 1294 else:
1295 1295 configlabel = b'formatvariant.config.default'
1296 1296 fm.condwrite(
1297 1297 ui.verbose,
1298 1298 b'config',
1299 1299 b' %6s',
1300 1300 formatvalue(configvalue),
1301 1301 label=configlabel,
1302 1302 )
1303 1303 fm.condwrite(
1304 1304 ui.verbose,
1305 1305 b'default',
1306 1306 b' %7s',
1307 1307 formatvalue(fv.default),
1308 1308 label=b'formatvariant.default',
1309 1309 )
1310 1310 fm.plain(b'\n')
1311 1311 fm.end()
1312 1312
1313 1313
1314 1314 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1315 1315 def debugfsinfo(ui, path=b"."):
1316 1316 """show information detected about current filesystem"""
1317 1317 ui.writenoi18n(b'path: %s\n' % path)
1318 1318 ui.writenoi18n(
1319 1319 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1320 1320 )
1321 1321 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1322 1322 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1323 1323 ui.writenoi18n(
1324 1324 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1325 1325 )
1326 1326 ui.writenoi18n(
1327 1327 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1328 1328 )
1329 1329 casesensitive = b'(unknown)'
1330 1330 try:
1331 1331 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1332 1332 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1333 1333 except OSError:
1334 1334 pass
1335 1335 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1336 1336
1337 1337
1338 1338 @command(
1339 1339 b'debuggetbundle',
1340 1340 [
1341 1341 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1342 1342 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1343 1343 (
1344 1344 b't',
1345 1345 b'type',
1346 1346 b'bzip2',
1347 1347 _(b'bundle compression type to use'),
1348 1348 _(b'TYPE'),
1349 1349 ),
1350 1350 ],
1351 1351 _(b'REPO FILE [-H|-C ID]...'),
1352 1352 norepo=True,
1353 1353 )
1354 1354 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1355 1355 """retrieves a bundle from a repo
1356 1356
1357 1357 Every ID must be a full-length hex node id string. Saves the bundle to the
1358 1358 given file.
1359 1359 """
1360 1360 opts = pycompat.byteskwargs(opts)
1361 1361 repo = hg.peer(ui, opts, repopath)
1362 1362 if not repo.capable(b'getbundle'):
1363 1363 raise error.Abort(b"getbundle() not supported by target repository")
1364 1364 args = {}
1365 1365 if common:
1366 1366 args['common'] = [bin(s) for s in common]
1367 1367 if head:
1368 1368 args['heads'] = [bin(s) for s in head]
1369 1369 # TODO: get desired bundlecaps from command line.
1370 1370 args['bundlecaps'] = None
1371 1371 bundle = repo.getbundle(b'debug', **args)
1372 1372
1373 1373 bundletype = opts.get(b'type', b'bzip2').lower()
1374 1374 btypes = {
1375 1375 b'none': b'HG10UN',
1376 1376 b'bzip2': b'HG10BZ',
1377 1377 b'gzip': b'HG10GZ',
1378 1378 b'bundle2': b'HG20',
1379 1379 }
1380 1380 bundletype = btypes.get(bundletype)
1381 1381 if bundletype not in bundle2.bundletypes:
1382 1382 raise error.Abort(_(b'unknown bundle type specified with --type'))
1383 1383 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1384 1384
1385 1385
1386 1386 @command(b'debugignore', [], b'[FILE]')
1387 1387 def debugignore(ui, repo, *files, **opts):
1388 1388 """display the combined ignore pattern and information about ignored files
1389 1389
1390 1390 With no argument display the combined ignore pattern.
1391 1391
1392 1392 Given space separated file names, shows if the given file is ignored and
1393 1393 if so, show the ignore rule (file and line number) that matched it.
1394 1394 """
1395 1395 ignore = repo.dirstate._ignore
1396 1396 if not files:
1397 1397 # Show all the patterns
1398 1398 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1399 1399 else:
1400 1400 m = scmutil.match(repo[None], pats=files)
1401 1401 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1402 1402 for f in m.files():
1403 1403 nf = util.normpath(f)
1404 1404 ignored = None
1405 1405 ignoredata = None
1406 1406 if nf != b'.':
1407 1407 if ignore(nf):
1408 1408 ignored = nf
1409 1409 ignoredata = repo.dirstate._ignorefileandline(nf)
1410 1410 else:
1411 1411 for p in pathutil.finddirs(nf):
1412 1412 if ignore(p):
1413 1413 ignored = p
1414 1414 ignoredata = repo.dirstate._ignorefileandline(p)
1415 1415 break
1416 1416 if ignored:
1417 1417 if ignored == nf:
1418 1418 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1419 1419 else:
1420 1420 ui.write(
1421 1421 _(
1422 1422 b"%s is ignored because of "
1423 1423 b"containing directory %s\n"
1424 1424 )
1425 1425 % (uipathfn(f), ignored)
1426 1426 )
1427 1427 ignorefile, lineno, line = ignoredata
1428 1428 ui.write(
1429 1429 _(b"(ignore rule in %s, line %d: '%s')\n")
1430 1430 % (ignorefile, lineno, line)
1431 1431 )
1432 1432 else:
1433 1433 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1434 1434
1435 1435
1436 1436 @command(
1437 1437 b'debugindex',
1438 1438 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1439 1439 _(b'-c|-m|FILE'),
1440 1440 )
1441 1441 def debugindex(ui, repo, file_=None, **opts):
1442 1442 """dump index data for a storage primitive"""
1443 1443 opts = pycompat.byteskwargs(opts)
1444 1444 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1445 1445
1446 1446 if ui.debugflag:
1447 1447 shortfn = hex
1448 1448 else:
1449 1449 shortfn = short
1450 1450
1451 1451 idlen = 12
1452 1452 for i in store:
1453 1453 idlen = len(shortfn(store.node(i)))
1454 1454 break
1455 1455
1456 1456 fm = ui.formatter(b'debugindex', opts)
1457 1457 fm.plain(
1458 1458 b' rev linkrev %s %s p2\n'
1459 1459 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1460 1460 )
1461 1461
1462 1462 for rev in store:
1463 1463 node = store.node(rev)
1464 1464 parents = store.parents(node)
1465 1465
1466 1466 fm.startitem()
1467 1467 fm.write(b'rev', b'%6d ', rev)
1468 1468 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1469 1469 fm.write(b'node', b'%s ', shortfn(node))
1470 1470 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1471 1471 fm.write(b'p2', b'%s', shortfn(parents[1]))
1472 1472 fm.plain(b'\n')
1473 1473
1474 1474 fm.end()
1475 1475
1476 1476
1477 1477 @command(
1478 1478 b'debugindexdot',
1479 1479 cmdutil.debugrevlogopts,
1480 1480 _(b'-c|-m|FILE'),
1481 1481 optionalrepo=True,
1482 1482 )
1483 1483 def debugindexdot(ui, repo, file_=None, **opts):
1484 1484 """dump an index DAG as a graphviz dot file"""
1485 1485 opts = pycompat.byteskwargs(opts)
1486 1486 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1487 1487 ui.writenoi18n(b"digraph G {\n")
1488 1488 for i in r:
1489 1489 node = r.node(i)
1490 1490 pp = r.parents(node)
1491 1491 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1492 1492 if pp[1] != nullid:
1493 1493 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1494 1494 ui.write(b"}\n")
1495 1495
1496 1496
1497 1497 @command(b'debugindexstats', [])
1498 1498 def debugindexstats(ui, repo):
1499 1499 """show stats related to the changelog index"""
1500 1500 repo.changelog.shortest(nullid, 1)
1501 1501 index = repo.changelog.index
1502 1502 if not util.safehasattr(index, b'stats'):
1503 1503 raise error.Abort(_(b'debugindexstats only works with native code'))
1504 1504 for k, v in sorted(index.stats().items()):
1505 1505 ui.write(b'%s: %d\n' % (k, v))
1506 1506
1507 1507
1508 1508 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1509 1509 def debuginstall(ui, **opts):
1510 1510 '''test Mercurial installation
1511 1511
1512 1512 Returns 0 on success.
1513 1513 '''
1514 1514 opts = pycompat.byteskwargs(opts)
1515 1515
1516 1516 problems = 0
1517 1517
1518 1518 fm = ui.formatter(b'debuginstall', opts)
1519 1519 fm.startitem()
1520 1520
1521 1521 # encoding might be unknown or wrong. don't translate these messages.
1522 1522 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1523 1523 err = None
1524 1524 try:
1525 1525 codecs.lookup(pycompat.sysstr(encoding.encoding))
1526 1526 except LookupError as inst:
1527 1527 err = stringutil.forcebytestr(inst)
1528 1528 problems += 1
1529 1529 fm.condwrite(
1530 1530 err,
1531 1531 b'encodingerror',
1532 1532 b" %s\n (check that your locale is properly set)\n",
1533 1533 err,
1534 1534 )
1535 1535
1536 1536 # Python
1537 1537 pythonlib = None
1538 1538 if util.safehasattr(os, '__file__'):
1539 1539 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1540 1540 elif getattr(sys, 'oxidized', False):
1541 1541 pythonlib = pycompat.sysexecutable
1542 1542
1543 1543 fm.write(
1544 1544 b'pythonexe',
1545 1545 _(b"checking Python executable (%s)\n"),
1546 1546 pycompat.sysexecutable or _(b"unknown"),
1547 1547 )
1548 1548 fm.write(
1549 1549 b'pythonimplementation',
1550 1550 _(b"checking Python implementation (%s)\n"),
1551 1551 pycompat.sysbytes(platform.python_implementation()),
1552 1552 )
1553 1553 fm.write(
1554 1554 b'pythonver',
1555 1555 _(b"checking Python version (%s)\n"),
1556 1556 (b"%d.%d.%d" % sys.version_info[:3]),
1557 1557 )
1558 1558 fm.write(
1559 1559 b'pythonlib',
1560 1560 _(b"checking Python lib (%s)...\n"),
1561 1561 pythonlib or _(b"unknown"),
1562 1562 )
1563 1563
1564 1564 try:
1565 1565 from . import rustext
1566 1566
1567 1567 rustext.__doc__ # trigger lazy import
1568 1568 except ImportError:
1569 1569 rustext = None
1570 1570
1571 1571 security = set(sslutil.supportedprotocols)
1572 1572 if sslutil.hassni:
1573 1573 security.add(b'sni')
1574 1574
1575 1575 fm.write(
1576 1576 b'pythonsecurity',
1577 1577 _(b"checking Python security support (%s)\n"),
1578 1578 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1579 1579 )
1580 1580
1581 1581 # These are warnings, not errors. So don't increment problem count. This
1582 1582 # may change in the future.
1583 1583 if b'tls1.2' not in security:
1584 1584 fm.plain(
1585 1585 _(
1586 1586 b' TLS 1.2 not supported by Python install; '
1587 1587 b'network connections lack modern security\n'
1588 1588 )
1589 1589 )
1590 1590 if b'sni' not in security:
1591 1591 fm.plain(
1592 1592 _(
1593 1593 b' SNI not supported by Python install; may have '
1594 1594 b'connectivity issues with some servers\n'
1595 1595 )
1596 1596 )
1597 1597
1598 1598 fm.plain(
1599 1599 _(
1600 1600 b"checking Rust extensions (%s)\n"
1601 1601 % (b'missing' if rustext is None else b'installed')
1602 1602 ),
1603 1603 )
1604 1604
1605 1605 # TODO print CA cert info
1606 1606
1607 1607 # hg version
1608 1608 hgver = util.version()
1609 1609 fm.write(
1610 1610 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1611 1611 )
1612 1612 fm.write(
1613 1613 b'hgverextra',
1614 1614 _(b"checking Mercurial custom build (%s)\n"),
1615 1615 b'+'.join(hgver.split(b'+')[1:]),
1616 1616 )
1617 1617
1618 1618 # compiled modules
1619 1619 hgmodules = None
1620 1620 if util.safehasattr(sys.modules[__name__], '__file__'):
1621 1621 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1622 1622 elif getattr(sys, 'oxidized', False):
1623 1623 hgmodules = pycompat.sysexecutable
1624 1624
1625 1625 fm.write(
1626 1626 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1627 1627 )
1628 1628 fm.write(
1629 1629 b'hgmodules',
1630 1630 _(b"checking installed modules (%s)...\n"),
1631 1631 hgmodules or _(b"unknown"),
1632 1632 )
1633 1633
1634 1634 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1635 1635 rustext = rustandc # for now, that's the only case
1636 1636 cext = policy.policy in (b'c', b'allow') or rustandc
1637 1637 nopure = cext or rustext
1638 1638 if nopure:
1639 1639 err = None
1640 1640 try:
1641 1641 if cext:
1642 1642 from .cext import ( # pytype: disable=import-error
1643 1643 base85,
1644 1644 bdiff,
1645 1645 mpatch,
1646 1646 osutil,
1647 1647 )
1648 1648
1649 1649 # quiet pyflakes
1650 1650 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1651 1651 if rustext:
1652 1652 from .rustext import ( # pytype: disable=import-error
1653 1653 ancestor,
1654 1654 dirstate,
1655 1655 )
1656 1656
1657 1657 dir(ancestor), dir(dirstate) # quiet pyflakes
1658 1658 except Exception as inst:
1659 1659 err = stringutil.forcebytestr(inst)
1660 1660 problems += 1
1661 1661 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1662 1662
1663 1663 compengines = util.compengines._engines.values()
1664 1664 fm.write(
1665 1665 b'compengines',
1666 1666 _(b'checking registered compression engines (%s)\n'),
1667 1667 fm.formatlist(
1668 1668 sorted(e.name() for e in compengines),
1669 1669 name=b'compengine',
1670 1670 fmt=b'%s',
1671 1671 sep=b', ',
1672 1672 ),
1673 1673 )
1674 1674 fm.write(
1675 1675 b'compenginesavail',
1676 1676 _(b'checking available compression engines (%s)\n'),
1677 1677 fm.formatlist(
1678 1678 sorted(e.name() for e in compengines if e.available()),
1679 1679 name=b'compengine',
1680 1680 fmt=b'%s',
1681 1681 sep=b', ',
1682 1682 ),
1683 1683 )
1684 1684 wirecompengines = compression.compengines.supportedwireengines(
1685 1685 compression.SERVERROLE
1686 1686 )
1687 1687 fm.write(
1688 1688 b'compenginesserver',
1689 1689 _(
1690 1690 b'checking available compression engines '
1691 1691 b'for wire protocol (%s)\n'
1692 1692 ),
1693 1693 fm.formatlist(
1694 1694 [e.name() for e in wirecompengines if e.wireprotosupport()],
1695 1695 name=b'compengine',
1696 1696 fmt=b'%s',
1697 1697 sep=b', ',
1698 1698 ),
1699 1699 )
1700 1700 re2 = b'missing'
1701 1701 if util._re2:
1702 1702 re2 = b'available'
1703 1703 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1704 1704 fm.data(re2=bool(util._re2))
1705 1705
1706 1706 # templates
1707 1707 p = templater.templatedir()
1708 1708 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1709 1709 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1710 1710 if p:
1711 1711 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1712 1712 if m:
1713 1713 # template found, check if it is working
1714 1714 err = None
1715 1715 try:
1716 1716 templater.templater.frommapfile(m)
1717 1717 except Exception as inst:
1718 1718 err = stringutil.forcebytestr(inst)
1719 1719 p = None
1720 1720 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1721 1721 else:
1722 1722 p = None
1723 1723 fm.condwrite(
1724 1724 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1725 1725 )
1726 1726 fm.condwrite(
1727 1727 not m,
1728 1728 b'defaulttemplatenotfound',
1729 1729 _(b" template '%s' not found\n"),
1730 1730 b"default",
1731 1731 )
1732 1732 if not p:
1733 1733 problems += 1
1734 1734 fm.condwrite(
1735 1735 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1736 1736 )
1737 1737
1738 1738 # editor
1739 1739 editor = ui.geteditor()
1740 1740 editor = util.expandpath(editor)
1741 1741 editorbin = procutil.shellsplit(editor)[0]
1742 1742 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1743 1743 cmdpath = procutil.findexe(editorbin)
1744 1744 fm.condwrite(
1745 1745 not cmdpath and editor == b'vi',
1746 1746 b'vinotfound',
1747 1747 _(
1748 1748 b" No commit editor set and can't find %s in PATH\n"
1749 1749 b" (specify a commit editor in your configuration"
1750 1750 b" file)\n"
1751 1751 ),
1752 1752 not cmdpath and editor == b'vi' and editorbin,
1753 1753 )
1754 1754 fm.condwrite(
1755 1755 not cmdpath and editor != b'vi',
1756 1756 b'editornotfound',
1757 1757 _(
1758 1758 b" Can't find editor '%s' in PATH\n"
1759 1759 b" (specify a commit editor in your configuration"
1760 1760 b" file)\n"
1761 1761 ),
1762 1762 not cmdpath and editorbin,
1763 1763 )
1764 1764 if not cmdpath and editor != b'vi':
1765 1765 problems += 1
1766 1766
1767 1767 # check username
1768 1768 username = None
1769 1769 err = None
1770 1770 try:
1771 1771 username = ui.username()
1772 1772 except error.Abort as e:
1773 1773 err = e.message
1774 1774 problems += 1
1775 1775
1776 1776 fm.condwrite(
1777 1777 username, b'username', _(b"checking username (%s)\n"), username
1778 1778 )
1779 1779 fm.condwrite(
1780 1780 err,
1781 1781 b'usernameerror',
1782 1782 _(
1783 1783 b"checking username...\n %s\n"
1784 1784 b" (specify a username in your configuration file)\n"
1785 1785 ),
1786 1786 err,
1787 1787 )
1788 1788
1789 1789 for name, mod in extensions.extensions():
1790 1790 handler = getattr(mod, 'debuginstall', None)
1791 1791 if handler is not None:
1792 1792 problems += handler(ui, fm)
1793 1793
1794 1794 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1795 1795 if not problems:
1796 1796 fm.data(problems=problems)
1797 1797 fm.condwrite(
1798 1798 problems,
1799 1799 b'problems',
1800 1800 _(b"%d problems detected, please check your install!\n"),
1801 1801 problems,
1802 1802 )
1803 1803 fm.end()
1804 1804
1805 1805 return problems
1806 1806
1807 1807
1808 1808 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1809 1809 def debugknown(ui, repopath, *ids, **opts):
1810 1810 """test whether node ids are known to a repo
1811 1811
1812 1812 Every ID must be a full-length hex node id string. Returns a list of 0s
1813 1813 and 1s indicating unknown/known.
1814 1814 """
1815 1815 opts = pycompat.byteskwargs(opts)
1816 1816 repo = hg.peer(ui, opts, repopath)
1817 1817 if not repo.capable(b'known'):
1818 1818 raise error.Abort(b"known() not supported by target repository")
1819 1819 flags = repo.known([bin(s) for s in ids])
1820 1820 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1821 1821
1822 1822
1823 1823 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1824 1824 def debuglabelcomplete(ui, repo, *args):
1825 1825 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1826 1826 debugnamecomplete(ui, repo, *args)
1827 1827
1828 1828
1829 1829 @command(
1830 1830 b'debuglocks',
1831 1831 [
1832 1832 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1833 1833 (
1834 1834 b'W',
1835 1835 b'force-wlock',
1836 1836 None,
1837 1837 _(b'free the working state lock (DANGEROUS)'),
1838 1838 ),
1839 1839 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1840 1840 (
1841 1841 b'S',
1842 1842 b'set-wlock',
1843 1843 None,
1844 1844 _(b'set the working state lock until stopped'),
1845 1845 ),
1846 1846 ],
1847 1847 _(b'[OPTION]...'),
1848 1848 )
1849 1849 def debuglocks(ui, repo, **opts):
1850 1850 """show or modify state of locks
1851 1851
1852 1852 By default, this command will show which locks are held. This
1853 1853 includes the user and process holding the lock, the amount of time
1854 1854 the lock has been held, and the machine name where the process is
1855 1855 running if it's not local.
1856 1856
1857 1857 Locks protect the integrity of Mercurial's data, so should be
1858 1858 treated with care. System crashes or other interruptions may cause
1859 1859 locks to not be properly released, though Mercurial will usually
1860 1860 detect and remove such stale locks automatically.
1861 1861
1862 1862 However, detecting stale locks may not always be possible (for
1863 1863 instance, on a shared filesystem). Removing locks may also be
1864 1864 blocked by filesystem permissions.
1865 1865
1866 1866 Setting a lock will prevent other commands from changing the data.
1867 1867 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1868 1868 The set locks are removed when the command exits.
1869 1869
1870 1870 Returns 0 if no locks are held.
1871 1871
1872 1872 """
1873 1873
1874 1874 if opts.get('force_lock'):
1875 1875 repo.svfs.unlink(b'lock')
1876 1876 if opts.get('force_wlock'):
1877 1877 repo.vfs.unlink(b'wlock')
1878 1878 if opts.get('force_lock') or opts.get('force_wlock'):
1879 1879 return 0
1880 1880
1881 1881 locks = []
1882 1882 try:
1883 1883 if opts.get('set_wlock'):
1884 1884 try:
1885 1885 locks.append(repo.wlock(False))
1886 1886 except error.LockHeld:
1887 1887 raise error.Abort(_(b'wlock is already held'))
1888 1888 if opts.get('set_lock'):
1889 1889 try:
1890 1890 locks.append(repo.lock(False))
1891 1891 except error.LockHeld:
1892 1892 raise error.Abort(_(b'lock is already held'))
1893 1893 if len(locks):
1894 1894 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1895 1895 return 0
1896 1896 finally:
1897 1897 release(*locks)
1898 1898
1899 1899 now = time.time()
1900 1900 held = 0
1901 1901
1902 1902 def report(vfs, name, method):
1903 1903 # this causes stale locks to get reaped for more accurate reporting
1904 1904 try:
1905 1905 l = method(False)
1906 1906 except error.LockHeld:
1907 1907 l = None
1908 1908
1909 1909 if l:
1910 1910 l.release()
1911 1911 else:
1912 1912 try:
1913 1913 st = vfs.lstat(name)
1914 1914 age = now - st[stat.ST_MTIME]
1915 1915 user = util.username(st.st_uid)
1916 1916 locker = vfs.readlock(name)
1917 1917 if b":" in locker:
1918 1918 host, pid = locker.split(b':')
1919 1919 if host == socket.gethostname():
1920 1920 locker = b'user %s, process %s' % (user or b'None', pid)
1921 1921 else:
1922 1922 locker = b'user %s, process %s, host %s' % (
1923 1923 user or b'None',
1924 1924 pid,
1925 1925 host,
1926 1926 )
1927 1927 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1928 1928 return 1
1929 1929 except OSError as e:
1930 1930 if e.errno != errno.ENOENT:
1931 1931 raise
1932 1932
1933 1933 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1934 1934 return 0
1935 1935
1936 1936 held += report(repo.svfs, b"lock", repo.lock)
1937 1937 held += report(repo.vfs, b"wlock", repo.wlock)
1938 1938
1939 1939 return held
1940 1940
1941 1941
1942 1942 @command(
1943 1943 b'debugmanifestfulltextcache',
1944 1944 [
1945 1945 (b'', b'clear', False, _(b'clear the cache')),
1946 1946 (
1947 1947 b'a',
1948 1948 b'add',
1949 1949 [],
1950 1950 _(b'add the given manifest nodes to the cache'),
1951 1951 _(b'NODE'),
1952 1952 ),
1953 1953 ],
1954 1954 b'',
1955 1955 )
1956 1956 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1957 1957 """show, clear or amend the contents of the manifest fulltext cache"""
1958 1958
1959 1959 def getcache():
1960 1960 r = repo.manifestlog.getstorage(b'')
1961 1961 try:
1962 1962 return r._fulltextcache
1963 1963 except AttributeError:
1964 1964 msg = _(
1965 1965 b"Current revlog implementation doesn't appear to have a "
1966 1966 b"manifest fulltext cache\n"
1967 1967 )
1968 1968 raise error.Abort(msg)
1969 1969
1970 1970 if opts.get('clear'):
1971 1971 with repo.wlock():
1972 1972 cache = getcache()
1973 1973 cache.clear(clear_persisted_data=True)
1974 1974 return
1975 1975
1976 1976 if add:
1977 1977 with repo.wlock():
1978 1978 m = repo.manifestlog
1979 1979 store = m.getstorage(b'')
1980 1980 for n in add:
1981 1981 try:
1982 1982 manifest = m[store.lookup(n)]
1983 1983 except error.LookupError as e:
1984 1984 raise error.Abort(e, hint=b"Check your manifest node id")
1985 1985 manifest.read() # stores revisision in cache too
1986 1986 return
1987 1987
1988 1988 cache = getcache()
1989 1989 if not len(cache):
1990 1990 ui.write(_(b'cache empty\n'))
1991 1991 else:
1992 1992 ui.write(
1993 1993 _(
1994 1994 b'cache contains %d manifest entries, in order of most to '
1995 1995 b'least recent:\n'
1996 1996 )
1997 1997 % (len(cache),)
1998 1998 )
1999 1999 totalsize = 0
2000 2000 for nodeid in cache:
2001 2001 # Use cache.get to not update the LRU order
2002 2002 data = cache.peek(nodeid)
2003 2003 size = len(data)
2004 2004 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2005 2005 ui.write(
2006 2006 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2007 2007 )
2008 2008 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2009 2009 ui.write(
2010 2010 _(b'total cache data size %s, on-disk %s\n')
2011 2011 % (util.bytecount(totalsize), util.bytecount(ondisk))
2012 2012 )
2013 2013
2014 2014
2015 2015 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2016 2016 def debugmergestate(ui, repo, *args, **opts):
2017 2017 """print merge state
2018 2018
2019 2019 Use --verbose to print out information about whether v1 or v2 merge state
2020 2020 was chosen."""
2021 2021
2022 2022 if ui.verbose:
2023 2023 ms = mergestatemod.mergestate(repo)
2024 2024
2025 2025 # sort so that reasonable information is on top
2026 2026 v1records = ms._readrecordsv1()
2027 2027 v2records = ms._readrecordsv2()
2028 2028
2029 2029 if not v1records and not v2records:
2030 2030 pass
2031 2031 elif not v2records:
2032 2032 ui.writenoi18n(b'no version 2 merge state\n')
2033 2033 elif ms._v1v2match(v1records, v2records):
2034 2034 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2035 2035 else:
2036 2036 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2037 2037
2038 2038 opts = pycompat.byteskwargs(opts)
2039 2039 if not opts[b'template']:
2040 2040 opts[b'template'] = (
2041 2041 b'{if(commits, "", "no merge state found\n")}'
2042 2042 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2043 2043 b'{files % "file: {path} (state \\"{state}\\")\n'
2044 2044 b'{if(local_path, "'
2045 2045 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2046 2046 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2047 2047 b' other path: {other_path} (node {other_node})\n'
2048 2048 b'")}'
2049 2049 b'{if(rename_side, "'
2050 2050 b' rename side: {rename_side}\n'
2051 2051 b' renamed path: {renamed_path}\n'
2052 2052 b'")}'
2053 2053 b'{extras % " extra: {key} = {value}\n"}'
2054 2054 b'"}'
2055 2055 b'{extras % "extra: {file} ({key} = {value})\n"}'
2056 2056 )
2057 2057
2058 2058 ms = mergestatemod.mergestate.read(repo)
2059 2059
2060 2060 fm = ui.formatter(b'debugmergestate', opts)
2061 2061 fm.startitem()
2062 2062
2063 2063 fm_commits = fm.nested(b'commits')
2064 2064 if ms.active():
2065 2065 for name, node, label_index in (
2066 2066 (b'local', ms.local, 0),
2067 2067 (b'other', ms.other, 1),
2068 2068 ):
2069 2069 fm_commits.startitem()
2070 2070 fm_commits.data(name=name)
2071 2071 fm_commits.data(node=hex(node))
2072 2072 if ms._labels and len(ms._labels) > label_index:
2073 2073 fm_commits.data(label=ms._labels[label_index])
2074 2074 fm_commits.end()
2075 2075
2076 2076 fm_files = fm.nested(b'files')
2077 2077 if ms.active():
2078 2078 for f in ms:
2079 2079 fm_files.startitem()
2080 2080 fm_files.data(path=f)
2081 2081 state = ms._state[f]
2082 2082 fm_files.data(state=state[0])
2083 2083 if state[0] in (
2084 2084 mergestatemod.MERGE_RECORD_UNRESOLVED,
2085 2085 mergestatemod.MERGE_RECORD_RESOLVED,
2086 2086 ):
2087 2087 fm_files.data(local_key=state[1])
2088 2088 fm_files.data(local_path=state[2])
2089 2089 fm_files.data(ancestor_path=state[3])
2090 2090 fm_files.data(ancestor_node=state[4])
2091 2091 fm_files.data(other_path=state[5])
2092 2092 fm_files.data(other_node=state[6])
2093 2093 fm_files.data(local_flags=state[7])
2094 2094 elif state[0] in (
2095 2095 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2096 2096 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2097 2097 ):
2098 2098 fm_files.data(renamed_path=state[1])
2099 2099 fm_files.data(rename_side=state[2])
2100 2100 fm_extras = fm_files.nested(b'extras')
2101 2101 for k, v in sorted(ms.extras(f).items()):
2102 2102 fm_extras.startitem()
2103 2103 fm_extras.data(key=k)
2104 2104 fm_extras.data(value=v)
2105 2105 fm_extras.end()
2106 2106
2107 2107 fm_files.end()
2108 2108
2109 2109 fm_extras = fm.nested(b'extras')
2110 2110 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2111 2111 if f in ms:
2112 2112 # If file is in mergestate, we have already processed it's extras
2113 2113 continue
2114 2114 for k, v in pycompat.iteritems(d):
2115 2115 fm_extras.startitem()
2116 2116 fm_extras.data(file=f)
2117 2117 fm_extras.data(key=k)
2118 2118 fm_extras.data(value=v)
2119 2119 fm_extras.end()
2120 2120
2121 2121 fm.end()
2122 2122
2123 2123
2124 2124 @command(b'debugnamecomplete', [], _(b'NAME...'))
2125 2125 def debugnamecomplete(ui, repo, *args):
2126 2126 '''complete "names" - tags, open branch names, bookmark names'''
2127 2127
2128 2128 names = set()
2129 2129 # since we previously only listed open branches, we will handle that
2130 2130 # specially (after this for loop)
2131 2131 for name, ns in pycompat.iteritems(repo.names):
2132 2132 if name != b'branches':
2133 2133 names.update(ns.listnames(repo))
2134 2134 names.update(
2135 2135 tag
2136 2136 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2137 2137 if not closed
2138 2138 )
2139 2139 completions = set()
2140 2140 if not args:
2141 2141 args = [b'']
2142 2142 for a in args:
2143 2143 completions.update(n for n in names if n.startswith(a))
2144 2144 ui.write(b'\n'.join(sorted(completions)))
2145 2145 ui.write(b'\n')
2146 2146
2147 2147
2148 2148 @command(
2149 2149 b'debugnodemap',
2150 2150 [
2151 2151 (
2152 2152 b'',
2153 2153 b'dump-new',
2154 2154 False,
2155 2155 _(b'write a (new) persistent binary nodemap on stdin'),
2156 2156 ),
2157 2157 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2158 2158 (
2159 2159 b'',
2160 2160 b'check',
2161 2161 False,
2162 2162 _(b'check that the data on disk data are correct.'),
2163 2163 ),
2164 2164 (
2165 2165 b'',
2166 2166 b'metadata',
2167 2167 False,
2168 2168 _(b'display the on disk meta data for the nodemap'),
2169 2169 ),
2170 2170 ],
2171 2171 )
2172 2172 def debugnodemap(ui, repo, **opts):
2173 2173 """write and inspect on disk nodemap
2174 2174 """
2175 2175 if opts['dump_new']:
2176 2176 unfi = repo.unfiltered()
2177 2177 cl = unfi.changelog
2178 2178 if util.safehasattr(cl.index, "nodemap_data_all"):
2179 2179 data = cl.index.nodemap_data_all()
2180 2180 else:
2181 2181 data = nodemap.persistent_data(cl.index)
2182 2182 ui.write(data)
2183 2183 elif opts['dump_disk']:
2184 2184 unfi = repo.unfiltered()
2185 2185 cl = unfi.changelog
2186 2186 nm_data = nodemap.persisted_data(cl)
2187 2187 if nm_data is not None:
2188 2188 docket, data = nm_data
2189 2189 ui.write(data[:])
2190 2190 elif opts['check']:
2191 2191 unfi = repo.unfiltered()
2192 2192 cl = unfi.changelog
2193 2193 nm_data = nodemap.persisted_data(cl)
2194 2194 if nm_data is not None:
2195 2195 docket, data = nm_data
2196 2196 return nodemap.check_data(ui, cl.index, data)
2197 2197 elif opts['metadata']:
2198 2198 unfi = repo.unfiltered()
2199 2199 cl = unfi.changelog
2200 2200 nm_data = nodemap.persisted_data(cl)
2201 2201 if nm_data is not None:
2202 2202 docket, data = nm_data
2203 2203 ui.write((b"uid: %s\n") % docket.uid)
2204 2204 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2205 2205 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2206 2206 ui.write((b"data-length: %d\n") % docket.data_length)
2207 2207 ui.write((b"data-unused: %d\n") % docket.data_unused)
2208 2208 unused_perc = docket.data_unused * 100.0 / docket.data_length
2209 2209 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2210 2210
2211 2211
2212 2212 @command(
2213 2213 b'debugobsolete',
2214 2214 [
2215 2215 (b'', b'flags', 0, _(b'markers flag')),
2216 2216 (
2217 2217 b'',
2218 2218 b'record-parents',
2219 2219 False,
2220 2220 _(b'record parent information for the precursor'),
2221 2221 ),
2222 2222 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2223 2223 (
2224 2224 b'',
2225 2225 b'exclusive',
2226 2226 False,
2227 2227 _(b'restrict display to markers only relevant to REV'),
2228 2228 ),
2229 2229 (b'', b'index', False, _(b'display index of the marker')),
2230 2230 (b'', b'delete', [], _(b'delete markers specified by indices')),
2231 2231 ]
2232 2232 + cmdutil.commitopts2
2233 2233 + cmdutil.formatteropts,
2234 2234 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2235 2235 )
2236 2236 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2237 2237 """create arbitrary obsolete marker
2238 2238
2239 2239 With no arguments, displays the list of obsolescence markers."""
2240 2240
2241 2241 opts = pycompat.byteskwargs(opts)
2242 2242
2243 2243 def parsenodeid(s):
2244 2244 try:
2245 2245 # We do not use revsingle/revrange functions here to accept
2246 2246 # arbitrary node identifiers, possibly not present in the
2247 2247 # local repository.
2248 2248 n = bin(s)
2249 2249 if len(n) != len(nullid):
2250 2250 raise TypeError()
2251 2251 return n
2252 2252 except TypeError:
2253 2253 raise error.Abort(
2254 2254 b'changeset references must be full hexadecimal '
2255 2255 b'node identifiers'
2256 2256 )
2257 2257
2258 2258 if opts.get(b'delete'):
2259 2259 indices = []
2260 2260 for v in opts.get(b'delete'):
2261 2261 try:
2262 2262 indices.append(int(v))
2263 2263 except ValueError:
2264 2264 raise error.Abort(
2265 2265 _(b'invalid index value: %r') % v,
2266 2266 hint=_(b'use integers for indices'),
2267 2267 )
2268 2268
2269 2269 if repo.currenttransaction():
2270 2270 raise error.Abort(
2271 2271 _(b'cannot delete obsmarkers in the middle of transaction.')
2272 2272 )
2273 2273
2274 2274 with repo.lock():
2275 2275 n = repair.deleteobsmarkers(repo.obsstore, indices)
2276 2276 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2277 2277
2278 2278 return
2279 2279
2280 2280 if precursor is not None:
2281 2281 if opts[b'rev']:
2282 2282 raise error.Abort(b'cannot select revision when creating marker')
2283 2283 metadata = {}
2284 2284 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2285 2285 succs = tuple(parsenodeid(succ) for succ in successors)
2286 2286 l = repo.lock()
2287 2287 try:
2288 2288 tr = repo.transaction(b'debugobsolete')
2289 2289 try:
2290 2290 date = opts.get(b'date')
2291 2291 if date:
2292 2292 date = dateutil.parsedate(date)
2293 2293 else:
2294 2294 date = None
2295 2295 prec = parsenodeid(precursor)
2296 2296 parents = None
2297 2297 if opts[b'record_parents']:
2298 2298 if prec not in repo.unfiltered():
2299 2299 raise error.Abort(
2300 2300 b'cannot used --record-parents on '
2301 2301 b'unknown changesets'
2302 2302 )
2303 2303 parents = repo.unfiltered()[prec].parents()
2304 2304 parents = tuple(p.node() for p in parents)
2305 2305 repo.obsstore.create(
2306 2306 tr,
2307 2307 prec,
2308 2308 succs,
2309 2309 opts[b'flags'],
2310 2310 parents=parents,
2311 2311 date=date,
2312 2312 metadata=metadata,
2313 2313 ui=ui,
2314 2314 )
2315 2315 tr.close()
2316 2316 except ValueError as exc:
2317 2317 raise error.Abort(
2318 2318 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2319 2319 )
2320 2320 finally:
2321 2321 tr.release()
2322 2322 finally:
2323 2323 l.release()
2324 2324 else:
2325 2325 if opts[b'rev']:
2326 2326 revs = scmutil.revrange(repo, opts[b'rev'])
2327 2327 nodes = [repo[r].node() for r in revs]
2328 2328 markers = list(
2329 2329 obsutil.getmarkers(
2330 2330 repo, nodes=nodes, exclusive=opts[b'exclusive']
2331 2331 )
2332 2332 )
2333 2333 markers.sort(key=lambda x: x._data)
2334 2334 else:
2335 2335 markers = obsutil.getmarkers(repo)
2336 2336
2337 2337 markerstoiter = markers
2338 2338 isrelevant = lambda m: True
2339 2339 if opts.get(b'rev') and opts.get(b'index'):
2340 2340 markerstoiter = obsutil.getmarkers(repo)
2341 2341 markerset = set(markers)
2342 2342 isrelevant = lambda m: m in markerset
2343 2343
2344 2344 fm = ui.formatter(b'debugobsolete', opts)
2345 2345 for i, m in enumerate(markerstoiter):
2346 2346 if not isrelevant(m):
2347 2347 # marker can be irrelevant when we're iterating over a set
2348 2348 # of markers (markerstoiter) which is bigger than the set
2349 2349 # of markers we want to display (markers)
2350 2350 # this can happen if both --index and --rev options are
2351 2351 # provided and thus we need to iterate over all of the markers
2352 2352 # to get the correct indices, but only display the ones that
2353 2353 # are relevant to --rev value
2354 2354 continue
2355 2355 fm.startitem()
2356 2356 ind = i if opts.get(b'index') else None
2357 2357 cmdutil.showmarker(fm, m, index=ind)
2358 2358 fm.end()
2359 2359
2360 2360
2361 2361 @command(
2362 2362 b'debugp1copies',
2363 2363 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2364 2364 _(b'[-r REV]'),
2365 2365 )
2366 2366 def debugp1copies(ui, repo, **opts):
2367 2367 """dump copy information compared to p1"""
2368 2368
2369 2369 opts = pycompat.byteskwargs(opts)
2370 2370 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2371 2371 for dst, src in ctx.p1copies().items():
2372 2372 ui.write(b'%s -> %s\n' % (src, dst))
2373 2373
2374 2374
2375 2375 @command(
2376 2376 b'debugp2copies',
2377 2377 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2378 2378 _(b'[-r REV]'),
2379 2379 )
2380 2380 def debugp1copies(ui, repo, **opts):
2381 2381 """dump copy information compared to p2"""
2382 2382
2383 2383 opts = pycompat.byteskwargs(opts)
2384 2384 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2385 2385 for dst, src in ctx.p2copies().items():
2386 2386 ui.write(b'%s -> %s\n' % (src, dst))
2387 2387
2388 2388
2389 2389 @command(
2390 2390 b'debugpathcomplete',
2391 2391 [
2392 2392 (b'f', b'full', None, _(b'complete an entire path')),
2393 2393 (b'n', b'normal', None, _(b'show only normal files')),
2394 2394 (b'a', b'added', None, _(b'show only added files')),
2395 2395 (b'r', b'removed', None, _(b'show only removed files')),
2396 2396 ],
2397 2397 _(b'FILESPEC...'),
2398 2398 )
2399 2399 def debugpathcomplete(ui, repo, *specs, **opts):
2400 2400 '''complete part or all of a tracked path
2401 2401
2402 2402 This command supports shells that offer path name completion. It
2403 2403 currently completes only files already known to the dirstate.
2404 2404
2405 2405 Completion extends only to the next path segment unless
2406 2406 --full is specified, in which case entire paths are used.'''
2407 2407
2408 2408 def complete(path, acceptable):
2409 2409 dirstate = repo.dirstate
2410 2410 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2411 2411 rootdir = repo.root + pycompat.ossep
2412 2412 if spec != repo.root and not spec.startswith(rootdir):
2413 2413 return [], []
2414 2414 if os.path.isdir(spec):
2415 2415 spec += b'/'
2416 2416 spec = spec[len(rootdir) :]
2417 2417 fixpaths = pycompat.ossep != b'/'
2418 2418 if fixpaths:
2419 2419 spec = spec.replace(pycompat.ossep, b'/')
2420 2420 speclen = len(spec)
2421 2421 fullpaths = opts['full']
2422 2422 files, dirs = set(), set()
2423 2423 adddir, addfile = dirs.add, files.add
2424 2424 for f, st in pycompat.iteritems(dirstate):
2425 2425 if f.startswith(spec) and st[0] in acceptable:
2426 2426 if fixpaths:
2427 2427 f = f.replace(b'/', pycompat.ossep)
2428 2428 if fullpaths:
2429 2429 addfile(f)
2430 2430 continue
2431 2431 s = f.find(pycompat.ossep, speclen)
2432 2432 if s >= 0:
2433 2433 adddir(f[:s])
2434 2434 else:
2435 2435 addfile(f)
2436 2436 return files, dirs
2437 2437
2438 2438 acceptable = b''
2439 2439 if opts['normal']:
2440 2440 acceptable += b'nm'
2441 2441 if opts['added']:
2442 2442 acceptable += b'a'
2443 2443 if opts['removed']:
2444 2444 acceptable += b'r'
2445 2445 cwd = repo.getcwd()
2446 2446 if not specs:
2447 2447 specs = [b'.']
2448 2448
2449 2449 files, dirs = set(), set()
2450 2450 for spec in specs:
2451 2451 f, d = complete(spec, acceptable or b'nmar')
2452 2452 files.update(f)
2453 2453 dirs.update(d)
2454 2454 files.update(dirs)
2455 2455 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2456 2456 ui.write(b'\n')
2457 2457
2458 2458
2459 2459 @command(
2460 2460 b'debugpathcopies',
2461 2461 cmdutil.walkopts,
2462 2462 b'hg debugpathcopies REV1 REV2 [FILE]',
2463 2463 inferrepo=True,
2464 2464 )
2465 2465 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2466 2466 """show copies between two revisions"""
2467 2467 ctx1 = scmutil.revsingle(repo, rev1)
2468 2468 ctx2 = scmutil.revsingle(repo, rev2)
2469 2469 m = scmutil.match(ctx1, pats, opts)
2470 2470 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2471 2471 ui.write(b'%s -> %s\n' % (src, dst))
2472 2472
2473 2473
2474 2474 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2475 2475 def debugpeer(ui, path):
2476 2476 """establish a connection to a peer repository"""
2477 2477 # Always enable peer request logging. Requires --debug to display
2478 2478 # though.
2479 2479 overrides = {
2480 2480 (b'devel', b'debug.peer-request'): True,
2481 2481 }
2482 2482
2483 2483 with ui.configoverride(overrides):
2484 2484 peer = hg.peer(ui, {}, path)
2485 2485
2486 2486 local = peer.local() is not None
2487 2487 canpush = peer.canpush()
2488 2488
2489 2489 ui.write(_(b'url: %s\n') % peer.url())
2490 2490 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2491 2491 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2492 2492
2493 2493
2494 2494 @command(
2495 2495 b'debugpickmergetool',
2496 2496 [
2497 2497 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2498 2498 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2499 2499 ]
2500 2500 + cmdutil.walkopts
2501 2501 + cmdutil.mergetoolopts,
2502 2502 _(b'[PATTERN]...'),
2503 2503 inferrepo=True,
2504 2504 )
2505 2505 def debugpickmergetool(ui, repo, *pats, **opts):
2506 2506 """examine which merge tool is chosen for specified file
2507 2507
2508 2508 As described in :hg:`help merge-tools`, Mercurial examines
2509 2509 configurations below in this order to decide which merge tool is
2510 2510 chosen for specified file.
2511 2511
2512 2512 1. ``--tool`` option
2513 2513 2. ``HGMERGE`` environment variable
2514 2514 3. configurations in ``merge-patterns`` section
2515 2515 4. configuration of ``ui.merge``
2516 2516 5. configurations in ``merge-tools`` section
2517 2517 6. ``hgmerge`` tool (for historical reason only)
2518 2518 7. default tool for fallback (``:merge`` or ``:prompt``)
2519 2519
2520 2520 This command writes out examination result in the style below::
2521 2521
2522 2522 FILE = MERGETOOL
2523 2523
2524 2524 By default, all files known in the first parent context of the
2525 2525 working directory are examined. Use file patterns and/or -I/-X
2526 2526 options to limit target files. -r/--rev is also useful to examine
2527 2527 files in another context without actual updating to it.
2528 2528
2529 2529 With --debug, this command shows warning messages while matching
2530 2530 against ``merge-patterns`` and so on, too. It is recommended to
2531 2531 use this option with explicit file patterns and/or -I/-X options,
2532 2532 because this option increases amount of output per file according
2533 2533 to configurations in hgrc.
2534 2534
2535 2535 With -v/--verbose, this command shows configurations below at
2536 2536 first (only if specified).
2537 2537
2538 2538 - ``--tool`` option
2539 2539 - ``HGMERGE`` environment variable
2540 2540 - configuration of ``ui.merge``
2541 2541
2542 2542 If merge tool is chosen before matching against
2543 2543 ``merge-patterns``, this command can't show any helpful
2544 2544 information, even with --debug. In such case, information above is
2545 2545 useful to know why a merge tool is chosen.
2546 2546 """
2547 2547 opts = pycompat.byteskwargs(opts)
2548 2548 overrides = {}
2549 2549 if opts[b'tool']:
2550 2550 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2551 2551 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2552 2552
2553 2553 with ui.configoverride(overrides, b'debugmergepatterns'):
2554 2554 hgmerge = encoding.environ.get(b"HGMERGE")
2555 2555 if hgmerge is not None:
2556 2556 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2557 2557 uimerge = ui.config(b"ui", b"merge")
2558 2558 if uimerge:
2559 2559 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2560 2560
2561 2561 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2562 2562 m = scmutil.match(ctx, pats, opts)
2563 2563 changedelete = opts[b'changedelete']
2564 2564 for path in ctx.walk(m):
2565 2565 fctx = ctx[path]
2566 2566 try:
2567 2567 if not ui.debugflag:
2568 2568 ui.pushbuffer(error=True)
2569 2569 tool, toolpath = filemerge._picktool(
2570 2570 repo,
2571 2571 ui,
2572 2572 path,
2573 2573 fctx.isbinary(),
2574 2574 b'l' in fctx.flags(),
2575 2575 changedelete,
2576 2576 )
2577 2577 finally:
2578 2578 if not ui.debugflag:
2579 2579 ui.popbuffer()
2580 2580 ui.write(b'%s = %s\n' % (path, tool))
2581 2581
2582 2582
2583 2583 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2584 2584 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2585 2585 '''access the pushkey key/value protocol
2586 2586
2587 2587 With two args, list the keys in the given namespace.
2588 2588
2589 2589 With five args, set a key to new if it currently is set to old.
2590 2590 Reports success or failure.
2591 2591 '''
2592 2592
2593 2593 target = hg.peer(ui, {}, repopath)
2594 2594 if keyinfo:
2595 2595 key, old, new = keyinfo
2596 2596 with target.commandexecutor() as e:
2597 2597 r = e.callcommand(
2598 2598 b'pushkey',
2599 2599 {
2600 2600 b'namespace': namespace,
2601 2601 b'key': key,
2602 2602 b'old': old,
2603 2603 b'new': new,
2604 2604 },
2605 2605 ).result()
2606 2606
2607 2607 ui.status(pycompat.bytestr(r) + b'\n')
2608 2608 return not r
2609 2609 else:
2610 2610 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2611 2611 ui.write(
2612 2612 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2613 2613 )
2614 2614
2615 2615
2616 2616 @command(b'debugpvec', [], _(b'A B'))
2617 2617 def debugpvec(ui, repo, a, b=None):
2618 2618 ca = scmutil.revsingle(repo, a)
2619 2619 cb = scmutil.revsingle(repo, b)
2620 2620 pa = pvec.ctxpvec(ca)
2621 2621 pb = pvec.ctxpvec(cb)
2622 2622 if pa == pb:
2623 2623 rel = b"="
2624 2624 elif pa > pb:
2625 2625 rel = b">"
2626 2626 elif pa < pb:
2627 2627 rel = b"<"
2628 2628 elif pa | pb:
2629 2629 rel = b"|"
2630 2630 ui.write(_(b"a: %s\n") % pa)
2631 2631 ui.write(_(b"b: %s\n") % pb)
2632 2632 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2633 2633 ui.write(
2634 2634 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2635 2635 % (
2636 2636 abs(pa._depth - pb._depth),
2637 2637 pvec._hamming(pa._vec, pb._vec),
2638 2638 pa.distance(pb),
2639 2639 rel,
2640 2640 )
2641 2641 )
2642 2642
2643 2643
2644 2644 @command(
2645 2645 b'debugrebuilddirstate|debugrebuildstate',
2646 2646 [
2647 2647 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2648 2648 (
2649 2649 b'',
2650 2650 b'minimal',
2651 2651 None,
2652 2652 _(
2653 2653 b'only rebuild files that are inconsistent with '
2654 2654 b'the working copy parent'
2655 2655 ),
2656 2656 ),
2657 2657 ],
2658 2658 _(b'[-r REV]'),
2659 2659 )
2660 2660 def debugrebuilddirstate(ui, repo, rev, **opts):
2661 2661 """rebuild the dirstate as it would look like for the given revision
2662 2662
2663 2663 If no revision is specified the first current parent will be used.
2664 2664
2665 2665 The dirstate will be set to the files of the given revision.
2666 2666 The actual working directory content or existing dirstate
2667 2667 information such as adds or removes is not considered.
2668 2668
2669 2669 ``minimal`` will only rebuild the dirstate status for files that claim to be
2670 2670 tracked but are not in the parent manifest, or that exist in the parent
2671 2671 manifest but are not in the dirstate. It will not change adds, removes, or
2672 2672 modified files that are in the working copy parent.
2673 2673
2674 2674 One use of this command is to make the next :hg:`status` invocation
2675 2675 check the actual file content.
2676 2676 """
2677 2677 ctx = scmutil.revsingle(repo, rev)
2678 2678 with repo.wlock():
2679 2679 dirstate = repo.dirstate
2680 2680 changedfiles = None
2681 2681 # See command doc for what minimal does.
2682 2682 if opts.get('minimal'):
2683 2683 manifestfiles = set(ctx.manifest().keys())
2684 2684 dirstatefiles = set(dirstate)
2685 2685 manifestonly = manifestfiles - dirstatefiles
2686 2686 dsonly = dirstatefiles - manifestfiles
2687 2687 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2688 2688 changedfiles = manifestonly | dsnotadded
2689 2689
2690 2690 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2691 2691
2692 2692
2693 2693 @command(b'debugrebuildfncache', [], b'')
2694 2694 def debugrebuildfncache(ui, repo):
2695 2695 """rebuild the fncache file"""
2696 2696 repair.rebuildfncache(ui, repo)
2697 2697
2698 2698
2699 2699 @command(
2700 2700 b'debugrename',
2701 2701 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2702 2702 _(b'[-r REV] [FILE]...'),
2703 2703 )
2704 2704 def debugrename(ui, repo, *pats, **opts):
2705 2705 """dump rename information"""
2706 2706
2707 2707 opts = pycompat.byteskwargs(opts)
2708 2708 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2709 2709 m = scmutil.match(ctx, pats, opts)
2710 2710 for abs in ctx.walk(m):
2711 2711 fctx = ctx[abs]
2712 2712 o = fctx.filelog().renamed(fctx.filenode())
2713 2713 rel = repo.pathto(abs)
2714 2714 if o:
2715 2715 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2716 2716 else:
2717 2717 ui.write(_(b"%s not renamed\n") % rel)
2718 2718
2719 2719
2720 2720 @command(b'debugrequires|debugrequirements', [], b'')
2721 2721 def debugrequirements(ui, repo):
2722 2722 """ print the current repo requirements """
2723 2723 for r in sorted(repo.requirements):
2724 2724 ui.write(b"%s\n" % r)
2725 2725
2726 2726
2727 2727 @command(
2728 2728 b'debugrevlog',
2729 2729 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2730 2730 _(b'-c|-m|FILE'),
2731 2731 optionalrepo=True,
2732 2732 )
2733 2733 def debugrevlog(ui, repo, file_=None, **opts):
2734 2734 """show data and statistics about a revlog"""
2735 2735 opts = pycompat.byteskwargs(opts)
2736 2736 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2737 2737
2738 2738 if opts.get(b"dump"):
2739 2739 numrevs = len(r)
2740 2740 ui.write(
2741 2741 (
2742 2742 b"# rev p1rev p2rev start end deltastart base p1 p2"
2743 2743 b" rawsize totalsize compression heads chainlen\n"
2744 2744 )
2745 2745 )
2746 2746 ts = 0
2747 2747 heads = set()
2748 2748
2749 2749 for rev in pycompat.xrange(numrevs):
2750 2750 dbase = r.deltaparent(rev)
2751 2751 if dbase == -1:
2752 2752 dbase = rev
2753 2753 cbase = r.chainbase(rev)
2754 2754 clen = r.chainlen(rev)
2755 2755 p1, p2 = r.parentrevs(rev)
2756 2756 rs = r.rawsize(rev)
2757 2757 ts = ts + rs
2758 2758 heads -= set(r.parentrevs(rev))
2759 2759 heads.add(rev)
2760 2760 try:
2761 2761 compression = ts / r.end(rev)
2762 2762 except ZeroDivisionError:
2763 2763 compression = 0
2764 2764 ui.write(
2765 2765 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2766 2766 b"%11d %5d %8d\n"
2767 2767 % (
2768 2768 rev,
2769 2769 p1,
2770 2770 p2,
2771 2771 r.start(rev),
2772 2772 r.end(rev),
2773 2773 r.start(dbase),
2774 2774 r.start(cbase),
2775 2775 r.start(p1),
2776 2776 r.start(p2),
2777 2777 rs,
2778 2778 ts,
2779 2779 compression,
2780 2780 len(heads),
2781 2781 clen,
2782 2782 )
2783 2783 )
2784 2784 return 0
2785 2785
2786 2786 v = r.version
2787 2787 format = v & 0xFFFF
2788 2788 flags = []
2789 2789 gdelta = False
2790 2790 if v & revlog.FLAG_INLINE_DATA:
2791 2791 flags.append(b'inline')
2792 2792 if v & revlog.FLAG_GENERALDELTA:
2793 2793 gdelta = True
2794 2794 flags.append(b'generaldelta')
2795 2795 if not flags:
2796 2796 flags = [b'(none)']
2797 2797
2798 2798 ### tracks merge vs single parent
2799 2799 nummerges = 0
2800 2800
2801 2801 ### tracks ways the "delta" are build
2802 2802 # nodelta
2803 2803 numempty = 0
2804 2804 numemptytext = 0
2805 2805 numemptydelta = 0
2806 2806 # full file content
2807 2807 numfull = 0
2808 2808 # intermediate snapshot against a prior snapshot
2809 2809 numsemi = 0
2810 2810 # snapshot count per depth
2811 2811 numsnapdepth = collections.defaultdict(lambda: 0)
2812 2812 # delta against previous revision
2813 2813 numprev = 0
2814 2814 # delta against first or second parent (not prev)
2815 2815 nump1 = 0
2816 2816 nump2 = 0
2817 2817 # delta against neither prev nor parents
2818 2818 numother = 0
2819 2819 # delta against prev that are also first or second parent
2820 2820 # (details of `numprev`)
2821 2821 nump1prev = 0
2822 2822 nump2prev = 0
2823 2823
2824 2824 # data about delta chain of each revs
2825 2825 chainlengths = []
2826 2826 chainbases = []
2827 2827 chainspans = []
2828 2828
2829 2829 # data about each revision
2830 2830 datasize = [None, 0, 0]
2831 2831 fullsize = [None, 0, 0]
2832 2832 semisize = [None, 0, 0]
2833 2833 # snapshot count per depth
2834 2834 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2835 2835 deltasize = [None, 0, 0]
2836 2836 chunktypecounts = {}
2837 2837 chunktypesizes = {}
2838 2838
2839 2839 def addsize(size, l):
2840 2840 if l[0] is None or size < l[0]:
2841 2841 l[0] = size
2842 2842 if size > l[1]:
2843 2843 l[1] = size
2844 2844 l[2] += size
2845 2845
2846 2846 numrevs = len(r)
2847 2847 for rev in pycompat.xrange(numrevs):
2848 2848 p1, p2 = r.parentrevs(rev)
2849 2849 delta = r.deltaparent(rev)
2850 2850 if format > 0:
2851 2851 addsize(r.rawsize(rev), datasize)
2852 2852 if p2 != nullrev:
2853 2853 nummerges += 1
2854 2854 size = r.length(rev)
2855 2855 if delta == nullrev:
2856 2856 chainlengths.append(0)
2857 2857 chainbases.append(r.start(rev))
2858 2858 chainspans.append(size)
2859 2859 if size == 0:
2860 2860 numempty += 1
2861 2861 numemptytext += 1
2862 2862 else:
2863 2863 numfull += 1
2864 2864 numsnapdepth[0] += 1
2865 2865 addsize(size, fullsize)
2866 2866 addsize(size, snapsizedepth[0])
2867 2867 else:
2868 2868 chainlengths.append(chainlengths[delta] + 1)
2869 2869 baseaddr = chainbases[delta]
2870 2870 revaddr = r.start(rev)
2871 2871 chainbases.append(baseaddr)
2872 2872 chainspans.append((revaddr - baseaddr) + size)
2873 2873 if size == 0:
2874 2874 numempty += 1
2875 2875 numemptydelta += 1
2876 2876 elif r.issnapshot(rev):
2877 2877 addsize(size, semisize)
2878 2878 numsemi += 1
2879 2879 depth = r.snapshotdepth(rev)
2880 2880 numsnapdepth[depth] += 1
2881 2881 addsize(size, snapsizedepth[depth])
2882 2882 else:
2883 2883 addsize(size, deltasize)
2884 2884 if delta == rev - 1:
2885 2885 numprev += 1
2886 2886 if delta == p1:
2887 2887 nump1prev += 1
2888 2888 elif delta == p2:
2889 2889 nump2prev += 1
2890 2890 elif delta == p1:
2891 2891 nump1 += 1
2892 2892 elif delta == p2:
2893 2893 nump2 += 1
2894 2894 elif delta != nullrev:
2895 2895 numother += 1
2896 2896
2897 2897 # Obtain data on the raw chunks in the revlog.
2898 2898 if util.safehasattr(r, b'_getsegmentforrevs'):
2899 2899 segment = r._getsegmentforrevs(rev, rev)[1]
2900 2900 else:
2901 2901 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2902 2902 if segment:
2903 2903 chunktype = bytes(segment[0:1])
2904 2904 else:
2905 2905 chunktype = b'empty'
2906 2906
2907 2907 if chunktype not in chunktypecounts:
2908 2908 chunktypecounts[chunktype] = 0
2909 2909 chunktypesizes[chunktype] = 0
2910 2910
2911 2911 chunktypecounts[chunktype] += 1
2912 2912 chunktypesizes[chunktype] += size
2913 2913
2914 2914 # Adjust size min value for empty cases
2915 2915 for size in (datasize, fullsize, semisize, deltasize):
2916 2916 if size[0] is None:
2917 2917 size[0] = 0
2918 2918
2919 2919 numdeltas = numrevs - numfull - numempty - numsemi
2920 2920 numoprev = numprev - nump1prev - nump2prev
2921 2921 totalrawsize = datasize[2]
2922 2922 datasize[2] /= numrevs
2923 2923 fulltotal = fullsize[2]
2924 2924 if numfull == 0:
2925 2925 fullsize[2] = 0
2926 2926 else:
2927 2927 fullsize[2] /= numfull
2928 2928 semitotal = semisize[2]
2929 2929 snaptotal = {}
2930 2930 if numsemi > 0:
2931 2931 semisize[2] /= numsemi
2932 2932 for depth in snapsizedepth:
2933 2933 snaptotal[depth] = snapsizedepth[depth][2]
2934 2934 snapsizedepth[depth][2] /= numsnapdepth[depth]
2935 2935
2936 2936 deltatotal = deltasize[2]
2937 2937 if numdeltas > 0:
2938 2938 deltasize[2] /= numdeltas
2939 2939 totalsize = fulltotal + semitotal + deltatotal
2940 2940 avgchainlen = sum(chainlengths) / numrevs
2941 2941 maxchainlen = max(chainlengths)
2942 2942 maxchainspan = max(chainspans)
2943 2943 compratio = 1
2944 2944 if totalsize:
2945 2945 compratio = totalrawsize / totalsize
2946 2946
2947 2947 basedfmtstr = b'%%%dd\n'
2948 2948 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2949 2949
2950 2950 def dfmtstr(max):
2951 2951 return basedfmtstr % len(str(max))
2952 2952
2953 2953 def pcfmtstr(max, padding=0):
2954 2954 return basepcfmtstr % (len(str(max)), b' ' * padding)
2955 2955
2956 2956 def pcfmt(value, total):
2957 2957 if total:
2958 2958 return (value, 100 * float(value) / total)
2959 2959 else:
2960 2960 return value, 100.0
2961 2961
2962 2962 ui.writenoi18n(b'format : %d\n' % format)
2963 2963 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2964 2964
2965 2965 ui.write(b'\n')
2966 2966 fmt = pcfmtstr(totalsize)
2967 2967 fmt2 = dfmtstr(totalsize)
2968 2968 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2969 2969 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2970 2970 ui.writenoi18n(
2971 2971 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2972 2972 )
2973 2973 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2974 2974 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2975 2975 ui.writenoi18n(
2976 2976 b' text : '
2977 2977 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2978 2978 )
2979 2979 ui.writenoi18n(
2980 2980 b' delta : '
2981 2981 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2982 2982 )
2983 2983 ui.writenoi18n(
2984 2984 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2985 2985 )
2986 2986 for depth in sorted(numsnapdepth):
2987 2987 ui.write(
2988 2988 (b' lvl-%-3d : ' % depth)
2989 2989 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2990 2990 )
2991 2991 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2992 2992 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2993 2993 ui.writenoi18n(
2994 2994 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2995 2995 )
2996 2996 for depth in sorted(numsnapdepth):
2997 2997 ui.write(
2998 2998 (b' lvl-%-3d : ' % depth)
2999 2999 + fmt % pcfmt(snaptotal[depth], totalsize)
3000 3000 )
3001 3001 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3002 3002
3003 3003 def fmtchunktype(chunktype):
3004 3004 if chunktype == b'empty':
3005 3005 return b' %s : ' % chunktype
3006 3006 elif chunktype in pycompat.bytestr(string.ascii_letters):
3007 3007 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3008 3008 else:
3009 3009 return b' 0x%s : ' % hex(chunktype)
3010 3010
3011 3011 ui.write(b'\n')
3012 3012 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3013 3013 for chunktype in sorted(chunktypecounts):
3014 3014 ui.write(fmtchunktype(chunktype))
3015 3015 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3016 3016 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3017 3017 for chunktype in sorted(chunktypecounts):
3018 3018 ui.write(fmtchunktype(chunktype))
3019 3019 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3020 3020
3021 3021 ui.write(b'\n')
3022 3022 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3023 3023 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3024 3024 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3025 3025 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3026 3026 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3027 3027
3028 3028 if format > 0:
3029 3029 ui.write(b'\n')
3030 3030 ui.writenoi18n(
3031 3031 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3032 3032 % tuple(datasize)
3033 3033 )
3034 3034 ui.writenoi18n(
3035 3035 b'full revision size (min/max/avg) : %d / %d / %d\n'
3036 3036 % tuple(fullsize)
3037 3037 )
3038 3038 ui.writenoi18n(
3039 3039 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3040 3040 % tuple(semisize)
3041 3041 )
3042 3042 for depth in sorted(snapsizedepth):
3043 3043 if depth == 0:
3044 3044 continue
3045 3045 ui.writenoi18n(
3046 3046 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3047 3047 % ((depth,) + tuple(snapsizedepth[depth]))
3048 3048 )
3049 3049 ui.writenoi18n(
3050 3050 b'delta size (min/max/avg) : %d / %d / %d\n'
3051 3051 % tuple(deltasize)
3052 3052 )
3053 3053
3054 3054 if numdeltas > 0:
3055 3055 ui.write(b'\n')
3056 3056 fmt = pcfmtstr(numdeltas)
3057 3057 fmt2 = pcfmtstr(numdeltas, 4)
3058 3058 ui.writenoi18n(
3059 3059 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3060 3060 )
3061 3061 if numprev > 0:
3062 3062 ui.writenoi18n(
3063 3063 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3064 3064 )
3065 3065 ui.writenoi18n(
3066 3066 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3067 3067 )
3068 3068 ui.writenoi18n(
3069 3069 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3070 3070 )
3071 3071 if gdelta:
3072 3072 ui.writenoi18n(
3073 3073 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3074 3074 )
3075 3075 ui.writenoi18n(
3076 3076 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3077 3077 )
3078 3078 ui.writenoi18n(
3079 3079 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3080 3080 )
3081 3081
3082 3082
3083 3083 @command(
3084 3084 b'debugrevlogindex',
3085 3085 cmdutil.debugrevlogopts
3086 3086 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3087 3087 _(b'[-f FORMAT] -c|-m|FILE'),
3088 3088 optionalrepo=True,
3089 3089 )
3090 3090 def debugrevlogindex(ui, repo, file_=None, **opts):
3091 3091 """dump the contents of a revlog index"""
3092 3092 opts = pycompat.byteskwargs(opts)
3093 3093 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3094 3094 format = opts.get(b'format', 0)
3095 3095 if format not in (0, 1):
3096 3096 raise error.Abort(_(b"unknown format %d") % format)
3097 3097
3098 3098 if ui.debugflag:
3099 3099 shortfn = hex
3100 3100 else:
3101 3101 shortfn = short
3102 3102
3103 3103 # There might not be anything in r, so have a sane default
3104 3104 idlen = 12
3105 3105 for i in r:
3106 3106 idlen = len(shortfn(r.node(i)))
3107 3107 break
3108 3108
3109 3109 if format == 0:
3110 3110 if ui.verbose:
3111 3111 ui.writenoi18n(
3112 3112 b" rev offset length linkrev %s %s p2\n"
3113 3113 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3114 3114 )
3115 3115 else:
3116 3116 ui.writenoi18n(
3117 3117 b" rev linkrev %s %s p2\n"
3118 3118 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3119 3119 )
3120 3120 elif format == 1:
3121 3121 if ui.verbose:
3122 3122 ui.writenoi18n(
3123 3123 (
3124 3124 b" rev flag offset length size link p1"
3125 3125 b" p2 %s\n"
3126 3126 )
3127 3127 % b"nodeid".rjust(idlen)
3128 3128 )
3129 3129 else:
3130 3130 ui.writenoi18n(
3131 3131 b" rev flag size link p1 p2 %s\n"
3132 3132 % b"nodeid".rjust(idlen)
3133 3133 )
3134 3134
3135 3135 for i in r:
3136 3136 node = r.node(i)
3137 3137 if format == 0:
3138 3138 try:
3139 3139 pp = r.parents(node)
3140 3140 except Exception:
3141 3141 pp = [nullid, nullid]
3142 3142 if ui.verbose:
3143 3143 ui.write(
3144 3144 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3145 3145 % (
3146 3146 i,
3147 3147 r.start(i),
3148 3148 r.length(i),
3149 3149 r.linkrev(i),
3150 3150 shortfn(node),
3151 3151 shortfn(pp[0]),
3152 3152 shortfn(pp[1]),
3153 3153 )
3154 3154 )
3155 3155 else:
3156 3156 ui.write(
3157 3157 b"% 6d % 7d %s %s %s\n"
3158 3158 % (
3159 3159 i,
3160 3160 r.linkrev(i),
3161 3161 shortfn(node),
3162 3162 shortfn(pp[0]),
3163 3163 shortfn(pp[1]),
3164 3164 )
3165 3165 )
3166 3166 elif format == 1:
3167 3167 pr = r.parentrevs(i)
3168 3168 if ui.verbose:
3169 3169 ui.write(
3170 3170 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3171 3171 % (
3172 3172 i,
3173 3173 r.flags(i),
3174 3174 r.start(i),
3175 3175 r.length(i),
3176 3176 r.rawsize(i),
3177 3177 r.linkrev(i),
3178 3178 pr[0],
3179 3179 pr[1],
3180 3180 shortfn(node),
3181 3181 )
3182 3182 )
3183 3183 else:
3184 3184 ui.write(
3185 3185 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3186 3186 % (
3187 3187 i,
3188 3188 r.flags(i),
3189 3189 r.rawsize(i),
3190 3190 r.linkrev(i),
3191 3191 pr[0],
3192 3192 pr[1],
3193 3193 shortfn(node),
3194 3194 )
3195 3195 )
3196 3196
3197 3197
3198 3198 @command(
3199 3199 b'debugrevspec',
3200 3200 [
3201 3201 (
3202 3202 b'',
3203 3203 b'optimize',
3204 3204 None,
3205 3205 _(b'print parsed tree after optimizing (DEPRECATED)'),
3206 3206 ),
3207 3207 (
3208 3208 b'',
3209 3209 b'show-revs',
3210 3210 True,
3211 3211 _(b'print list of result revisions (default)'),
3212 3212 ),
3213 3213 (
3214 3214 b's',
3215 3215 b'show-set',
3216 3216 None,
3217 3217 _(b'print internal representation of result set'),
3218 3218 ),
3219 3219 (
3220 3220 b'p',
3221 3221 b'show-stage',
3222 3222 [],
3223 3223 _(b'print parsed tree at the given stage'),
3224 3224 _(b'NAME'),
3225 3225 ),
3226 3226 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3227 3227 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3228 3228 ],
3229 3229 b'REVSPEC',
3230 3230 )
3231 3231 def debugrevspec(ui, repo, expr, **opts):
3232 3232 """parse and apply a revision specification
3233 3233
3234 3234 Use -p/--show-stage option to print the parsed tree at the given stages.
3235 3235 Use -p all to print tree at every stage.
3236 3236
3237 3237 Use --no-show-revs option with -s or -p to print only the set
3238 3238 representation or the parsed tree respectively.
3239 3239
3240 3240 Use --verify-optimized to compare the optimized result with the unoptimized
3241 3241 one. Returns 1 if the optimized result differs.
3242 3242 """
3243 3243 opts = pycompat.byteskwargs(opts)
3244 3244 aliases = ui.configitems(b'revsetalias')
3245 3245 stages = [
3246 3246 (b'parsed', lambda tree: tree),
3247 3247 (
3248 3248 b'expanded',
3249 3249 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3250 3250 ),
3251 3251 (b'concatenated', revsetlang.foldconcat),
3252 3252 (b'analyzed', revsetlang.analyze),
3253 3253 (b'optimized', revsetlang.optimize),
3254 3254 ]
3255 3255 if opts[b'no_optimized']:
3256 3256 stages = stages[:-1]
3257 3257 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3258 3258 raise error.Abort(
3259 3259 _(b'cannot use --verify-optimized with --no-optimized')
3260 3260 )
3261 3261 stagenames = {n for n, f in stages}
3262 3262
3263 3263 showalways = set()
3264 3264 showchanged = set()
3265 3265 if ui.verbose and not opts[b'show_stage']:
3266 3266 # show parsed tree by --verbose (deprecated)
3267 3267 showalways.add(b'parsed')
3268 3268 showchanged.update([b'expanded', b'concatenated'])
3269 3269 if opts[b'optimize']:
3270 3270 showalways.add(b'optimized')
3271 3271 if opts[b'show_stage'] and opts[b'optimize']:
3272 3272 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3273 3273 if opts[b'show_stage'] == [b'all']:
3274 3274 showalways.update(stagenames)
3275 3275 else:
3276 3276 for n in opts[b'show_stage']:
3277 3277 if n not in stagenames:
3278 3278 raise error.Abort(_(b'invalid stage name: %s') % n)
3279 3279 showalways.update(opts[b'show_stage'])
3280 3280
3281 3281 treebystage = {}
3282 3282 printedtree = None
3283 3283 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3284 3284 for n, f in stages:
3285 3285 treebystage[n] = tree = f(tree)
3286 3286 if n in showalways or (n in showchanged and tree != printedtree):
3287 3287 if opts[b'show_stage'] or n != b'parsed':
3288 3288 ui.write(b"* %s:\n" % n)
3289 3289 ui.write(revsetlang.prettyformat(tree), b"\n")
3290 3290 printedtree = tree
3291 3291
3292 3292 if opts[b'verify_optimized']:
3293 3293 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3294 3294 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3295 3295 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3296 3296 ui.writenoi18n(
3297 3297 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3298 3298 )
3299 3299 ui.writenoi18n(
3300 3300 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3301 3301 )
3302 3302 arevs = list(arevs)
3303 3303 brevs = list(brevs)
3304 3304 if arevs == brevs:
3305 3305 return 0
3306 3306 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3307 3307 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3308 3308 sm = difflib.SequenceMatcher(None, arevs, brevs)
3309 3309 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3310 3310 if tag in ('delete', 'replace'):
3311 3311 for c in arevs[alo:ahi]:
3312 3312 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3313 3313 if tag in ('insert', 'replace'):
3314 3314 for c in brevs[blo:bhi]:
3315 3315 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3316 3316 if tag == 'equal':
3317 3317 for c in arevs[alo:ahi]:
3318 3318 ui.write(b' %d\n' % c)
3319 3319 return 1
3320 3320
3321 3321 func = revset.makematcher(tree)
3322 3322 revs = func(repo)
3323 3323 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3324 3324 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3325 3325 if not opts[b'show_revs']:
3326 3326 return
3327 3327 for c in revs:
3328 3328 ui.write(b"%d\n" % c)
3329 3329
3330 3330
3331 3331 @command(
3332 3332 b'debugserve',
3333 3333 [
3334 3334 (
3335 3335 b'',
3336 3336 b'sshstdio',
3337 3337 False,
3338 3338 _(b'run an SSH server bound to process handles'),
3339 3339 ),
3340 3340 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3341 3341 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3342 3342 ],
3343 3343 b'',
3344 3344 )
3345 3345 def debugserve(ui, repo, **opts):
3346 3346 """run a server with advanced settings
3347 3347
3348 3348 This command is similar to :hg:`serve`. It exists partially as a
3349 3349 workaround to the fact that ``hg serve --stdio`` must have specific
3350 3350 arguments for security reasons.
3351 3351 """
3352 3352 opts = pycompat.byteskwargs(opts)
3353 3353
3354 3354 if not opts[b'sshstdio']:
3355 3355 raise error.Abort(_(b'only --sshstdio is currently supported'))
3356 3356
3357 3357 logfh = None
3358 3358
3359 3359 if opts[b'logiofd'] and opts[b'logiofile']:
3360 3360 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3361 3361
3362 3362 if opts[b'logiofd']:
3363 3363 # Ideally we would be line buffered. But line buffering in binary
3364 3364 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3365 3365 # buffering could have performance impacts. But since this isn't
3366 3366 # performance critical code, it should be fine.
3367 3367 try:
3368 3368 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3369 3369 except OSError as e:
3370 3370 if e.errno != errno.ESPIPE:
3371 3371 raise
3372 3372 # can't seek a pipe, so `ab` mode fails on py3
3373 3373 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3374 3374 elif opts[b'logiofile']:
3375 3375 logfh = open(opts[b'logiofile'], b'ab', 0)
3376 3376
3377 3377 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3378 3378 s.serve_forever()
3379 3379
3380 3380
3381 3381 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3382 3382 def debugsetparents(ui, repo, rev1, rev2=None):
3383 3383 """manually set the parents of the current working directory
3384 3384
3385 3385 This is useful for writing repository conversion tools, but should
3386 3386 be used with care. For example, neither the working directory nor the
3387 3387 dirstate is updated, so file status may be incorrect after running this
3388 3388 command.
3389 3389
3390 3390 Returns 0 on success.
3391 3391 """
3392 3392
3393 3393 node1 = scmutil.revsingle(repo, rev1).node()
3394 3394 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3395 3395
3396 3396 with repo.wlock():
3397 3397 repo.setparents(node1, node2)
3398 3398
3399 3399
3400 3400 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3401 3401 def debugsidedata(ui, repo, file_, rev=None, **opts):
3402 3402 """dump the side data for a cl/manifest/file revision
3403 3403
3404 3404 Use --verbose to dump the sidedata content."""
3405 3405 opts = pycompat.byteskwargs(opts)
3406 3406 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3407 3407 if rev is not None:
3408 3408 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3409 3409 file_, rev = None, file_
3410 3410 elif rev is None:
3411 3411 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3412 3412 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3413 3413 r = getattr(r, '_revlog', r)
3414 3414 try:
3415 3415 sidedata = r.sidedata(r.lookup(rev))
3416 3416 except KeyError:
3417 3417 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3418 3418 if sidedata:
3419 3419 sidedata = list(sidedata.items())
3420 3420 sidedata.sort()
3421 3421 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3422 3422 for key, value in sidedata:
3423 3423 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3424 3424 if ui.verbose:
3425 3425 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3426 3426
3427 3427
3428 3428 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3429 3429 def debugssl(ui, repo, source=None, **opts):
3430 3430 '''test a secure connection to a server
3431 3431
3432 3432 This builds the certificate chain for the server on Windows, installing the
3433 3433 missing intermediates and trusted root via Windows Update if necessary. It
3434 3434 does nothing on other platforms.
3435 3435
3436 3436 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3437 3437 that server is used. See :hg:`help urls` for more information.
3438 3438
3439 3439 If the update succeeds, retry the original operation. Otherwise, the cause
3440 3440 of the SSL error is likely another issue.
3441 3441 '''
3442 3442 if not pycompat.iswindows:
3443 3443 raise error.Abort(
3444 3444 _(b'certificate chain building is only possible on Windows')
3445 3445 )
3446 3446
3447 3447 if not source:
3448 3448 if not repo:
3449 3449 raise error.Abort(
3450 3450 _(
3451 3451 b"there is no Mercurial repository here, and no "
3452 3452 b"server specified"
3453 3453 )
3454 3454 )
3455 3455 source = b"default"
3456 3456
3457 3457 source, branches = hg.parseurl(ui.expandpath(source))
3458 3458 url = util.url(source)
3459 3459
3460 3460 defaultport = {b'https': 443, b'ssh': 22}
3461 3461 if url.scheme in defaultport:
3462 3462 try:
3463 3463 addr = (url.host, int(url.port or defaultport[url.scheme]))
3464 3464 except ValueError:
3465 3465 raise error.Abort(_(b"malformed port number in URL"))
3466 3466 else:
3467 3467 raise error.Abort(_(b"only https and ssh connections are supported"))
3468 3468
3469 3469 from . import win32
3470 3470
3471 3471 s = ssl.wrap_socket(
3472 3472 socket.socket(),
3473 3473 ssl_version=ssl.PROTOCOL_TLS,
3474 3474 cert_reqs=ssl.CERT_NONE,
3475 3475 ca_certs=None,
3476 3476 )
3477 3477
3478 3478 try:
3479 3479 s.connect(addr)
3480 3480 cert = s.getpeercert(True)
3481 3481
3482 3482 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3483 3483
3484 3484 complete = win32.checkcertificatechain(cert, build=False)
3485 3485
3486 3486 if not complete:
3487 3487 ui.status(_(b'certificate chain is incomplete, updating... '))
3488 3488
3489 3489 if not win32.checkcertificatechain(cert):
3490 3490 ui.status(_(b'failed.\n'))
3491 3491 else:
3492 3492 ui.status(_(b'done.\n'))
3493 3493 else:
3494 3494 ui.status(_(b'full certificate chain is available\n'))
3495 3495 finally:
3496 3496 s.close()
3497 3497
3498 3498
3499 3499 @command(
3500 3500 b"debugbackupbundle",
3501 3501 [
3502 3502 (
3503 3503 b"",
3504 3504 b"recover",
3505 3505 b"",
3506 3506 b"brings the specified changeset back into the repository",
3507 3507 )
3508 3508 ]
3509 3509 + cmdutil.logopts,
3510 3510 _(b"hg debugbackupbundle [--recover HASH]"),
3511 3511 )
3512 3512 def debugbackupbundle(ui, repo, *pats, **opts):
3513 3513 """lists the changesets available in backup bundles
3514 3514
3515 3515 Without any arguments, this command prints a list of the changesets in each
3516 3516 backup bundle.
3517 3517
3518 3518 --recover takes a changeset hash and unbundles the first bundle that
3519 3519 contains that hash, which puts that changeset back in your repository.
3520 3520
3521 3521 --verbose will print the entire commit message and the bundle path for that
3522 3522 backup.
3523 3523 """
3524 3524 backups = list(
3525 3525 filter(
3526 3526 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3527 3527 )
3528 3528 )
3529 3529 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3530 3530
3531 3531 opts = pycompat.byteskwargs(opts)
3532 3532 opts[b"bundle"] = b""
3533 3533 opts[b"force"] = None
3534 3534 limit = logcmdutil.getlimit(opts)
3535 3535
3536 3536 def display(other, chlist, displayer):
3537 3537 if opts.get(b"newest_first"):
3538 3538 chlist.reverse()
3539 3539 count = 0
3540 3540 for n in chlist:
3541 3541 if limit is not None and count >= limit:
3542 3542 break
3543 3543 parents = [True for p in other.changelog.parents(n) if p != nullid]
3544 3544 if opts.get(b"no_merges") and len(parents) == 2:
3545 3545 continue
3546 3546 count += 1
3547 3547 displayer.show(other[n])
3548 3548
3549 3549 recovernode = opts.get(b"recover")
3550 3550 if recovernode:
3551 3551 if scmutil.isrevsymbol(repo, recovernode):
3552 3552 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3553 3553 return
3554 3554 elif backups:
3555 3555 msg = _(
3556 3556 b"Recover changesets using: hg debugbackupbundle --recover "
3557 3557 b"<changeset hash>\n\nAvailable backup changesets:"
3558 3558 )
3559 3559 ui.status(msg, label=b"status.removed")
3560 3560 else:
3561 3561 ui.status(_(b"no backup changesets found\n"))
3562 3562 return
3563 3563
3564 3564 for backup in backups:
3565 3565 # Much of this is copied from the hg incoming logic
3566 3566 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3567 3567 source, branches = hg.parseurl(source, opts.get(b"branch"))
3568 3568 try:
3569 3569 other = hg.peer(repo, opts, source)
3570 3570 except error.LookupError as ex:
3571 3571 msg = _(b"\nwarning: unable to open bundle %s") % source
3572 3572 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3573 3573 ui.warn(msg, hint=hint)
3574 3574 continue
3575 3575 revs, checkout = hg.addbranchrevs(
3576 3576 repo, other, branches, opts.get(b"rev")
3577 3577 )
3578 3578
3579 3579 if revs:
3580 3580 revs = [other.lookup(rev) for rev in revs]
3581 3581
3582 3582 quiet = ui.quiet
3583 3583 try:
3584 3584 ui.quiet = True
3585 3585 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3586 3586 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3587 3587 )
3588 3588 except error.LookupError:
3589 3589 continue
3590 3590 finally:
3591 3591 ui.quiet = quiet
3592 3592
3593 3593 try:
3594 3594 if not chlist:
3595 3595 continue
3596 3596 if recovernode:
3597 3597 with repo.lock(), repo.transaction(b"unbundle") as tr:
3598 3598 if scmutil.isrevsymbol(other, recovernode):
3599 3599 ui.status(_(b"Unbundling %s\n") % (recovernode))
3600 3600 f = hg.openpath(ui, source)
3601 3601 gen = exchange.readbundle(ui, f, source)
3602 3602 if isinstance(gen, bundle2.unbundle20):
3603 3603 bundle2.applybundle(
3604 3604 repo,
3605 3605 gen,
3606 3606 tr,
3607 3607 source=b"unbundle",
3608 3608 url=b"bundle:" + source,
3609 3609 )
3610 3610 else:
3611 3611 gen.apply(repo, b"unbundle", b"bundle:" + source)
3612 3612 break
3613 3613 else:
3614 3614 backupdate = encoding.strtolocal(
3615 3615 time.strftime(
3616 3616 "%a %H:%M, %Y-%m-%d",
3617 3617 time.localtime(os.path.getmtime(source)),
3618 3618 )
3619 3619 )
3620 3620 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3621 3621 if ui.verbose:
3622 3622 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3623 3623 else:
3624 3624 opts[
3625 3625 b"template"
3626 3626 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3627 3627 displayer = logcmdutil.changesetdisplayer(
3628 3628 ui, other, opts, False
3629 3629 )
3630 3630 display(other, chlist, displayer)
3631 3631 displayer.close()
3632 3632 finally:
3633 3633 cleanupfn()
3634 3634
3635 3635
3636 3636 @command(
3637 3637 b'debugsub',
3638 3638 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3639 3639 _(b'[-r REV] [REV]'),
3640 3640 )
3641 3641 def debugsub(ui, repo, rev=None):
3642 3642 ctx = scmutil.revsingle(repo, rev, None)
3643 3643 for k, v in sorted(ctx.substate.items()):
3644 3644 ui.writenoi18n(b'path %s\n' % k)
3645 3645 ui.writenoi18n(b' source %s\n' % v[0])
3646 3646 ui.writenoi18n(b' revision %s\n' % v[1])
3647 3647
3648 3648
3649 3649 @command(
3650 3650 b'debugsuccessorssets',
3651 3651 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3652 3652 _(b'[REV]'),
3653 3653 )
3654 3654 def debugsuccessorssets(ui, repo, *revs, **opts):
3655 3655 """show set of successors for revision
3656 3656
3657 3657 A successors set of changeset A is a consistent group of revisions that
3658 3658 succeed A. It contains non-obsolete changesets only unless closests
3659 3659 successors set is set.
3660 3660
3661 3661 In most cases a changeset A has a single successors set containing a single
3662 3662 successor (changeset A replaced by A').
3663 3663
3664 3664 A changeset that is made obsolete with no successors are called "pruned".
3665 3665 Such changesets have no successors sets at all.
3666 3666
3667 3667 A changeset that has been "split" will have a successors set containing
3668 3668 more than one successor.
3669 3669
3670 3670 A changeset that has been rewritten in multiple different ways is called
3671 3671 "divergent". Such changesets have multiple successor sets (each of which
3672 3672 may also be split, i.e. have multiple successors).
3673 3673
3674 3674 Results are displayed as follows::
3675 3675
3676 3676 <rev1>
3677 3677 <successors-1A>
3678 3678 <rev2>
3679 3679 <successors-2A>
3680 3680 <successors-2B1> <successors-2B2> <successors-2B3>
3681 3681
3682 3682 Here rev2 has two possible (i.e. divergent) successors sets. The first
3683 3683 holds one element, whereas the second holds three (i.e. the changeset has
3684 3684 been split).
3685 3685 """
3686 3686 # passed to successorssets caching computation from one call to another
3687 3687 cache = {}
3688 3688 ctx2str = bytes
3689 3689 node2str = short
3690 3690 for rev in scmutil.revrange(repo, revs):
3691 3691 ctx = repo[rev]
3692 3692 ui.write(b'%s\n' % ctx2str(ctx))
3693 3693 for succsset in obsutil.successorssets(
3694 3694 repo, ctx.node(), closest=opts['closest'], cache=cache
3695 3695 ):
3696 3696 if succsset:
3697 3697 ui.write(b' ')
3698 3698 ui.write(node2str(succsset[0]))
3699 3699 for node in succsset[1:]:
3700 3700 ui.write(b' ')
3701 3701 ui.write(node2str(node))
3702 3702 ui.write(b'\n')
3703 3703
3704 3704
3705 3705 @command(b'debugtagscache', [])
3706 3706 def debugtagscache(ui, repo):
3707 3707 """display the contents of .hg/cache/hgtagsfnodes1"""
3708 3708 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3709 3709 for r in repo:
3710 3710 node = repo[r].node()
3711 3711 tagsnode = cache.getfnode(node, computemissing=False)
3712 3712 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3713 3713 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3714 3714
3715 3715
3716 3716 @command(
3717 3717 b'debugtemplate',
3718 3718 [
3719 3719 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3720 3720 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3721 3721 ],
3722 3722 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3723 3723 optionalrepo=True,
3724 3724 )
3725 3725 def debugtemplate(ui, repo, tmpl, **opts):
3726 3726 """parse and apply a template
3727 3727
3728 3728 If -r/--rev is given, the template is processed as a log template and
3729 3729 applied to the given changesets. Otherwise, it is processed as a generic
3730 3730 template.
3731 3731
3732 3732 Use --verbose to print the parsed tree.
3733 3733 """
3734 3734 revs = None
3735 3735 if opts['rev']:
3736 3736 if repo is None:
3737 3737 raise error.RepoError(
3738 3738 _(b'there is no Mercurial repository here (.hg not found)')
3739 3739 )
3740 3740 revs = scmutil.revrange(repo, opts['rev'])
3741 3741
3742 3742 props = {}
3743 3743 for d in opts['define']:
3744 3744 try:
3745 3745 k, v = (e.strip() for e in d.split(b'=', 1))
3746 3746 if not k or k == b'ui':
3747 3747 raise ValueError
3748 3748 props[k] = v
3749 3749 except ValueError:
3750 3750 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3751 3751
3752 3752 if ui.verbose:
3753 3753 aliases = ui.configitems(b'templatealias')
3754 3754 tree = templater.parse(tmpl)
3755 3755 ui.note(templater.prettyformat(tree), b'\n')
3756 3756 newtree = templater.expandaliases(tree, aliases)
3757 3757 if newtree != tree:
3758 3758 ui.notenoi18n(
3759 3759 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3760 3760 )
3761 3761
3762 3762 if revs is None:
3763 3763 tres = formatter.templateresources(ui, repo)
3764 3764 t = formatter.maketemplater(ui, tmpl, resources=tres)
3765 3765 if ui.verbose:
3766 3766 kwds, funcs = t.symbolsuseddefault()
3767 3767 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3768 3768 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3769 3769 ui.write(t.renderdefault(props))
3770 3770 else:
3771 3771 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3772 3772 if ui.verbose:
3773 3773 kwds, funcs = displayer.t.symbolsuseddefault()
3774 3774 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3775 3775 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3776 3776 for r in revs:
3777 3777 displayer.show(repo[r], **pycompat.strkwargs(props))
3778 3778 displayer.close()
3779 3779
3780 3780
3781 3781 @command(
3782 3782 b'debuguigetpass',
3783 3783 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3784 3784 _(b'[-p TEXT]'),
3785 3785 norepo=True,
3786 3786 )
3787 3787 def debuguigetpass(ui, prompt=b''):
3788 3788 """show prompt to type password"""
3789 3789 r = ui.getpass(prompt)
3790 if r is not None:
3791 r = encoding.strtolocal(r)
3792 else:
3790 if r is None:
3793 3791 r = b"<default response>"
3794 3792 ui.writenoi18n(b'response: %s\n' % r)
3795 3793
3796 3794
3797 3795 @command(
3798 3796 b'debuguiprompt',
3799 3797 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3800 3798 _(b'[-p TEXT]'),
3801 3799 norepo=True,
3802 3800 )
3803 3801 def debuguiprompt(ui, prompt=b''):
3804 3802 """show plain prompt"""
3805 3803 r = ui.prompt(prompt)
3806 3804 ui.writenoi18n(b'response: %s\n' % r)
3807 3805
3808 3806
3809 3807 @command(b'debugupdatecaches', [])
3810 3808 def debugupdatecaches(ui, repo, *pats, **opts):
3811 3809 """warm all known caches in the repository"""
3812 3810 with repo.wlock(), repo.lock():
3813 3811 repo.updatecaches(full=True)
3814 3812
3815 3813
3816 3814 @command(
3817 3815 b'debugupgraderepo',
3818 3816 [
3819 3817 (
3820 3818 b'o',
3821 3819 b'optimize',
3822 3820 [],
3823 3821 _(b'extra optimization to perform'),
3824 3822 _(b'NAME'),
3825 3823 ),
3826 3824 (b'', b'run', False, _(b'performs an upgrade')),
3827 3825 (b'', b'backup', True, _(b'keep the old repository content around')),
3828 3826 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3829 3827 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3830 3828 ],
3831 3829 )
3832 3830 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3833 3831 """upgrade a repository to use different features
3834 3832
3835 3833 If no arguments are specified, the repository is evaluated for upgrade
3836 3834 and a list of problems and potential optimizations is printed.
3837 3835
3838 3836 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3839 3837 can be influenced via additional arguments. More details will be provided
3840 3838 by the command output when run without ``--run``.
3841 3839
3842 3840 During the upgrade, the repository will be locked and no writes will be
3843 3841 allowed.
3844 3842
3845 3843 At the end of the upgrade, the repository may not be readable while new
3846 3844 repository data is swapped in. This window will be as long as it takes to
3847 3845 rename some directories inside the ``.hg`` directory. On most machines, this
3848 3846 should complete almost instantaneously and the chances of a consumer being
3849 3847 unable to access the repository should be low.
3850 3848
3851 3849 By default, all revlog will be upgraded. You can restrict this using flag
3852 3850 such as `--manifest`:
3853 3851
3854 3852 * `--manifest`: only optimize the manifest
3855 3853 * `--no-manifest`: optimize all revlog but the manifest
3856 3854 * `--changelog`: optimize the changelog only
3857 3855 * `--no-changelog --no-manifest`: optimize filelogs only
3858 3856 """
3859 3857 return upgrade.upgraderepo(
3860 3858 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3861 3859 )
3862 3860
3863 3861
3864 3862 @command(
3865 3863 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3866 3864 )
3867 3865 def debugwalk(ui, repo, *pats, **opts):
3868 3866 """show how files match on given patterns"""
3869 3867 opts = pycompat.byteskwargs(opts)
3870 3868 m = scmutil.match(repo[None], pats, opts)
3871 3869 if ui.verbose:
3872 3870 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3873 3871 items = list(repo[None].walk(m))
3874 3872 if not items:
3875 3873 return
3876 3874 f = lambda fn: fn
3877 3875 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3878 3876 f = lambda fn: util.normpath(fn)
3879 3877 fmt = b'f %%-%ds %%-%ds %%s' % (
3880 3878 max([len(abs) for abs in items]),
3881 3879 max([len(repo.pathto(abs)) for abs in items]),
3882 3880 )
3883 3881 for abs in items:
3884 3882 line = fmt % (
3885 3883 abs,
3886 3884 f(repo.pathto(abs)),
3887 3885 m.exact(abs) and b'exact' or b'',
3888 3886 )
3889 3887 ui.write(b"%s\n" % line.rstrip())
3890 3888
3891 3889
3892 3890 @command(b'debugwhyunstable', [], _(b'REV'))
3893 3891 def debugwhyunstable(ui, repo, rev):
3894 3892 """explain instabilities of a changeset"""
3895 3893 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3896 3894 dnodes = b''
3897 3895 if entry.get(b'divergentnodes'):
3898 3896 dnodes = (
3899 3897 b' '.join(
3900 3898 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3901 3899 for ctx in entry[b'divergentnodes']
3902 3900 )
3903 3901 + b' '
3904 3902 )
3905 3903 ui.write(
3906 3904 b'%s: %s%s %s\n'
3907 3905 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3908 3906 )
3909 3907
3910 3908
3911 3909 @command(
3912 3910 b'debugwireargs',
3913 3911 [
3914 3912 (b'', b'three', b'', b'three'),
3915 3913 (b'', b'four', b'', b'four'),
3916 3914 (b'', b'five', b'', b'five'),
3917 3915 ]
3918 3916 + cmdutil.remoteopts,
3919 3917 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3920 3918 norepo=True,
3921 3919 )
3922 3920 def debugwireargs(ui, repopath, *vals, **opts):
3923 3921 opts = pycompat.byteskwargs(opts)
3924 3922 repo = hg.peer(ui, opts, repopath)
3925 3923 for opt in cmdutil.remoteopts:
3926 3924 del opts[opt[1]]
3927 3925 args = {}
3928 3926 for k, v in pycompat.iteritems(opts):
3929 3927 if v:
3930 3928 args[k] = v
3931 3929 args = pycompat.strkwargs(args)
3932 3930 # run twice to check that we don't mess up the stream for the next command
3933 3931 res1 = repo.debugwireargs(*vals, **args)
3934 3932 res2 = repo.debugwireargs(*vals, **args)
3935 3933 ui.write(b"%s\n" % res1)
3936 3934 if res1 != res2:
3937 3935 ui.warn(b"%s\n" % res2)
3938 3936
3939 3937
3940 3938 def _parsewirelangblocks(fh):
3941 3939 activeaction = None
3942 3940 blocklines = []
3943 3941 lastindent = 0
3944 3942
3945 3943 for line in fh:
3946 3944 line = line.rstrip()
3947 3945 if not line:
3948 3946 continue
3949 3947
3950 3948 if line.startswith(b'#'):
3951 3949 continue
3952 3950
3953 3951 if not line.startswith(b' '):
3954 3952 # New block. Flush previous one.
3955 3953 if activeaction:
3956 3954 yield activeaction, blocklines
3957 3955
3958 3956 activeaction = line
3959 3957 blocklines = []
3960 3958 lastindent = 0
3961 3959 continue
3962 3960
3963 3961 # Else we start with an indent.
3964 3962
3965 3963 if not activeaction:
3966 3964 raise error.Abort(_(b'indented line outside of block'))
3967 3965
3968 3966 indent = len(line) - len(line.lstrip())
3969 3967
3970 3968 # If this line is indented more than the last line, concatenate it.
3971 3969 if indent > lastindent and blocklines:
3972 3970 blocklines[-1] += line.lstrip()
3973 3971 else:
3974 3972 blocklines.append(line)
3975 3973 lastindent = indent
3976 3974
3977 3975 # Flush last block.
3978 3976 if activeaction:
3979 3977 yield activeaction, blocklines
3980 3978
3981 3979
3982 3980 @command(
3983 3981 b'debugwireproto',
3984 3982 [
3985 3983 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3986 3984 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3987 3985 (
3988 3986 b'',
3989 3987 b'noreadstderr',
3990 3988 False,
3991 3989 _(b'do not read from stderr of the remote'),
3992 3990 ),
3993 3991 (
3994 3992 b'',
3995 3993 b'nologhandshake',
3996 3994 False,
3997 3995 _(b'do not log I/O related to the peer handshake'),
3998 3996 ),
3999 3997 ]
4000 3998 + cmdutil.remoteopts,
4001 3999 _(b'[PATH]'),
4002 4000 optionalrepo=True,
4003 4001 )
4004 4002 def debugwireproto(ui, repo, path=None, **opts):
4005 4003 """send wire protocol commands to a server
4006 4004
4007 4005 This command can be used to issue wire protocol commands to remote
4008 4006 peers and to debug the raw data being exchanged.
4009 4007
4010 4008 ``--localssh`` will start an SSH server against the current repository
4011 4009 and connect to that. By default, the connection will perform a handshake
4012 4010 and establish an appropriate peer instance.
4013 4011
4014 4012 ``--peer`` can be used to bypass the handshake protocol and construct a
4015 4013 peer instance using the specified class type. Valid values are ``raw``,
4016 4014 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4017 4015 raw data payloads and don't support higher-level command actions.
4018 4016
4019 4017 ``--noreadstderr`` can be used to disable automatic reading from stderr
4020 4018 of the peer (for SSH connections only). Disabling automatic reading of
4021 4019 stderr is useful for making output more deterministic.
4022 4020
4023 4021 Commands are issued via a mini language which is specified via stdin.
4024 4022 The language consists of individual actions to perform. An action is
4025 4023 defined by a block. A block is defined as a line with no leading
4026 4024 space followed by 0 or more lines with leading space. Blocks are
4027 4025 effectively a high-level command with additional metadata.
4028 4026
4029 4027 Lines beginning with ``#`` are ignored.
4030 4028
4031 4029 The following sections denote available actions.
4032 4030
4033 4031 raw
4034 4032 ---
4035 4033
4036 4034 Send raw data to the server.
4037 4035
4038 4036 The block payload contains the raw data to send as one atomic send
4039 4037 operation. The data may not actually be delivered in a single system
4040 4038 call: it depends on the abilities of the transport being used.
4041 4039
4042 4040 Each line in the block is de-indented and concatenated. Then, that
4043 4041 value is evaluated as a Python b'' literal. This allows the use of
4044 4042 backslash escaping, etc.
4045 4043
4046 4044 raw+
4047 4045 ----
4048 4046
4049 4047 Behaves like ``raw`` except flushes output afterwards.
4050 4048
4051 4049 command <X>
4052 4050 -----------
4053 4051
4054 4052 Send a request to run a named command, whose name follows the ``command``
4055 4053 string.
4056 4054
4057 4055 Arguments to the command are defined as lines in this block. The format of
4058 4056 each line is ``<key> <value>``. e.g.::
4059 4057
4060 4058 command listkeys
4061 4059 namespace bookmarks
4062 4060
4063 4061 If the value begins with ``eval:``, it will be interpreted as a Python
4064 4062 literal expression. Otherwise values are interpreted as Python b'' literals.
4065 4063 This allows sending complex types and encoding special byte sequences via
4066 4064 backslash escaping.
4067 4065
4068 4066 The following arguments have special meaning:
4069 4067
4070 4068 ``PUSHFILE``
4071 4069 When defined, the *push* mechanism of the peer will be used instead
4072 4070 of the static request-response mechanism and the content of the
4073 4071 file specified in the value of this argument will be sent as the
4074 4072 command payload.
4075 4073
4076 4074 This can be used to submit a local bundle file to the remote.
4077 4075
4078 4076 batchbegin
4079 4077 ----------
4080 4078
4081 4079 Instruct the peer to begin a batched send.
4082 4080
4083 4081 All ``command`` blocks are queued for execution until the next
4084 4082 ``batchsubmit`` block.
4085 4083
4086 4084 batchsubmit
4087 4085 -----------
4088 4086
4089 4087 Submit previously queued ``command`` blocks as a batch request.
4090 4088
4091 4089 This action MUST be paired with a ``batchbegin`` action.
4092 4090
4093 4091 httprequest <method> <path>
4094 4092 ---------------------------
4095 4093
4096 4094 (HTTP peer only)
4097 4095
4098 4096 Send an HTTP request to the peer.
4099 4097
4100 4098 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4101 4099
4102 4100 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4103 4101 headers to add to the request. e.g. ``Accept: foo``.
4104 4102
4105 4103 The following arguments are special:
4106 4104
4107 4105 ``BODYFILE``
4108 4106 The content of the file defined as the value to this argument will be
4109 4107 transferred verbatim as the HTTP request body.
4110 4108
4111 4109 ``frame <type> <flags> <payload>``
4112 4110 Send a unified protocol frame as part of the request body.
4113 4111
4114 4112 All frames will be collected and sent as the body to the HTTP
4115 4113 request.
4116 4114
4117 4115 close
4118 4116 -----
4119 4117
4120 4118 Close the connection to the server.
4121 4119
4122 4120 flush
4123 4121 -----
4124 4122
4125 4123 Flush data written to the server.
4126 4124
4127 4125 readavailable
4128 4126 -------------
4129 4127
4130 4128 Close the write end of the connection and read all available data from
4131 4129 the server.
4132 4130
4133 4131 If the connection to the server encompasses multiple pipes, we poll both
4134 4132 pipes and read available data.
4135 4133
4136 4134 readline
4137 4135 --------
4138 4136
4139 4137 Read a line of output from the server. If there are multiple output
4140 4138 pipes, reads only the main pipe.
4141 4139
4142 4140 ereadline
4143 4141 ---------
4144 4142
4145 4143 Like ``readline``, but read from the stderr pipe, if available.
4146 4144
4147 4145 read <X>
4148 4146 --------
4149 4147
4150 4148 ``read()`` N bytes from the server's main output pipe.
4151 4149
4152 4150 eread <X>
4153 4151 ---------
4154 4152
4155 4153 ``read()`` N bytes from the server's stderr pipe, if available.
4156 4154
4157 4155 Specifying Unified Frame-Based Protocol Frames
4158 4156 ----------------------------------------------
4159 4157
4160 4158 It is possible to emit a *Unified Frame-Based Protocol* by using special
4161 4159 syntax.
4162 4160
4163 4161 A frame is composed as a type, flags, and payload. These can be parsed
4164 4162 from a string of the form:
4165 4163
4166 4164 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4167 4165
4168 4166 ``request-id`` and ``stream-id`` are integers defining the request and
4169 4167 stream identifiers.
4170 4168
4171 4169 ``type`` can be an integer value for the frame type or the string name
4172 4170 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4173 4171 ``command-name``.
4174 4172
4175 4173 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4176 4174 components. Each component (and there can be just one) can be an integer
4177 4175 or a flag name for stream flags or frame flags, respectively. Values are
4178 4176 resolved to integers and then bitwise OR'd together.
4179 4177
4180 4178 ``payload`` represents the raw frame payload. If it begins with
4181 4179 ``cbor:``, the following string is evaluated as Python code and the
4182 4180 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4183 4181 as a Python byte string literal.
4184 4182 """
4185 4183 opts = pycompat.byteskwargs(opts)
4186 4184
4187 4185 if opts[b'localssh'] and not repo:
4188 4186 raise error.Abort(_(b'--localssh requires a repository'))
4189 4187
4190 4188 if opts[b'peer'] and opts[b'peer'] not in (
4191 4189 b'raw',
4192 4190 b'http2',
4193 4191 b'ssh1',
4194 4192 b'ssh2',
4195 4193 ):
4196 4194 raise error.Abort(
4197 4195 _(b'invalid value for --peer'),
4198 4196 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4199 4197 )
4200 4198
4201 4199 if path and opts[b'localssh']:
4202 4200 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4203 4201
4204 4202 if ui.interactive():
4205 4203 ui.write(_(b'(waiting for commands on stdin)\n'))
4206 4204
4207 4205 blocks = list(_parsewirelangblocks(ui.fin))
4208 4206
4209 4207 proc = None
4210 4208 stdin = None
4211 4209 stdout = None
4212 4210 stderr = None
4213 4211 opener = None
4214 4212
4215 4213 if opts[b'localssh']:
4216 4214 # We start the SSH server in its own process so there is process
4217 4215 # separation. This prevents a whole class of potential bugs around
4218 4216 # shared state from interfering with server operation.
4219 4217 args = procutil.hgcmd() + [
4220 4218 b'-R',
4221 4219 repo.root,
4222 4220 b'debugserve',
4223 4221 b'--sshstdio',
4224 4222 ]
4225 4223 proc = subprocess.Popen(
4226 4224 pycompat.rapply(procutil.tonativestr, args),
4227 4225 stdin=subprocess.PIPE,
4228 4226 stdout=subprocess.PIPE,
4229 4227 stderr=subprocess.PIPE,
4230 4228 bufsize=0,
4231 4229 )
4232 4230
4233 4231 stdin = proc.stdin
4234 4232 stdout = proc.stdout
4235 4233 stderr = proc.stderr
4236 4234
4237 4235 # We turn the pipes into observers so we can log I/O.
4238 4236 if ui.verbose or opts[b'peer'] == b'raw':
4239 4237 stdin = util.makeloggingfileobject(
4240 4238 ui, proc.stdin, b'i', logdata=True
4241 4239 )
4242 4240 stdout = util.makeloggingfileobject(
4243 4241 ui, proc.stdout, b'o', logdata=True
4244 4242 )
4245 4243 stderr = util.makeloggingfileobject(
4246 4244 ui, proc.stderr, b'e', logdata=True
4247 4245 )
4248 4246
4249 4247 # --localssh also implies the peer connection settings.
4250 4248
4251 4249 url = b'ssh://localserver'
4252 4250 autoreadstderr = not opts[b'noreadstderr']
4253 4251
4254 4252 if opts[b'peer'] == b'ssh1':
4255 4253 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4256 4254 peer = sshpeer.sshv1peer(
4257 4255 ui,
4258 4256 url,
4259 4257 proc,
4260 4258 stdin,
4261 4259 stdout,
4262 4260 stderr,
4263 4261 None,
4264 4262 autoreadstderr=autoreadstderr,
4265 4263 )
4266 4264 elif opts[b'peer'] == b'ssh2':
4267 4265 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4268 4266 peer = sshpeer.sshv2peer(
4269 4267 ui,
4270 4268 url,
4271 4269 proc,
4272 4270 stdin,
4273 4271 stdout,
4274 4272 stderr,
4275 4273 None,
4276 4274 autoreadstderr=autoreadstderr,
4277 4275 )
4278 4276 elif opts[b'peer'] == b'raw':
4279 4277 ui.write(_(b'using raw connection to peer\n'))
4280 4278 peer = None
4281 4279 else:
4282 4280 ui.write(_(b'creating ssh peer from handshake results\n'))
4283 4281 peer = sshpeer.makepeer(
4284 4282 ui,
4285 4283 url,
4286 4284 proc,
4287 4285 stdin,
4288 4286 stdout,
4289 4287 stderr,
4290 4288 autoreadstderr=autoreadstderr,
4291 4289 )
4292 4290
4293 4291 elif path:
4294 4292 # We bypass hg.peer() so we can proxy the sockets.
4295 4293 # TODO consider not doing this because we skip
4296 4294 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4297 4295 u = util.url(path)
4298 4296 if u.scheme != b'http':
4299 4297 raise error.Abort(_(b'only http:// paths are currently supported'))
4300 4298
4301 4299 url, authinfo = u.authinfo()
4302 4300 openerargs = {
4303 4301 'useragent': b'Mercurial debugwireproto',
4304 4302 }
4305 4303
4306 4304 # Turn pipes/sockets into observers so we can log I/O.
4307 4305 if ui.verbose:
4308 4306 openerargs.update(
4309 4307 {
4310 4308 'loggingfh': ui,
4311 4309 'loggingname': b's',
4312 4310 'loggingopts': {'logdata': True, 'logdataapis': False,},
4313 4311 }
4314 4312 )
4315 4313
4316 4314 if ui.debugflag:
4317 4315 openerargs['loggingopts']['logdataapis'] = True
4318 4316
4319 4317 # Don't send default headers when in raw mode. This allows us to
4320 4318 # bypass most of the behavior of our URL handling code so we can
4321 4319 # have near complete control over what's sent on the wire.
4322 4320 if opts[b'peer'] == b'raw':
4323 4321 openerargs['sendaccept'] = False
4324 4322
4325 4323 opener = urlmod.opener(ui, authinfo, **openerargs)
4326 4324
4327 4325 if opts[b'peer'] == b'http2':
4328 4326 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4329 4327 # We go through makepeer() because we need an API descriptor for
4330 4328 # the peer instance to be useful.
4331 4329 with ui.configoverride(
4332 4330 {(b'experimental', b'httppeer.advertise-v2'): True}
4333 4331 ):
4334 4332 if opts[b'nologhandshake']:
4335 4333 ui.pushbuffer()
4336 4334
4337 4335 peer = httppeer.makepeer(ui, path, opener=opener)
4338 4336
4339 4337 if opts[b'nologhandshake']:
4340 4338 ui.popbuffer()
4341 4339
4342 4340 if not isinstance(peer, httppeer.httpv2peer):
4343 4341 raise error.Abort(
4344 4342 _(
4345 4343 b'could not instantiate HTTP peer for '
4346 4344 b'wire protocol version 2'
4347 4345 ),
4348 4346 hint=_(
4349 4347 b'the server may not have the feature '
4350 4348 b'enabled or is not allowing this '
4351 4349 b'client version'
4352 4350 ),
4353 4351 )
4354 4352
4355 4353 elif opts[b'peer'] == b'raw':
4356 4354 ui.write(_(b'using raw connection to peer\n'))
4357 4355 peer = None
4358 4356 elif opts[b'peer']:
4359 4357 raise error.Abort(
4360 4358 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4361 4359 )
4362 4360 else:
4363 4361 peer = httppeer.makepeer(ui, path, opener=opener)
4364 4362
4365 4363 # We /could/ populate stdin/stdout with sock.makefile()...
4366 4364 else:
4367 4365 raise error.Abort(_(b'unsupported connection configuration'))
4368 4366
4369 4367 batchedcommands = None
4370 4368
4371 4369 # Now perform actions based on the parsed wire language instructions.
4372 4370 for action, lines in blocks:
4373 4371 if action in (b'raw', b'raw+'):
4374 4372 if not stdin:
4375 4373 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4376 4374
4377 4375 # Concatenate the data together.
4378 4376 data = b''.join(l.lstrip() for l in lines)
4379 4377 data = stringutil.unescapestr(data)
4380 4378 stdin.write(data)
4381 4379
4382 4380 if action == b'raw+':
4383 4381 stdin.flush()
4384 4382 elif action == b'flush':
4385 4383 if not stdin:
4386 4384 raise error.Abort(_(b'cannot call flush on this peer'))
4387 4385 stdin.flush()
4388 4386 elif action.startswith(b'command'):
4389 4387 if not peer:
4390 4388 raise error.Abort(
4391 4389 _(
4392 4390 b'cannot send commands unless peer instance '
4393 4391 b'is available'
4394 4392 )
4395 4393 )
4396 4394
4397 4395 command = action.split(b' ', 1)[1]
4398 4396
4399 4397 args = {}
4400 4398 for line in lines:
4401 4399 # We need to allow empty values.
4402 4400 fields = line.lstrip().split(b' ', 1)
4403 4401 if len(fields) == 1:
4404 4402 key = fields[0]
4405 4403 value = b''
4406 4404 else:
4407 4405 key, value = fields
4408 4406
4409 4407 if value.startswith(b'eval:'):
4410 4408 value = stringutil.evalpythonliteral(value[5:])
4411 4409 else:
4412 4410 value = stringutil.unescapestr(value)
4413 4411
4414 4412 args[key] = value
4415 4413
4416 4414 if batchedcommands is not None:
4417 4415 batchedcommands.append((command, args))
4418 4416 continue
4419 4417
4420 4418 ui.status(_(b'sending %s command\n') % command)
4421 4419
4422 4420 if b'PUSHFILE' in args:
4423 4421 with open(args[b'PUSHFILE'], 'rb') as fh:
4424 4422 del args[b'PUSHFILE']
4425 4423 res, output = peer._callpush(
4426 4424 command, fh, **pycompat.strkwargs(args)
4427 4425 )
4428 4426 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4429 4427 ui.status(
4430 4428 _(b'remote output: %s\n') % stringutil.escapestr(output)
4431 4429 )
4432 4430 else:
4433 4431 with peer.commandexecutor() as e:
4434 4432 res = e.callcommand(command, args).result()
4435 4433
4436 4434 if isinstance(res, wireprotov2peer.commandresponse):
4437 4435 val = res.objects()
4438 4436 ui.status(
4439 4437 _(b'response: %s\n')
4440 4438 % stringutil.pprint(val, bprefix=True, indent=2)
4441 4439 )
4442 4440 else:
4443 4441 ui.status(
4444 4442 _(b'response: %s\n')
4445 4443 % stringutil.pprint(res, bprefix=True, indent=2)
4446 4444 )
4447 4445
4448 4446 elif action == b'batchbegin':
4449 4447 if batchedcommands is not None:
4450 4448 raise error.Abort(_(b'nested batchbegin not allowed'))
4451 4449
4452 4450 batchedcommands = []
4453 4451 elif action == b'batchsubmit':
4454 4452 # There is a batching API we could go through. But it would be
4455 4453 # difficult to normalize requests into function calls. It is easier
4456 4454 # to bypass this layer and normalize to commands + args.
4457 4455 ui.status(
4458 4456 _(b'sending batch with %d sub-commands\n')
4459 4457 % len(batchedcommands)
4460 4458 )
4461 4459 assert peer is not None
4462 4460 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4463 4461 ui.status(
4464 4462 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4465 4463 )
4466 4464
4467 4465 batchedcommands = None
4468 4466
4469 4467 elif action.startswith(b'httprequest '):
4470 4468 if not opener:
4471 4469 raise error.Abort(
4472 4470 _(b'cannot use httprequest without an HTTP peer')
4473 4471 )
4474 4472
4475 4473 request = action.split(b' ', 2)
4476 4474 if len(request) != 3:
4477 4475 raise error.Abort(
4478 4476 _(
4479 4477 b'invalid httprequest: expected format is '
4480 4478 b'"httprequest <method> <path>'
4481 4479 )
4482 4480 )
4483 4481
4484 4482 method, httppath = request[1:]
4485 4483 headers = {}
4486 4484 body = None
4487 4485 frames = []
4488 4486 for line in lines:
4489 4487 line = line.lstrip()
4490 4488 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4491 4489 if m:
4492 4490 # Headers need to use native strings.
4493 4491 key = pycompat.strurl(m.group(1))
4494 4492 value = pycompat.strurl(m.group(2))
4495 4493 headers[key] = value
4496 4494 continue
4497 4495
4498 4496 if line.startswith(b'BODYFILE '):
4499 4497 with open(line.split(b' ', 1), b'rb') as fh:
4500 4498 body = fh.read()
4501 4499 elif line.startswith(b'frame '):
4502 4500 frame = wireprotoframing.makeframefromhumanstring(
4503 4501 line[len(b'frame ') :]
4504 4502 )
4505 4503
4506 4504 frames.append(frame)
4507 4505 else:
4508 4506 raise error.Abort(
4509 4507 _(b'unknown argument to httprequest: %s') % line
4510 4508 )
4511 4509
4512 4510 url = path + httppath
4513 4511
4514 4512 if frames:
4515 4513 body = b''.join(bytes(f) for f in frames)
4516 4514
4517 4515 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4518 4516
4519 4517 # urllib.Request insists on using has_data() as a proxy for
4520 4518 # determining the request method. Override that to use our
4521 4519 # explicitly requested method.
4522 4520 req.get_method = lambda: pycompat.sysstr(method)
4523 4521
4524 4522 try:
4525 4523 res = opener.open(req)
4526 4524 body = res.read()
4527 4525 except util.urlerr.urlerror as e:
4528 4526 # read() method must be called, but only exists in Python 2
4529 4527 getattr(e, 'read', lambda: None)()
4530 4528 continue
4531 4529
4532 4530 ct = res.headers.get('Content-Type')
4533 4531 if ct == 'application/mercurial-cbor':
4534 4532 ui.write(
4535 4533 _(b'cbor> %s\n')
4536 4534 % stringutil.pprint(
4537 4535 cborutil.decodeall(body), bprefix=True, indent=2
4538 4536 )
4539 4537 )
4540 4538
4541 4539 elif action == b'close':
4542 4540 assert peer is not None
4543 4541 peer.close()
4544 4542 elif action == b'readavailable':
4545 4543 if not stdout or not stderr:
4546 4544 raise error.Abort(
4547 4545 _(b'readavailable not available on this peer')
4548 4546 )
4549 4547
4550 4548 stdin.close()
4551 4549 stdout.read()
4552 4550 stderr.read()
4553 4551
4554 4552 elif action == b'readline':
4555 4553 if not stdout:
4556 4554 raise error.Abort(_(b'readline not available on this peer'))
4557 4555 stdout.readline()
4558 4556 elif action == b'ereadline':
4559 4557 if not stderr:
4560 4558 raise error.Abort(_(b'ereadline not available on this peer'))
4561 4559 stderr.readline()
4562 4560 elif action.startswith(b'read '):
4563 4561 count = int(action.split(b' ', 1)[1])
4564 4562 if not stdout:
4565 4563 raise error.Abort(_(b'read not available on this peer'))
4566 4564 stdout.read(count)
4567 4565 elif action.startswith(b'eread '):
4568 4566 count = int(action.split(b' ', 1)[1])
4569 4567 if not stderr:
4570 4568 raise error.Abort(_(b'eread not available on this peer'))
4571 4569 stderr.read(count)
4572 4570 else:
4573 4571 raise error.Abort(_(b'unknown action: %s') % action)
4574 4572
4575 4573 if batchedcommands is not None:
4576 4574 raise error.Abort(_(b'unclosed "batchbegin" request'))
4577 4575
4578 4576 if peer:
4579 4577 peer.close()
4580 4578
4581 4579 if proc:
4582 4580 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now