##// END OF EJS Templates
dirstate-item: use the `state` property in debugpathcomplete...
marmoute -
r48333:5363610f default
parent child Browse files
Show More
@@ -1,4827 +1,4827
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 repoview,
73 73 revlog,
74 74 revset,
75 75 revsetlang,
76 76 scmutil,
77 77 setdiscovery,
78 78 simplemerge,
79 79 sshpeer,
80 80 sslutil,
81 81 streamclone,
82 82 strip,
83 83 tags as tagsmod,
84 84 templater,
85 85 treediscovery,
86 86 upgrade,
87 87 url as urlmod,
88 88 util,
89 89 vfs as vfsmod,
90 90 wireprotoframing,
91 91 wireprotoserver,
92 92 wireprotov2peer,
93 93 )
94 94 from .interfaces import repository
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 urlutil,
102 102 )
103 103
104 104 from .revlogutils import (
105 105 deltas as deltautil,
106 106 nodemap,
107 107 sidedata,
108 108 )
109 109
110 110 release = lockmod.release
111 111
112 112 table = {}
113 113 table.update(strip.command._table)
114 114 command = registrar.command(table)
115 115
116 116
117 117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
118 118 def debugancestor(ui, repo, *args):
119 119 """find the ancestor revision of two revisions in a given index"""
120 120 if len(args) == 3:
121 121 index, rev1, rev2 = args
122 122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
123 123 lookup = r.lookup
124 124 elif len(args) == 2:
125 125 if not repo:
126 126 raise error.Abort(
127 127 _(b'there is no Mercurial repository here (.hg not found)')
128 128 )
129 129 rev1, rev2 = args
130 130 r = repo.changelog
131 131 lookup = repo.lookup
132 132 else:
133 133 raise error.Abort(_(b'either two or three arguments required'))
134 134 a = r.ancestor(lookup(rev1), lookup(rev2))
135 135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
136 136
137 137
138 138 @command(b'debugantivirusrunning', [])
139 139 def debugantivirusrunning(ui, repo):
140 140 """attempt to trigger an antivirus scanner to see if one is active"""
141 141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
142 142 f.write(
143 143 util.b85decode(
144 144 # This is a base85-armored version of the EICAR test file. See
145 145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
146 146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
147 147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
148 148 )
149 149 )
150 150 # Give an AV engine time to scan the file.
151 151 time.sleep(2)
152 152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
153 153
154 154
155 155 @command(b'debugapplystreamclonebundle', [], b'FILE')
156 156 def debugapplystreamclonebundle(ui, repo, fname):
157 157 """apply a stream clone bundle file"""
158 158 f = hg.openpath(ui, fname)
159 159 gen = exchange.readbundle(ui, f, fname)
160 160 gen.apply(repo)
161 161
162 162
163 163 @command(
164 164 b'debugbuilddag',
165 165 [
166 166 (
167 167 b'm',
168 168 b'mergeable-file',
169 169 None,
170 170 _(b'add single file mergeable changes'),
171 171 ),
172 172 (
173 173 b'o',
174 174 b'overwritten-file',
175 175 None,
176 176 _(b'add single file all revs overwrite'),
177 177 ),
178 178 (b'n', b'new-file', None, _(b'add new file at each rev')),
179 179 ],
180 180 _(b'[OPTION]... [TEXT]'),
181 181 )
182 182 def debugbuilddag(
183 183 ui,
184 184 repo,
185 185 text=None,
186 186 mergeable_file=False,
187 187 overwritten_file=False,
188 188 new_file=False,
189 189 ):
190 190 """builds a repo with a given DAG from scratch in the current empty repo
191 191
192 192 The description of the DAG is read from stdin if not given on the
193 193 command line.
194 194
195 195 Elements:
196 196
197 197 - "+n" is a linear run of n nodes based on the current default parent
198 198 - "." is a single node based on the current default parent
199 199 - "$" resets the default parent to null (implied at the start);
200 200 otherwise the default parent is always the last node created
201 201 - "<p" sets the default parent to the backref p
202 202 - "*p" is a fork at parent p, which is a backref
203 203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
204 204 - "/p2" is a merge of the preceding node and p2
205 205 - ":tag" defines a local tag for the preceding node
206 206 - "@branch" sets the named branch for subsequent nodes
207 207 - "#...\\n" is a comment up to the end of the line
208 208
209 209 Whitespace between the above elements is ignored.
210 210
211 211 A backref is either
212 212
213 213 - a number n, which references the node curr-n, where curr is the current
214 214 node, or
215 215 - the name of a local tag you placed earlier using ":tag", or
216 216 - empty to denote the default parent.
217 217
218 218 All string valued-elements are either strictly alphanumeric, or must
219 219 be enclosed in double quotes ("..."), with "\\" as escape character.
220 220 """
221 221
222 222 if text is None:
223 223 ui.status(_(b"reading DAG from stdin\n"))
224 224 text = ui.fin.read()
225 225
226 226 cl = repo.changelog
227 227 if len(cl) > 0:
228 228 raise error.Abort(_(b'repository is not empty'))
229 229
230 230 # determine number of revs in DAG
231 231 total = 0
232 232 for type, data in dagparser.parsedag(text):
233 233 if type == b'n':
234 234 total += 1
235 235
236 236 if mergeable_file:
237 237 linesperrev = 2
238 238 # make a file with k lines per rev
239 239 initialmergedlines = [
240 240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
241 241 ]
242 242 initialmergedlines.append(b"")
243 243
244 244 tags = []
245 245 progress = ui.makeprogress(
246 246 _(b'building'), unit=_(b'revisions'), total=total
247 247 )
248 248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
249 249 at = -1
250 250 atbranch = b'default'
251 251 nodeids = []
252 252 id = 0
253 253 progress.update(id)
254 254 for type, data in dagparser.parsedag(text):
255 255 if type == b'n':
256 256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
257 257 id, ps = data
258 258
259 259 files = []
260 260 filecontent = {}
261 261
262 262 p2 = None
263 263 if mergeable_file:
264 264 fn = b"mf"
265 265 p1 = repo[ps[0]]
266 266 if len(ps) > 1:
267 267 p2 = repo[ps[1]]
268 268 pa = p1.ancestor(p2)
269 269 base, local, other = [
270 270 x[fn].data() for x in (pa, p1, p2)
271 271 ]
272 272 m3 = simplemerge.Merge3Text(base, local, other)
273 273 ml = [l.strip() for l in m3.merge_lines()]
274 274 ml.append(b"")
275 275 elif at > 0:
276 276 ml = p1[fn].data().split(b"\n")
277 277 else:
278 278 ml = initialmergedlines
279 279 ml[id * linesperrev] += b" r%i" % id
280 280 mergedtext = b"\n".join(ml)
281 281 files.append(fn)
282 282 filecontent[fn] = mergedtext
283 283
284 284 if overwritten_file:
285 285 fn = b"of"
286 286 files.append(fn)
287 287 filecontent[fn] = b"r%i\n" % id
288 288
289 289 if new_file:
290 290 fn = b"nf%i" % id
291 291 files.append(fn)
292 292 filecontent[fn] = b"r%i\n" % id
293 293 if len(ps) > 1:
294 294 if not p2:
295 295 p2 = repo[ps[1]]
296 296 for fn in p2:
297 297 if fn.startswith(b"nf"):
298 298 files.append(fn)
299 299 filecontent[fn] = p2[fn].data()
300 300
301 301 def fctxfn(repo, cx, path):
302 302 if path in filecontent:
303 303 return context.memfilectx(
304 304 repo, cx, path, filecontent[path]
305 305 )
306 306 return None
307 307
308 308 if len(ps) == 0 or ps[0] < 0:
309 309 pars = [None, None]
310 310 elif len(ps) == 1:
311 311 pars = [nodeids[ps[0]], None]
312 312 else:
313 313 pars = [nodeids[p] for p in ps]
314 314 cx = context.memctx(
315 315 repo,
316 316 pars,
317 317 b"r%i" % id,
318 318 files,
319 319 fctxfn,
320 320 date=(id, 0),
321 321 user=b"debugbuilddag",
322 322 extra={b'branch': atbranch},
323 323 )
324 324 nodeid = repo.commitctx(cx)
325 325 nodeids.append(nodeid)
326 326 at = id
327 327 elif type == b'l':
328 328 id, name = data
329 329 ui.note((b'tag %s\n' % name))
330 330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
331 331 elif type == b'a':
332 332 ui.note((b'branch %s\n' % data))
333 333 atbranch = data
334 334 progress.update(id)
335 335
336 336 if tags:
337 337 repo.vfs.write(b"localtags", b"".join(tags))
338 338
339 339
340 340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
341 341 indent_string = b' ' * indent
342 342 if all:
343 343 ui.writenoi18n(
344 344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
345 345 % indent_string
346 346 )
347 347
348 348 def showchunks(named):
349 349 ui.write(b"\n%s%s\n" % (indent_string, named))
350 350 for deltadata in gen.deltaiter():
351 351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
352 352 ui.write(
353 353 b"%s%s %s %s %s %s %d\n"
354 354 % (
355 355 indent_string,
356 356 hex(node),
357 357 hex(p1),
358 358 hex(p2),
359 359 hex(cs),
360 360 hex(deltabase),
361 361 len(delta),
362 362 )
363 363 )
364 364
365 365 gen.changelogheader()
366 366 showchunks(b"changelog")
367 367 gen.manifestheader()
368 368 showchunks(b"manifest")
369 369 for chunkdata in iter(gen.filelogheader, {}):
370 370 fname = chunkdata[b'filename']
371 371 showchunks(fname)
372 372 else:
373 373 if isinstance(gen, bundle2.unbundle20):
374 374 raise error.Abort(_(b'use debugbundle2 for this file'))
375 375 gen.changelogheader()
376 376 for deltadata in gen.deltaiter():
377 377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
378 378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
379 379
380 380
381 381 def _debugobsmarkers(ui, part, indent=0, **opts):
382 382 """display version and markers contained in 'data'"""
383 383 opts = pycompat.byteskwargs(opts)
384 384 data = part.read()
385 385 indent_string = b' ' * indent
386 386 try:
387 387 version, markers = obsolete._readmarkers(data)
388 388 except error.UnknownVersion as exc:
389 389 msg = b"%sunsupported version: %s (%d bytes)\n"
390 390 msg %= indent_string, exc.version, len(data)
391 391 ui.write(msg)
392 392 else:
393 393 msg = b"%sversion: %d (%d bytes)\n"
394 394 msg %= indent_string, version, len(data)
395 395 ui.write(msg)
396 396 fm = ui.formatter(b'debugobsolete', opts)
397 397 for rawmarker in sorted(markers):
398 398 m = obsutil.marker(None, rawmarker)
399 399 fm.startitem()
400 400 fm.plain(indent_string)
401 401 cmdutil.showmarker(fm, m)
402 402 fm.end()
403 403
404 404
405 405 def _debugphaseheads(ui, data, indent=0):
406 406 """display version and markers contained in 'data'"""
407 407 indent_string = b' ' * indent
408 408 headsbyphase = phases.binarydecode(data)
409 409 for phase in phases.allphases:
410 410 for head in headsbyphase[phase]:
411 411 ui.write(indent_string)
412 412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
413 413
414 414
415 415 def _quasirepr(thing):
416 416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
417 417 return b'{%s}' % (
418 418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
419 419 )
420 420 return pycompat.bytestr(repr(thing))
421 421
422 422
423 423 def _debugbundle2(ui, gen, all=None, **opts):
424 424 """lists the contents of a bundle2"""
425 425 if not isinstance(gen, bundle2.unbundle20):
426 426 raise error.Abort(_(b'not a bundle2 file'))
427 427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
428 428 parttypes = opts.get('part_type', [])
429 429 for part in gen.iterparts():
430 430 if parttypes and part.type not in parttypes:
431 431 continue
432 432 msg = b'%s -- %s (mandatory: %r)\n'
433 433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
434 434 if part.type == b'changegroup':
435 435 version = part.params.get(b'version', b'01')
436 436 cg = changegroup.getunbundler(version, part, b'UN')
437 437 if not ui.quiet:
438 438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
439 439 if part.type == b'obsmarkers':
440 440 if not ui.quiet:
441 441 _debugobsmarkers(ui, part, indent=4, **opts)
442 442 if part.type == b'phase-heads':
443 443 if not ui.quiet:
444 444 _debugphaseheads(ui, part, indent=4)
445 445
446 446
447 447 @command(
448 448 b'debugbundle',
449 449 [
450 450 (b'a', b'all', None, _(b'show all details')),
451 451 (b'', b'part-type', [], _(b'show only the named part type')),
452 452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
453 453 ],
454 454 _(b'FILE'),
455 455 norepo=True,
456 456 )
457 457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
458 458 """lists the contents of a bundle"""
459 459 with hg.openpath(ui, bundlepath) as f:
460 460 if spec:
461 461 spec = exchange.getbundlespec(ui, f)
462 462 ui.write(b'%s\n' % spec)
463 463 return
464 464
465 465 gen = exchange.readbundle(ui, f, bundlepath)
466 466 if isinstance(gen, bundle2.unbundle20):
467 467 return _debugbundle2(ui, gen, all=all, **opts)
468 468 _debugchangegroup(ui, gen, all=all, **opts)
469 469
470 470
471 471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
472 472 def debugcapabilities(ui, path, **opts):
473 473 """lists the capabilities of a remote peer"""
474 474 opts = pycompat.byteskwargs(opts)
475 475 peer = hg.peer(ui, opts, path)
476 476 try:
477 477 caps = peer.capabilities()
478 478 ui.writenoi18n(b'Main capabilities:\n')
479 479 for c in sorted(caps):
480 480 ui.write(b' %s\n' % c)
481 481 b2caps = bundle2.bundle2caps(peer)
482 482 if b2caps:
483 483 ui.writenoi18n(b'Bundle2 capabilities:\n')
484 484 for key, values in sorted(pycompat.iteritems(b2caps)):
485 485 ui.write(b' %s\n' % key)
486 486 for v in values:
487 487 ui.write(b' %s\n' % v)
488 488 finally:
489 489 peer.close()
490 490
491 491
492 492 @command(
493 493 b'debugchangedfiles',
494 494 [
495 495 (
496 496 b'',
497 497 b'compute',
498 498 False,
499 499 b"compute information instead of reading it from storage",
500 500 ),
501 501 ],
502 502 b'REV',
503 503 )
504 504 def debugchangedfiles(ui, repo, rev, **opts):
505 505 """list the stored files changes for a revision"""
506 506 ctx = scmutil.revsingle(repo, rev, None)
507 507 files = None
508 508
509 509 if opts['compute']:
510 510 files = metadata.compute_all_files_changes(ctx)
511 511 else:
512 512 sd = repo.changelog.sidedata(ctx.rev())
513 513 files_block = sd.get(sidedata.SD_FILES)
514 514 if files_block is not None:
515 515 files = metadata.decode_files_sidedata(sd)
516 516 if files is not None:
517 517 for f in sorted(files.touched):
518 518 if f in files.added:
519 519 action = b"added"
520 520 elif f in files.removed:
521 521 action = b"removed"
522 522 elif f in files.merged:
523 523 action = b"merged"
524 524 elif f in files.salvaged:
525 525 action = b"salvaged"
526 526 else:
527 527 action = b"touched"
528 528
529 529 copy_parent = b""
530 530 copy_source = b""
531 531 if f in files.copied_from_p1:
532 532 copy_parent = b"p1"
533 533 copy_source = files.copied_from_p1[f]
534 534 elif f in files.copied_from_p2:
535 535 copy_parent = b"p2"
536 536 copy_source = files.copied_from_p2[f]
537 537
538 538 data = (action, copy_parent, f, copy_source)
539 539 template = b"%-8s %2s: %s, %s;\n"
540 540 ui.write(template % data)
541 541
542 542
543 543 @command(b'debugcheckstate', [], b'')
544 544 def debugcheckstate(ui, repo):
545 545 """validate the correctness of the current dirstate"""
546 546 parent1, parent2 = repo.dirstate.parents()
547 547 m1 = repo[parent1].manifest()
548 548 m2 = repo[parent2].manifest()
549 549 errors = 0
550 550 for f in repo.dirstate:
551 551 state = repo.dirstate[f]
552 552 if state in b"nr" and f not in m1:
553 553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
554 554 errors += 1
555 555 if state in b"a" and f in m1:
556 556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
557 557 errors += 1
558 558 if state in b"m" and f not in m1 and f not in m2:
559 559 ui.warn(
560 560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
561 561 )
562 562 errors += 1
563 563 for f in m1:
564 564 state = repo.dirstate[f]
565 565 if state not in b"nrm":
566 566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
567 567 errors += 1
568 568 if errors:
569 569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
570 570 raise error.Abort(errstr)
571 571
572 572
573 573 @command(
574 574 b'debugcolor',
575 575 [(b'', b'style', None, _(b'show all configured styles'))],
576 576 b'hg debugcolor',
577 577 )
578 578 def debugcolor(ui, repo, **opts):
579 579 """show available color, effects or style"""
580 580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
581 581 if opts.get('style'):
582 582 return _debugdisplaystyle(ui)
583 583 else:
584 584 return _debugdisplaycolor(ui)
585 585
586 586
587 587 def _debugdisplaycolor(ui):
588 588 ui = ui.copy()
589 589 ui._styles.clear()
590 590 for effect in color._activeeffects(ui).keys():
591 591 ui._styles[effect] = effect
592 592 if ui._terminfoparams:
593 593 for k, v in ui.configitems(b'color'):
594 594 if k.startswith(b'color.'):
595 595 ui._styles[k] = k[6:]
596 596 elif k.startswith(b'terminfo.'):
597 597 ui._styles[k] = k[9:]
598 598 ui.write(_(b'available colors:\n'))
599 599 # sort label with a '_' after the other to group '_background' entry.
600 600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
601 601 for colorname, label in items:
602 602 ui.write(b'%s\n' % colorname, label=label)
603 603
604 604
605 605 def _debugdisplaystyle(ui):
606 606 ui.write(_(b'available style:\n'))
607 607 if not ui._styles:
608 608 return
609 609 width = max(len(s) for s in ui._styles)
610 610 for label, effects in sorted(ui._styles.items()):
611 611 ui.write(b'%s' % label, label=label)
612 612 if effects:
613 613 # 50
614 614 ui.write(b': ')
615 615 ui.write(b' ' * (max(0, width - len(label))))
616 616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
617 617 ui.write(b'\n')
618 618
619 619
620 620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
621 621 def debugcreatestreamclonebundle(ui, repo, fname):
622 622 """create a stream clone bundle file
623 623
624 624 Stream bundles are special bundles that are essentially archives of
625 625 revlog files. They are commonly used for cloning very quickly.
626 626 """
627 627 # TODO we may want to turn this into an abort when this functionality
628 628 # is moved into `hg bundle`.
629 629 if phases.hassecret(repo):
630 630 ui.warn(
631 631 _(
632 632 b'(warning: stream clone bundle will contain secret '
633 633 b'revisions)\n'
634 634 )
635 635 )
636 636
637 637 requirements, gen = streamclone.generatebundlev1(repo)
638 638 changegroup.writechunks(ui, gen, fname)
639 639
640 640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
641 641
642 642
643 643 @command(
644 644 b'debugdag',
645 645 [
646 646 (b't', b'tags', None, _(b'use tags as labels')),
647 647 (b'b', b'branches', None, _(b'annotate with branch names')),
648 648 (b'', b'dots', None, _(b'use dots for runs')),
649 649 (b's', b'spaces', None, _(b'separate elements by spaces')),
650 650 ],
651 651 _(b'[OPTION]... [FILE [REV]...]'),
652 652 optionalrepo=True,
653 653 )
654 654 def debugdag(ui, repo, file_=None, *revs, **opts):
655 655 """format the changelog or an index DAG as a concise textual description
656 656
657 657 If you pass a revlog index, the revlog's DAG is emitted. If you list
658 658 revision numbers, they get labeled in the output as rN.
659 659
660 660 Otherwise, the changelog DAG of the current repo is emitted.
661 661 """
662 662 spaces = opts.get('spaces')
663 663 dots = opts.get('dots')
664 664 if file_:
665 665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
666 666 revs = {int(r) for r in revs}
667 667
668 668 def events():
669 669 for r in rlog:
670 670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
671 671 if r in revs:
672 672 yield b'l', (r, b"r%i" % r)
673 673
674 674 elif repo:
675 675 cl = repo.changelog
676 676 tags = opts.get('tags')
677 677 branches = opts.get('branches')
678 678 if tags:
679 679 labels = {}
680 680 for l, n in repo.tags().items():
681 681 labels.setdefault(cl.rev(n), []).append(l)
682 682
683 683 def events():
684 684 b = b"default"
685 685 for r in cl:
686 686 if branches:
687 687 newb = cl.read(cl.node(r))[5][b'branch']
688 688 if newb != b:
689 689 yield b'a', newb
690 690 b = newb
691 691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
692 692 if tags:
693 693 ls = labels.get(r)
694 694 if ls:
695 695 for l in ls:
696 696 yield b'l', (r, l)
697 697
698 698 else:
699 699 raise error.Abort(_(b'need repo for changelog dag'))
700 700
701 701 for line in dagparser.dagtextlines(
702 702 events(),
703 703 addspaces=spaces,
704 704 wraplabels=True,
705 705 wrapannotations=True,
706 706 wrapnonlinear=dots,
707 707 usedots=dots,
708 708 maxlinewidth=70,
709 709 ):
710 710 ui.write(line)
711 711 ui.write(b"\n")
712 712
713 713
714 714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
715 715 def debugdata(ui, repo, file_, rev=None, **opts):
716 716 """dump the contents of a data file revision"""
717 717 opts = pycompat.byteskwargs(opts)
718 718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
719 719 if rev is not None:
720 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 721 file_, rev = None, file_
722 722 elif rev is None:
723 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
725 725 try:
726 726 ui.write(r.rawdata(r.lookup(rev)))
727 727 except KeyError:
728 728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
729 729
730 730
731 731 @command(
732 732 b'debugdate',
733 733 [(b'e', b'extended', None, _(b'try extended date formats'))],
734 734 _(b'[-e] DATE [RANGE]'),
735 735 norepo=True,
736 736 optionalrepo=True,
737 737 )
738 738 def debugdate(ui, date, range=None, **opts):
739 739 """parse and display a date"""
740 740 if opts["extended"]:
741 741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
742 742 else:
743 743 d = dateutil.parsedate(date)
744 744 ui.writenoi18n(b"internal: %d %d\n" % d)
745 745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
746 746 if range:
747 747 m = dateutil.matchdate(range)
748 748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
749 749
750 750
751 751 @command(
752 752 b'debugdeltachain',
753 753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
754 754 _(b'-c|-m|FILE'),
755 755 optionalrepo=True,
756 756 )
757 757 def debugdeltachain(ui, repo, file_=None, **opts):
758 758 """dump information about delta chains in a revlog
759 759
760 760 Output can be templatized. Available template keywords are:
761 761
762 762 :``rev``: revision number
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 767 :``compsize``: compressed size of revision
768 768 :``uncompsize``: uncompressed size of revision
769 769 :``chainsize``: total size of compressed revisions in chain
770 770 :``chainratio``: total chain size divided by uncompressed revision size
771 771 (new delta chains typically start at ratio 2.00)
772 772 :``lindist``: linear distance from base revision in delta chain to end
773 773 of this revision
774 774 :``extradist``: total size of revisions not part of this delta chain from
775 775 base of delta chain to end of this revision; a measurement
776 776 of how much extra data we need to read/seek across to read
777 777 the delta chain for this revision
778 778 :``extraratio``: extradist divided by chainsize; another representation of
779 779 how much unrelated data is needed to load this delta chain
780 780
781 781 If the repository is configured to use the sparse read, additional keywords
782 782 are available:
783 783
784 784 :``readsize``: total size of data read from the disk for a revision
785 785 (sum of the sizes of all the blocks)
786 786 :``largestblock``: size of the largest block of data read from the disk
787 787 :``readdensity``: density of useful bytes in the data read from the disk
788 788 :``srchunks``: in how many data hunks the whole revision would be read
789 789
790 790 The sparse read can be enabled with experimental.sparse-read = True
791 791 """
792 792 opts = pycompat.byteskwargs(opts)
793 793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
794 794 index = r.index
795 795 start = r.start
796 796 length = r.length
797 797 generaldelta = r._generaldelta
798 798 withsparseread = getattr(r, '_withsparseread', False)
799 799
800 800 def revinfo(rev):
801 801 e = index[rev]
802 802 compsize = e[1]
803 803 uncompsize = e[2]
804 804 chainsize = 0
805 805
806 806 if generaldelta:
807 807 if e[3] == e[5]:
808 808 deltatype = b'p1'
809 809 elif e[3] == e[6]:
810 810 deltatype = b'p2'
811 811 elif e[3] == rev - 1:
812 812 deltatype = b'prev'
813 813 elif e[3] == rev:
814 814 deltatype = b'base'
815 815 else:
816 816 deltatype = b'other'
817 817 else:
818 818 if e[3] == rev:
819 819 deltatype = b'base'
820 820 else:
821 821 deltatype = b'prev'
822 822
823 823 chain = r._deltachain(rev)[0]
824 824 for iterrev in chain:
825 825 e = index[iterrev]
826 826 chainsize += e[1]
827 827
828 828 return compsize, uncompsize, deltatype, chain, chainsize
829 829
830 830 fm = ui.formatter(b'debugdeltachain', opts)
831 831
832 832 fm.plain(
833 833 b' rev chain# chainlen prev delta '
834 834 b'size rawsize chainsize ratio lindist extradist '
835 835 b'extraratio'
836 836 )
837 837 if withsparseread:
838 838 fm.plain(b' readsize largestblk rddensity srchunks')
839 839 fm.plain(b'\n')
840 840
841 841 chainbases = {}
842 842 for rev in r:
843 843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
844 844 chainbase = chain[0]
845 845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
846 846 basestart = start(chainbase)
847 847 revstart = start(rev)
848 848 lineardist = revstart + comp - basestart
849 849 extradist = lineardist - chainsize
850 850 try:
851 851 prevrev = chain[-2]
852 852 except IndexError:
853 853 prevrev = -1
854 854
855 855 if uncomp != 0:
856 856 chainratio = float(chainsize) / float(uncomp)
857 857 else:
858 858 chainratio = chainsize
859 859
860 860 if chainsize != 0:
861 861 extraratio = float(extradist) / float(chainsize)
862 862 else:
863 863 extraratio = extradist
864 864
865 865 fm.startitem()
866 866 fm.write(
867 867 b'rev chainid chainlen prevrev deltatype compsize '
868 868 b'uncompsize chainsize chainratio lindist extradist '
869 869 b'extraratio',
870 870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
871 871 rev,
872 872 chainid,
873 873 len(chain),
874 874 prevrev,
875 875 deltatype,
876 876 comp,
877 877 uncomp,
878 878 chainsize,
879 879 chainratio,
880 880 lineardist,
881 881 extradist,
882 882 extraratio,
883 883 rev=rev,
884 884 chainid=chainid,
885 885 chainlen=len(chain),
886 886 prevrev=prevrev,
887 887 deltatype=deltatype,
888 888 compsize=comp,
889 889 uncompsize=uncomp,
890 890 chainsize=chainsize,
891 891 chainratio=chainratio,
892 892 lindist=lineardist,
893 893 extradist=extradist,
894 894 extraratio=extraratio,
895 895 )
896 896 if withsparseread:
897 897 readsize = 0
898 898 largestblock = 0
899 899 srchunks = 0
900 900
901 901 for revschunk in deltautil.slicechunk(r, chain):
902 902 srchunks += 1
903 903 blkend = start(revschunk[-1]) + length(revschunk[-1])
904 904 blksize = blkend - start(revschunk[0])
905 905
906 906 readsize += blksize
907 907 if largestblock < blksize:
908 908 largestblock = blksize
909 909
910 910 if readsize:
911 911 readdensity = float(chainsize) / float(readsize)
912 912 else:
913 913 readdensity = 1
914 914
915 915 fm.write(
916 916 b'readsize largestblock readdensity srchunks',
917 917 b' %10d %10d %9.5f %8d',
918 918 readsize,
919 919 largestblock,
920 920 readdensity,
921 921 srchunks,
922 922 readsize=readsize,
923 923 largestblock=largestblock,
924 924 readdensity=readdensity,
925 925 srchunks=srchunks,
926 926 )
927 927
928 928 fm.plain(b'\n')
929 929
930 930 fm.end()
931 931
932 932
933 933 @command(
934 934 b'debugdirstate|debugstate',
935 935 [
936 936 (
937 937 b'',
938 938 b'nodates',
939 939 None,
940 940 _(b'do not display the saved mtime (DEPRECATED)'),
941 941 ),
942 942 (b'', b'dates', True, _(b'display the saved mtime')),
943 943 (b'', b'datesort', None, _(b'sort by saved mtime')),
944 944 (b'', b'dirs', False, _(b'display directories')),
945 945 ],
946 946 _(b'[OPTION]...'),
947 947 )
948 948 def debugstate(ui, repo, **opts):
949 949 """show the contents of the current dirstate"""
950 950
951 951 nodates = not opts['dates']
952 952 if opts.get('nodates') is not None:
953 953 nodates = True
954 954 datesort = opts.get('datesort')
955 955
956 956 if datesort:
957 957 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
958 958 else:
959 959 keyfunc = None # sort by filename
960 960 entries = list(pycompat.iteritems(repo.dirstate))
961 961 if opts['dirs']:
962 962 entries.extend(repo.dirstate.directories())
963 963 entries.sort(key=keyfunc)
964 964 for file_, ent in entries:
965 965 if ent[3] == -1:
966 966 timestr = b'unset '
967 967 elif nodates:
968 968 timestr = b'set '
969 969 else:
970 970 timestr = time.strftime(
971 971 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
972 972 )
973 973 timestr = encoding.strtolocal(timestr)
974 974 if ent[1] & 0o20000:
975 975 mode = b'lnk'
976 976 else:
977 977 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
978 978 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
979 979 for f in repo.dirstate.copies():
980 980 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
981 981
982 982
983 983 @command(
984 984 b'debugdiscovery',
985 985 [
986 986 (b'', b'old', None, _(b'use old-style discovery')),
987 987 (
988 988 b'',
989 989 b'nonheads',
990 990 None,
991 991 _(b'use old-style discovery with non-heads included'),
992 992 ),
993 993 (b'', b'rev', [], b'restrict discovery to this set of revs'),
994 994 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
995 995 (
996 996 b'',
997 997 b'local-as-revs',
998 998 b"",
999 999 b'treat local has having these revisions only',
1000 1000 ),
1001 1001 (
1002 1002 b'',
1003 1003 b'remote-as-revs',
1004 1004 b"",
1005 1005 b'use local as remote, with only these these revisions',
1006 1006 ),
1007 1007 ]
1008 1008 + cmdutil.remoteopts
1009 1009 + cmdutil.formatteropts,
1010 1010 _(b'[--rev REV] [OTHER]'),
1011 1011 )
1012 1012 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1013 1013 """runs the changeset discovery protocol in isolation
1014 1014
1015 1015 The local peer can be "replaced" by a subset of the local repository by
1016 1016 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1017 1017 be "replaced" by a subset of the local repository using the
1018 1018 `--local-as-revs` flag. This is useful to efficiently debug pathological
1019 1019 discovery situation.
1020 1020
1021 1021 The following developer oriented config are relevant for people playing with this command:
1022 1022
1023 1023 * devel.discovery.exchange-heads=True
1024 1024
1025 1025 If False, the discovery will not start with
1026 1026 remote head fetching and local head querying.
1027 1027
1028 1028 * devel.discovery.grow-sample=True
1029 1029
1030 1030 If False, the sample size used in set discovery will not be increased
1031 1031 through the process
1032 1032
1033 1033 * devel.discovery.grow-sample.dynamic=True
1034 1034
1035 1035 When discovery.grow-sample.dynamic is True, the default, the sample size is
1036 1036 adapted to the shape of the undecided set (it is set to the max of:
1037 1037 <target-size>, len(roots(undecided)), len(heads(undecided)
1038 1038
1039 1039 * devel.discovery.grow-sample.rate=1.05
1040 1040
1041 1041 the rate at which the sample grow
1042 1042
1043 1043 * devel.discovery.randomize=True
1044 1044
1045 1045 If andom sampling during discovery are deterministic. It is meant for
1046 1046 integration tests.
1047 1047
1048 1048 * devel.discovery.sample-size=200
1049 1049
1050 1050 Control the initial size of the discovery sample
1051 1051
1052 1052 * devel.discovery.sample-size.initial=100
1053 1053
1054 1054 Control the initial size of the discovery for initial change
1055 1055 """
1056 1056 opts = pycompat.byteskwargs(opts)
1057 1057 unfi = repo.unfiltered()
1058 1058
1059 1059 # setup potential extra filtering
1060 1060 local_revs = opts[b"local_as_revs"]
1061 1061 remote_revs = opts[b"remote_as_revs"]
1062 1062
1063 1063 # make sure tests are repeatable
1064 1064 random.seed(int(opts[b'seed']))
1065 1065
1066 1066 if not remote_revs:
1067 1067
1068 1068 remoteurl, branches = urlutil.get_unique_pull_path(
1069 1069 b'debugdiscovery', repo, ui, remoteurl
1070 1070 )
1071 1071 remote = hg.peer(repo, opts, remoteurl)
1072 1072 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1073 1073 else:
1074 1074 branches = (None, [])
1075 1075 remote_filtered_revs = scmutil.revrange(
1076 1076 unfi, [b"not (::(%s))" % remote_revs]
1077 1077 )
1078 1078 remote_filtered_revs = frozenset(remote_filtered_revs)
1079 1079
1080 1080 def remote_func(x):
1081 1081 return remote_filtered_revs
1082 1082
1083 1083 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1084 1084
1085 1085 remote = repo.peer()
1086 1086 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1087 1087
1088 1088 if local_revs:
1089 1089 local_filtered_revs = scmutil.revrange(
1090 1090 unfi, [b"not (::(%s))" % local_revs]
1091 1091 )
1092 1092 local_filtered_revs = frozenset(local_filtered_revs)
1093 1093
1094 1094 def local_func(x):
1095 1095 return local_filtered_revs
1096 1096
1097 1097 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1098 1098 repo = repo.filtered(b'debug-discovery-local-filter')
1099 1099
1100 1100 data = {}
1101 1101 if opts.get(b'old'):
1102 1102
1103 1103 def doit(pushedrevs, remoteheads, remote=remote):
1104 1104 if not util.safehasattr(remote, b'branches'):
1105 1105 # enable in-client legacy support
1106 1106 remote = localrepo.locallegacypeer(remote.local())
1107 1107 common, _in, hds = treediscovery.findcommonincoming(
1108 1108 repo, remote, force=True, audit=data
1109 1109 )
1110 1110 common = set(common)
1111 1111 if not opts.get(b'nonheads'):
1112 1112 ui.writenoi18n(
1113 1113 b"unpruned common: %s\n"
1114 1114 % b" ".join(sorted(short(n) for n in common))
1115 1115 )
1116 1116
1117 1117 clnode = repo.changelog.node
1118 1118 common = repo.revs(b'heads(::%ln)', common)
1119 1119 common = {clnode(r) for r in common}
1120 1120 return common, hds
1121 1121
1122 1122 else:
1123 1123
1124 1124 def doit(pushedrevs, remoteheads, remote=remote):
1125 1125 nodes = None
1126 1126 if pushedrevs:
1127 1127 revs = scmutil.revrange(repo, pushedrevs)
1128 1128 nodes = [repo[r].node() for r in revs]
1129 1129 common, any, hds = setdiscovery.findcommonheads(
1130 1130 ui, repo, remote, ancestorsof=nodes, audit=data
1131 1131 )
1132 1132 return common, hds
1133 1133
1134 1134 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1135 1135 localrevs = opts[b'rev']
1136 1136
1137 1137 fm = ui.formatter(b'debugdiscovery', opts)
1138 1138 if fm.strict_format:
1139 1139
1140 1140 @contextlib.contextmanager
1141 1141 def may_capture_output():
1142 1142 ui.pushbuffer()
1143 1143 yield
1144 1144 data[b'output'] = ui.popbuffer()
1145 1145
1146 1146 else:
1147 1147 may_capture_output = util.nullcontextmanager
1148 1148 with may_capture_output():
1149 1149 with util.timedcm('debug-discovery') as t:
1150 1150 common, hds = doit(localrevs, remoterevs)
1151 1151
1152 1152 # compute all statistics
1153 1153 heads_common = set(common)
1154 1154 heads_remote = set(hds)
1155 1155 heads_local = set(repo.heads())
1156 1156 # note: they cannot be a local or remote head that is in common and not
1157 1157 # itself a head of common.
1158 1158 heads_common_local = heads_common & heads_local
1159 1159 heads_common_remote = heads_common & heads_remote
1160 1160 heads_common_both = heads_common & heads_remote & heads_local
1161 1161
1162 1162 all = repo.revs(b'all()')
1163 1163 common = repo.revs(b'::%ln', common)
1164 1164 roots_common = repo.revs(b'roots(::%ld)', common)
1165 1165 missing = repo.revs(b'not ::%ld', common)
1166 1166 heads_missing = repo.revs(b'heads(%ld)', missing)
1167 1167 roots_missing = repo.revs(b'roots(%ld)', missing)
1168 1168 assert len(common) + len(missing) == len(all)
1169 1169
1170 1170 initial_undecided = repo.revs(
1171 1171 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1172 1172 )
1173 1173 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1174 1174 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1175 1175 common_initial_undecided = initial_undecided & common
1176 1176 missing_initial_undecided = initial_undecided & missing
1177 1177
1178 1178 data[b'elapsed'] = t.elapsed
1179 1179 data[b'nb-common-heads'] = len(heads_common)
1180 1180 data[b'nb-common-heads-local'] = len(heads_common_local)
1181 1181 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1182 1182 data[b'nb-common-heads-both'] = len(heads_common_both)
1183 1183 data[b'nb-common-roots'] = len(roots_common)
1184 1184 data[b'nb-head-local'] = len(heads_local)
1185 1185 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1186 1186 data[b'nb-head-remote'] = len(heads_remote)
1187 1187 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1188 1188 heads_common_remote
1189 1189 )
1190 1190 data[b'nb-revs'] = len(all)
1191 1191 data[b'nb-revs-common'] = len(common)
1192 1192 data[b'nb-revs-missing'] = len(missing)
1193 1193 data[b'nb-missing-heads'] = len(heads_missing)
1194 1194 data[b'nb-missing-roots'] = len(roots_missing)
1195 1195 data[b'nb-ini_und'] = len(initial_undecided)
1196 1196 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1197 1197 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1198 1198 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1199 1199 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1200 1200
1201 1201 fm.startitem()
1202 1202 fm.data(**pycompat.strkwargs(data))
1203 1203 # display discovery summary
1204 1204 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1205 1205 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1206 1206 fm.plain(b"heads summary:\n")
1207 1207 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1208 1208 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1209 1209 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1210 1210 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1211 1211 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1212 1212 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1213 1213 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1214 1214 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1215 1215 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1216 1216 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1217 1217 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1218 1218 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1219 1219 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1220 1220 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1221 1221 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1222 1222 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1223 1223 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1224 1224 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1225 1225 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1226 1226 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1227 1227 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1228 1228 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1229 1229
1230 1230 if ui.verbose:
1231 1231 fm.plain(
1232 1232 b"common heads: %s\n"
1233 1233 % b" ".join(sorted(short(n) for n in heads_common))
1234 1234 )
1235 1235 fm.end()
1236 1236
1237 1237
1238 1238 _chunksize = 4 << 10
1239 1239
1240 1240
1241 1241 @command(
1242 1242 b'debugdownload',
1243 1243 [
1244 1244 (b'o', b'output', b'', _(b'path')),
1245 1245 ],
1246 1246 optionalrepo=True,
1247 1247 )
1248 1248 def debugdownload(ui, repo, url, output=None, **opts):
1249 1249 """download a resource using Mercurial logic and config"""
1250 1250 fh = urlmod.open(ui, url, output)
1251 1251
1252 1252 dest = ui
1253 1253 if output:
1254 1254 dest = open(output, b"wb", _chunksize)
1255 1255 try:
1256 1256 data = fh.read(_chunksize)
1257 1257 while data:
1258 1258 dest.write(data)
1259 1259 data = fh.read(_chunksize)
1260 1260 finally:
1261 1261 if output:
1262 1262 dest.close()
1263 1263
1264 1264
1265 1265 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1266 1266 def debugextensions(ui, repo, **opts):
1267 1267 '''show information about active extensions'''
1268 1268 opts = pycompat.byteskwargs(opts)
1269 1269 exts = extensions.extensions(ui)
1270 1270 hgver = util.version()
1271 1271 fm = ui.formatter(b'debugextensions', opts)
1272 1272 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1273 1273 isinternal = extensions.ismoduleinternal(extmod)
1274 1274 extsource = None
1275 1275
1276 1276 if util.safehasattr(extmod, '__file__'):
1277 1277 extsource = pycompat.fsencode(extmod.__file__)
1278 1278 elif getattr(sys, 'oxidized', False):
1279 1279 extsource = pycompat.sysexecutable
1280 1280 if isinternal:
1281 1281 exttestedwith = [] # never expose magic string to users
1282 1282 else:
1283 1283 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1284 1284 extbuglink = getattr(extmod, 'buglink', None)
1285 1285
1286 1286 fm.startitem()
1287 1287
1288 1288 if ui.quiet or ui.verbose:
1289 1289 fm.write(b'name', b'%s\n', extname)
1290 1290 else:
1291 1291 fm.write(b'name', b'%s', extname)
1292 1292 if isinternal or hgver in exttestedwith:
1293 1293 fm.plain(b'\n')
1294 1294 elif not exttestedwith:
1295 1295 fm.plain(_(b' (untested!)\n'))
1296 1296 else:
1297 1297 lasttestedversion = exttestedwith[-1]
1298 1298 fm.plain(b' (%s!)\n' % lasttestedversion)
1299 1299
1300 1300 fm.condwrite(
1301 1301 ui.verbose and extsource,
1302 1302 b'source',
1303 1303 _(b' location: %s\n'),
1304 1304 extsource or b"",
1305 1305 )
1306 1306
1307 1307 if ui.verbose:
1308 1308 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1309 1309 fm.data(bundled=isinternal)
1310 1310
1311 1311 fm.condwrite(
1312 1312 ui.verbose and exttestedwith,
1313 1313 b'testedwith',
1314 1314 _(b' tested with: %s\n'),
1315 1315 fm.formatlist(exttestedwith, name=b'ver'),
1316 1316 )
1317 1317
1318 1318 fm.condwrite(
1319 1319 ui.verbose and extbuglink,
1320 1320 b'buglink',
1321 1321 _(b' bug reporting: %s\n'),
1322 1322 extbuglink or b"",
1323 1323 )
1324 1324
1325 1325 fm.end()
1326 1326
1327 1327
1328 1328 @command(
1329 1329 b'debugfileset',
1330 1330 [
1331 1331 (
1332 1332 b'r',
1333 1333 b'rev',
1334 1334 b'',
1335 1335 _(b'apply the filespec on this revision'),
1336 1336 _(b'REV'),
1337 1337 ),
1338 1338 (
1339 1339 b'',
1340 1340 b'all-files',
1341 1341 False,
1342 1342 _(b'test files from all revisions and working directory'),
1343 1343 ),
1344 1344 (
1345 1345 b's',
1346 1346 b'show-matcher',
1347 1347 None,
1348 1348 _(b'print internal representation of matcher'),
1349 1349 ),
1350 1350 (
1351 1351 b'p',
1352 1352 b'show-stage',
1353 1353 [],
1354 1354 _(b'print parsed tree at the given stage'),
1355 1355 _(b'NAME'),
1356 1356 ),
1357 1357 ],
1358 1358 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1359 1359 )
1360 1360 def debugfileset(ui, repo, expr, **opts):
1361 1361 '''parse and apply a fileset specification'''
1362 1362 from . import fileset
1363 1363
1364 1364 fileset.symbols # force import of fileset so we have predicates to optimize
1365 1365 opts = pycompat.byteskwargs(opts)
1366 1366 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1367 1367
1368 1368 stages = [
1369 1369 (b'parsed', pycompat.identity),
1370 1370 (b'analyzed', filesetlang.analyze),
1371 1371 (b'optimized', filesetlang.optimize),
1372 1372 ]
1373 1373 stagenames = {n for n, f in stages}
1374 1374
1375 1375 showalways = set()
1376 1376 if ui.verbose and not opts[b'show_stage']:
1377 1377 # show parsed tree by --verbose (deprecated)
1378 1378 showalways.add(b'parsed')
1379 1379 if opts[b'show_stage'] == [b'all']:
1380 1380 showalways.update(stagenames)
1381 1381 else:
1382 1382 for n in opts[b'show_stage']:
1383 1383 if n not in stagenames:
1384 1384 raise error.Abort(_(b'invalid stage name: %s') % n)
1385 1385 showalways.update(opts[b'show_stage'])
1386 1386
1387 1387 tree = filesetlang.parse(expr)
1388 1388 for n, f in stages:
1389 1389 tree = f(tree)
1390 1390 if n in showalways:
1391 1391 if opts[b'show_stage'] or n != b'parsed':
1392 1392 ui.write(b"* %s:\n" % n)
1393 1393 ui.write(filesetlang.prettyformat(tree), b"\n")
1394 1394
1395 1395 files = set()
1396 1396 if opts[b'all_files']:
1397 1397 for r in repo:
1398 1398 c = repo[r]
1399 1399 files.update(c.files())
1400 1400 files.update(c.substate)
1401 1401 if opts[b'all_files'] or ctx.rev() is None:
1402 1402 wctx = repo[None]
1403 1403 files.update(
1404 1404 repo.dirstate.walk(
1405 1405 scmutil.matchall(repo),
1406 1406 subrepos=list(wctx.substate),
1407 1407 unknown=True,
1408 1408 ignored=True,
1409 1409 )
1410 1410 )
1411 1411 files.update(wctx.substate)
1412 1412 else:
1413 1413 files.update(ctx.files())
1414 1414 files.update(ctx.substate)
1415 1415
1416 1416 m = ctx.matchfileset(repo.getcwd(), expr)
1417 1417 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1418 1418 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1419 1419 for f in sorted(files):
1420 1420 if not m(f):
1421 1421 continue
1422 1422 ui.write(b"%s\n" % f)
1423 1423
1424 1424
1425 1425 @command(b'debugformat', [] + cmdutil.formatteropts)
1426 1426 def debugformat(ui, repo, **opts):
1427 1427 """display format information about the current repository
1428 1428
1429 1429 Use --verbose to get extra information about current config value and
1430 1430 Mercurial default."""
1431 1431 opts = pycompat.byteskwargs(opts)
1432 1432 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1433 1433 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1434 1434
1435 1435 def makeformatname(name):
1436 1436 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1437 1437
1438 1438 fm = ui.formatter(b'debugformat', opts)
1439 1439 if fm.isplain():
1440 1440
1441 1441 def formatvalue(value):
1442 1442 if util.safehasattr(value, b'startswith'):
1443 1443 return value
1444 1444 if value:
1445 1445 return b'yes'
1446 1446 else:
1447 1447 return b'no'
1448 1448
1449 1449 else:
1450 1450 formatvalue = pycompat.identity
1451 1451
1452 1452 fm.plain(b'format-variant')
1453 1453 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1454 1454 fm.plain(b' repo')
1455 1455 if ui.verbose:
1456 1456 fm.plain(b' config default')
1457 1457 fm.plain(b'\n')
1458 1458 for fv in upgrade.allformatvariant:
1459 1459 fm.startitem()
1460 1460 repovalue = fv.fromrepo(repo)
1461 1461 configvalue = fv.fromconfig(repo)
1462 1462
1463 1463 if repovalue != configvalue:
1464 1464 namelabel = b'formatvariant.name.mismatchconfig'
1465 1465 repolabel = b'formatvariant.repo.mismatchconfig'
1466 1466 elif repovalue != fv.default:
1467 1467 namelabel = b'formatvariant.name.mismatchdefault'
1468 1468 repolabel = b'formatvariant.repo.mismatchdefault'
1469 1469 else:
1470 1470 namelabel = b'formatvariant.name.uptodate'
1471 1471 repolabel = b'formatvariant.repo.uptodate'
1472 1472
1473 1473 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1474 1474 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1475 1475 if fv.default != configvalue:
1476 1476 configlabel = b'formatvariant.config.special'
1477 1477 else:
1478 1478 configlabel = b'formatvariant.config.default'
1479 1479 fm.condwrite(
1480 1480 ui.verbose,
1481 1481 b'config',
1482 1482 b' %6s',
1483 1483 formatvalue(configvalue),
1484 1484 label=configlabel,
1485 1485 )
1486 1486 fm.condwrite(
1487 1487 ui.verbose,
1488 1488 b'default',
1489 1489 b' %7s',
1490 1490 formatvalue(fv.default),
1491 1491 label=b'formatvariant.default',
1492 1492 )
1493 1493 fm.plain(b'\n')
1494 1494 fm.end()
1495 1495
1496 1496
1497 1497 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1498 1498 def debugfsinfo(ui, path=b"."):
1499 1499 """show information detected about current filesystem"""
1500 1500 ui.writenoi18n(b'path: %s\n' % path)
1501 1501 ui.writenoi18n(
1502 1502 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1503 1503 )
1504 1504 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1505 1505 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1506 1506 ui.writenoi18n(
1507 1507 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1508 1508 )
1509 1509 ui.writenoi18n(
1510 1510 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1511 1511 )
1512 1512 casesensitive = b'(unknown)'
1513 1513 try:
1514 1514 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1515 1515 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1516 1516 except OSError:
1517 1517 pass
1518 1518 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1519 1519
1520 1520
1521 1521 @command(
1522 1522 b'debuggetbundle',
1523 1523 [
1524 1524 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1525 1525 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1526 1526 (
1527 1527 b't',
1528 1528 b'type',
1529 1529 b'bzip2',
1530 1530 _(b'bundle compression type to use'),
1531 1531 _(b'TYPE'),
1532 1532 ),
1533 1533 ],
1534 1534 _(b'REPO FILE [-H|-C ID]...'),
1535 1535 norepo=True,
1536 1536 )
1537 1537 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1538 1538 """retrieves a bundle from a repo
1539 1539
1540 1540 Every ID must be a full-length hex node id string. Saves the bundle to the
1541 1541 given file.
1542 1542 """
1543 1543 opts = pycompat.byteskwargs(opts)
1544 1544 repo = hg.peer(ui, opts, repopath)
1545 1545 if not repo.capable(b'getbundle'):
1546 1546 raise error.Abort(b"getbundle() not supported by target repository")
1547 1547 args = {}
1548 1548 if common:
1549 1549 args['common'] = [bin(s) for s in common]
1550 1550 if head:
1551 1551 args['heads'] = [bin(s) for s in head]
1552 1552 # TODO: get desired bundlecaps from command line.
1553 1553 args['bundlecaps'] = None
1554 1554 bundle = repo.getbundle(b'debug', **args)
1555 1555
1556 1556 bundletype = opts.get(b'type', b'bzip2').lower()
1557 1557 btypes = {
1558 1558 b'none': b'HG10UN',
1559 1559 b'bzip2': b'HG10BZ',
1560 1560 b'gzip': b'HG10GZ',
1561 1561 b'bundle2': b'HG20',
1562 1562 }
1563 1563 bundletype = btypes.get(bundletype)
1564 1564 if bundletype not in bundle2.bundletypes:
1565 1565 raise error.Abort(_(b'unknown bundle type specified with --type'))
1566 1566 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1567 1567
1568 1568
1569 1569 @command(b'debugignore', [], b'[FILE]')
1570 1570 def debugignore(ui, repo, *files, **opts):
1571 1571 """display the combined ignore pattern and information about ignored files
1572 1572
1573 1573 With no argument display the combined ignore pattern.
1574 1574
1575 1575 Given space separated file names, shows if the given file is ignored and
1576 1576 if so, show the ignore rule (file and line number) that matched it.
1577 1577 """
1578 1578 ignore = repo.dirstate._ignore
1579 1579 if not files:
1580 1580 # Show all the patterns
1581 1581 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1582 1582 else:
1583 1583 m = scmutil.match(repo[None], pats=files)
1584 1584 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1585 1585 for f in m.files():
1586 1586 nf = util.normpath(f)
1587 1587 ignored = None
1588 1588 ignoredata = None
1589 1589 if nf != b'.':
1590 1590 if ignore(nf):
1591 1591 ignored = nf
1592 1592 ignoredata = repo.dirstate._ignorefileandline(nf)
1593 1593 else:
1594 1594 for p in pathutil.finddirs(nf):
1595 1595 if ignore(p):
1596 1596 ignored = p
1597 1597 ignoredata = repo.dirstate._ignorefileandline(p)
1598 1598 break
1599 1599 if ignored:
1600 1600 if ignored == nf:
1601 1601 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1602 1602 else:
1603 1603 ui.write(
1604 1604 _(
1605 1605 b"%s is ignored because of "
1606 1606 b"containing directory %s\n"
1607 1607 )
1608 1608 % (uipathfn(f), ignored)
1609 1609 )
1610 1610 ignorefile, lineno, line = ignoredata
1611 1611 ui.write(
1612 1612 _(b"(ignore rule in %s, line %d: '%s')\n")
1613 1613 % (ignorefile, lineno, line)
1614 1614 )
1615 1615 else:
1616 1616 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1617 1617
1618 1618
1619 1619 @command(
1620 1620 b'debugindex',
1621 1621 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1622 1622 _(b'-c|-m|FILE'),
1623 1623 )
1624 1624 def debugindex(ui, repo, file_=None, **opts):
1625 1625 """dump index data for a storage primitive"""
1626 1626 opts = pycompat.byteskwargs(opts)
1627 1627 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1628 1628
1629 1629 if ui.debugflag:
1630 1630 shortfn = hex
1631 1631 else:
1632 1632 shortfn = short
1633 1633
1634 1634 idlen = 12
1635 1635 for i in store:
1636 1636 idlen = len(shortfn(store.node(i)))
1637 1637 break
1638 1638
1639 1639 fm = ui.formatter(b'debugindex', opts)
1640 1640 fm.plain(
1641 1641 b' rev linkrev %s %s p2\n'
1642 1642 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1643 1643 )
1644 1644
1645 1645 for rev in store:
1646 1646 node = store.node(rev)
1647 1647 parents = store.parents(node)
1648 1648
1649 1649 fm.startitem()
1650 1650 fm.write(b'rev', b'%6d ', rev)
1651 1651 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1652 1652 fm.write(b'node', b'%s ', shortfn(node))
1653 1653 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1654 1654 fm.write(b'p2', b'%s', shortfn(parents[1]))
1655 1655 fm.plain(b'\n')
1656 1656
1657 1657 fm.end()
1658 1658
1659 1659
1660 1660 @command(
1661 1661 b'debugindexdot',
1662 1662 cmdutil.debugrevlogopts,
1663 1663 _(b'-c|-m|FILE'),
1664 1664 optionalrepo=True,
1665 1665 )
1666 1666 def debugindexdot(ui, repo, file_=None, **opts):
1667 1667 """dump an index DAG as a graphviz dot file"""
1668 1668 opts = pycompat.byteskwargs(opts)
1669 1669 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1670 1670 ui.writenoi18n(b"digraph G {\n")
1671 1671 for i in r:
1672 1672 node = r.node(i)
1673 1673 pp = r.parents(node)
1674 1674 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1675 1675 if pp[1] != repo.nullid:
1676 1676 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1677 1677 ui.write(b"}\n")
1678 1678
1679 1679
1680 1680 @command(b'debugindexstats', [])
1681 1681 def debugindexstats(ui, repo):
1682 1682 """show stats related to the changelog index"""
1683 1683 repo.changelog.shortest(repo.nullid, 1)
1684 1684 index = repo.changelog.index
1685 1685 if not util.safehasattr(index, b'stats'):
1686 1686 raise error.Abort(_(b'debugindexstats only works with native code'))
1687 1687 for k, v in sorted(index.stats().items()):
1688 1688 ui.write(b'%s: %d\n' % (k, v))
1689 1689
1690 1690
1691 1691 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1692 1692 def debuginstall(ui, **opts):
1693 1693 """test Mercurial installation
1694 1694
1695 1695 Returns 0 on success.
1696 1696 """
1697 1697 opts = pycompat.byteskwargs(opts)
1698 1698
1699 1699 problems = 0
1700 1700
1701 1701 fm = ui.formatter(b'debuginstall', opts)
1702 1702 fm.startitem()
1703 1703
1704 1704 # encoding might be unknown or wrong. don't translate these messages.
1705 1705 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1706 1706 err = None
1707 1707 try:
1708 1708 codecs.lookup(pycompat.sysstr(encoding.encoding))
1709 1709 except LookupError as inst:
1710 1710 err = stringutil.forcebytestr(inst)
1711 1711 problems += 1
1712 1712 fm.condwrite(
1713 1713 err,
1714 1714 b'encodingerror',
1715 1715 b" %s\n (check that your locale is properly set)\n",
1716 1716 err,
1717 1717 )
1718 1718
1719 1719 # Python
1720 1720 pythonlib = None
1721 1721 if util.safehasattr(os, '__file__'):
1722 1722 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1723 1723 elif getattr(sys, 'oxidized', False):
1724 1724 pythonlib = pycompat.sysexecutable
1725 1725
1726 1726 fm.write(
1727 1727 b'pythonexe',
1728 1728 _(b"checking Python executable (%s)\n"),
1729 1729 pycompat.sysexecutable or _(b"unknown"),
1730 1730 )
1731 1731 fm.write(
1732 1732 b'pythonimplementation',
1733 1733 _(b"checking Python implementation (%s)\n"),
1734 1734 pycompat.sysbytes(platform.python_implementation()),
1735 1735 )
1736 1736 fm.write(
1737 1737 b'pythonver',
1738 1738 _(b"checking Python version (%s)\n"),
1739 1739 (b"%d.%d.%d" % sys.version_info[:3]),
1740 1740 )
1741 1741 fm.write(
1742 1742 b'pythonlib',
1743 1743 _(b"checking Python lib (%s)...\n"),
1744 1744 pythonlib or _(b"unknown"),
1745 1745 )
1746 1746
1747 1747 try:
1748 1748 from . import rustext # pytype: disable=import-error
1749 1749
1750 1750 rustext.__doc__ # trigger lazy import
1751 1751 except ImportError:
1752 1752 rustext = None
1753 1753
1754 1754 security = set(sslutil.supportedprotocols)
1755 1755 if sslutil.hassni:
1756 1756 security.add(b'sni')
1757 1757
1758 1758 fm.write(
1759 1759 b'pythonsecurity',
1760 1760 _(b"checking Python security support (%s)\n"),
1761 1761 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1762 1762 )
1763 1763
1764 1764 # These are warnings, not errors. So don't increment problem count. This
1765 1765 # may change in the future.
1766 1766 if b'tls1.2' not in security:
1767 1767 fm.plain(
1768 1768 _(
1769 1769 b' TLS 1.2 not supported by Python install; '
1770 1770 b'network connections lack modern security\n'
1771 1771 )
1772 1772 )
1773 1773 if b'sni' not in security:
1774 1774 fm.plain(
1775 1775 _(
1776 1776 b' SNI not supported by Python install; may have '
1777 1777 b'connectivity issues with some servers\n'
1778 1778 )
1779 1779 )
1780 1780
1781 1781 fm.plain(
1782 1782 _(
1783 1783 b"checking Rust extensions (%s)\n"
1784 1784 % (b'missing' if rustext is None else b'installed')
1785 1785 ),
1786 1786 )
1787 1787
1788 1788 # TODO print CA cert info
1789 1789
1790 1790 # hg version
1791 1791 hgver = util.version()
1792 1792 fm.write(
1793 1793 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1794 1794 )
1795 1795 fm.write(
1796 1796 b'hgverextra',
1797 1797 _(b"checking Mercurial custom build (%s)\n"),
1798 1798 b'+'.join(hgver.split(b'+')[1:]),
1799 1799 )
1800 1800
1801 1801 # compiled modules
1802 1802 hgmodules = None
1803 1803 if util.safehasattr(sys.modules[__name__], '__file__'):
1804 1804 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1805 1805 elif getattr(sys, 'oxidized', False):
1806 1806 hgmodules = pycompat.sysexecutable
1807 1807
1808 1808 fm.write(
1809 1809 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1810 1810 )
1811 1811 fm.write(
1812 1812 b'hgmodules',
1813 1813 _(b"checking installed modules (%s)...\n"),
1814 1814 hgmodules or _(b"unknown"),
1815 1815 )
1816 1816
1817 1817 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1818 1818 rustext = rustandc # for now, that's the only case
1819 1819 cext = policy.policy in (b'c', b'allow') or rustandc
1820 1820 nopure = cext or rustext
1821 1821 if nopure:
1822 1822 err = None
1823 1823 try:
1824 1824 if cext:
1825 1825 from .cext import ( # pytype: disable=import-error
1826 1826 base85,
1827 1827 bdiff,
1828 1828 mpatch,
1829 1829 osutil,
1830 1830 )
1831 1831
1832 1832 # quiet pyflakes
1833 1833 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1834 1834 if rustext:
1835 1835 from .rustext import ( # pytype: disable=import-error
1836 1836 ancestor,
1837 1837 dirstate,
1838 1838 )
1839 1839
1840 1840 dir(ancestor), dir(dirstate) # quiet pyflakes
1841 1841 except Exception as inst:
1842 1842 err = stringutil.forcebytestr(inst)
1843 1843 problems += 1
1844 1844 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1845 1845
1846 1846 compengines = util.compengines._engines.values()
1847 1847 fm.write(
1848 1848 b'compengines',
1849 1849 _(b'checking registered compression engines (%s)\n'),
1850 1850 fm.formatlist(
1851 1851 sorted(e.name() for e in compengines),
1852 1852 name=b'compengine',
1853 1853 fmt=b'%s',
1854 1854 sep=b', ',
1855 1855 ),
1856 1856 )
1857 1857 fm.write(
1858 1858 b'compenginesavail',
1859 1859 _(b'checking available compression engines (%s)\n'),
1860 1860 fm.formatlist(
1861 1861 sorted(e.name() for e in compengines if e.available()),
1862 1862 name=b'compengine',
1863 1863 fmt=b'%s',
1864 1864 sep=b', ',
1865 1865 ),
1866 1866 )
1867 1867 wirecompengines = compression.compengines.supportedwireengines(
1868 1868 compression.SERVERROLE
1869 1869 )
1870 1870 fm.write(
1871 1871 b'compenginesserver',
1872 1872 _(
1873 1873 b'checking available compression engines '
1874 1874 b'for wire protocol (%s)\n'
1875 1875 ),
1876 1876 fm.formatlist(
1877 1877 [e.name() for e in wirecompengines if e.wireprotosupport()],
1878 1878 name=b'compengine',
1879 1879 fmt=b'%s',
1880 1880 sep=b', ',
1881 1881 ),
1882 1882 )
1883 1883 re2 = b'missing'
1884 1884 if util._re2:
1885 1885 re2 = b'available'
1886 1886 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1887 1887 fm.data(re2=bool(util._re2))
1888 1888
1889 1889 # templates
1890 1890 p = templater.templatedir()
1891 1891 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1892 1892 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1893 1893 if p:
1894 1894 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1895 1895 if m:
1896 1896 # template found, check if it is working
1897 1897 err = None
1898 1898 try:
1899 1899 templater.templater.frommapfile(m)
1900 1900 except Exception as inst:
1901 1901 err = stringutil.forcebytestr(inst)
1902 1902 p = None
1903 1903 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1904 1904 else:
1905 1905 p = None
1906 1906 fm.condwrite(
1907 1907 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1908 1908 )
1909 1909 fm.condwrite(
1910 1910 not m,
1911 1911 b'defaulttemplatenotfound',
1912 1912 _(b" template '%s' not found\n"),
1913 1913 b"default",
1914 1914 )
1915 1915 if not p:
1916 1916 problems += 1
1917 1917 fm.condwrite(
1918 1918 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1919 1919 )
1920 1920
1921 1921 # editor
1922 1922 editor = ui.geteditor()
1923 1923 editor = util.expandpath(editor)
1924 1924 editorbin = procutil.shellsplit(editor)[0]
1925 1925 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1926 1926 cmdpath = procutil.findexe(editorbin)
1927 1927 fm.condwrite(
1928 1928 not cmdpath and editor == b'vi',
1929 1929 b'vinotfound',
1930 1930 _(
1931 1931 b" No commit editor set and can't find %s in PATH\n"
1932 1932 b" (specify a commit editor in your configuration"
1933 1933 b" file)\n"
1934 1934 ),
1935 1935 not cmdpath and editor == b'vi' and editorbin,
1936 1936 )
1937 1937 fm.condwrite(
1938 1938 not cmdpath and editor != b'vi',
1939 1939 b'editornotfound',
1940 1940 _(
1941 1941 b" Can't find editor '%s' in PATH\n"
1942 1942 b" (specify a commit editor in your configuration"
1943 1943 b" file)\n"
1944 1944 ),
1945 1945 not cmdpath and editorbin,
1946 1946 )
1947 1947 if not cmdpath and editor != b'vi':
1948 1948 problems += 1
1949 1949
1950 1950 # check username
1951 1951 username = None
1952 1952 err = None
1953 1953 try:
1954 1954 username = ui.username()
1955 1955 except error.Abort as e:
1956 1956 err = e.message
1957 1957 problems += 1
1958 1958
1959 1959 fm.condwrite(
1960 1960 username, b'username', _(b"checking username (%s)\n"), username
1961 1961 )
1962 1962 fm.condwrite(
1963 1963 err,
1964 1964 b'usernameerror',
1965 1965 _(
1966 1966 b"checking username...\n %s\n"
1967 1967 b" (specify a username in your configuration file)\n"
1968 1968 ),
1969 1969 err,
1970 1970 )
1971 1971
1972 1972 for name, mod in extensions.extensions():
1973 1973 handler = getattr(mod, 'debuginstall', None)
1974 1974 if handler is not None:
1975 1975 problems += handler(ui, fm)
1976 1976
1977 1977 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1978 1978 if not problems:
1979 1979 fm.data(problems=problems)
1980 1980 fm.condwrite(
1981 1981 problems,
1982 1982 b'problems',
1983 1983 _(b"%d problems detected, please check your install!\n"),
1984 1984 problems,
1985 1985 )
1986 1986 fm.end()
1987 1987
1988 1988 return problems
1989 1989
1990 1990
1991 1991 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1992 1992 def debugknown(ui, repopath, *ids, **opts):
1993 1993 """test whether node ids are known to a repo
1994 1994
1995 1995 Every ID must be a full-length hex node id string. Returns a list of 0s
1996 1996 and 1s indicating unknown/known.
1997 1997 """
1998 1998 opts = pycompat.byteskwargs(opts)
1999 1999 repo = hg.peer(ui, opts, repopath)
2000 2000 if not repo.capable(b'known'):
2001 2001 raise error.Abort(b"known() not supported by target repository")
2002 2002 flags = repo.known([bin(s) for s in ids])
2003 2003 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2004 2004
2005 2005
2006 2006 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2007 2007 def debuglabelcomplete(ui, repo, *args):
2008 2008 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2009 2009 debugnamecomplete(ui, repo, *args)
2010 2010
2011 2011
2012 2012 @command(
2013 2013 b'debuglocks',
2014 2014 [
2015 2015 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2016 2016 (
2017 2017 b'W',
2018 2018 b'force-free-wlock',
2019 2019 None,
2020 2020 _(b'free the working state lock (DANGEROUS)'),
2021 2021 ),
2022 2022 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2023 2023 (
2024 2024 b'S',
2025 2025 b'set-wlock',
2026 2026 None,
2027 2027 _(b'set the working state lock until stopped'),
2028 2028 ),
2029 2029 ],
2030 2030 _(b'[OPTION]...'),
2031 2031 )
2032 2032 def debuglocks(ui, repo, **opts):
2033 2033 """show or modify state of locks
2034 2034
2035 2035 By default, this command will show which locks are held. This
2036 2036 includes the user and process holding the lock, the amount of time
2037 2037 the lock has been held, and the machine name where the process is
2038 2038 running if it's not local.
2039 2039
2040 2040 Locks protect the integrity of Mercurial's data, so should be
2041 2041 treated with care. System crashes or other interruptions may cause
2042 2042 locks to not be properly released, though Mercurial will usually
2043 2043 detect and remove such stale locks automatically.
2044 2044
2045 2045 However, detecting stale locks may not always be possible (for
2046 2046 instance, on a shared filesystem). Removing locks may also be
2047 2047 blocked by filesystem permissions.
2048 2048
2049 2049 Setting a lock will prevent other commands from changing the data.
2050 2050 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2051 2051 The set locks are removed when the command exits.
2052 2052
2053 2053 Returns 0 if no locks are held.
2054 2054
2055 2055 """
2056 2056
2057 2057 if opts.get('force_free_lock'):
2058 2058 repo.svfs.unlink(b'lock')
2059 2059 if opts.get('force_free_wlock'):
2060 2060 repo.vfs.unlink(b'wlock')
2061 2061 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2062 2062 return 0
2063 2063
2064 2064 locks = []
2065 2065 try:
2066 2066 if opts.get('set_wlock'):
2067 2067 try:
2068 2068 locks.append(repo.wlock(False))
2069 2069 except error.LockHeld:
2070 2070 raise error.Abort(_(b'wlock is already held'))
2071 2071 if opts.get('set_lock'):
2072 2072 try:
2073 2073 locks.append(repo.lock(False))
2074 2074 except error.LockHeld:
2075 2075 raise error.Abort(_(b'lock is already held'))
2076 2076 if len(locks):
2077 2077 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2078 2078 return 0
2079 2079 finally:
2080 2080 release(*locks)
2081 2081
2082 2082 now = time.time()
2083 2083 held = 0
2084 2084
2085 2085 def report(vfs, name, method):
2086 2086 # this causes stale locks to get reaped for more accurate reporting
2087 2087 try:
2088 2088 l = method(False)
2089 2089 except error.LockHeld:
2090 2090 l = None
2091 2091
2092 2092 if l:
2093 2093 l.release()
2094 2094 else:
2095 2095 try:
2096 2096 st = vfs.lstat(name)
2097 2097 age = now - st[stat.ST_MTIME]
2098 2098 user = util.username(st.st_uid)
2099 2099 locker = vfs.readlock(name)
2100 2100 if b":" in locker:
2101 2101 host, pid = locker.split(b':')
2102 2102 if host == socket.gethostname():
2103 2103 locker = b'user %s, process %s' % (user or b'None', pid)
2104 2104 else:
2105 2105 locker = b'user %s, process %s, host %s' % (
2106 2106 user or b'None',
2107 2107 pid,
2108 2108 host,
2109 2109 )
2110 2110 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2111 2111 return 1
2112 2112 except OSError as e:
2113 2113 if e.errno != errno.ENOENT:
2114 2114 raise
2115 2115
2116 2116 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2117 2117 return 0
2118 2118
2119 2119 held += report(repo.svfs, b"lock", repo.lock)
2120 2120 held += report(repo.vfs, b"wlock", repo.wlock)
2121 2121
2122 2122 return held
2123 2123
2124 2124
2125 2125 @command(
2126 2126 b'debugmanifestfulltextcache',
2127 2127 [
2128 2128 (b'', b'clear', False, _(b'clear the cache')),
2129 2129 (
2130 2130 b'a',
2131 2131 b'add',
2132 2132 [],
2133 2133 _(b'add the given manifest nodes to the cache'),
2134 2134 _(b'NODE'),
2135 2135 ),
2136 2136 ],
2137 2137 b'',
2138 2138 )
2139 2139 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2140 2140 """show, clear or amend the contents of the manifest fulltext cache"""
2141 2141
2142 2142 def getcache():
2143 2143 r = repo.manifestlog.getstorage(b'')
2144 2144 try:
2145 2145 return r._fulltextcache
2146 2146 except AttributeError:
2147 2147 msg = _(
2148 2148 b"Current revlog implementation doesn't appear to have a "
2149 2149 b"manifest fulltext cache\n"
2150 2150 )
2151 2151 raise error.Abort(msg)
2152 2152
2153 2153 if opts.get('clear'):
2154 2154 with repo.wlock():
2155 2155 cache = getcache()
2156 2156 cache.clear(clear_persisted_data=True)
2157 2157 return
2158 2158
2159 2159 if add:
2160 2160 with repo.wlock():
2161 2161 m = repo.manifestlog
2162 2162 store = m.getstorage(b'')
2163 2163 for n in add:
2164 2164 try:
2165 2165 manifest = m[store.lookup(n)]
2166 2166 except error.LookupError as e:
2167 2167 raise error.Abort(
2168 2168 bytes(e), hint=b"Check your manifest node id"
2169 2169 )
2170 2170 manifest.read() # stores revisision in cache too
2171 2171 return
2172 2172
2173 2173 cache = getcache()
2174 2174 if not len(cache):
2175 2175 ui.write(_(b'cache empty\n'))
2176 2176 else:
2177 2177 ui.write(
2178 2178 _(
2179 2179 b'cache contains %d manifest entries, in order of most to '
2180 2180 b'least recent:\n'
2181 2181 )
2182 2182 % (len(cache),)
2183 2183 )
2184 2184 totalsize = 0
2185 2185 for nodeid in cache:
2186 2186 # Use cache.get to not update the LRU order
2187 2187 data = cache.peek(nodeid)
2188 2188 size = len(data)
2189 2189 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2190 2190 ui.write(
2191 2191 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2192 2192 )
2193 2193 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2194 2194 ui.write(
2195 2195 _(b'total cache data size %s, on-disk %s\n')
2196 2196 % (util.bytecount(totalsize), util.bytecount(ondisk))
2197 2197 )
2198 2198
2199 2199
2200 2200 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2201 2201 def debugmergestate(ui, repo, *args, **opts):
2202 2202 """print merge state
2203 2203
2204 2204 Use --verbose to print out information about whether v1 or v2 merge state
2205 2205 was chosen."""
2206 2206
2207 2207 if ui.verbose:
2208 2208 ms = mergestatemod.mergestate(repo)
2209 2209
2210 2210 # sort so that reasonable information is on top
2211 2211 v1records = ms._readrecordsv1()
2212 2212 v2records = ms._readrecordsv2()
2213 2213
2214 2214 if not v1records and not v2records:
2215 2215 pass
2216 2216 elif not v2records:
2217 2217 ui.writenoi18n(b'no version 2 merge state\n')
2218 2218 elif ms._v1v2match(v1records, v2records):
2219 2219 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2220 2220 else:
2221 2221 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2222 2222
2223 2223 opts = pycompat.byteskwargs(opts)
2224 2224 if not opts[b'template']:
2225 2225 opts[b'template'] = (
2226 2226 b'{if(commits, "", "no merge state found\n")}'
2227 2227 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2228 2228 b'{files % "file: {path} (state \\"{state}\\")\n'
2229 2229 b'{if(local_path, "'
2230 2230 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2231 2231 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2232 2232 b' other path: {other_path} (node {other_node})\n'
2233 2233 b'")}'
2234 2234 b'{if(rename_side, "'
2235 2235 b' rename side: {rename_side}\n'
2236 2236 b' renamed path: {renamed_path}\n'
2237 2237 b'")}'
2238 2238 b'{extras % " extra: {key} = {value}\n"}'
2239 2239 b'"}'
2240 2240 b'{extras % "extra: {file} ({key} = {value})\n"}'
2241 2241 )
2242 2242
2243 2243 ms = mergestatemod.mergestate.read(repo)
2244 2244
2245 2245 fm = ui.formatter(b'debugmergestate', opts)
2246 2246 fm.startitem()
2247 2247
2248 2248 fm_commits = fm.nested(b'commits')
2249 2249 if ms.active():
2250 2250 for name, node, label_index in (
2251 2251 (b'local', ms.local, 0),
2252 2252 (b'other', ms.other, 1),
2253 2253 ):
2254 2254 fm_commits.startitem()
2255 2255 fm_commits.data(name=name)
2256 2256 fm_commits.data(node=hex(node))
2257 2257 if ms._labels and len(ms._labels) > label_index:
2258 2258 fm_commits.data(label=ms._labels[label_index])
2259 2259 fm_commits.end()
2260 2260
2261 2261 fm_files = fm.nested(b'files')
2262 2262 if ms.active():
2263 2263 for f in ms:
2264 2264 fm_files.startitem()
2265 2265 fm_files.data(path=f)
2266 2266 state = ms._state[f]
2267 2267 fm_files.data(state=state[0])
2268 2268 if state[0] in (
2269 2269 mergestatemod.MERGE_RECORD_UNRESOLVED,
2270 2270 mergestatemod.MERGE_RECORD_RESOLVED,
2271 2271 ):
2272 2272 fm_files.data(local_key=state[1])
2273 2273 fm_files.data(local_path=state[2])
2274 2274 fm_files.data(ancestor_path=state[3])
2275 2275 fm_files.data(ancestor_node=state[4])
2276 2276 fm_files.data(other_path=state[5])
2277 2277 fm_files.data(other_node=state[6])
2278 2278 fm_files.data(local_flags=state[7])
2279 2279 elif state[0] in (
2280 2280 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2281 2281 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2282 2282 ):
2283 2283 fm_files.data(renamed_path=state[1])
2284 2284 fm_files.data(rename_side=state[2])
2285 2285 fm_extras = fm_files.nested(b'extras')
2286 2286 for k, v in sorted(ms.extras(f).items()):
2287 2287 fm_extras.startitem()
2288 2288 fm_extras.data(key=k)
2289 2289 fm_extras.data(value=v)
2290 2290 fm_extras.end()
2291 2291
2292 2292 fm_files.end()
2293 2293
2294 2294 fm_extras = fm.nested(b'extras')
2295 2295 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2296 2296 if f in ms:
2297 2297 # If file is in mergestate, we have already processed it's extras
2298 2298 continue
2299 2299 for k, v in pycompat.iteritems(d):
2300 2300 fm_extras.startitem()
2301 2301 fm_extras.data(file=f)
2302 2302 fm_extras.data(key=k)
2303 2303 fm_extras.data(value=v)
2304 2304 fm_extras.end()
2305 2305
2306 2306 fm.end()
2307 2307
2308 2308
2309 2309 @command(b'debugnamecomplete', [], _(b'NAME...'))
2310 2310 def debugnamecomplete(ui, repo, *args):
2311 2311 '''complete "names" - tags, open branch names, bookmark names'''
2312 2312
2313 2313 names = set()
2314 2314 # since we previously only listed open branches, we will handle that
2315 2315 # specially (after this for loop)
2316 2316 for name, ns in pycompat.iteritems(repo.names):
2317 2317 if name != b'branches':
2318 2318 names.update(ns.listnames(repo))
2319 2319 names.update(
2320 2320 tag
2321 2321 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2322 2322 if not closed
2323 2323 )
2324 2324 completions = set()
2325 2325 if not args:
2326 2326 args = [b'']
2327 2327 for a in args:
2328 2328 completions.update(n for n in names if n.startswith(a))
2329 2329 ui.write(b'\n'.join(sorted(completions)))
2330 2330 ui.write(b'\n')
2331 2331
2332 2332
2333 2333 @command(
2334 2334 b'debugnodemap',
2335 2335 [
2336 2336 (
2337 2337 b'',
2338 2338 b'dump-new',
2339 2339 False,
2340 2340 _(b'write a (new) persistent binary nodemap on stdout'),
2341 2341 ),
2342 2342 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2343 2343 (
2344 2344 b'',
2345 2345 b'check',
2346 2346 False,
2347 2347 _(b'check that the data on disk data are correct.'),
2348 2348 ),
2349 2349 (
2350 2350 b'',
2351 2351 b'metadata',
2352 2352 False,
2353 2353 _(b'display the on disk meta data for the nodemap'),
2354 2354 ),
2355 2355 ],
2356 2356 )
2357 2357 def debugnodemap(ui, repo, **opts):
2358 2358 """write and inspect on disk nodemap"""
2359 2359 if opts['dump_new']:
2360 2360 unfi = repo.unfiltered()
2361 2361 cl = unfi.changelog
2362 2362 if util.safehasattr(cl.index, "nodemap_data_all"):
2363 2363 data = cl.index.nodemap_data_all()
2364 2364 else:
2365 2365 data = nodemap.persistent_data(cl.index)
2366 2366 ui.write(data)
2367 2367 elif opts['dump_disk']:
2368 2368 unfi = repo.unfiltered()
2369 2369 cl = unfi.changelog
2370 2370 nm_data = nodemap.persisted_data(cl)
2371 2371 if nm_data is not None:
2372 2372 docket, data = nm_data
2373 2373 ui.write(data[:])
2374 2374 elif opts['check']:
2375 2375 unfi = repo.unfiltered()
2376 2376 cl = unfi.changelog
2377 2377 nm_data = nodemap.persisted_data(cl)
2378 2378 if nm_data is not None:
2379 2379 docket, data = nm_data
2380 2380 return nodemap.check_data(ui, cl.index, data)
2381 2381 elif opts['metadata']:
2382 2382 unfi = repo.unfiltered()
2383 2383 cl = unfi.changelog
2384 2384 nm_data = nodemap.persisted_data(cl)
2385 2385 if nm_data is not None:
2386 2386 docket, data = nm_data
2387 2387 ui.write((b"uid: %s\n") % docket.uid)
2388 2388 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2389 2389 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2390 2390 ui.write((b"data-length: %d\n") % docket.data_length)
2391 2391 ui.write((b"data-unused: %d\n") % docket.data_unused)
2392 2392 unused_perc = docket.data_unused * 100.0 / docket.data_length
2393 2393 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2394 2394
2395 2395
2396 2396 @command(
2397 2397 b'debugobsolete',
2398 2398 [
2399 2399 (b'', b'flags', 0, _(b'markers flag')),
2400 2400 (
2401 2401 b'',
2402 2402 b'record-parents',
2403 2403 False,
2404 2404 _(b'record parent information for the precursor'),
2405 2405 ),
2406 2406 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2407 2407 (
2408 2408 b'',
2409 2409 b'exclusive',
2410 2410 False,
2411 2411 _(b'restrict display to markers only relevant to REV'),
2412 2412 ),
2413 2413 (b'', b'index', False, _(b'display index of the marker')),
2414 2414 (b'', b'delete', [], _(b'delete markers specified by indices')),
2415 2415 ]
2416 2416 + cmdutil.commitopts2
2417 2417 + cmdutil.formatteropts,
2418 2418 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2419 2419 )
2420 2420 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2421 2421 """create arbitrary obsolete marker
2422 2422
2423 2423 With no arguments, displays the list of obsolescence markers."""
2424 2424
2425 2425 opts = pycompat.byteskwargs(opts)
2426 2426
2427 2427 def parsenodeid(s):
2428 2428 try:
2429 2429 # We do not use revsingle/revrange functions here to accept
2430 2430 # arbitrary node identifiers, possibly not present in the
2431 2431 # local repository.
2432 2432 n = bin(s)
2433 2433 if len(n) != repo.nodeconstants.nodelen:
2434 2434 raise TypeError()
2435 2435 return n
2436 2436 except TypeError:
2437 2437 raise error.InputError(
2438 2438 b'changeset references must be full hexadecimal '
2439 2439 b'node identifiers'
2440 2440 )
2441 2441
2442 2442 if opts.get(b'delete'):
2443 2443 indices = []
2444 2444 for v in opts.get(b'delete'):
2445 2445 try:
2446 2446 indices.append(int(v))
2447 2447 except ValueError:
2448 2448 raise error.InputError(
2449 2449 _(b'invalid index value: %r') % v,
2450 2450 hint=_(b'use integers for indices'),
2451 2451 )
2452 2452
2453 2453 if repo.currenttransaction():
2454 2454 raise error.Abort(
2455 2455 _(b'cannot delete obsmarkers in the middle of transaction.')
2456 2456 )
2457 2457
2458 2458 with repo.lock():
2459 2459 n = repair.deleteobsmarkers(repo.obsstore, indices)
2460 2460 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2461 2461
2462 2462 return
2463 2463
2464 2464 if precursor is not None:
2465 2465 if opts[b'rev']:
2466 2466 raise error.InputError(
2467 2467 b'cannot select revision when creating marker'
2468 2468 )
2469 2469 metadata = {}
2470 2470 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2471 2471 succs = tuple(parsenodeid(succ) for succ in successors)
2472 2472 l = repo.lock()
2473 2473 try:
2474 2474 tr = repo.transaction(b'debugobsolete')
2475 2475 try:
2476 2476 date = opts.get(b'date')
2477 2477 if date:
2478 2478 date = dateutil.parsedate(date)
2479 2479 else:
2480 2480 date = None
2481 2481 prec = parsenodeid(precursor)
2482 2482 parents = None
2483 2483 if opts[b'record_parents']:
2484 2484 if prec not in repo.unfiltered():
2485 2485 raise error.Abort(
2486 2486 b'cannot used --record-parents on '
2487 2487 b'unknown changesets'
2488 2488 )
2489 2489 parents = repo.unfiltered()[prec].parents()
2490 2490 parents = tuple(p.node() for p in parents)
2491 2491 repo.obsstore.create(
2492 2492 tr,
2493 2493 prec,
2494 2494 succs,
2495 2495 opts[b'flags'],
2496 2496 parents=parents,
2497 2497 date=date,
2498 2498 metadata=metadata,
2499 2499 ui=ui,
2500 2500 )
2501 2501 tr.close()
2502 2502 except ValueError as exc:
2503 2503 raise error.Abort(
2504 2504 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2505 2505 )
2506 2506 finally:
2507 2507 tr.release()
2508 2508 finally:
2509 2509 l.release()
2510 2510 else:
2511 2511 if opts[b'rev']:
2512 2512 revs = scmutil.revrange(repo, opts[b'rev'])
2513 2513 nodes = [repo[r].node() for r in revs]
2514 2514 markers = list(
2515 2515 obsutil.getmarkers(
2516 2516 repo, nodes=nodes, exclusive=opts[b'exclusive']
2517 2517 )
2518 2518 )
2519 2519 markers.sort(key=lambda x: x._data)
2520 2520 else:
2521 2521 markers = obsutil.getmarkers(repo)
2522 2522
2523 2523 markerstoiter = markers
2524 2524 isrelevant = lambda m: True
2525 2525 if opts.get(b'rev') and opts.get(b'index'):
2526 2526 markerstoiter = obsutil.getmarkers(repo)
2527 2527 markerset = set(markers)
2528 2528 isrelevant = lambda m: m in markerset
2529 2529
2530 2530 fm = ui.formatter(b'debugobsolete', opts)
2531 2531 for i, m in enumerate(markerstoiter):
2532 2532 if not isrelevant(m):
2533 2533 # marker can be irrelevant when we're iterating over a set
2534 2534 # of markers (markerstoiter) which is bigger than the set
2535 2535 # of markers we want to display (markers)
2536 2536 # this can happen if both --index and --rev options are
2537 2537 # provided and thus we need to iterate over all of the markers
2538 2538 # to get the correct indices, but only display the ones that
2539 2539 # are relevant to --rev value
2540 2540 continue
2541 2541 fm.startitem()
2542 2542 ind = i if opts.get(b'index') else None
2543 2543 cmdutil.showmarker(fm, m, index=ind)
2544 2544 fm.end()
2545 2545
2546 2546
2547 2547 @command(
2548 2548 b'debugp1copies',
2549 2549 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2550 2550 _(b'[-r REV]'),
2551 2551 )
2552 2552 def debugp1copies(ui, repo, **opts):
2553 2553 """dump copy information compared to p1"""
2554 2554
2555 2555 opts = pycompat.byteskwargs(opts)
2556 2556 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2557 2557 for dst, src in ctx.p1copies().items():
2558 2558 ui.write(b'%s -> %s\n' % (src, dst))
2559 2559
2560 2560
2561 2561 @command(
2562 2562 b'debugp2copies',
2563 2563 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2564 2564 _(b'[-r REV]'),
2565 2565 )
2566 2566 def debugp1copies(ui, repo, **opts):
2567 2567 """dump copy information compared to p2"""
2568 2568
2569 2569 opts = pycompat.byteskwargs(opts)
2570 2570 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2571 2571 for dst, src in ctx.p2copies().items():
2572 2572 ui.write(b'%s -> %s\n' % (src, dst))
2573 2573
2574 2574
2575 2575 @command(
2576 2576 b'debugpathcomplete',
2577 2577 [
2578 2578 (b'f', b'full', None, _(b'complete an entire path')),
2579 2579 (b'n', b'normal', None, _(b'show only normal files')),
2580 2580 (b'a', b'added', None, _(b'show only added files')),
2581 2581 (b'r', b'removed', None, _(b'show only removed files')),
2582 2582 ],
2583 2583 _(b'FILESPEC...'),
2584 2584 )
2585 2585 def debugpathcomplete(ui, repo, *specs, **opts):
2586 2586 """complete part or all of a tracked path
2587 2587
2588 2588 This command supports shells that offer path name completion. It
2589 2589 currently completes only files already known to the dirstate.
2590 2590
2591 2591 Completion extends only to the next path segment unless
2592 2592 --full is specified, in which case entire paths are used."""
2593 2593
2594 2594 def complete(path, acceptable):
2595 2595 dirstate = repo.dirstate
2596 2596 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2597 2597 rootdir = repo.root + pycompat.ossep
2598 2598 if spec != repo.root and not spec.startswith(rootdir):
2599 2599 return [], []
2600 2600 if os.path.isdir(spec):
2601 2601 spec += b'/'
2602 2602 spec = spec[len(rootdir) :]
2603 2603 fixpaths = pycompat.ossep != b'/'
2604 2604 if fixpaths:
2605 2605 spec = spec.replace(pycompat.ossep, b'/')
2606 2606 speclen = len(spec)
2607 2607 fullpaths = opts['full']
2608 2608 files, dirs = set(), set()
2609 2609 adddir, addfile = dirs.add, files.add
2610 2610 for f, st in pycompat.iteritems(dirstate):
2611 if f.startswith(spec) and st[0] in acceptable:
2611 if f.startswith(spec) and st.state in acceptable:
2612 2612 if fixpaths:
2613 2613 f = f.replace(b'/', pycompat.ossep)
2614 2614 if fullpaths:
2615 2615 addfile(f)
2616 2616 continue
2617 2617 s = f.find(pycompat.ossep, speclen)
2618 2618 if s >= 0:
2619 2619 adddir(f[:s])
2620 2620 else:
2621 2621 addfile(f)
2622 2622 return files, dirs
2623 2623
2624 2624 acceptable = b''
2625 2625 if opts['normal']:
2626 2626 acceptable += b'nm'
2627 2627 if opts['added']:
2628 2628 acceptable += b'a'
2629 2629 if opts['removed']:
2630 2630 acceptable += b'r'
2631 2631 cwd = repo.getcwd()
2632 2632 if not specs:
2633 2633 specs = [b'.']
2634 2634
2635 2635 files, dirs = set(), set()
2636 2636 for spec in specs:
2637 2637 f, d = complete(spec, acceptable or b'nmar')
2638 2638 files.update(f)
2639 2639 dirs.update(d)
2640 2640 files.update(dirs)
2641 2641 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2642 2642 ui.write(b'\n')
2643 2643
2644 2644
2645 2645 @command(
2646 2646 b'debugpathcopies',
2647 2647 cmdutil.walkopts,
2648 2648 b'hg debugpathcopies REV1 REV2 [FILE]',
2649 2649 inferrepo=True,
2650 2650 )
2651 2651 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2652 2652 """show copies between two revisions"""
2653 2653 ctx1 = scmutil.revsingle(repo, rev1)
2654 2654 ctx2 = scmutil.revsingle(repo, rev2)
2655 2655 m = scmutil.match(ctx1, pats, opts)
2656 2656 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2657 2657 ui.write(b'%s -> %s\n' % (src, dst))
2658 2658
2659 2659
2660 2660 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2661 2661 def debugpeer(ui, path):
2662 2662 """establish a connection to a peer repository"""
2663 2663 # Always enable peer request logging. Requires --debug to display
2664 2664 # though.
2665 2665 overrides = {
2666 2666 (b'devel', b'debug.peer-request'): True,
2667 2667 }
2668 2668
2669 2669 with ui.configoverride(overrides):
2670 2670 peer = hg.peer(ui, {}, path)
2671 2671
2672 2672 try:
2673 2673 local = peer.local() is not None
2674 2674 canpush = peer.canpush()
2675 2675
2676 2676 ui.write(_(b'url: %s\n') % peer.url())
2677 2677 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2678 2678 ui.write(
2679 2679 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2680 2680 )
2681 2681 finally:
2682 2682 peer.close()
2683 2683
2684 2684
2685 2685 @command(
2686 2686 b'debugpickmergetool',
2687 2687 [
2688 2688 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2689 2689 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2690 2690 ]
2691 2691 + cmdutil.walkopts
2692 2692 + cmdutil.mergetoolopts,
2693 2693 _(b'[PATTERN]...'),
2694 2694 inferrepo=True,
2695 2695 )
2696 2696 def debugpickmergetool(ui, repo, *pats, **opts):
2697 2697 """examine which merge tool is chosen for specified file
2698 2698
2699 2699 As described in :hg:`help merge-tools`, Mercurial examines
2700 2700 configurations below in this order to decide which merge tool is
2701 2701 chosen for specified file.
2702 2702
2703 2703 1. ``--tool`` option
2704 2704 2. ``HGMERGE`` environment variable
2705 2705 3. configurations in ``merge-patterns`` section
2706 2706 4. configuration of ``ui.merge``
2707 2707 5. configurations in ``merge-tools`` section
2708 2708 6. ``hgmerge`` tool (for historical reason only)
2709 2709 7. default tool for fallback (``:merge`` or ``:prompt``)
2710 2710
2711 2711 This command writes out examination result in the style below::
2712 2712
2713 2713 FILE = MERGETOOL
2714 2714
2715 2715 By default, all files known in the first parent context of the
2716 2716 working directory are examined. Use file patterns and/or -I/-X
2717 2717 options to limit target files. -r/--rev is also useful to examine
2718 2718 files in another context without actual updating to it.
2719 2719
2720 2720 With --debug, this command shows warning messages while matching
2721 2721 against ``merge-patterns`` and so on, too. It is recommended to
2722 2722 use this option with explicit file patterns and/or -I/-X options,
2723 2723 because this option increases amount of output per file according
2724 2724 to configurations in hgrc.
2725 2725
2726 2726 With -v/--verbose, this command shows configurations below at
2727 2727 first (only if specified).
2728 2728
2729 2729 - ``--tool`` option
2730 2730 - ``HGMERGE`` environment variable
2731 2731 - configuration of ``ui.merge``
2732 2732
2733 2733 If merge tool is chosen before matching against
2734 2734 ``merge-patterns``, this command can't show any helpful
2735 2735 information, even with --debug. In such case, information above is
2736 2736 useful to know why a merge tool is chosen.
2737 2737 """
2738 2738 opts = pycompat.byteskwargs(opts)
2739 2739 overrides = {}
2740 2740 if opts[b'tool']:
2741 2741 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2742 2742 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2743 2743
2744 2744 with ui.configoverride(overrides, b'debugmergepatterns'):
2745 2745 hgmerge = encoding.environ.get(b"HGMERGE")
2746 2746 if hgmerge is not None:
2747 2747 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2748 2748 uimerge = ui.config(b"ui", b"merge")
2749 2749 if uimerge:
2750 2750 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2751 2751
2752 2752 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2753 2753 m = scmutil.match(ctx, pats, opts)
2754 2754 changedelete = opts[b'changedelete']
2755 2755 for path in ctx.walk(m):
2756 2756 fctx = ctx[path]
2757 2757 with ui.silent(
2758 2758 error=True
2759 2759 ) if not ui.debugflag else util.nullcontextmanager():
2760 2760 tool, toolpath = filemerge._picktool(
2761 2761 repo,
2762 2762 ui,
2763 2763 path,
2764 2764 fctx.isbinary(),
2765 2765 b'l' in fctx.flags(),
2766 2766 changedelete,
2767 2767 )
2768 2768 ui.write(b'%s = %s\n' % (path, tool))
2769 2769
2770 2770
2771 2771 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2772 2772 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2773 2773 """access the pushkey key/value protocol
2774 2774
2775 2775 With two args, list the keys in the given namespace.
2776 2776
2777 2777 With five args, set a key to new if it currently is set to old.
2778 2778 Reports success or failure.
2779 2779 """
2780 2780
2781 2781 target = hg.peer(ui, {}, repopath)
2782 2782 try:
2783 2783 if keyinfo:
2784 2784 key, old, new = keyinfo
2785 2785 with target.commandexecutor() as e:
2786 2786 r = e.callcommand(
2787 2787 b'pushkey',
2788 2788 {
2789 2789 b'namespace': namespace,
2790 2790 b'key': key,
2791 2791 b'old': old,
2792 2792 b'new': new,
2793 2793 },
2794 2794 ).result()
2795 2795
2796 2796 ui.status(pycompat.bytestr(r) + b'\n')
2797 2797 return not r
2798 2798 else:
2799 2799 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2800 2800 ui.write(
2801 2801 b"%s\t%s\n"
2802 2802 % (stringutil.escapestr(k), stringutil.escapestr(v))
2803 2803 )
2804 2804 finally:
2805 2805 target.close()
2806 2806
2807 2807
2808 2808 @command(b'debugpvec', [], _(b'A B'))
2809 2809 def debugpvec(ui, repo, a, b=None):
2810 2810 ca = scmutil.revsingle(repo, a)
2811 2811 cb = scmutil.revsingle(repo, b)
2812 2812 pa = pvec.ctxpvec(ca)
2813 2813 pb = pvec.ctxpvec(cb)
2814 2814 if pa == pb:
2815 2815 rel = b"="
2816 2816 elif pa > pb:
2817 2817 rel = b">"
2818 2818 elif pa < pb:
2819 2819 rel = b"<"
2820 2820 elif pa | pb:
2821 2821 rel = b"|"
2822 2822 ui.write(_(b"a: %s\n") % pa)
2823 2823 ui.write(_(b"b: %s\n") % pb)
2824 2824 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2825 2825 ui.write(
2826 2826 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2827 2827 % (
2828 2828 abs(pa._depth - pb._depth),
2829 2829 pvec._hamming(pa._vec, pb._vec),
2830 2830 pa.distance(pb),
2831 2831 rel,
2832 2832 )
2833 2833 )
2834 2834
2835 2835
2836 2836 @command(
2837 2837 b'debugrebuilddirstate|debugrebuildstate',
2838 2838 [
2839 2839 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2840 2840 (
2841 2841 b'',
2842 2842 b'minimal',
2843 2843 None,
2844 2844 _(
2845 2845 b'only rebuild files that are inconsistent with '
2846 2846 b'the working copy parent'
2847 2847 ),
2848 2848 ),
2849 2849 ],
2850 2850 _(b'[-r REV]'),
2851 2851 )
2852 2852 def debugrebuilddirstate(ui, repo, rev, **opts):
2853 2853 """rebuild the dirstate as it would look like for the given revision
2854 2854
2855 2855 If no revision is specified the first current parent will be used.
2856 2856
2857 2857 The dirstate will be set to the files of the given revision.
2858 2858 The actual working directory content or existing dirstate
2859 2859 information such as adds or removes is not considered.
2860 2860
2861 2861 ``minimal`` will only rebuild the dirstate status for files that claim to be
2862 2862 tracked but are not in the parent manifest, or that exist in the parent
2863 2863 manifest but are not in the dirstate. It will not change adds, removes, or
2864 2864 modified files that are in the working copy parent.
2865 2865
2866 2866 One use of this command is to make the next :hg:`status` invocation
2867 2867 check the actual file content.
2868 2868 """
2869 2869 ctx = scmutil.revsingle(repo, rev)
2870 2870 with repo.wlock():
2871 2871 dirstate = repo.dirstate
2872 2872 changedfiles = None
2873 2873 # See command doc for what minimal does.
2874 2874 if opts.get('minimal'):
2875 2875 manifestfiles = set(ctx.manifest().keys())
2876 2876 dirstatefiles = set(dirstate)
2877 2877 manifestonly = manifestfiles - dirstatefiles
2878 2878 dsonly = dirstatefiles - manifestfiles
2879 2879 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2880 2880 changedfiles = manifestonly | dsnotadded
2881 2881
2882 2882 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2883 2883
2884 2884
2885 2885 @command(b'debugrebuildfncache', [], b'')
2886 2886 def debugrebuildfncache(ui, repo):
2887 2887 """rebuild the fncache file"""
2888 2888 repair.rebuildfncache(ui, repo)
2889 2889
2890 2890
2891 2891 @command(
2892 2892 b'debugrename',
2893 2893 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2894 2894 _(b'[-r REV] [FILE]...'),
2895 2895 )
2896 2896 def debugrename(ui, repo, *pats, **opts):
2897 2897 """dump rename information"""
2898 2898
2899 2899 opts = pycompat.byteskwargs(opts)
2900 2900 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2901 2901 m = scmutil.match(ctx, pats, opts)
2902 2902 for abs in ctx.walk(m):
2903 2903 fctx = ctx[abs]
2904 2904 o = fctx.filelog().renamed(fctx.filenode())
2905 2905 rel = repo.pathto(abs)
2906 2906 if o:
2907 2907 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2908 2908 else:
2909 2909 ui.write(_(b"%s not renamed\n") % rel)
2910 2910
2911 2911
2912 2912 @command(b'debugrequires|debugrequirements', [], b'')
2913 2913 def debugrequirements(ui, repo):
2914 2914 """print the current repo requirements"""
2915 2915 for r in sorted(repo.requirements):
2916 2916 ui.write(b"%s\n" % r)
2917 2917
2918 2918
2919 2919 @command(
2920 2920 b'debugrevlog',
2921 2921 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2922 2922 _(b'-c|-m|FILE'),
2923 2923 optionalrepo=True,
2924 2924 )
2925 2925 def debugrevlog(ui, repo, file_=None, **opts):
2926 2926 """show data and statistics about a revlog"""
2927 2927 opts = pycompat.byteskwargs(opts)
2928 2928 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2929 2929
2930 2930 if opts.get(b"dump"):
2931 2931 numrevs = len(r)
2932 2932 ui.write(
2933 2933 (
2934 2934 b"# rev p1rev p2rev start end deltastart base p1 p2"
2935 2935 b" rawsize totalsize compression heads chainlen\n"
2936 2936 )
2937 2937 )
2938 2938 ts = 0
2939 2939 heads = set()
2940 2940
2941 2941 for rev in pycompat.xrange(numrevs):
2942 2942 dbase = r.deltaparent(rev)
2943 2943 if dbase == -1:
2944 2944 dbase = rev
2945 2945 cbase = r.chainbase(rev)
2946 2946 clen = r.chainlen(rev)
2947 2947 p1, p2 = r.parentrevs(rev)
2948 2948 rs = r.rawsize(rev)
2949 2949 ts = ts + rs
2950 2950 heads -= set(r.parentrevs(rev))
2951 2951 heads.add(rev)
2952 2952 try:
2953 2953 compression = ts / r.end(rev)
2954 2954 except ZeroDivisionError:
2955 2955 compression = 0
2956 2956 ui.write(
2957 2957 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2958 2958 b"%11d %5d %8d\n"
2959 2959 % (
2960 2960 rev,
2961 2961 p1,
2962 2962 p2,
2963 2963 r.start(rev),
2964 2964 r.end(rev),
2965 2965 r.start(dbase),
2966 2966 r.start(cbase),
2967 2967 r.start(p1),
2968 2968 r.start(p2),
2969 2969 rs,
2970 2970 ts,
2971 2971 compression,
2972 2972 len(heads),
2973 2973 clen,
2974 2974 )
2975 2975 )
2976 2976 return 0
2977 2977
2978 2978 format = r._format_version
2979 2979 v = r._format_flags
2980 2980 flags = []
2981 2981 gdelta = False
2982 2982 if v & revlog.FLAG_INLINE_DATA:
2983 2983 flags.append(b'inline')
2984 2984 if v & revlog.FLAG_GENERALDELTA:
2985 2985 gdelta = True
2986 2986 flags.append(b'generaldelta')
2987 2987 if not flags:
2988 2988 flags = [b'(none)']
2989 2989
2990 2990 ### tracks merge vs single parent
2991 2991 nummerges = 0
2992 2992
2993 2993 ### tracks ways the "delta" are build
2994 2994 # nodelta
2995 2995 numempty = 0
2996 2996 numemptytext = 0
2997 2997 numemptydelta = 0
2998 2998 # full file content
2999 2999 numfull = 0
3000 3000 # intermediate snapshot against a prior snapshot
3001 3001 numsemi = 0
3002 3002 # snapshot count per depth
3003 3003 numsnapdepth = collections.defaultdict(lambda: 0)
3004 3004 # delta against previous revision
3005 3005 numprev = 0
3006 3006 # delta against first or second parent (not prev)
3007 3007 nump1 = 0
3008 3008 nump2 = 0
3009 3009 # delta against neither prev nor parents
3010 3010 numother = 0
3011 3011 # delta against prev that are also first or second parent
3012 3012 # (details of `numprev`)
3013 3013 nump1prev = 0
3014 3014 nump2prev = 0
3015 3015
3016 3016 # data about delta chain of each revs
3017 3017 chainlengths = []
3018 3018 chainbases = []
3019 3019 chainspans = []
3020 3020
3021 3021 # data about each revision
3022 3022 datasize = [None, 0, 0]
3023 3023 fullsize = [None, 0, 0]
3024 3024 semisize = [None, 0, 0]
3025 3025 # snapshot count per depth
3026 3026 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3027 3027 deltasize = [None, 0, 0]
3028 3028 chunktypecounts = {}
3029 3029 chunktypesizes = {}
3030 3030
3031 3031 def addsize(size, l):
3032 3032 if l[0] is None or size < l[0]:
3033 3033 l[0] = size
3034 3034 if size > l[1]:
3035 3035 l[1] = size
3036 3036 l[2] += size
3037 3037
3038 3038 numrevs = len(r)
3039 3039 for rev in pycompat.xrange(numrevs):
3040 3040 p1, p2 = r.parentrevs(rev)
3041 3041 delta = r.deltaparent(rev)
3042 3042 if format > 0:
3043 3043 addsize(r.rawsize(rev), datasize)
3044 3044 if p2 != nullrev:
3045 3045 nummerges += 1
3046 3046 size = r.length(rev)
3047 3047 if delta == nullrev:
3048 3048 chainlengths.append(0)
3049 3049 chainbases.append(r.start(rev))
3050 3050 chainspans.append(size)
3051 3051 if size == 0:
3052 3052 numempty += 1
3053 3053 numemptytext += 1
3054 3054 else:
3055 3055 numfull += 1
3056 3056 numsnapdepth[0] += 1
3057 3057 addsize(size, fullsize)
3058 3058 addsize(size, snapsizedepth[0])
3059 3059 else:
3060 3060 chainlengths.append(chainlengths[delta] + 1)
3061 3061 baseaddr = chainbases[delta]
3062 3062 revaddr = r.start(rev)
3063 3063 chainbases.append(baseaddr)
3064 3064 chainspans.append((revaddr - baseaddr) + size)
3065 3065 if size == 0:
3066 3066 numempty += 1
3067 3067 numemptydelta += 1
3068 3068 elif r.issnapshot(rev):
3069 3069 addsize(size, semisize)
3070 3070 numsemi += 1
3071 3071 depth = r.snapshotdepth(rev)
3072 3072 numsnapdepth[depth] += 1
3073 3073 addsize(size, snapsizedepth[depth])
3074 3074 else:
3075 3075 addsize(size, deltasize)
3076 3076 if delta == rev - 1:
3077 3077 numprev += 1
3078 3078 if delta == p1:
3079 3079 nump1prev += 1
3080 3080 elif delta == p2:
3081 3081 nump2prev += 1
3082 3082 elif delta == p1:
3083 3083 nump1 += 1
3084 3084 elif delta == p2:
3085 3085 nump2 += 1
3086 3086 elif delta != nullrev:
3087 3087 numother += 1
3088 3088
3089 3089 # Obtain data on the raw chunks in the revlog.
3090 3090 if util.safehasattr(r, b'_getsegmentforrevs'):
3091 3091 segment = r._getsegmentforrevs(rev, rev)[1]
3092 3092 else:
3093 3093 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3094 3094 if segment:
3095 3095 chunktype = bytes(segment[0:1])
3096 3096 else:
3097 3097 chunktype = b'empty'
3098 3098
3099 3099 if chunktype not in chunktypecounts:
3100 3100 chunktypecounts[chunktype] = 0
3101 3101 chunktypesizes[chunktype] = 0
3102 3102
3103 3103 chunktypecounts[chunktype] += 1
3104 3104 chunktypesizes[chunktype] += size
3105 3105
3106 3106 # Adjust size min value for empty cases
3107 3107 for size in (datasize, fullsize, semisize, deltasize):
3108 3108 if size[0] is None:
3109 3109 size[0] = 0
3110 3110
3111 3111 numdeltas = numrevs - numfull - numempty - numsemi
3112 3112 numoprev = numprev - nump1prev - nump2prev
3113 3113 totalrawsize = datasize[2]
3114 3114 datasize[2] /= numrevs
3115 3115 fulltotal = fullsize[2]
3116 3116 if numfull == 0:
3117 3117 fullsize[2] = 0
3118 3118 else:
3119 3119 fullsize[2] /= numfull
3120 3120 semitotal = semisize[2]
3121 3121 snaptotal = {}
3122 3122 if numsemi > 0:
3123 3123 semisize[2] /= numsemi
3124 3124 for depth in snapsizedepth:
3125 3125 snaptotal[depth] = snapsizedepth[depth][2]
3126 3126 snapsizedepth[depth][2] /= numsnapdepth[depth]
3127 3127
3128 3128 deltatotal = deltasize[2]
3129 3129 if numdeltas > 0:
3130 3130 deltasize[2] /= numdeltas
3131 3131 totalsize = fulltotal + semitotal + deltatotal
3132 3132 avgchainlen = sum(chainlengths) / numrevs
3133 3133 maxchainlen = max(chainlengths)
3134 3134 maxchainspan = max(chainspans)
3135 3135 compratio = 1
3136 3136 if totalsize:
3137 3137 compratio = totalrawsize / totalsize
3138 3138
3139 3139 basedfmtstr = b'%%%dd\n'
3140 3140 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3141 3141
3142 3142 def dfmtstr(max):
3143 3143 return basedfmtstr % len(str(max))
3144 3144
3145 3145 def pcfmtstr(max, padding=0):
3146 3146 return basepcfmtstr % (len(str(max)), b' ' * padding)
3147 3147
3148 3148 def pcfmt(value, total):
3149 3149 if total:
3150 3150 return (value, 100 * float(value) / total)
3151 3151 else:
3152 3152 return value, 100.0
3153 3153
3154 3154 ui.writenoi18n(b'format : %d\n' % format)
3155 3155 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3156 3156
3157 3157 ui.write(b'\n')
3158 3158 fmt = pcfmtstr(totalsize)
3159 3159 fmt2 = dfmtstr(totalsize)
3160 3160 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3161 3161 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3162 3162 ui.writenoi18n(
3163 3163 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3164 3164 )
3165 3165 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3166 3166 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3167 3167 ui.writenoi18n(
3168 3168 b' text : '
3169 3169 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3170 3170 )
3171 3171 ui.writenoi18n(
3172 3172 b' delta : '
3173 3173 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3174 3174 )
3175 3175 ui.writenoi18n(
3176 3176 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3177 3177 )
3178 3178 for depth in sorted(numsnapdepth):
3179 3179 ui.write(
3180 3180 (b' lvl-%-3d : ' % depth)
3181 3181 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3182 3182 )
3183 3183 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3184 3184 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3185 3185 ui.writenoi18n(
3186 3186 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3187 3187 )
3188 3188 for depth in sorted(numsnapdepth):
3189 3189 ui.write(
3190 3190 (b' lvl-%-3d : ' % depth)
3191 3191 + fmt % pcfmt(snaptotal[depth], totalsize)
3192 3192 )
3193 3193 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3194 3194
3195 3195 def fmtchunktype(chunktype):
3196 3196 if chunktype == b'empty':
3197 3197 return b' %s : ' % chunktype
3198 3198 elif chunktype in pycompat.bytestr(string.ascii_letters):
3199 3199 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3200 3200 else:
3201 3201 return b' 0x%s : ' % hex(chunktype)
3202 3202
3203 3203 ui.write(b'\n')
3204 3204 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3205 3205 for chunktype in sorted(chunktypecounts):
3206 3206 ui.write(fmtchunktype(chunktype))
3207 3207 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3208 3208 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3209 3209 for chunktype in sorted(chunktypecounts):
3210 3210 ui.write(fmtchunktype(chunktype))
3211 3211 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3212 3212
3213 3213 ui.write(b'\n')
3214 3214 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3215 3215 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3216 3216 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3217 3217 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3218 3218 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3219 3219
3220 3220 if format > 0:
3221 3221 ui.write(b'\n')
3222 3222 ui.writenoi18n(
3223 3223 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3224 3224 % tuple(datasize)
3225 3225 )
3226 3226 ui.writenoi18n(
3227 3227 b'full revision size (min/max/avg) : %d / %d / %d\n'
3228 3228 % tuple(fullsize)
3229 3229 )
3230 3230 ui.writenoi18n(
3231 3231 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3232 3232 % tuple(semisize)
3233 3233 )
3234 3234 for depth in sorted(snapsizedepth):
3235 3235 if depth == 0:
3236 3236 continue
3237 3237 ui.writenoi18n(
3238 3238 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3239 3239 % ((depth,) + tuple(snapsizedepth[depth]))
3240 3240 )
3241 3241 ui.writenoi18n(
3242 3242 b'delta size (min/max/avg) : %d / %d / %d\n'
3243 3243 % tuple(deltasize)
3244 3244 )
3245 3245
3246 3246 if numdeltas > 0:
3247 3247 ui.write(b'\n')
3248 3248 fmt = pcfmtstr(numdeltas)
3249 3249 fmt2 = pcfmtstr(numdeltas, 4)
3250 3250 ui.writenoi18n(
3251 3251 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3252 3252 )
3253 3253 if numprev > 0:
3254 3254 ui.writenoi18n(
3255 3255 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3256 3256 )
3257 3257 ui.writenoi18n(
3258 3258 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3259 3259 )
3260 3260 ui.writenoi18n(
3261 3261 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3262 3262 )
3263 3263 if gdelta:
3264 3264 ui.writenoi18n(
3265 3265 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3266 3266 )
3267 3267 ui.writenoi18n(
3268 3268 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3269 3269 )
3270 3270 ui.writenoi18n(
3271 3271 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3272 3272 )
3273 3273
3274 3274
3275 3275 @command(
3276 3276 b'debugrevlogindex',
3277 3277 cmdutil.debugrevlogopts
3278 3278 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3279 3279 _(b'[-f FORMAT] -c|-m|FILE'),
3280 3280 optionalrepo=True,
3281 3281 )
3282 3282 def debugrevlogindex(ui, repo, file_=None, **opts):
3283 3283 """dump the contents of a revlog index"""
3284 3284 opts = pycompat.byteskwargs(opts)
3285 3285 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3286 3286 format = opts.get(b'format', 0)
3287 3287 if format not in (0, 1):
3288 3288 raise error.Abort(_(b"unknown format %d") % format)
3289 3289
3290 3290 if ui.debugflag:
3291 3291 shortfn = hex
3292 3292 else:
3293 3293 shortfn = short
3294 3294
3295 3295 # There might not be anything in r, so have a sane default
3296 3296 idlen = 12
3297 3297 for i in r:
3298 3298 idlen = len(shortfn(r.node(i)))
3299 3299 break
3300 3300
3301 3301 if format == 0:
3302 3302 if ui.verbose:
3303 3303 ui.writenoi18n(
3304 3304 b" rev offset length linkrev %s %s p2\n"
3305 3305 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3306 3306 )
3307 3307 else:
3308 3308 ui.writenoi18n(
3309 3309 b" rev linkrev %s %s p2\n"
3310 3310 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3311 3311 )
3312 3312 elif format == 1:
3313 3313 if ui.verbose:
3314 3314 ui.writenoi18n(
3315 3315 (
3316 3316 b" rev flag offset length size link p1"
3317 3317 b" p2 %s\n"
3318 3318 )
3319 3319 % b"nodeid".rjust(idlen)
3320 3320 )
3321 3321 else:
3322 3322 ui.writenoi18n(
3323 3323 b" rev flag size link p1 p2 %s\n"
3324 3324 % b"nodeid".rjust(idlen)
3325 3325 )
3326 3326
3327 3327 for i in r:
3328 3328 node = r.node(i)
3329 3329 if format == 0:
3330 3330 try:
3331 3331 pp = r.parents(node)
3332 3332 except Exception:
3333 3333 pp = [repo.nullid, repo.nullid]
3334 3334 if ui.verbose:
3335 3335 ui.write(
3336 3336 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3337 3337 % (
3338 3338 i,
3339 3339 r.start(i),
3340 3340 r.length(i),
3341 3341 r.linkrev(i),
3342 3342 shortfn(node),
3343 3343 shortfn(pp[0]),
3344 3344 shortfn(pp[1]),
3345 3345 )
3346 3346 )
3347 3347 else:
3348 3348 ui.write(
3349 3349 b"% 6d % 7d %s %s %s\n"
3350 3350 % (
3351 3351 i,
3352 3352 r.linkrev(i),
3353 3353 shortfn(node),
3354 3354 shortfn(pp[0]),
3355 3355 shortfn(pp[1]),
3356 3356 )
3357 3357 )
3358 3358 elif format == 1:
3359 3359 pr = r.parentrevs(i)
3360 3360 if ui.verbose:
3361 3361 ui.write(
3362 3362 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3363 3363 % (
3364 3364 i,
3365 3365 r.flags(i),
3366 3366 r.start(i),
3367 3367 r.length(i),
3368 3368 r.rawsize(i),
3369 3369 r.linkrev(i),
3370 3370 pr[0],
3371 3371 pr[1],
3372 3372 shortfn(node),
3373 3373 )
3374 3374 )
3375 3375 else:
3376 3376 ui.write(
3377 3377 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3378 3378 % (
3379 3379 i,
3380 3380 r.flags(i),
3381 3381 r.rawsize(i),
3382 3382 r.linkrev(i),
3383 3383 pr[0],
3384 3384 pr[1],
3385 3385 shortfn(node),
3386 3386 )
3387 3387 )
3388 3388
3389 3389
3390 3390 @command(
3391 3391 b'debugrevspec',
3392 3392 [
3393 3393 (
3394 3394 b'',
3395 3395 b'optimize',
3396 3396 None,
3397 3397 _(b'print parsed tree after optimizing (DEPRECATED)'),
3398 3398 ),
3399 3399 (
3400 3400 b'',
3401 3401 b'show-revs',
3402 3402 True,
3403 3403 _(b'print list of result revisions (default)'),
3404 3404 ),
3405 3405 (
3406 3406 b's',
3407 3407 b'show-set',
3408 3408 None,
3409 3409 _(b'print internal representation of result set'),
3410 3410 ),
3411 3411 (
3412 3412 b'p',
3413 3413 b'show-stage',
3414 3414 [],
3415 3415 _(b'print parsed tree at the given stage'),
3416 3416 _(b'NAME'),
3417 3417 ),
3418 3418 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3419 3419 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3420 3420 ],
3421 3421 b'REVSPEC',
3422 3422 )
3423 3423 def debugrevspec(ui, repo, expr, **opts):
3424 3424 """parse and apply a revision specification
3425 3425
3426 3426 Use -p/--show-stage option to print the parsed tree at the given stages.
3427 3427 Use -p all to print tree at every stage.
3428 3428
3429 3429 Use --no-show-revs option with -s or -p to print only the set
3430 3430 representation or the parsed tree respectively.
3431 3431
3432 3432 Use --verify-optimized to compare the optimized result with the unoptimized
3433 3433 one. Returns 1 if the optimized result differs.
3434 3434 """
3435 3435 opts = pycompat.byteskwargs(opts)
3436 3436 aliases = ui.configitems(b'revsetalias')
3437 3437 stages = [
3438 3438 (b'parsed', lambda tree: tree),
3439 3439 (
3440 3440 b'expanded',
3441 3441 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3442 3442 ),
3443 3443 (b'concatenated', revsetlang.foldconcat),
3444 3444 (b'analyzed', revsetlang.analyze),
3445 3445 (b'optimized', revsetlang.optimize),
3446 3446 ]
3447 3447 if opts[b'no_optimized']:
3448 3448 stages = stages[:-1]
3449 3449 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3450 3450 raise error.Abort(
3451 3451 _(b'cannot use --verify-optimized with --no-optimized')
3452 3452 )
3453 3453 stagenames = {n for n, f in stages}
3454 3454
3455 3455 showalways = set()
3456 3456 showchanged = set()
3457 3457 if ui.verbose and not opts[b'show_stage']:
3458 3458 # show parsed tree by --verbose (deprecated)
3459 3459 showalways.add(b'parsed')
3460 3460 showchanged.update([b'expanded', b'concatenated'])
3461 3461 if opts[b'optimize']:
3462 3462 showalways.add(b'optimized')
3463 3463 if opts[b'show_stage'] and opts[b'optimize']:
3464 3464 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3465 3465 if opts[b'show_stage'] == [b'all']:
3466 3466 showalways.update(stagenames)
3467 3467 else:
3468 3468 for n in opts[b'show_stage']:
3469 3469 if n not in stagenames:
3470 3470 raise error.Abort(_(b'invalid stage name: %s') % n)
3471 3471 showalways.update(opts[b'show_stage'])
3472 3472
3473 3473 treebystage = {}
3474 3474 printedtree = None
3475 3475 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3476 3476 for n, f in stages:
3477 3477 treebystage[n] = tree = f(tree)
3478 3478 if n in showalways or (n in showchanged and tree != printedtree):
3479 3479 if opts[b'show_stage'] or n != b'parsed':
3480 3480 ui.write(b"* %s:\n" % n)
3481 3481 ui.write(revsetlang.prettyformat(tree), b"\n")
3482 3482 printedtree = tree
3483 3483
3484 3484 if opts[b'verify_optimized']:
3485 3485 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3486 3486 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3487 3487 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3488 3488 ui.writenoi18n(
3489 3489 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3490 3490 )
3491 3491 ui.writenoi18n(
3492 3492 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3493 3493 )
3494 3494 arevs = list(arevs)
3495 3495 brevs = list(brevs)
3496 3496 if arevs == brevs:
3497 3497 return 0
3498 3498 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3499 3499 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3500 3500 sm = difflib.SequenceMatcher(None, arevs, brevs)
3501 3501 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3502 3502 if tag in ('delete', 'replace'):
3503 3503 for c in arevs[alo:ahi]:
3504 3504 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3505 3505 if tag in ('insert', 'replace'):
3506 3506 for c in brevs[blo:bhi]:
3507 3507 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3508 3508 if tag == 'equal':
3509 3509 for c in arevs[alo:ahi]:
3510 3510 ui.write(b' %d\n' % c)
3511 3511 return 1
3512 3512
3513 3513 func = revset.makematcher(tree)
3514 3514 revs = func(repo)
3515 3515 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3516 3516 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3517 3517 if not opts[b'show_revs']:
3518 3518 return
3519 3519 for c in revs:
3520 3520 ui.write(b"%d\n" % c)
3521 3521
3522 3522
3523 3523 @command(
3524 3524 b'debugserve',
3525 3525 [
3526 3526 (
3527 3527 b'',
3528 3528 b'sshstdio',
3529 3529 False,
3530 3530 _(b'run an SSH server bound to process handles'),
3531 3531 ),
3532 3532 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3533 3533 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3534 3534 ],
3535 3535 b'',
3536 3536 )
3537 3537 def debugserve(ui, repo, **opts):
3538 3538 """run a server with advanced settings
3539 3539
3540 3540 This command is similar to :hg:`serve`. It exists partially as a
3541 3541 workaround to the fact that ``hg serve --stdio`` must have specific
3542 3542 arguments for security reasons.
3543 3543 """
3544 3544 opts = pycompat.byteskwargs(opts)
3545 3545
3546 3546 if not opts[b'sshstdio']:
3547 3547 raise error.Abort(_(b'only --sshstdio is currently supported'))
3548 3548
3549 3549 logfh = None
3550 3550
3551 3551 if opts[b'logiofd'] and opts[b'logiofile']:
3552 3552 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3553 3553
3554 3554 if opts[b'logiofd']:
3555 3555 # Ideally we would be line buffered. But line buffering in binary
3556 3556 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3557 3557 # buffering could have performance impacts. But since this isn't
3558 3558 # performance critical code, it should be fine.
3559 3559 try:
3560 3560 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3561 3561 except OSError as e:
3562 3562 if e.errno != errno.ESPIPE:
3563 3563 raise
3564 3564 # can't seek a pipe, so `ab` mode fails on py3
3565 3565 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3566 3566 elif opts[b'logiofile']:
3567 3567 logfh = open(opts[b'logiofile'], b'ab', 0)
3568 3568
3569 3569 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3570 3570 s.serve_forever()
3571 3571
3572 3572
3573 3573 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3574 3574 def debugsetparents(ui, repo, rev1, rev2=None):
3575 3575 """manually set the parents of the current working directory (DANGEROUS)
3576 3576
3577 3577 This command is not what you are looking for and should not be used. Using
3578 3578 this command will most certainly results in slight corruption of the file
3579 3579 level histories withing your repository. DO NOT USE THIS COMMAND.
3580 3580
3581 3581 The command update the p1 and p2 field in the dirstate, and not touching
3582 3582 anything else. This useful for writing repository conversion tools, but
3583 3583 should be used with extreme care. For example, neither the working
3584 3584 directory nor the dirstate is updated, so file status may be incorrect
3585 3585 after running this command. Only used if you are one of the few people that
3586 3586 deeply unstand both conversion tools and file level histories. If you are
3587 3587 reading this help, you are not one of this people (most of them sailed west
3588 3588 from Mithlond anyway.
3589 3589
3590 3590 So one last time DO NOT USE THIS COMMAND.
3591 3591
3592 3592 Returns 0 on success.
3593 3593 """
3594 3594
3595 3595 node1 = scmutil.revsingle(repo, rev1).node()
3596 3596 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3597 3597
3598 3598 with repo.wlock():
3599 3599 repo.setparents(node1, node2)
3600 3600
3601 3601
3602 3602 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3603 3603 def debugsidedata(ui, repo, file_, rev=None, **opts):
3604 3604 """dump the side data for a cl/manifest/file revision
3605 3605
3606 3606 Use --verbose to dump the sidedata content."""
3607 3607 opts = pycompat.byteskwargs(opts)
3608 3608 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3609 3609 if rev is not None:
3610 3610 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3611 3611 file_, rev = None, file_
3612 3612 elif rev is None:
3613 3613 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3614 3614 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3615 3615 r = getattr(r, '_revlog', r)
3616 3616 try:
3617 3617 sidedata = r.sidedata(r.lookup(rev))
3618 3618 except KeyError:
3619 3619 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3620 3620 if sidedata:
3621 3621 sidedata = list(sidedata.items())
3622 3622 sidedata.sort()
3623 3623 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3624 3624 for key, value in sidedata:
3625 3625 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3626 3626 if ui.verbose:
3627 3627 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3628 3628
3629 3629
3630 3630 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3631 3631 def debugssl(ui, repo, source=None, **opts):
3632 3632 """test a secure connection to a server
3633 3633
3634 3634 This builds the certificate chain for the server on Windows, installing the
3635 3635 missing intermediates and trusted root via Windows Update if necessary. It
3636 3636 does nothing on other platforms.
3637 3637
3638 3638 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3639 3639 that server is used. See :hg:`help urls` for more information.
3640 3640
3641 3641 If the update succeeds, retry the original operation. Otherwise, the cause
3642 3642 of the SSL error is likely another issue.
3643 3643 """
3644 3644 if not pycompat.iswindows:
3645 3645 raise error.Abort(
3646 3646 _(b'certificate chain building is only possible on Windows')
3647 3647 )
3648 3648
3649 3649 if not source:
3650 3650 if not repo:
3651 3651 raise error.Abort(
3652 3652 _(
3653 3653 b"there is no Mercurial repository here, and no "
3654 3654 b"server specified"
3655 3655 )
3656 3656 )
3657 3657 source = b"default"
3658 3658
3659 3659 source, branches = urlutil.get_unique_pull_path(
3660 3660 b'debugssl', repo, ui, source
3661 3661 )
3662 3662 url = urlutil.url(source)
3663 3663
3664 3664 defaultport = {b'https': 443, b'ssh': 22}
3665 3665 if url.scheme in defaultport:
3666 3666 try:
3667 3667 addr = (url.host, int(url.port or defaultport[url.scheme]))
3668 3668 except ValueError:
3669 3669 raise error.Abort(_(b"malformed port number in URL"))
3670 3670 else:
3671 3671 raise error.Abort(_(b"only https and ssh connections are supported"))
3672 3672
3673 3673 from . import win32
3674 3674
3675 3675 s = ssl.wrap_socket(
3676 3676 socket.socket(),
3677 3677 ssl_version=ssl.PROTOCOL_TLS,
3678 3678 cert_reqs=ssl.CERT_NONE,
3679 3679 ca_certs=None,
3680 3680 )
3681 3681
3682 3682 try:
3683 3683 s.connect(addr)
3684 3684 cert = s.getpeercert(True)
3685 3685
3686 3686 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3687 3687
3688 3688 complete = win32.checkcertificatechain(cert, build=False)
3689 3689
3690 3690 if not complete:
3691 3691 ui.status(_(b'certificate chain is incomplete, updating... '))
3692 3692
3693 3693 if not win32.checkcertificatechain(cert):
3694 3694 ui.status(_(b'failed.\n'))
3695 3695 else:
3696 3696 ui.status(_(b'done.\n'))
3697 3697 else:
3698 3698 ui.status(_(b'full certificate chain is available\n'))
3699 3699 finally:
3700 3700 s.close()
3701 3701
3702 3702
3703 3703 @command(
3704 3704 b"debugbackupbundle",
3705 3705 [
3706 3706 (
3707 3707 b"",
3708 3708 b"recover",
3709 3709 b"",
3710 3710 b"brings the specified changeset back into the repository",
3711 3711 )
3712 3712 ]
3713 3713 + cmdutil.logopts,
3714 3714 _(b"hg debugbackupbundle [--recover HASH]"),
3715 3715 )
3716 3716 def debugbackupbundle(ui, repo, *pats, **opts):
3717 3717 """lists the changesets available in backup bundles
3718 3718
3719 3719 Without any arguments, this command prints a list of the changesets in each
3720 3720 backup bundle.
3721 3721
3722 3722 --recover takes a changeset hash and unbundles the first bundle that
3723 3723 contains that hash, which puts that changeset back in your repository.
3724 3724
3725 3725 --verbose will print the entire commit message and the bundle path for that
3726 3726 backup.
3727 3727 """
3728 3728 backups = list(
3729 3729 filter(
3730 3730 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3731 3731 )
3732 3732 )
3733 3733 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3734 3734
3735 3735 opts = pycompat.byteskwargs(opts)
3736 3736 opts[b"bundle"] = b""
3737 3737 opts[b"force"] = None
3738 3738 limit = logcmdutil.getlimit(opts)
3739 3739
3740 3740 def display(other, chlist, displayer):
3741 3741 if opts.get(b"newest_first"):
3742 3742 chlist.reverse()
3743 3743 count = 0
3744 3744 for n in chlist:
3745 3745 if limit is not None and count >= limit:
3746 3746 break
3747 3747 parents = [
3748 3748 True for p in other.changelog.parents(n) if p != repo.nullid
3749 3749 ]
3750 3750 if opts.get(b"no_merges") and len(parents) == 2:
3751 3751 continue
3752 3752 count += 1
3753 3753 displayer.show(other[n])
3754 3754
3755 3755 recovernode = opts.get(b"recover")
3756 3756 if recovernode:
3757 3757 if scmutil.isrevsymbol(repo, recovernode):
3758 3758 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3759 3759 return
3760 3760 elif backups:
3761 3761 msg = _(
3762 3762 b"Recover changesets using: hg debugbackupbundle --recover "
3763 3763 b"<changeset hash>\n\nAvailable backup changesets:"
3764 3764 )
3765 3765 ui.status(msg, label=b"status.removed")
3766 3766 else:
3767 3767 ui.status(_(b"no backup changesets found\n"))
3768 3768 return
3769 3769
3770 3770 for backup in backups:
3771 3771 # Much of this is copied from the hg incoming logic
3772 3772 source = os.path.relpath(backup, encoding.getcwd())
3773 3773 source, branches = urlutil.get_unique_pull_path(
3774 3774 b'debugbackupbundle',
3775 3775 repo,
3776 3776 ui,
3777 3777 source,
3778 3778 default_branches=opts.get(b'branch'),
3779 3779 )
3780 3780 try:
3781 3781 other = hg.peer(repo, opts, source)
3782 3782 except error.LookupError as ex:
3783 3783 msg = _(b"\nwarning: unable to open bundle %s") % source
3784 3784 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3785 3785 ui.warn(msg, hint=hint)
3786 3786 continue
3787 3787 revs, checkout = hg.addbranchrevs(
3788 3788 repo, other, branches, opts.get(b"rev")
3789 3789 )
3790 3790
3791 3791 if revs:
3792 3792 revs = [other.lookup(rev) for rev in revs]
3793 3793
3794 3794 with ui.silent():
3795 3795 try:
3796 3796 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3797 3797 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3798 3798 )
3799 3799 except error.LookupError:
3800 3800 continue
3801 3801
3802 3802 try:
3803 3803 if not chlist:
3804 3804 continue
3805 3805 if recovernode:
3806 3806 with repo.lock(), repo.transaction(b"unbundle") as tr:
3807 3807 if scmutil.isrevsymbol(other, recovernode):
3808 3808 ui.status(_(b"Unbundling %s\n") % (recovernode))
3809 3809 f = hg.openpath(ui, source)
3810 3810 gen = exchange.readbundle(ui, f, source)
3811 3811 if isinstance(gen, bundle2.unbundle20):
3812 3812 bundle2.applybundle(
3813 3813 repo,
3814 3814 gen,
3815 3815 tr,
3816 3816 source=b"unbundle",
3817 3817 url=b"bundle:" + source,
3818 3818 )
3819 3819 else:
3820 3820 gen.apply(repo, b"unbundle", b"bundle:" + source)
3821 3821 break
3822 3822 else:
3823 3823 backupdate = encoding.strtolocal(
3824 3824 time.strftime(
3825 3825 "%a %H:%M, %Y-%m-%d",
3826 3826 time.localtime(os.path.getmtime(source)),
3827 3827 )
3828 3828 )
3829 3829 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3830 3830 if ui.verbose:
3831 3831 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3832 3832 else:
3833 3833 opts[
3834 3834 b"template"
3835 3835 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3836 3836 displayer = logcmdutil.changesetdisplayer(
3837 3837 ui, other, opts, False
3838 3838 )
3839 3839 display(other, chlist, displayer)
3840 3840 displayer.close()
3841 3841 finally:
3842 3842 cleanupfn()
3843 3843
3844 3844
3845 3845 @command(
3846 3846 b'debugsub',
3847 3847 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3848 3848 _(b'[-r REV] [REV]'),
3849 3849 )
3850 3850 def debugsub(ui, repo, rev=None):
3851 3851 ctx = scmutil.revsingle(repo, rev, None)
3852 3852 for k, v in sorted(ctx.substate.items()):
3853 3853 ui.writenoi18n(b'path %s\n' % k)
3854 3854 ui.writenoi18n(b' source %s\n' % v[0])
3855 3855 ui.writenoi18n(b' revision %s\n' % v[1])
3856 3856
3857 3857
3858 3858 @command(b'debugshell', optionalrepo=True)
3859 3859 def debugshell(ui, repo):
3860 3860 """run an interactive Python interpreter
3861 3861
3862 3862 The local namespace is provided with a reference to the ui and
3863 3863 the repo instance (if available).
3864 3864 """
3865 3865 import code
3866 3866
3867 3867 imported_objects = {
3868 3868 'ui': ui,
3869 3869 'repo': repo,
3870 3870 }
3871 3871
3872 3872 code.interact(local=imported_objects)
3873 3873
3874 3874
3875 3875 @command(
3876 3876 b'debugsuccessorssets',
3877 3877 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3878 3878 _(b'[REV]'),
3879 3879 )
3880 3880 def debugsuccessorssets(ui, repo, *revs, **opts):
3881 3881 """show set of successors for revision
3882 3882
3883 3883 A successors set of changeset A is a consistent group of revisions that
3884 3884 succeed A. It contains non-obsolete changesets only unless closests
3885 3885 successors set is set.
3886 3886
3887 3887 In most cases a changeset A has a single successors set containing a single
3888 3888 successor (changeset A replaced by A').
3889 3889
3890 3890 A changeset that is made obsolete with no successors are called "pruned".
3891 3891 Such changesets have no successors sets at all.
3892 3892
3893 3893 A changeset that has been "split" will have a successors set containing
3894 3894 more than one successor.
3895 3895
3896 3896 A changeset that has been rewritten in multiple different ways is called
3897 3897 "divergent". Such changesets have multiple successor sets (each of which
3898 3898 may also be split, i.e. have multiple successors).
3899 3899
3900 3900 Results are displayed as follows::
3901 3901
3902 3902 <rev1>
3903 3903 <successors-1A>
3904 3904 <rev2>
3905 3905 <successors-2A>
3906 3906 <successors-2B1> <successors-2B2> <successors-2B3>
3907 3907
3908 3908 Here rev2 has two possible (i.e. divergent) successors sets. The first
3909 3909 holds one element, whereas the second holds three (i.e. the changeset has
3910 3910 been split).
3911 3911 """
3912 3912 # passed to successorssets caching computation from one call to another
3913 3913 cache = {}
3914 3914 ctx2str = bytes
3915 3915 node2str = short
3916 3916 for rev in scmutil.revrange(repo, revs):
3917 3917 ctx = repo[rev]
3918 3918 ui.write(b'%s\n' % ctx2str(ctx))
3919 3919 for succsset in obsutil.successorssets(
3920 3920 repo, ctx.node(), closest=opts['closest'], cache=cache
3921 3921 ):
3922 3922 if succsset:
3923 3923 ui.write(b' ')
3924 3924 ui.write(node2str(succsset[0]))
3925 3925 for node in succsset[1:]:
3926 3926 ui.write(b' ')
3927 3927 ui.write(node2str(node))
3928 3928 ui.write(b'\n')
3929 3929
3930 3930
3931 3931 @command(b'debugtagscache', [])
3932 3932 def debugtagscache(ui, repo):
3933 3933 """display the contents of .hg/cache/hgtagsfnodes1"""
3934 3934 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3935 3935 flog = repo.file(b'.hgtags')
3936 3936 for r in repo:
3937 3937 node = repo[r].node()
3938 3938 tagsnode = cache.getfnode(node, computemissing=False)
3939 3939 if tagsnode:
3940 3940 tagsnodedisplay = hex(tagsnode)
3941 3941 if not flog.hasnode(tagsnode):
3942 3942 tagsnodedisplay += b' (unknown node)'
3943 3943 elif tagsnode is None:
3944 3944 tagsnodedisplay = b'missing'
3945 3945 else:
3946 3946 tagsnodedisplay = b'invalid'
3947 3947
3948 3948 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3949 3949
3950 3950
3951 3951 @command(
3952 3952 b'debugtemplate',
3953 3953 [
3954 3954 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3955 3955 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3956 3956 ],
3957 3957 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3958 3958 optionalrepo=True,
3959 3959 )
3960 3960 def debugtemplate(ui, repo, tmpl, **opts):
3961 3961 """parse and apply a template
3962 3962
3963 3963 If -r/--rev is given, the template is processed as a log template and
3964 3964 applied to the given changesets. Otherwise, it is processed as a generic
3965 3965 template.
3966 3966
3967 3967 Use --verbose to print the parsed tree.
3968 3968 """
3969 3969 revs = None
3970 3970 if opts['rev']:
3971 3971 if repo is None:
3972 3972 raise error.RepoError(
3973 3973 _(b'there is no Mercurial repository here (.hg not found)')
3974 3974 )
3975 3975 revs = scmutil.revrange(repo, opts['rev'])
3976 3976
3977 3977 props = {}
3978 3978 for d in opts['define']:
3979 3979 try:
3980 3980 k, v = (e.strip() for e in d.split(b'=', 1))
3981 3981 if not k or k == b'ui':
3982 3982 raise ValueError
3983 3983 props[k] = v
3984 3984 except ValueError:
3985 3985 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3986 3986
3987 3987 if ui.verbose:
3988 3988 aliases = ui.configitems(b'templatealias')
3989 3989 tree = templater.parse(tmpl)
3990 3990 ui.note(templater.prettyformat(tree), b'\n')
3991 3991 newtree = templater.expandaliases(tree, aliases)
3992 3992 if newtree != tree:
3993 3993 ui.notenoi18n(
3994 3994 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3995 3995 )
3996 3996
3997 3997 if revs is None:
3998 3998 tres = formatter.templateresources(ui, repo)
3999 3999 t = formatter.maketemplater(ui, tmpl, resources=tres)
4000 4000 if ui.verbose:
4001 4001 kwds, funcs = t.symbolsuseddefault()
4002 4002 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4003 4003 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4004 4004 ui.write(t.renderdefault(props))
4005 4005 else:
4006 4006 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4007 4007 if ui.verbose:
4008 4008 kwds, funcs = displayer.t.symbolsuseddefault()
4009 4009 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4010 4010 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4011 4011 for r in revs:
4012 4012 displayer.show(repo[r], **pycompat.strkwargs(props))
4013 4013 displayer.close()
4014 4014
4015 4015
4016 4016 @command(
4017 4017 b'debuguigetpass',
4018 4018 [
4019 4019 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4020 4020 ],
4021 4021 _(b'[-p TEXT]'),
4022 4022 norepo=True,
4023 4023 )
4024 4024 def debuguigetpass(ui, prompt=b''):
4025 4025 """show prompt to type password"""
4026 4026 r = ui.getpass(prompt)
4027 4027 if r is None:
4028 4028 r = b"<default response>"
4029 4029 ui.writenoi18n(b'response: %s\n' % r)
4030 4030
4031 4031
4032 4032 @command(
4033 4033 b'debuguiprompt',
4034 4034 [
4035 4035 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4036 4036 ],
4037 4037 _(b'[-p TEXT]'),
4038 4038 norepo=True,
4039 4039 )
4040 4040 def debuguiprompt(ui, prompt=b''):
4041 4041 """show plain prompt"""
4042 4042 r = ui.prompt(prompt)
4043 4043 ui.writenoi18n(b'response: %s\n' % r)
4044 4044
4045 4045
4046 4046 @command(b'debugupdatecaches', [])
4047 4047 def debugupdatecaches(ui, repo, *pats, **opts):
4048 4048 """warm all known caches in the repository"""
4049 4049 with repo.wlock(), repo.lock():
4050 4050 repo.updatecaches(caches=repository.CACHES_ALL)
4051 4051
4052 4052
4053 4053 @command(
4054 4054 b'debugupgraderepo',
4055 4055 [
4056 4056 (
4057 4057 b'o',
4058 4058 b'optimize',
4059 4059 [],
4060 4060 _(b'extra optimization to perform'),
4061 4061 _(b'NAME'),
4062 4062 ),
4063 4063 (b'', b'run', False, _(b'performs an upgrade')),
4064 4064 (b'', b'backup', True, _(b'keep the old repository content around')),
4065 4065 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4066 4066 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4067 4067 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4068 4068 ],
4069 4069 )
4070 4070 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4071 4071 """upgrade a repository to use different features
4072 4072
4073 4073 If no arguments are specified, the repository is evaluated for upgrade
4074 4074 and a list of problems and potential optimizations is printed.
4075 4075
4076 4076 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4077 4077 can be influenced via additional arguments. More details will be provided
4078 4078 by the command output when run without ``--run``.
4079 4079
4080 4080 During the upgrade, the repository will be locked and no writes will be
4081 4081 allowed.
4082 4082
4083 4083 At the end of the upgrade, the repository may not be readable while new
4084 4084 repository data is swapped in. This window will be as long as it takes to
4085 4085 rename some directories inside the ``.hg`` directory. On most machines, this
4086 4086 should complete almost instantaneously and the chances of a consumer being
4087 4087 unable to access the repository should be low.
4088 4088
4089 4089 By default, all revlogs will be upgraded. You can restrict this using flags
4090 4090 such as `--manifest`:
4091 4091
4092 4092 * `--manifest`: only optimize the manifest
4093 4093 * `--no-manifest`: optimize all revlog but the manifest
4094 4094 * `--changelog`: optimize the changelog only
4095 4095 * `--no-changelog --no-manifest`: optimize filelogs only
4096 4096 * `--filelogs`: optimize the filelogs only
4097 4097 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4098 4098 """
4099 4099 return upgrade.upgraderepo(
4100 4100 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4101 4101 )
4102 4102
4103 4103
4104 4104 @command(
4105 4105 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4106 4106 )
4107 4107 def debugwalk(ui, repo, *pats, **opts):
4108 4108 """show how files match on given patterns"""
4109 4109 opts = pycompat.byteskwargs(opts)
4110 4110 m = scmutil.match(repo[None], pats, opts)
4111 4111 if ui.verbose:
4112 4112 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4113 4113 items = list(repo[None].walk(m))
4114 4114 if not items:
4115 4115 return
4116 4116 f = lambda fn: fn
4117 4117 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4118 4118 f = lambda fn: util.normpath(fn)
4119 4119 fmt = b'f %%-%ds %%-%ds %%s' % (
4120 4120 max([len(abs) for abs in items]),
4121 4121 max([len(repo.pathto(abs)) for abs in items]),
4122 4122 )
4123 4123 for abs in items:
4124 4124 line = fmt % (
4125 4125 abs,
4126 4126 f(repo.pathto(abs)),
4127 4127 m.exact(abs) and b'exact' or b'',
4128 4128 )
4129 4129 ui.write(b"%s\n" % line.rstrip())
4130 4130
4131 4131
4132 4132 @command(b'debugwhyunstable', [], _(b'REV'))
4133 4133 def debugwhyunstable(ui, repo, rev):
4134 4134 """explain instabilities of a changeset"""
4135 4135 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4136 4136 dnodes = b''
4137 4137 if entry.get(b'divergentnodes'):
4138 4138 dnodes = (
4139 4139 b' '.join(
4140 4140 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4141 4141 for ctx in entry[b'divergentnodes']
4142 4142 )
4143 4143 + b' '
4144 4144 )
4145 4145 ui.write(
4146 4146 b'%s: %s%s %s\n'
4147 4147 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4148 4148 )
4149 4149
4150 4150
4151 4151 @command(
4152 4152 b'debugwireargs',
4153 4153 [
4154 4154 (b'', b'three', b'', b'three'),
4155 4155 (b'', b'four', b'', b'four'),
4156 4156 (b'', b'five', b'', b'five'),
4157 4157 ]
4158 4158 + cmdutil.remoteopts,
4159 4159 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4160 4160 norepo=True,
4161 4161 )
4162 4162 def debugwireargs(ui, repopath, *vals, **opts):
4163 4163 opts = pycompat.byteskwargs(opts)
4164 4164 repo = hg.peer(ui, opts, repopath)
4165 4165 try:
4166 4166 for opt in cmdutil.remoteopts:
4167 4167 del opts[opt[1]]
4168 4168 args = {}
4169 4169 for k, v in pycompat.iteritems(opts):
4170 4170 if v:
4171 4171 args[k] = v
4172 4172 args = pycompat.strkwargs(args)
4173 4173 # run twice to check that we don't mess up the stream for the next command
4174 4174 res1 = repo.debugwireargs(*vals, **args)
4175 4175 res2 = repo.debugwireargs(*vals, **args)
4176 4176 ui.write(b"%s\n" % res1)
4177 4177 if res1 != res2:
4178 4178 ui.warn(b"%s\n" % res2)
4179 4179 finally:
4180 4180 repo.close()
4181 4181
4182 4182
4183 4183 def _parsewirelangblocks(fh):
4184 4184 activeaction = None
4185 4185 blocklines = []
4186 4186 lastindent = 0
4187 4187
4188 4188 for line in fh:
4189 4189 line = line.rstrip()
4190 4190 if not line:
4191 4191 continue
4192 4192
4193 4193 if line.startswith(b'#'):
4194 4194 continue
4195 4195
4196 4196 if not line.startswith(b' '):
4197 4197 # New block. Flush previous one.
4198 4198 if activeaction:
4199 4199 yield activeaction, blocklines
4200 4200
4201 4201 activeaction = line
4202 4202 blocklines = []
4203 4203 lastindent = 0
4204 4204 continue
4205 4205
4206 4206 # Else we start with an indent.
4207 4207
4208 4208 if not activeaction:
4209 4209 raise error.Abort(_(b'indented line outside of block'))
4210 4210
4211 4211 indent = len(line) - len(line.lstrip())
4212 4212
4213 4213 # If this line is indented more than the last line, concatenate it.
4214 4214 if indent > lastindent and blocklines:
4215 4215 blocklines[-1] += line.lstrip()
4216 4216 else:
4217 4217 blocklines.append(line)
4218 4218 lastindent = indent
4219 4219
4220 4220 # Flush last block.
4221 4221 if activeaction:
4222 4222 yield activeaction, blocklines
4223 4223
4224 4224
4225 4225 @command(
4226 4226 b'debugwireproto',
4227 4227 [
4228 4228 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4229 4229 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4230 4230 (
4231 4231 b'',
4232 4232 b'noreadstderr',
4233 4233 False,
4234 4234 _(b'do not read from stderr of the remote'),
4235 4235 ),
4236 4236 (
4237 4237 b'',
4238 4238 b'nologhandshake',
4239 4239 False,
4240 4240 _(b'do not log I/O related to the peer handshake'),
4241 4241 ),
4242 4242 ]
4243 4243 + cmdutil.remoteopts,
4244 4244 _(b'[PATH]'),
4245 4245 optionalrepo=True,
4246 4246 )
4247 4247 def debugwireproto(ui, repo, path=None, **opts):
4248 4248 """send wire protocol commands to a server
4249 4249
4250 4250 This command can be used to issue wire protocol commands to remote
4251 4251 peers and to debug the raw data being exchanged.
4252 4252
4253 4253 ``--localssh`` will start an SSH server against the current repository
4254 4254 and connect to that. By default, the connection will perform a handshake
4255 4255 and establish an appropriate peer instance.
4256 4256
4257 4257 ``--peer`` can be used to bypass the handshake protocol and construct a
4258 4258 peer instance using the specified class type. Valid values are ``raw``,
4259 4259 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4260 4260 raw data payloads and don't support higher-level command actions.
4261 4261
4262 4262 ``--noreadstderr`` can be used to disable automatic reading from stderr
4263 4263 of the peer (for SSH connections only). Disabling automatic reading of
4264 4264 stderr is useful for making output more deterministic.
4265 4265
4266 4266 Commands are issued via a mini language which is specified via stdin.
4267 4267 The language consists of individual actions to perform. An action is
4268 4268 defined by a block. A block is defined as a line with no leading
4269 4269 space followed by 0 or more lines with leading space. Blocks are
4270 4270 effectively a high-level command with additional metadata.
4271 4271
4272 4272 Lines beginning with ``#`` are ignored.
4273 4273
4274 4274 The following sections denote available actions.
4275 4275
4276 4276 raw
4277 4277 ---
4278 4278
4279 4279 Send raw data to the server.
4280 4280
4281 4281 The block payload contains the raw data to send as one atomic send
4282 4282 operation. The data may not actually be delivered in a single system
4283 4283 call: it depends on the abilities of the transport being used.
4284 4284
4285 4285 Each line in the block is de-indented and concatenated. Then, that
4286 4286 value is evaluated as a Python b'' literal. This allows the use of
4287 4287 backslash escaping, etc.
4288 4288
4289 4289 raw+
4290 4290 ----
4291 4291
4292 4292 Behaves like ``raw`` except flushes output afterwards.
4293 4293
4294 4294 command <X>
4295 4295 -----------
4296 4296
4297 4297 Send a request to run a named command, whose name follows the ``command``
4298 4298 string.
4299 4299
4300 4300 Arguments to the command are defined as lines in this block. The format of
4301 4301 each line is ``<key> <value>``. e.g.::
4302 4302
4303 4303 command listkeys
4304 4304 namespace bookmarks
4305 4305
4306 4306 If the value begins with ``eval:``, it will be interpreted as a Python
4307 4307 literal expression. Otherwise values are interpreted as Python b'' literals.
4308 4308 This allows sending complex types and encoding special byte sequences via
4309 4309 backslash escaping.
4310 4310
4311 4311 The following arguments have special meaning:
4312 4312
4313 4313 ``PUSHFILE``
4314 4314 When defined, the *push* mechanism of the peer will be used instead
4315 4315 of the static request-response mechanism and the content of the
4316 4316 file specified in the value of this argument will be sent as the
4317 4317 command payload.
4318 4318
4319 4319 This can be used to submit a local bundle file to the remote.
4320 4320
4321 4321 batchbegin
4322 4322 ----------
4323 4323
4324 4324 Instruct the peer to begin a batched send.
4325 4325
4326 4326 All ``command`` blocks are queued for execution until the next
4327 4327 ``batchsubmit`` block.
4328 4328
4329 4329 batchsubmit
4330 4330 -----------
4331 4331
4332 4332 Submit previously queued ``command`` blocks as a batch request.
4333 4333
4334 4334 This action MUST be paired with a ``batchbegin`` action.
4335 4335
4336 4336 httprequest <method> <path>
4337 4337 ---------------------------
4338 4338
4339 4339 (HTTP peer only)
4340 4340
4341 4341 Send an HTTP request to the peer.
4342 4342
4343 4343 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4344 4344
4345 4345 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4346 4346 headers to add to the request. e.g. ``Accept: foo``.
4347 4347
4348 4348 The following arguments are special:
4349 4349
4350 4350 ``BODYFILE``
4351 4351 The content of the file defined as the value to this argument will be
4352 4352 transferred verbatim as the HTTP request body.
4353 4353
4354 4354 ``frame <type> <flags> <payload>``
4355 4355 Send a unified protocol frame as part of the request body.
4356 4356
4357 4357 All frames will be collected and sent as the body to the HTTP
4358 4358 request.
4359 4359
4360 4360 close
4361 4361 -----
4362 4362
4363 4363 Close the connection to the server.
4364 4364
4365 4365 flush
4366 4366 -----
4367 4367
4368 4368 Flush data written to the server.
4369 4369
4370 4370 readavailable
4371 4371 -------------
4372 4372
4373 4373 Close the write end of the connection and read all available data from
4374 4374 the server.
4375 4375
4376 4376 If the connection to the server encompasses multiple pipes, we poll both
4377 4377 pipes and read available data.
4378 4378
4379 4379 readline
4380 4380 --------
4381 4381
4382 4382 Read a line of output from the server. If there are multiple output
4383 4383 pipes, reads only the main pipe.
4384 4384
4385 4385 ereadline
4386 4386 ---------
4387 4387
4388 4388 Like ``readline``, but read from the stderr pipe, if available.
4389 4389
4390 4390 read <X>
4391 4391 --------
4392 4392
4393 4393 ``read()`` N bytes from the server's main output pipe.
4394 4394
4395 4395 eread <X>
4396 4396 ---------
4397 4397
4398 4398 ``read()`` N bytes from the server's stderr pipe, if available.
4399 4399
4400 4400 Specifying Unified Frame-Based Protocol Frames
4401 4401 ----------------------------------------------
4402 4402
4403 4403 It is possible to emit a *Unified Frame-Based Protocol* by using special
4404 4404 syntax.
4405 4405
4406 4406 A frame is composed as a type, flags, and payload. These can be parsed
4407 4407 from a string of the form:
4408 4408
4409 4409 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4410 4410
4411 4411 ``request-id`` and ``stream-id`` are integers defining the request and
4412 4412 stream identifiers.
4413 4413
4414 4414 ``type`` can be an integer value for the frame type or the string name
4415 4415 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4416 4416 ``command-name``.
4417 4417
4418 4418 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4419 4419 components. Each component (and there can be just one) can be an integer
4420 4420 or a flag name for stream flags or frame flags, respectively. Values are
4421 4421 resolved to integers and then bitwise OR'd together.
4422 4422
4423 4423 ``payload`` represents the raw frame payload. If it begins with
4424 4424 ``cbor:``, the following string is evaluated as Python code and the
4425 4425 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4426 4426 as a Python byte string literal.
4427 4427 """
4428 4428 opts = pycompat.byteskwargs(opts)
4429 4429
4430 4430 if opts[b'localssh'] and not repo:
4431 4431 raise error.Abort(_(b'--localssh requires a repository'))
4432 4432
4433 4433 if opts[b'peer'] and opts[b'peer'] not in (
4434 4434 b'raw',
4435 4435 b'http2',
4436 4436 b'ssh1',
4437 4437 b'ssh2',
4438 4438 ):
4439 4439 raise error.Abort(
4440 4440 _(b'invalid value for --peer'),
4441 4441 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4442 4442 )
4443 4443
4444 4444 if path and opts[b'localssh']:
4445 4445 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4446 4446
4447 4447 if ui.interactive():
4448 4448 ui.write(_(b'(waiting for commands on stdin)\n'))
4449 4449
4450 4450 blocks = list(_parsewirelangblocks(ui.fin))
4451 4451
4452 4452 proc = None
4453 4453 stdin = None
4454 4454 stdout = None
4455 4455 stderr = None
4456 4456 opener = None
4457 4457
4458 4458 if opts[b'localssh']:
4459 4459 # We start the SSH server in its own process so there is process
4460 4460 # separation. This prevents a whole class of potential bugs around
4461 4461 # shared state from interfering with server operation.
4462 4462 args = procutil.hgcmd() + [
4463 4463 b'-R',
4464 4464 repo.root,
4465 4465 b'debugserve',
4466 4466 b'--sshstdio',
4467 4467 ]
4468 4468 proc = subprocess.Popen(
4469 4469 pycompat.rapply(procutil.tonativestr, args),
4470 4470 stdin=subprocess.PIPE,
4471 4471 stdout=subprocess.PIPE,
4472 4472 stderr=subprocess.PIPE,
4473 4473 bufsize=0,
4474 4474 )
4475 4475
4476 4476 stdin = proc.stdin
4477 4477 stdout = proc.stdout
4478 4478 stderr = proc.stderr
4479 4479
4480 4480 # We turn the pipes into observers so we can log I/O.
4481 4481 if ui.verbose or opts[b'peer'] == b'raw':
4482 4482 stdin = util.makeloggingfileobject(
4483 4483 ui, proc.stdin, b'i', logdata=True
4484 4484 )
4485 4485 stdout = util.makeloggingfileobject(
4486 4486 ui, proc.stdout, b'o', logdata=True
4487 4487 )
4488 4488 stderr = util.makeloggingfileobject(
4489 4489 ui, proc.stderr, b'e', logdata=True
4490 4490 )
4491 4491
4492 4492 # --localssh also implies the peer connection settings.
4493 4493
4494 4494 url = b'ssh://localserver'
4495 4495 autoreadstderr = not opts[b'noreadstderr']
4496 4496
4497 4497 if opts[b'peer'] == b'ssh1':
4498 4498 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4499 4499 peer = sshpeer.sshv1peer(
4500 4500 ui,
4501 4501 url,
4502 4502 proc,
4503 4503 stdin,
4504 4504 stdout,
4505 4505 stderr,
4506 4506 None,
4507 4507 autoreadstderr=autoreadstderr,
4508 4508 )
4509 4509 elif opts[b'peer'] == b'ssh2':
4510 4510 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4511 4511 peer = sshpeer.sshv2peer(
4512 4512 ui,
4513 4513 url,
4514 4514 proc,
4515 4515 stdin,
4516 4516 stdout,
4517 4517 stderr,
4518 4518 None,
4519 4519 autoreadstderr=autoreadstderr,
4520 4520 )
4521 4521 elif opts[b'peer'] == b'raw':
4522 4522 ui.write(_(b'using raw connection to peer\n'))
4523 4523 peer = None
4524 4524 else:
4525 4525 ui.write(_(b'creating ssh peer from handshake results\n'))
4526 4526 peer = sshpeer.makepeer(
4527 4527 ui,
4528 4528 url,
4529 4529 proc,
4530 4530 stdin,
4531 4531 stdout,
4532 4532 stderr,
4533 4533 autoreadstderr=autoreadstderr,
4534 4534 )
4535 4535
4536 4536 elif path:
4537 4537 # We bypass hg.peer() so we can proxy the sockets.
4538 4538 # TODO consider not doing this because we skip
4539 4539 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4540 4540 u = urlutil.url(path)
4541 4541 if u.scheme != b'http':
4542 4542 raise error.Abort(_(b'only http:// paths are currently supported'))
4543 4543
4544 4544 url, authinfo = u.authinfo()
4545 4545 openerargs = {
4546 4546 'useragent': b'Mercurial debugwireproto',
4547 4547 }
4548 4548
4549 4549 # Turn pipes/sockets into observers so we can log I/O.
4550 4550 if ui.verbose:
4551 4551 openerargs.update(
4552 4552 {
4553 4553 'loggingfh': ui,
4554 4554 'loggingname': b's',
4555 4555 'loggingopts': {
4556 4556 'logdata': True,
4557 4557 'logdataapis': False,
4558 4558 },
4559 4559 }
4560 4560 )
4561 4561
4562 4562 if ui.debugflag:
4563 4563 openerargs['loggingopts']['logdataapis'] = True
4564 4564
4565 4565 # Don't send default headers when in raw mode. This allows us to
4566 4566 # bypass most of the behavior of our URL handling code so we can
4567 4567 # have near complete control over what's sent on the wire.
4568 4568 if opts[b'peer'] == b'raw':
4569 4569 openerargs['sendaccept'] = False
4570 4570
4571 4571 opener = urlmod.opener(ui, authinfo, **openerargs)
4572 4572
4573 4573 if opts[b'peer'] == b'http2':
4574 4574 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4575 4575 # We go through makepeer() because we need an API descriptor for
4576 4576 # the peer instance to be useful.
4577 4577 maybe_silent = (
4578 4578 ui.silent()
4579 4579 if opts[b'nologhandshake']
4580 4580 else util.nullcontextmanager()
4581 4581 )
4582 4582 with maybe_silent, ui.configoverride(
4583 4583 {(b'experimental', b'httppeer.advertise-v2'): True}
4584 4584 ):
4585 4585 peer = httppeer.makepeer(ui, path, opener=opener)
4586 4586
4587 4587 if not isinstance(peer, httppeer.httpv2peer):
4588 4588 raise error.Abort(
4589 4589 _(
4590 4590 b'could not instantiate HTTP peer for '
4591 4591 b'wire protocol version 2'
4592 4592 ),
4593 4593 hint=_(
4594 4594 b'the server may not have the feature '
4595 4595 b'enabled or is not allowing this '
4596 4596 b'client version'
4597 4597 ),
4598 4598 )
4599 4599
4600 4600 elif opts[b'peer'] == b'raw':
4601 4601 ui.write(_(b'using raw connection to peer\n'))
4602 4602 peer = None
4603 4603 elif opts[b'peer']:
4604 4604 raise error.Abort(
4605 4605 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4606 4606 )
4607 4607 else:
4608 4608 peer = httppeer.makepeer(ui, path, opener=opener)
4609 4609
4610 4610 # We /could/ populate stdin/stdout with sock.makefile()...
4611 4611 else:
4612 4612 raise error.Abort(_(b'unsupported connection configuration'))
4613 4613
4614 4614 batchedcommands = None
4615 4615
4616 4616 # Now perform actions based on the parsed wire language instructions.
4617 4617 for action, lines in blocks:
4618 4618 if action in (b'raw', b'raw+'):
4619 4619 if not stdin:
4620 4620 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4621 4621
4622 4622 # Concatenate the data together.
4623 4623 data = b''.join(l.lstrip() for l in lines)
4624 4624 data = stringutil.unescapestr(data)
4625 4625 stdin.write(data)
4626 4626
4627 4627 if action == b'raw+':
4628 4628 stdin.flush()
4629 4629 elif action == b'flush':
4630 4630 if not stdin:
4631 4631 raise error.Abort(_(b'cannot call flush on this peer'))
4632 4632 stdin.flush()
4633 4633 elif action.startswith(b'command'):
4634 4634 if not peer:
4635 4635 raise error.Abort(
4636 4636 _(
4637 4637 b'cannot send commands unless peer instance '
4638 4638 b'is available'
4639 4639 )
4640 4640 )
4641 4641
4642 4642 command = action.split(b' ', 1)[1]
4643 4643
4644 4644 args = {}
4645 4645 for line in lines:
4646 4646 # We need to allow empty values.
4647 4647 fields = line.lstrip().split(b' ', 1)
4648 4648 if len(fields) == 1:
4649 4649 key = fields[0]
4650 4650 value = b''
4651 4651 else:
4652 4652 key, value = fields
4653 4653
4654 4654 if value.startswith(b'eval:'):
4655 4655 value = stringutil.evalpythonliteral(value[5:])
4656 4656 else:
4657 4657 value = stringutil.unescapestr(value)
4658 4658
4659 4659 args[key] = value
4660 4660
4661 4661 if batchedcommands is not None:
4662 4662 batchedcommands.append((command, args))
4663 4663 continue
4664 4664
4665 4665 ui.status(_(b'sending %s command\n') % command)
4666 4666
4667 4667 if b'PUSHFILE' in args:
4668 4668 with open(args[b'PUSHFILE'], 'rb') as fh:
4669 4669 del args[b'PUSHFILE']
4670 4670 res, output = peer._callpush(
4671 4671 command, fh, **pycompat.strkwargs(args)
4672 4672 )
4673 4673 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4674 4674 ui.status(
4675 4675 _(b'remote output: %s\n') % stringutil.escapestr(output)
4676 4676 )
4677 4677 else:
4678 4678 with peer.commandexecutor() as e:
4679 4679 res = e.callcommand(command, args).result()
4680 4680
4681 4681 if isinstance(res, wireprotov2peer.commandresponse):
4682 4682 val = res.objects()
4683 4683 ui.status(
4684 4684 _(b'response: %s\n')
4685 4685 % stringutil.pprint(val, bprefix=True, indent=2)
4686 4686 )
4687 4687 else:
4688 4688 ui.status(
4689 4689 _(b'response: %s\n')
4690 4690 % stringutil.pprint(res, bprefix=True, indent=2)
4691 4691 )
4692 4692
4693 4693 elif action == b'batchbegin':
4694 4694 if batchedcommands is not None:
4695 4695 raise error.Abort(_(b'nested batchbegin not allowed'))
4696 4696
4697 4697 batchedcommands = []
4698 4698 elif action == b'batchsubmit':
4699 4699 # There is a batching API we could go through. But it would be
4700 4700 # difficult to normalize requests into function calls. It is easier
4701 4701 # to bypass this layer and normalize to commands + args.
4702 4702 ui.status(
4703 4703 _(b'sending batch with %d sub-commands\n')
4704 4704 % len(batchedcommands)
4705 4705 )
4706 4706 assert peer is not None
4707 4707 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4708 4708 ui.status(
4709 4709 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4710 4710 )
4711 4711
4712 4712 batchedcommands = None
4713 4713
4714 4714 elif action.startswith(b'httprequest '):
4715 4715 if not opener:
4716 4716 raise error.Abort(
4717 4717 _(b'cannot use httprequest without an HTTP peer')
4718 4718 )
4719 4719
4720 4720 request = action.split(b' ', 2)
4721 4721 if len(request) != 3:
4722 4722 raise error.Abort(
4723 4723 _(
4724 4724 b'invalid httprequest: expected format is '
4725 4725 b'"httprequest <method> <path>'
4726 4726 )
4727 4727 )
4728 4728
4729 4729 method, httppath = request[1:]
4730 4730 headers = {}
4731 4731 body = None
4732 4732 frames = []
4733 4733 for line in lines:
4734 4734 line = line.lstrip()
4735 4735 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4736 4736 if m:
4737 4737 # Headers need to use native strings.
4738 4738 key = pycompat.strurl(m.group(1))
4739 4739 value = pycompat.strurl(m.group(2))
4740 4740 headers[key] = value
4741 4741 continue
4742 4742
4743 4743 if line.startswith(b'BODYFILE '):
4744 4744 with open(line.split(b' ', 1), b'rb') as fh:
4745 4745 body = fh.read()
4746 4746 elif line.startswith(b'frame '):
4747 4747 frame = wireprotoframing.makeframefromhumanstring(
4748 4748 line[len(b'frame ') :]
4749 4749 )
4750 4750
4751 4751 frames.append(frame)
4752 4752 else:
4753 4753 raise error.Abort(
4754 4754 _(b'unknown argument to httprequest: %s') % line
4755 4755 )
4756 4756
4757 4757 url = path + httppath
4758 4758
4759 4759 if frames:
4760 4760 body = b''.join(bytes(f) for f in frames)
4761 4761
4762 4762 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4763 4763
4764 4764 # urllib.Request insists on using has_data() as a proxy for
4765 4765 # determining the request method. Override that to use our
4766 4766 # explicitly requested method.
4767 4767 req.get_method = lambda: pycompat.sysstr(method)
4768 4768
4769 4769 try:
4770 4770 res = opener.open(req)
4771 4771 body = res.read()
4772 4772 except util.urlerr.urlerror as e:
4773 4773 # read() method must be called, but only exists in Python 2
4774 4774 getattr(e, 'read', lambda: None)()
4775 4775 continue
4776 4776
4777 4777 ct = res.headers.get('Content-Type')
4778 4778 if ct == 'application/mercurial-cbor':
4779 4779 ui.write(
4780 4780 _(b'cbor> %s\n')
4781 4781 % stringutil.pprint(
4782 4782 cborutil.decodeall(body), bprefix=True, indent=2
4783 4783 )
4784 4784 )
4785 4785
4786 4786 elif action == b'close':
4787 4787 assert peer is not None
4788 4788 peer.close()
4789 4789 elif action == b'readavailable':
4790 4790 if not stdout or not stderr:
4791 4791 raise error.Abort(
4792 4792 _(b'readavailable not available on this peer')
4793 4793 )
4794 4794
4795 4795 stdin.close()
4796 4796 stdout.read()
4797 4797 stderr.read()
4798 4798
4799 4799 elif action == b'readline':
4800 4800 if not stdout:
4801 4801 raise error.Abort(_(b'readline not available on this peer'))
4802 4802 stdout.readline()
4803 4803 elif action == b'ereadline':
4804 4804 if not stderr:
4805 4805 raise error.Abort(_(b'ereadline not available on this peer'))
4806 4806 stderr.readline()
4807 4807 elif action.startswith(b'read '):
4808 4808 count = int(action.split(b' ', 1)[1])
4809 4809 if not stdout:
4810 4810 raise error.Abort(_(b'read not available on this peer'))
4811 4811 stdout.read(count)
4812 4812 elif action.startswith(b'eread '):
4813 4813 count = int(action.split(b' ', 1)[1])
4814 4814 if not stderr:
4815 4815 raise error.Abort(_(b'eread not available on this peer'))
4816 4816 stderr.read(count)
4817 4817 else:
4818 4818 raise error.Abort(_(b'unknown action: %s') % action)
4819 4819
4820 4820 if batchedcommands is not None:
4821 4821 raise error.Abort(_(b'unclosed "batchbegin" request'))
4822 4822
4823 4823 if peer:
4824 4824 peer.close()
4825 4825
4826 4826 if proc:
4827 4827 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now