##// END OF EJS Templates
updatecaches: use the caches argument in `hg debugupdatecaches`...
marmoute -
r48078:e96f7585 default
parent child Browse files
Show More
@@ -1,4828 +1,4829
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 repoview,
73 73 revlog,
74 74 revset,
75 75 revsetlang,
76 76 scmutil,
77 77 setdiscovery,
78 78 simplemerge,
79 79 sshpeer,
80 80 sslutil,
81 81 streamclone,
82 82 strip,
83 83 tags as tagsmod,
84 84 templater,
85 85 treediscovery,
86 86 upgrade,
87 87 url as urlmod,
88 88 util,
89 89 vfs as vfsmod,
90 90 wireprotoframing,
91 91 wireprotoserver,
92 92 wireprotov2peer,
93 93 )
94 from .interfaces import repository
94 95 from .utils import (
95 96 cborutil,
96 97 compression,
97 98 dateutil,
98 99 procutil,
99 100 stringutil,
100 101 urlutil,
101 102 )
102 103
103 104 from .revlogutils import (
104 105 deltas as deltautil,
105 106 nodemap,
106 107 sidedata,
107 108 )
108 109
109 110 release = lockmod.release
110 111
111 112 table = {}
112 113 table.update(strip.command._table)
113 114 command = registrar.command(table)
114 115
115 116
116 117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
117 118 def debugancestor(ui, repo, *args):
118 119 """find the ancestor revision of two revisions in a given index"""
119 120 if len(args) == 3:
120 121 index, rev1, rev2 = args
121 122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
122 123 lookup = r.lookup
123 124 elif len(args) == 2:
124 125 if not repo:
125 126 raise error.Abort(
126 127 _(b'there is no Mercurial repository here (.hg not found)')
127 128 )
128 129 rev1, rev2 = args
129 130 r = repo.changelog
130 131 lookup = repo.lookup
131 132 else:
132 133 raise error.Abort(_(b'either two or three arguments required'))
133 134 a = r.ancestor(lookup(rev1), lookup(rev2))
134 135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
135 136
136 137
137 138 @command(b'debugantivirusrunning', [])
138 139 def debugantivirusrunning(ui, repo):
139 140 """attempt to trigger an antivirus scanner to see if one is active"""
140 141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
141 142 f.write(
142 143 util.b85decode(
143 144 # This is a base85-armored version of the EICAR test file. See
144 145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
145 146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
146 147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
147 148 )
148 149 )
149 150 # Give an AV engine time to scan the file.
150 151 time.sleep(2)
151 152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
152 153
153 154
154 155 @command(b'debugapplystreamclonebundle', [], b'FILE')
155 156 def debugapplystreamclonebundle(ui, repo, fname):
156 157 """apply a stream clone bundle file"""
157 158 f = hg.openpath(ui, fname)
158 159 gen = exchange.readbundle(ui, f, fname)
159 160 gen.apply(repo)
160 161
161 162
162 163 @command(
163 164 b'debugbuilddag',
164 165 [
165 166 (
166 167 b'm',
167 168 b'mergeable-file',
168 169 None,
169 170 _(b'add single file mergeable changes'),
170 171 ),
171 172 (
172 173 b'o',
173 174 b'overwritten-file',
174 175 None,
175 176 _(b'add single file all revs overwrite'),
176 177 ),
177 178 (b'n', b'new-file', None, _(b'add new file at each rev')),
178 179 ],
179 180 _(b'[OPTION]... [TEXT]'),
180 181 )
181 182 def debugbuilddag(
182 183 ui,
183 184 repo,
184 185 text=None,
185 186 mergeable_file=False,
186 187 overwritten_file=False,
187 188 new_file=False,
188 189 ):
189 190 """builds a repo with a given DAG from scratch in the current empty repo
190 191
191 192 The description of the DAG is read from stdin if not given on the
192 193 command line.
193 194
194 195 Elements:
195 196
196 197 - "+n" is a linear run of n nodes based on the current default parent
197 198 - "." is a single node based on the current default parent
198 199 - "$" resets the default parent to null (implied at the start);
199 200 otherwise the default parent is always the last node created
200 201 - "<p" sets the default parent to the backref p
201 202 - "*p" is a fork at parent p, which is a backref
202 203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
203 204 - "/p2" is a merge of the preceding node and p2
204 205 - ":tag" defines a local tag for the preceding node
205 206 - "@branch" sets the named branch for subsequent nodes
206 207 - "#...\\n" is a comment up to the end of the line
207 208
208 209 Whitespace between the above elements is ignored.
209 210
210 211 A backref is either
211 212
212 213 - a number n, which references the node curr-n, where curr is the current
213 214 node, or
214 215 - the name of a local tag you placed earlier using ":tag", or
215 216 - empty to denote the default parent.
216 217
217 218 All string valued-elements are either strictly alphanumeric, or must
218 219 be enclosed in double quotes ("..."), with "\\" as escape character.
219 220 """
220 221
221 222 if text is None:
222 223 ui.status(_(b"reading DAG from stdin\n"))
223 224 text = ui.fin.read()
224 225
225 226 cl = repo.changelog
226 227 if len(cl) > 0:
227 228 raise error.Abort(_(b'repository is not empty'))
228 229
229 230 # determine number of revs in DAG
230 231 total = 0
231 232 for type, data in dagparser.parsedag(text):
232 233 if type == b'n':
233 234 total += 1
234 235
235 236 if mergeable_file:
236 237 linesperrev = 2
237 238 # make a file with k lines per rev
238 239 initialmergedlines = [
239 240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
240 241 ]
241 242 initialmergedlines.append(b"")
242 243
243 244 tags = []
244 245 progress = ui.makeprogress(
245 246 _(b'building'), unit=_(b'revisions'), total=total
246 247 )
247 248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
248 249 at = -1
249 250 atbranch = b'default'
250 251 nodeids = []
251 252 id = 0
252 253 progress.update(id)
253 254 for type, data in dagparser.parsedag(text):
254 255 if type == b'n':
255 256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
256 257 id, ps = data
257 258
258 259 files = []
259 260 filecontent = {}
260 261
261 262 p2 = None
262 263 if mergeable_file:
263 264 fn = b"mf"
264 265 p1 = repo[ps[0]]
265 266 if len(ps) > 1:
266 267 p2 = repo[ps[1]]
267 268 pa = p1.ancestor(p2)
268 269 base, local, other = [
269 270 x[fn].data() for x in (pa, p1, p2)
270 271 ]
271 272 m3 = simplemerge.Merge3Text(base, local, other)
272 273 ml = [l.strip() for l in m3.merge_lines()]
273 274 ml.append(b"")
274 275 elif at > 0:
275 276 ml = p1[fn].data().split(b"\n")
276 277 else:
277 278 ml = initialmergedlines
278 279 ml[id * linesperrev] += b" r%i" % id
279 280 mergedtext = b"\n".join(ml)
280 281 files.append(fn)
281 282 filecontent[fn] = mergedtext
282 283
283 284 if overwritten_file:
284 285 fn = b"of"
285 286 files.append(fn)
286 287 filecontent[fn] = b"r%i\n" % id
287 288
288 289 if new_file:
289 290 fn = b"nf%i" % id
290 291 files.append(fn)
291 292 filecontent[fn] = b"r%i\n" % id
292 293 if len(ps) > 1:
293 294 if not p2:
294 295 p2 = repo[ps[1]]
295 296 for fn in p2:
296 297 if fn.startswith(b"nf"):
297 298 files.append(fn)
298 299 filecontent[fn] = p2[fn].data()
299 300
300 301 def fctxfn(repo, cx, path):
301 302 if path in filecontent:
302 303 return context.memfilectx(
303 304 repo, cx, path, filecontent[path]
304 305 )
305 306 return None
306 307
307 308 if len(ps) == 0 or ps[0] < 0:
308 309 pars = [None, None]
309 310 elif len(ps) == 1:
310 311 pars = [nodeids[ps[0]], None]
311 312 else:
312 313 pars = [nodeids[p] for p in ps]
313 314 cx = context.memctx(
314 315 repo,
315 316 pars,
316 317 b"r%i" % id,
317 318 files,
318 319 fctxfn,
319 320 date=(id, 0),
320 321 user=b"debugbuilddag",
321 322 extra={b'branch': atbranch},
322 323 )
323 324 nodeid = repo.commitctx(cx)
324 325 nodeids.append(nodeid)
325 326 at = id
326 327 elif type == b'l':
327 328 id, name = data
328 329 ui.note((b'tag %s\n' % name))
329 330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
330 331 elif type == b'a':
331 332 ui.note((b'branch %s\n' % data))
332 333 atbranch = data
333 334 progress.update(id)
334 335
335 336 if tags:
336 337 repo.vfs.write(b"localtags", b"".join(tags))
337 338
338 339
339 340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
340 341 indent_string = b' ' * indent
341 342 if all:
342 343 ui.writenoi18n(
343 344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
344 345 % indent_string
345 346 )
346 347
347 348 def showchunks(named):
348 349 ui.write(b"\n%s%s\n" % (indent_string, named))
349 350 for deltadata in gen.deltaiter():
350 351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
351 352 ui.write(
352 353 b"%s%s %s %s %s %s %d\n"
353 354 % (
354 355 indent_string,
355 356 hex(node),
356 357 hex(p1),
357 358 hex(p2),
358 359 hex(cs),
359 360 hex(deltabase),
360 361 len(delta),
361 362 )
362 363 )
363 364
364 365 gen.changelogheader()
365 366 showchunks(b"changelog")
366 367 gen.manifestheader()
367 368 showchunks(b"manifest")
368 369 for chunkdata in iter(gen.filelogheader, {}):
369 370 fname = chunkdata[b'filename']
370 371 showchunks(fname)
371 372 else:
372 373 if isinstance(gen, bundle2.unbundle20):
373 374 raise error.Abort(_(b'use debugbundle2 for this file'))
374 375 gen.changelogheader()
375 376 for deltadata in gen.deltaiter():
376 377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
377 378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
378 379
379 380
380 381 def _debugobsmarkers(ui, part, indent=0, **opts):
381 382 """display version and markers contained in 'data'"""
382 383 opts = pycompat.byteskwargs(opts)
383 384 data = part.read()
384 385 indent_string = b' ' * indent
385 386 try:
386 387 version, markers = obsolete._readmarkers(data)
387 388 except error.UnknownVersion as exc:
388 389 msg = b"%sunsupported version: %s (%d bytes)\n"
389 390 msg %= indent_string, exc.version, len(data)
390 391 ui.write(msg)
391 392 else:
392 393 msg = b"%sversion: %d (%d bytes)\n"
393 394 msg %= indent_string, version, len(data)
394 395 ui.write(msg)
395 396 fm = ui.formatter(b'debugobsolete', opts)
396 397 for rawmarker in sorted(markers):
397 398 m = obsutil.marker(None, rawmarker)
398 399 fm.startitem()
399 400 fm.plain(indent_string)
400 401 cmdutil.showmarker(fm, m)
401 402 fm.end()
402 403
403 404
404 405 def _debugphaseheads(ui, data, indent=0):
405 406 """display version and markers contained in 'data'"""
406 407 indent_string = b' ' * indent
407 408 headsbyphase = phases.binarydecode(data)
408 409 for phase in phases.allphases:
409 410 for head in headsbyphase[phase]:
410 411 ui.write(indent_string)
411 412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
412 413
413 414
414 415 def _quasirepr(thing):
415 416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
416 417 return b'{%s}' % (
417 418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
418 419 )
419 420 return pycompat.bytestr(repr(thing))
420 421
421 422
422 423 def _debugbundle2(ui, gen, all=None, **opts):
423 424 """lists the contents of a bundle2"""
424 425 if not isinstance(gen, bundle2.unbundle20):
425 426 raise error.Abort(_(b'not a bundle2 file'))
426 427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
427 428 parttypes = opts.get('part_type', [])
428 429 for part in gen.iterparts():
429 430 if parttypes and part.type not in parttypes:
430 431 continue
431 432 msg = b'%s -- %s (mandatory: %r)\n'
432 433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
433 434 if part.type == b'changegroup':
434 435 version = part.params.get(b'version', b'01')
435 436 cg = changegroup.getunbundler(version, part, b'UN')
436 437 if not ui.quiet:
437 438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
438 439 if part.type == b'obsmarkers':
439 440 if not ui.quiet:
440 441 _debugobsmarkers(ui, part, indent=4, **opts)
441 442 if part.type == b'phase-heads':
442 443 if not ui.quiet:
443 444 _debugphaseheads(ui, part, indent=4)
444 445
445 446
446 447 @command(
447 448 b'debugbundle',
448 449 [
449 450 (b'a', b'all', None, _(b'show all details')),
450 451 (b'', b'part-type', [], _(b'show only the named part type')),
451 452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
452 453 ],
453 454 _(b'FILE'),
454 455 norepo=True,
455 456 )
456 457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
457 458 """lists the contents of a bundle"""
458 459 with hg.openpath(ui, bundlepath) as f:
459 460 if spec:
460 461 spec = exchange.getbundlespec(ui, f)
461 462 ui.write(b'%s\n' % spec)
462 463 return
463 464
464 465 gen = exchange.readbundle(ui, f, bundlepath)
465 466 if isinstance(gen, bundle2.unbundle20):
466 467 return _debugbundle2(ui, gen, all=all, **opts)
467 468 _debugchangegroup(ui, gen, all=all, **opts)
468 469
469 470
470 471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
471 472 def debugcapabilities(ui, path, **opts):
472 473 """lists the capabilities of a remote peer"""
473 474 opts = pycompat.byteskwargs(opts)
474 475 peer = hg.peer(ui, opts, path)
475 476 try:
476 477 caps = peer.capabilities()
477 478 ui.writenoi18n(b'Main capabilities:\n')
478 479 for c in sorted(caps):
479 480 ui.write(b' %s\n' % c)
480 481 b2caps = bundle2.bundle2caps(peer)
481 482 if b2caps:
482 483 ui.writenoi18n(b'Bundle2 capabilities:\n')
483 484 for key, values in sorted(pycompat.iteritems(b2caps)):
484 485 ui.write(b' %s\n' % key)
485 486 for v in values:
486 487 ui.write(b' %s\n' % v)
487 488 finally:
488 489 peer.close()
489 490
490 491
491 492 @command(
492 493 b'debugchangedfiles',
493 494 [
494 495 (
495 496 b'',
496 497 b'compute',
497 498 False,
498 499 b"compute information instead of reading it from storage",
499 500 ),
500 501 ],
501 502 b'REV',
502 503 )
503 504 def debugchangedfiles(ui, repo, rev, **opts):
504 505 """list the stored files changes for a revision"""
505 506 ctx = scmutil.revsingle(repo, rev, None)
506 507 files = None
507 508
508 509 if opts['compute']:
509 510 files = metadata.compute_all_files_changes(ctx)
510 511 else:
511 512 sd = repo.changelog.sidedata(ctx.rev())
512 513 files_block = sd.get(sidedata.SD_FILES)
513 514 if files_block is not None:
514 515 files = metadata.decode_files_sidedata(sd)
515 516 if files is not None:
516 517 for f in sorted(files.touched):
517 518 if f in files.added:
518 519 action = b"added"
519 520 elif f in files.removed:
520 521 action = b"removed"
521 522 elif f in files.merged:
522 523 action = b"merged"
523 524 elif f in files.salvaged:
524 525 action = b"salvaged"
525 526 else:
526 527 action = b"touched"
527 528
528 529 copy_parent = b""
529 530 copy_source = b""
530 531 if f in files.copied_from_p1:
531 532 copy_parent = b"p1"
532 533 copy_source = files.copied_from_p1[f]
533 534 elif f in files.copied_from_p2:
534 535 copy_parent = b"p2"
535 536 copy_source = files.copied_from_p2[f]
536 537
537 538 data = (action, copy_parent, f, copy_source)
538 539 template = b"%-8s %2s: %s, %s;\n"
539 540 ui.write(template % data)
540 541
541 542
542 543 @command(b'debugcheckstate', [], b'')
543 544 def debugcheckstate(ui, repo):
544 545 """validate the correctness of the current dirstate"""
545 546 parent1, parent2 = repo.dirstate.parents()
546 547 m1 = repo[parent1].manifest()
547 548 m2 = repo[parent2].manifest()
548 549 errors = 0
549 550 for f in repo.dirstate:
550 551 state = repo.dirstate[f]
551 552 if state in b"nr" and f not in m1:
552 553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
553 554 errors += 1
554 555 if state in b"a" and f in m1:
555 556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
556 557 errors += 1
557 558 if state in b"m" and f not in m1 and f not in m2:
558 559 ui.warn(
559 560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
560 561 )
561 562 errors += 1
562 563 for f in m1:
563 564 state = repo.dirstate[f]
564 565 if state not in b"nrm":
565 566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
566 567 errors += 1
567 568 if errors:
568 569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 570 raise error.Abort(errstr)
570 571
571 572
572 573 @command(
573 574 b'debugcolor',
574 575 [(b'', b'style', None, _(b'show all configured styles'))],
575 576 b'hg debugcolor',
576 577 )
577 578 def debugcolor(ui, repo, **opts):
578 579 """show available color, effects or style"""
579 580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 581 if opts.get('style'):
581 582 return _debugdisplaystyle(ui)
582 583 else:
583 584 return _debugdisplaycolor(ui)
584 585
585 586
586 587 def _debugdisplaycolor(ui):
587 588 ui = ui.copy()
588 589 ui._styles.clear()
589 590 for effect in color._activeeffects(ui).keys():
590 591 ui._styles[effect] = effect
591 592 if ui._terminfoparams:
592 593 for k, v in ui.configitems(b'color'):
593 594 if k.startswith(b'color.'):
594 595 ui._styles[k] = k[6:]
595 596 elif k.startswith(b'terminfo.'):
596 597 ui._styles[k] = k[9:]
597 598 ui.write(_(b'available colors:\n'))
598 599 # sort label with a '_' after the other to group '_background' entry.
599 600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 601 for colorname, label in items:
601 602 ui.write(b'%s\n' % colorname, label=label)
602 603
603 604
604 605 def _debugdisplaystyle(ui):
605 606 ui.write(_(b'available style:\n'))
606 607 if not ui._styles:
607 608 return
608 609 width = max(len(s) for s in ui._styles)
609 610 for label, effects in sorted(ui._styles.items()):
610 611 ui.write(b'%s' % label, label=label)
611 612 if effects:
612 613 # 50
613 614 ui.write(b': ')
614 615 ui.write(b' ' * (max(0, width - len(label))))
615 616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 617 ui.write(b'\n')
617 618
618 619
619 620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 621 def debugcreatestreamclonebundle(ui, repo, fname):
621 622 """create a stream clone bundle file
622 623
623 624 Stream bundles are special bundles that are essentially archives of
624 625 revlog files. They are commonly used for cloning very quickly.
625 626 """
626 627 # TODO we may want to turn this into an abort when this functionality
627 628 # is moved into `hg bundle`.
628 629 if phases.hassecret(repo):
629 630 ui.warn(
630 631 _(
631 632 b'(warning: stream clone bundle will contain secret '
632 633 b'revisions)\n'
633 634 )
634 635 )
635 636
636 637 requirements, gen = streamclone.generatebundlev1(repo)
637 638 changegroup.writechunks(ui, gen, fname)
638 639
639 640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 641
641 642
642 643 @command(
643 644 b'debugdag',
644 645 [
645 646 (b't', b'tags', None, _(b'use tags as labels')),
646 647 (b'b', b'branches', None, _(b'annotate with branch names')),
647 648 (b'', b'dots', None, _(b'use dots for runs')),
648 649 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 650 ],
650 651 _(b'[OPTION]... [FILE [REV]...]'),
651 652 optionalrepo=True,
652 653 )
653 654 def debugdag(ui, repo, file_=None, *revs, **opts):
654 655 """format the changelog or an index DAG as a concise textual description
655 656
656 657 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 658 revision numbers, they get labeled in the output as rN.
658 659
659 660 Otherwise, the changelog DAG of the current repo is emitted.
660 661 """
661 662 spaces = opts.get('spaces')
662 663 dots = opts.get('dots')
663 664 if file_:
664 665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 666 revs = {int(r) for r in revs}
666 667
667 668 def events():
668 669 for r in rlog:
669 670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 671 if r in revs:
671 672 yield b'l', (r, b"r%i" % r)
672 673
673 674 elif repo:
674 675 cl = repo.changelog
675 676 tags = opts.get('tags')
676 677 branches = opts.get('branches')
677 678 if tags:
678 679 labels = {}
679 680 for l, n in repo.tags().items():
680 681 labels.setdefault(cl.rev(n), []).append(l)
681 682
682 683 def events():
683 684 b = b"default"
684 685 for r in cl:
685 686 if branches:
686 687 newb = cl.read(cl.node(r))[5][b'branch']
687 688 if newb != b:
688 689 yield b'a', newb
689 690 b = newb
690 691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 692 if tags:
692 693 ls = labels.get(r)
693 694 if ls:
694 695 for l in ls:
695 696 yield b'l', (r, l)
696 697
697 698 else:
698 699 raise error.Abort(_(b'need repo for changelog dag'))
699 700
700 701 for line in dagparser.dagtextlines(
701 702 events(),
702 703 addspaces=spaces,
703 704 wraplabels=True,
704 705 wrapannotations=True,
705 706 wrapnonlinear=dots,
706 707 usedots=dots,
707 708 maxlinewidth=70,
708 709 ):
709 710 ui.write(line)
710 711 ui.write(b"\n")
711 712
712 713
713 714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 715 def debugdata(ui, repo, file_, rev=None, **opts):
715 716 """dump the contents of a data file revision"""
716 717 opts = pycompat.byteskwargs(opts)
717 718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 719 if rev is not None:
719 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 721 file_, rev = None, file_
721 722 elif rev is None:
722 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 725 try:
725 726 ui.write(r.rawdata(r.lookup(rev)))
726 727 except KeyError:
727 728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 729
729 730
730 731 @command(
731 732 b'debugdate',
732 733 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 734 _(b'[-e] DATE [RANGE]'),
734 735 norepo=True,
735 736 optionalrepo=True,
736 737 )
737 738 def debugdate(ui, date, range=None, **opts):
738 739 """parse and display a date"""
739 740 if opts["extended"]:
740 741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 742 else:
742 743 d = dateutil.parsedate(date)
743 744 ui.writenoi18n(b"internal: %d %d\n" % d)
744 745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 746 if range:
746 747 m = dateutil.matchdate(range)
747 748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 749
749 750
750 751 @command(
751 752 b'debugdeltachain',
752 753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 754 _(b'-c|-m|FILE'),
754 755 optionalrepo=True,
755 756 )
756 757 def debugdeltachain(ui, repo, file_=None, **opts):
757 758 """dump information about delta chains in a revlog
758 759
759 760 Output can be templatized. Available template keywords are:
760 761
761 762 :``rev``: revision number
762 763 :``chainid``: delta chain identifier (numbered by unique base)
763 764 :``chainlen``: delta chain length to this revision
764 765 :``prevrev``: previous revision in delta chain
765 766 :``deltatype``: role of delta / how it was computed
766 767 :``compsize``: compressed size of revision
767 768 :``uncompsize``: uncompressed size of revision
768 769 :``chainsize``: total size of compressed revisions in chain
769 770 :``chainratio``: total chain size divided by uncompressed revision size
770 771 (new delta chains typically start at ratio 2.00)
771 772 :``lindist``: linear distance from base revision in delta chain to end
772 773 of this revision
773 774 :``extradist``: total size of revisions not part of this delta chain from
774 775 base of delta chain to end of this revision; a measurement
775 776 of how much extra data we need to read/seek across to read
776 777 the delta chain for this revision
777 778 :``extraratio``: extradist divided by chainsize; another representation of
778 779 how much unrelated data is needed to load this delta chain
779 780
780 781 If the repository is configured to use the sparse read, additional keywords
781 782 are available:
782 783
783 784 :``readsize``: total size of data read from the disk for a revision
784 785 (sum of the sizes of all the blocks)
785 786 :``largestblock``: size of the largest block of data read from the disk
786 787 :``readdensity``: density of useful bytes in the data read from the disk
787 788 :``srchunks``: in how many data hunks the whole revision would be read
788 789
789 790 The sparse read can be enabled with experimental.sparse-read = True
790 791 """
791 792 opts = pycompat.byteskwargs(opts)
792 793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
793 794 index = r.index
794 795 start = r.start
795 796 length = r.length
796 797 generaldelta = r._generaldelta
797 798 withsparseread = getattr(r, '_withsparseread', False)
798 799
799 800 def revinfo(rev):
800 801 e = index[rev]
801 802 compsize = e[1]
802 803 uncompsize = e[2]
803 804 chainsize = 0
804 805
805 806 if generaldelta:
806 807 if e[3] == e[5]:
807 808 deltatype = b'p1'
808 809 elif e[3] == e[6]:
809 810 deltatype = b'p2'
810 811 elif e[3] == rev - 1:
811 812 deltatype = b'prev'
812 813 elif e[3] == rev:
813 814 deltatype = b'base'
814 815 else:
815 816 deltatype = b'other'
816 817 else:
817 818 if e[3] == rev:
818 819 deltatype = b'base'
819 820 else:
820 821 deltatype = b'prev'
821 822
822 823 chain = r._deltachain(rev)[0]
823 824 for iterrev in chain:
824 825 e = index[iterrev]
825 826 chainsize += e[1]
826 827
827 828 return compsize, uncompsize, deltatype, chain, chainsize
828 829
829 830 fm = ui.formatter(b'debugdeltachain', opts)
830 831
831 832 fm.plain(
832 833 b' rev chain# chainlen prev delta '
833 834 b'size rawsize chainsize ratio lindist extradist '
834 835 b'extraratio'
835 836 )
836 837 if withsparseread:
837 838 fm.plain(b' readsize largestblk rddensity srchunks')
838 839 fm.plain(b'\n')
839 840
840 841 chainbases = {}
841 842 for rev in r:
842 843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 844 chainbase = chain[0]
844 845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 846 basestart = start(chainbase)
846 847 revstart = start(rev)
847 848 lineardist = revstart + comp - basestart
848 849 extradist = lineardist - chainsize
849 850 try:
850 851 prevrev = chain[-2]
851 852 except IndexError:
852 853 prevrev = -1
853 854
854 855 if uncomp != 0:
855 856 chainratio = float(chainsize) / float(uncomp)
856 857 else:
857 858 chainratio = chainsize
858 859
859 860 if chainsize != 0:
860 861 extraratio = float(extradist) / float(chainsize)
861 862 else:
862 863 extraratio = extradist
863 864
864 865 fm.startitem()
865 866 fm.write(
866 867 b'rev chainid chainlen prevrev deltatype compsize '
867 868 b'uncompsize chainsize chainratio lindist extradist '
868 869 b'extraratio',
869 870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 871 rev,
871 872 chainid,
872 873 len(chain),
873 874 prevrev,
874 875 deltatype,
875 876 comp,
876 877 uncomp,
877 878 chainsize,
878 879 chainratio,
879 880 lineardist,
880 881 extradist,
881 882 extraratio,
882 883 rev=rev,
883 884 chainid=chainid,
884 885 chainlen=len(chain),
885 886 prevrev=prevrev,
886 887 deltatype=deltatype,
887 888 compsize=comp,
888 889 uncompsize=uncomp,
889 890 chainsize=chainsize,
890 891 chainratio=chainratio,
891 892 lindist=lineardist,
892 893 extradist=extradist,
893 894 extraratio=extraratio,
894 895 )
895 896 if withsparseread:
896 897 readsize = 0
897 898 largestblock = 0
898 899 srchunks = 0
899 900
900 901 for revschunk in deltautil.slicechunk(r, chain):
901 902 srchunks += 1
902 903 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 904 blksize = blkend - start(revschunk[0])
904 905
905 906 readsize += blksize
906 907 if largestblock < blksize:
907 908 largestblock = blksize
908 909
909 910 if readsize:
910 911 readdensity = float(chainsize) / float(readsize)
911 912 else:
912 913 readdensity = 1
913 914
914 915 fm.write(
915 916 b'readsize largestblock readdensity srchunks',
916 917 b' %10d %10d %9.5f %8d',
917 918 readsize,
918 919 largestblock,
919 920 readdensity,
920 921 srchunks,
921 922 readsize=readsize,
922 923 largestblock=largestblock,
923 924 readdensity=readdensity,
924 925 srchunks=srchunks,
925 926 )
926 927
927 928 fm.plain(b'\n')
928 929
929 930 fm.end()
930 931
931 932
932 933 @command(
933 934 b'debugdirstate|debugstate',
934 935 [
935 936 (
936 937 b'',
937 938 b'nodates',
938 939 None,
939 940 _(b'do not display the saved mtime (DEPRECATED)'),
940 941 ),
941 942 (b'', b'dates', True, _(b'display the saved mtime')),
942 943 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 944 ],
944 945 _(b'[OPTION]...'),
945 946 )
946 947 def debugstate(ui, repo, **opts):
947 948 """show the contents of the current dirstate"""
948 949
949 950 nodates = not opts['dates']
950 951 if opts.get('nodates') is not None:
951 952 nodates = True
952 953 datesort = opts.get('datesort')
953 954
954 955 if datesort:
955 956 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
956 957 else:
957 958 keyfunc = None # sort by filename
958 959 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
959 960 if ent[3] == -1:
960 961 timestr = b'unset '
961 962 elif nodates:
962 963 timestr = b'set '
963 964 else:
964 965 timestr = time.strftime(
965 966 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
966 967 )
967 968 timestr = encoding.strtolocal(timestr)
968 969 if ent[1] & 0o20000:
969 970 mode = b'lnk'
970 971 else:
971 972 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
972 973 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
973 974 for f in repo.dirstate.copies():
974 975 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975 976
976 977
977 978 @command(
978 979 b'debugdiscovery',
979 980 [
980 981 (b'', b'old', None, _(b'use old-style discovery')),
981 982 (
982 983 b'',
983 984 b'nonheads',
984 985 None,
985 986 _(b'use old-style discovery with non-heads included'),
986 987 ),
987 988 (b'', b'rev', [], b'restrict discovery to this set of revs'),
988 989 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
989 990 (
990 991 b'',
991 992 b'local-as-revs',
992 993 b"",
993 994 b'treat local has having these revisions only',
994 995 ),
995 996 (
996 997 b'',
997 998 b'remote-as-revs',
998 999 b"",
999 1000 b'use local as remote, with only these these revisions',
1000 1001 ),
1001 1002 ]
1002 1003 + cmdutil.remoteopts
1003 1004 + cmdutil.formatteropts,
1004 1005 _(b'[--rev REV] [OTHER]'),
1005 1006 )
1006 1007 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1007 1008 """runs the changeset discovery protocol in isolation
1008 1009
1009 1010 The local peer can be "replaced" by a subset of the local repository by
1010 1011 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1011 1012 be "replaced" by a subset of the local repository using the
1012 1013 `--local-as-revs` flag. This is useful to efficiently debug pathological
1013 1014 discovery situation.
1014 1015
1015 1016 The following developer oriented config are relevant for people playing with this command:
1016 1017
1017 1018 * devel.discovery.exchange-heads=True
1018 1019
1019 1020 If False, the discovery will not start with
1020 1021 remote head fetching and local head querying.
1021 1022
1022 1023 * devel.discovery.grow-sample=True
1023 1024
1024 1025 If False, the sample size used in set discovery will not be increased
1025 1026 through the process
1026 1027
1027 1028 * devel.discovery.grow-sample.dynamic=True
1028 1029
1029 1030 When discovery.grow-sample.dynamic is True, the default, the sample size is
1030 1031 adapted to the shape of the undecided set (it is set to the max of:
1031 1032 <target-size>, len(roots(undecided)), len(heads(undecided)
1032 1033
1033 1034 * devel.discovery.grow-sample.rate=1.05
1034 1035
1035 1036 the rate at which the sample grow
1036 1037
1037 1038 * devel.discovery.randomize=True
1038 1039
1039 1040 If andom sampling during discovery are deterministic. It is meant for
1040 1041 integration tests.
1041 1042
1042 1043 * devel.discovery.sample-size=200
1043 1044
1044 1045 Control the initial size of the discovery sample
1045 1046
1046 1047 * devel.discovery.sample-size.initial=100
1047 1048
1048 1049 Control the initial size of the discovery for initial change
1049 1050 """
1050 1051 opts = pycompat.byteskwargs(opts)
1051 1052 unfi = repo.unfiltered()
1052 1053
1053 1054 # setup potential extra filtering
1054 1055 local_revs = opts[b"local_as_revs"]
1055 1056 remote_revs = opts[b"remote_as_revs"]
1056 1057
1057 1058 # make sure tests are repeatable
1058 1059 random.seed(int(opts[b'seed']))
1059 1060
1060 1061 if not remote_revs:
1061 1062
1062 1063 remoteurl, branches = urlutil.get_unique_pull_path(
1063 1064 b'debugdiscovery', repo, ui, remoteurl
1064 1065 )
1065 1066 remote = hg.peer(repo, opts, remoteurl)
1066 1067 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1067 1068 else:
1068 1069 branches = (None, [])
1069 1070 remote_filtered_revs = scmutil.revrange(
1070 1071 unfi, [b"not (::(%s))" % remote_revs]
1071 1072 )
1072 1073 remote_filtered_revs = frozenset(remote_filtered_revs)
1073 1074
1074 1075 def remote_func(x):
1075 1076 return remote_filtered_revs
1076 1077
1077 1078 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1078 1079
1079 1080 remote = repo.peer()
1080 1081 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1081 1082
1082 1083 if local_revs:
1083 1084 local_filtered_revs = scmutil.revrange(
1084 1085 unfi, [b"not (::(%s))" % local_revs]
1085 1086 )
1086 1087 local_filtered_revs = frozenset(local_filtered_revs)
1087 1088
1088 1089 def local_func(x):
1089 1090 return local_filtered_revs
1090 1091
1091 1092 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1092 1093 repo = repo.filtered(b'debug-discovery-local-filter')
1093 1094
1094 1095 data = {}
1095 1096 if opts.get(b'old'):
1096 1097
1097 1098 def doit(pushedrevs, remoteheads, remote=remote):
1098 1099 if not util.safehasattr(remote, b'branches'):
1099 1100 # enable in-client legacy support
1100 1101 remote = localrepo.locallegacypeer(remote.local())
1101 1102 common, _in, hds = treediscovery.findcommonincoming(
1102 1103 repo, remote, force=True, audit=data
1103 1104 )
1104 1105 common = set(common)
1105 1106 if not opts.get(b'nonheads'):
1106 1107 ui.writenoi18n(
1107 1108 b"unpruned common: %s\n"
1108 1109 % b" ".join(sorted(short(n) for n in common))
1109 1110 )
1110 1111
1111 1112 clnode = repo.changelog.node
1112 1113 common = repo.revs(b'heads(::%ln)', common)
1113 1114 common = {clnode(r) for r in common}
1114 1115 return common, hds
1115 1116
1116 1117 else:
1117 1118
1118 1119 def doit(pushedrevs, remoteheads, remote=remote):
1119 1120 nodes = None
1120 1121 if pushedrevs:
1121 1122 revs = scmutil.revrange(repo, pushedrevs)
1122 1123 nodes = [repo[r].node() for r in revs]
1123 1124 common, any, hds = setdiscovery.findcommonheads(
1124 1125 ui, repo, remote, ancestorsof=nodes, audit=data
1125 1126 )
1126 1127 return common, hds
1127 1128
1128 1129 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1129 1130 localrevs = opts[b'rev']
1130 1131
1131 1132 fm = ui.formatter(b'debugdiscovery', opts)
1132 1133 if fm.strict_format:
1133 1134
1134 1135 @contextlib.contextmanager
1135 1136 def may_capture_output():
1136 1137 ui.pushbuffer()
1137 1138 yield
1138 1139 data[b'output'] = ui.popbuffer()
1139 1140
1140 1141 else:
1141 1142 may_capture_output = util.nullcontextmanager
1142 1143 with may_capture_output():
1143 1144 with util.timedcm('debug-discovery') as t:
1144 1145 common, hds = doit(localrevs, remoterevs)
1145 1146
1146 1147 # compute all statistics
1147 1148 heads_common = set(common)
1148 1149 heads_remote = set(hds)
1149 1150 heads_local = set(repo.heads())
1150 1151 # note: they cannot be a local or remote head that is in common and not
1151 1152 # itself a head of common.
1152 1153 heads_common_local = heads_common & heads_local
1153 1154 heads_common_remote = heads_common & heads_remote
1154 1155 heads_common_both = heads_common & heads_remote & heads_local
1155 1156
1156 1157 all = repo.revs(b'all()')
1157 1158 common = repo.revs(b'::%ln', common)
1158 1159 roots_common = repo.revs(b'roots(::%ld)', common)
1159 1160 missing = repo.revs(b'not ::%ld', common)
1160 1161 heads_missing = repo.revs(b'heads(%ld)', missing)
1161 1162 roots_missing = repo.revs(b'roots(%ld)', missing)
1162 1163 assert len(common) + len(missing) == len(all)
1163 1164
1164 1165 initial_undecided = repo.revs(
1165 1166 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1166 1167 )
1167 1168 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1168 1169 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1169 1170 common_initial_undecided = initial_undecided & common
1170 1171 missing_initial_undecided = initial_undecided & missing
1171 1172
1172 1173 data[b'elapsed'] = t.elapsed
1173 1174 data[b'nb-common-heads'] = len(heads_common)
1174 1175 data[b'nb-common-heads-local'] = len(heads_common_local)
1175 1176 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1176 1177 data[b'nb-common-heads-both'] = len(heads_common_both)
1177 1178 data[b'nb-common-roots'] = len(roots_common)
1178 1179 data[b'nb-head-local'] = len(heads_local)
1179 1180 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1180 1181 data[b'nb-head-remote'] = len(heads_remote)
1181 1182 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1182 1183 heads_common_remote
1183 1184 )
1184 1185 data[b'nb-revs'] = len(all)
1185 1186 data[b'nb-revs-common'] = len(common)
1186 1187 data[b'nb-revs-missing'] = len(missing)
1187 1188 data[b'nb-missing-heads'] = len(heads_missing)
1188 1189 data[b'nb-missing-roots'] = len(roots_missing)
1189 1190 data[b'nb-ini_und'] = len(initial_undecided)
1190 1191 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1191 1192 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1192 1193 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1193 1194 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1194 1195
1195 1196 fm.startitem()
1196 1197 fm.data(**pycompat.strkwargs(data))
1197 1198 # display discovery summary
1198 1199 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1199 1200 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1200 1201 fm.plain(b"heads summary:\n")
1201 1202 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1202 1203 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1203 1204 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1204 1205 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1205 1206 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1206 1207 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1207 1208 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1208 1209 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1209 1210 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1210 1211 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1211 1212 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1212 1213 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1213 1214 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1214 1215 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1215 1216 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1216 1217 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1217 1218 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1218 1219 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1219 1220 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1220 1221 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1221 1222 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1222 1223 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1223 1224
1224 1225 if ui.verbose:
1225 1226 fm.plain(
1226 1227 b"common heads: %s\n"
1227 1228 % b" ".join(sorted(short(n) for n in heads_common))
1228 1229 )
1229 1230 fm.end()
1230 1231
1231 1232
1232 1233 _chunksize = 4 << 10
1233 1234
1234 1235
1235 1236 @command(
1236 1237 b'debugdownload',
1237 1238 [
1238 1239 (b'o', b'output', b'', _(b'path')),
1239 1240 ],
1240 1241 optionalrepo=True,
1241 1242 )
1242 1243 def debugdownload(ui, repo, url, output=None, **opts):
1243 1244 """download a resource using Mercurial logic and config"""
1244 1245 fh = urlmod.open(ui, url, output)
1245 1246
1246 1247 dest = ui
1247 1248 if output:
1248 1249 dest = open(output, b"wb", _chunksize)
1249 1250 try:
1250 1251 data = fh.read(_chunksize)
1251 1252 while data:
1252 1253 dest.write(data)
1253 1254 data = fh.read(_chunksize)
1254 1255 finally:
1255 1256 if output:
1256 1257 dest.close()
1257 1258
1258 1259
1259 1260 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1260 1261 def debugextensions(ui, repo, **opts):
1261 1262 '''show information about active extensions'''
1262 1263 opts = pycompat.byteskwargs(opts)
1263 1264 exts = extensions.extensions(ui)
1264 1265 hgver = util.version()
1265 1266 fm = ui.formatter(b'debugextensions', opts)
1266 1267 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1267 1268 isinternal = extensions.ismoduleinternal(extmod)
1268 1269 extsource = None
1269 1270
1270 1271 if util.safehasattr(extmod, '__file__'):
1271 1272 extsource = pycompat.fsencode(extmod.__file__)
1272 1273 elif getattr(sys, 'oxidized', False):
1273 1274 extsource = pycompat.sysexecutable
1274 1275 if isinternal:
1275 1276 exttestedwith = [] # never expose magic string to users
1276 1277 else:
1277 1278 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1278 1279 extbuglink = getattr(extmod, 'buglink', None)
1279 1280
1280 1281 fm.startitem()
1281 1282
1282 1283 if ui.quiet or ui.verbose:
1283 1284 fm.write(b'name', b'%s\n', extname)
1284 1285 else:
1285 1286 fm.write(b'name', b'%s', extname)
1286 1287 if isinternal or hgver in exttestedwith:
1287 1288 fm.plain(b'\n')
1288 1289 elif not exttestedwith:
1289 1290 fm.plain(_(b' (untested!)\n'))
1290 1291 else:
1291 1292 lasttestedversion = exttestedwith[-1]
1292 1293 fm.plain(b' (%s!)\n' % lasttestedversion)
1293 1294
1294 1295 fm.condwrite(
1295 1296 ui.verbose and extsource,
1296 1297 b'source',
1297 1298 _(b' location: %s\n'),
1298 1299 extsource or b"",
1299 1300 )
1300 1301
1301 1302 if ui.verbose:
1302 1303 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1303 1304 fm.data(bundled=isinternal)
1304 1305
1305 1306 fm.condwrite(
1306 1307 ui.verbose and exttestedwith,
1307 1308 b'testedwith',
1308 1309 _(b' tested with: %s\n'),
1309 1310 fm.formatlist(exttestedwith, name=b'ver'),
1310 1311 )
1311 1312
1312 1313 fm.condwrite(
1313 1314 ui.verbose and extbuglink,
1314 1315 b'buglink',
1315 1316 _(b' bug reporting: %s\n'),
1316 1317 extbuglink or b"",
1317 1318 )
1318 1319
1319 1320 fm.end()
1320 1321
1321 1322
1322 1323 @command(
1323 1324 b'debugfileset',
1324 1325 [
1325 1326 (
1326 1327 b'r',
1327 1328 b'rev',
1328 1329 b'',
1329 1330 _(b'apply the filespec on this revision'),
1330 1331 _(b'REV'),
1331 1332 ),
1332 1333 (
1333 1334 b'',
1334 1335 b'all-files',
1335 1336 False,
1336 1337 _(b'test files from all revisions and working directory'),
1337 1338 ),
1338 1339 (
1339 1340 b's',
1340 1341 b'show-matcher',
1341 1342 None,
1342 1343 _(b'print internal representation of matcher'),
1343 1344 ),
1344 1345 (
1345 1346 b'p',
1346 1347 b'show-stage',
1347 1348 [],
1348 1349 _(b'print parsed tree at the given stage'),
1349 1350 _(b'NAME'),
1350 1351 ),
1351 1352 ],
1352 1353 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1353 1354 )
1354 1355 def debugfileset(ui, repo, expr, **opts):
1355 1356 '''parse and apply a fileset specification'''
1356 1357 from . import fileset
1357 1358
1358 1359 fileset.symbols # force import of fileset so we have predicates to optimize
1359 1360 opts = pycompat.byteskwargs(opts)
1360 1361 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1361 1362
1362 1363 stages = [
1363 1364 (b'parsed', pycompat.identity),
1364 1365 (b'analyzed', filesetlang.analyze),
1365 1366 (b'optimized', filesetlang.optimize),
1366 1367 ]
1367 1368 stagenames = {n for n, f in stages}
1368 1369
1369 1370 showalways = set()
1370 1371 if ui.verbose and not opts[b'show_stage']:
1371 1372 # show parsed tree by --verbose (deprecated)
1372 1373 showalways.add(b'parsed')
1373 1374 if opts[b'show_stage'] == [b'all']:
1374 1375 showalways.update(stagenames)
1375 1376 else:
1376 1377 for n in opts[b'show_stage']:
1377 1378 if n not in stagenames:
1378 1379 raise error.Abort(_(b'invalid stage name: %s') % n)
1379 1380 showalways.update(opts[b'show_stage'])
1380 1381
1381 1382 tree = filesetlang.parse(expr)
1382 1383 for n, f in stages:
1383 1384 tree = f(tree)
1384 1385 if n in showalways:
1385 1386 if opts[b'show_stage'] or n != b'parsed':
1386 1387 ui.write(b"* %s:\n" % n)
1387 1388 ui.write(filesetlang.prettyformat(tree), b"\n")
1388 1389
1389 1390 files = set()
1390 1391 if opts[b'all_files']:
1391 1392 for r in repo:
1392 1393 c = repo[r]
1393 1394 files.update(c.files())
1394 1395 files.update(c.substate)
1395 1396 if opts[b'all_files'] or ctx.rev() is None:
1396 1397 wctx = repo[None]
1397 1398 files.update(
1398 1399 repo.dirstate.walk(
1399 1400 scmutil.matchall(repo),
1400 1401 subrepos=list(wctx.substate),
1401 1402 unknown=True,
1402 1403 ignored=True,
1403 1404 )
1404 1405 )
1405 1406 files.update(wctx.substate)
1406 1407 else:
1407 1408 files.update(ctx.files())
1408 1409 files.update(ctx.substate)
1409 1410
1410 1411 m = ctx.matchfileset(repo.getcwd(), expr)
1411 1412 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1412 1413 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1413 1414 for f in sorted(files):
1414 1415 if not m(f):
1415 1416 continue
1416 1417 ui.write(b"%s\n" % f)
1417 1418
1418 1419
1419 1420 @command(b'debugformat', [] + cmdutil.formatteropts)
1420 1421 def debugformat(ui, repo, **opts):
1421 1422 """display format information about the current repository
1422 1423
1423 1424 Use --verbose to get extra information about current config value and
1424 1425 Mercurial default."""
1425 1426 opts = pycompat.byteskwargs(opts)
1426 1427 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1427 1428 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1428 1429
1429 1430 def makeformatname(name):
1430 1431 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1431 1432
1432 1433 fm = ui.formatter(b'debugformat', opts)
1433 1434 if fm.isplain():
1434 1435
1435 1436 def formatvalue(value):
1436 1437 if util.safehasattr(value, b'startswith'):
1437 1438 return value
1438 1439 if value:
1439 1440 return b'yes'
1440 1441 else:
1441 1442 return b'no'
1442 1443
1443 1444 else:
1444 1445 formatvalue = pycompat.identity
1445 1446
1446 1447 fm.plain(b'format-variant')
1447 1448 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1448 1449 fm.plain(b' repo')
1449 1450 if ui.verbose:
1450 1451 fm.plain(b' config default')
1451 1452 fm.plain(b'\n')
1452 1453 for fv in upgrade.allformatvariant:
1453 1454 fm.startitem()
1454 1455 repovalue = fv.fromrepo(repo)
1455 1456 configvalue = fv.fromconfig(repo)
1456 1457
1457 1458 if repovalue != configvalue:
1458 1459 namelabel = b'formatvariant.name.mismatchconfig'
1459 1460 repolabel = b'formatvariant.repo.mismatchconfig'
1460 1461 elif repovalue != fv.default:
1461 1462 namelabel = b'formatvariant.name.mismatchdefault'
1462 1463 repolabel = b'formatvariant.repo.mismatchdefault'
1463 1464 else:
1464 1465 namelabel = b'formatvariant.name.uptodate'
1465 1466 repolabel = b'formatvariant.repo.uptodate'
1466 1467
1467 1468 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1468 1469 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1469 1470 if fv.default != configvalue:
1470 1471 configlabel = b'formatvariant.config.special'
1471 1472 else:
1472 1473 configlabel = b'formatvariant.config.default'
1473 1474 fm.condwrite(
1474 1475 ui.verbose,
1475 1476 b'config',
1476 1477 b' %6s',
1477 1478 formatvalue(configvalue),
1478 1479 label=configlabel,
1479 1480 )
1480 1481 fm.condwrite(
1481 1482 ui.verbose,
1482 1483 b'default',
1483 1484 b' %7s',
1484 1485 formatvalue(fv.default),
1485 1486 label=b'formatvariant.default',
1486 1487 )
1487 1488 fm.plain(b'\n')
1488 1489 fm.end()
1489 1490
1490 1491
1491 1492 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1492 1493 def debugfsinfo(ui, path=b"."):
1493 1494 """show information detected about current filesystem"""
1494 1495 ui.writenoi18n(b'path: %s\n' % path)
1495 1496 ui.writenoi18n(
1496 1497 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1497 1498 )
1498 1499 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1499 1500 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1500 1501 ui.writenoi18n(
1501 1502 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1502 1503 )
1503 1504 ui.writenoi18n(
1504 1505 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1505 1506 )
1506 1507 casesensitive = b'(unknown)'
1507 1508 try:
1508 1509 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1509 1510 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1510 1511 except OSError:
1511 1512 pass
1512 1513 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1513 1514
1514 1515
1515 1516 @command(
1516 1517 b'debuggetbundle',
1517 1518 [
1518 1519 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1519 1520 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1520 1521 (
1521 1522 b't',
1522 1523 b'type',
1523 1524 b'bzip2',
1524 1525 _(b'bundle compression type to use'),
1525 1526 _(b'TYPE'),
1526 1527 ),
1527 1528 ],
1528 1529 _(b'REPO FILE [-H|-C ID]...'),
1529 1530 norepo=True,
1530 1531 )
1531 1532 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1532 1533 """retrieves a bundle from a repo
1533 1534
1534 1535 Every ID must be a full-length hex node id string. Saves the bundle to the
1535 1536 given file.
1536 1537 """
1537 1538 opts = pycompat.byteskwargs(opts)
1538 1539 repo = hg.peer(ui, opts, repopath)
1539 1540 if not repo.capable(b'getbundle'):
1540 1541 raise error.Abort(b"getbundle() not supported by target repository")
1541 1542 args = {}
1542 1543 if common:
1543 1544 args['common'] = [bin(s) for s in common]
1544 1545 if head:
1545 1546 args['heads'] = [bin(s) for s in head]
1546 1547 # TODO: get desired bundlecaps from command line.
1547 1548 args['bundlecaps'] = None
1548 1549 bundle = repo.getbundle(b'debug', **args)
1549 1550
1550 1551 bundletype = opts.get(b'type', b'bzip2').lower()
1551 1552 btypes = {
1552 1553 b'none': b'HG10UN',
1553 1554 b'bzip2': b'HG10BZ',
1554 1555 b'gzip': b'HG10GZ',
1555 1556 b'bundle2': b'HG20',
1556 1557 }
1557 1558 bundletype = btypes.get(bundletype)
1558 1559 if bundletype not in bundle2.bundletypes:
1559 1560 raise error.Abort(_(b'unknown bundle type specified with --type'))
1560 1561 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1561 1562
1562 1563
1563 1564 @command(b'debugignore', [], b'[FILE]')
1564 1565 def debugignore(ui, repo, *files, **opts):
1565 1566 """display the combined ignore pattern and information about ignored files
1566 1567
1567 1568 With no argument display the combined ignore pattern.
1568 1569
1569 1570 Given space separated file names, shows if the given file is ignored and
1570 1571 if so, show the ignore rule (file and line number) that matched it.
1571 1572 """
1572 1573 ignore = repo.dirstate._ignore
1573 1574 if not files:
1574 1575 # Show all the patterns
1575 1576 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1576 1577 else:
1577 1578 m = scmutil.match(repo[None], pats=files)
1578 1579 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1579 1580 for f in m.files():
1580 1581 nf = util.normpath(f)
1581 1582 ignored = None
1582 1583 ignoredata = None
1583 1584 if nf != b'.':
1584 1585 if ignore(nf):
1585 1586 ignored = nf
1586 1587 ignoredata = repo.dirstate._ignorefileandline(nf)
1587 1588 else:
1588 1589 for p in pathutil.finddirs(nf):
1589 1590 if ignore(p):
1590 1591 ignored = p
1591 1592 ignoredata = repo.dirstate._ignorefileandline(p)
1592 1593 break
1593 1594 if ignored:
1594 1595 if ignored == nf:
1595 1596 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1596 1597 else:
1597 1598 ui.write(
1598 1599 _(
1599 1600 b"%s is ignored because of "
1600 1601 b"containing directory %s\n"
1601 1602 )
1602 1603 % (uipathfn(f), ignored)
1603 1604 )
1604 1605 ignorefile, lineno, line = ignoredata
1605 1606 ui.write(
1606 1607 _(b"(ignore rule in %s, line %d: '%s')\n")
1607 1608 % (ignorefile, lineno, line)
1608 1609 )
1609 1610 else:
1610 1611 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1611 1612
1612 1613
1613 1614 @command(
1614 1615 b'debugindex',
1615 1616 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1616 1617 _(b'-c|-m|FILE'),
1617 1618 )
1618 1619 def debugindex(ui, repo, file_=None, **opts):
1619 1620 """dump index data for a storage primitive"""
1620 1621 opts = pycompat.byteskwargs(opts)
1621 1622 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1622 1623
1623 1624 if ui.debugflag:
1624 1625 shortfn = hex
1625 1626 else:
1626 1627 shortfn = short
1627 1628
1628 1629 idlen = 12
1629 1630 for i in store:
1630 1631 idlen = len(shortfn(store.node(i)))
1631 1632 break
1632 1633
1633 1634 fm = ui.formatter(b'debugindex', opts)
1634 1635 fm.plain(
1635 1636 b' rev linkrev %s %s p2\n'
1636 1637 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1637 1638 )
1638 1639
1639 1640 for rev in store:
1640 1641 node = store.node(rev)
1641 1642 parents = store.parents(node)
1642 1643
1643 1644 fm.startitem()
1644 1645 fm.write(b'rev', b'%6d ', rev)
1645 1646 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1646 1647 fm.write(b'node', b'%s ', shortfn(node))
1647 1648 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1648 1649 fm.write(b'p2', b'%s', shortfn(parents[1]))
1649 1650 fm.plain(b'\n')
1650 1651
1651 1652 fm.end()
1652 1653
1653 1654
1654 1655 @command(
1655 1656 b'debugindexdot',
1656 1657 cmdutil.debugrevlogopts,
1657 1658 _(b'-c|-m|FILE'),
1658 1659 optionalrepo=True,
1659 1660 )
1660 1661 def debugindexdot(ui, repo, file_=None, **opts):
1661 1662 """dump an index DAG as a graphviz dot file"""
1662 1663 opts = pycompat.byteskwargs(opts)
1663 1664 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1664 1665 ui.writenoi18n(b"digraph G {\n")
1665 1666 for i in r:
1666 1667 node = r.node(i)
1667 1668 pp = r.parents(node)
1668 1669 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1669 1670 if pp[1] != repo.nullid:
1670 1671 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1671 1672 ui.write(b"}\n")
1672 1673
1673 1674
1674 1675 @command(b'debugindexstats', [])
1675 1676 def debugindexstats(ui, repo):
1676 1677 """show stats related to the changelog index"""
1677 1678 repo.changelog.shortest(repo.nullid, 1)
1678 1679 index = repo.changelog.index
1679 1680 if not util.safehasattr(index, b'stats'):
1680 1681 raise error.Abort(_(b'debugindexstats only works with native code'))
1681 1682 for k, v in sorted(index.stats().items()):
1682 1683 ui.write(b'%s: %d\n' % (k, v))
1683 1684
1684 1685
1685 1686 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1686 1687 def debuginstall(ui, **opts):
1687 1688 """test Mercurial installation
1688 1689
1689 1690 Returns 0 on success.
1690 1691 """
1691 1692 opts = pycompat.byteskwargs(opts)
1692 1693
1693 1694 problems = 0
1694 1695
1695 1696 fm = ui.formatter(b'debuginstall', opts)
1696 1697 fm.startitem()
1697 1698
1698 1699 # encoding might be unknown or wrong. don't translate these messages.
1699 1700 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1700 1701 err = None
1701 1702 try:
1702 1703 codecs.lookup(pycompat.sysstr(encoding.encoding))
1703 1704 except LookupError as inst:
1704 1705 err = stringutil.forcebytestr(inst)
1705 1706 problems += 1
1706 1707 fm.condwrite(
1707 1708 err,
1708 1709 b'encodingerror',
1709 1710 b" %s\n (check that your locale is properly set)\n",
1710 1711 err,
1711 1712 )
1712 1713
1713 1714 # Python
1714 1715 pythonlib = None
1715 1716 if util.safehasattr(os, '__file__'):
1716 1717 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1717 1718 elif getattr(sys, 'oxidized', False):
1718 1719 pythonlib = pycompat.sysexecutable
1719 1720
1720 1721 fm.write(
1721 1722 b'pythonexe',
1722 1723 _(b"checking Python executable (%s)\n"),
1723 1724 pycompat.sysexecutable or _(b"unknown"),
1724 1725 )
1725 1726 fm.write(
1726 1727 b'pythonimplementation',
1727 1728 _(b"checking Python implementation (%s)\n"),
1728 1729 pycompat.sysbytes(platform.python_implementation()),
1729 1730 )
1730 1731 fm.write(
1731 1732 b'pythonver',
1732 1733 _(b"checking Python version (%s)\n"),
1733 1734 (b"%d.%d.%d" % sys.version_info[:3]),
1734 1735 )
1735 1736 fm.write(
1736 1737 b'pythonlib',
1737 1738 _(b"checking Python lib (%s)...\n"),
1738 1739 pythonlib or _(b"unknown"),
1739 1740 )
1740 1741
1741 1742 try:
1742 1743 from . import rustext # pytype: disable=import-error
1743 1744
1744 1745 rustext.__doc__ # trigger lazy import
1745 1746 except ImportError:
1746 1747 rustext = None
1747 1748
1748 1749 security = set(sslutil.supportedprotocols)
1749 1750 if sslutil.hassni:
1750 1751 security.add(b'sni')
1751 1752
1752 1753 fm.write(
1753 1754 b'pythonsecurity',
1754 1755 _(b"checking Python security support (%s)\n"),
1755 1756 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1756 1757 )
1757 1758
1758 1759 # These are warnings, not errors. So don't increment problem count. This
1759 1760 # may change in the future.
1760 1761 if b'tls1.2' not in security:
1761 1762 fm.plain(
1762 1763 _(
1763 1764 b' TLS 1.2 not supported by Python install; '
1764 1765 b'network connections lack modern security\n'
1765 1766 )
1766 1767 )
1767 1768 if b'sni' not in security:
1768 1769 fm.plain(
1769 1770 _(
1770 1771 b' SNI not supported by Python install; may have '
1771 1772 b'connectivity issues with some servers\n'
1772 1773 )
1773 1774 )
1774 1775
1775 1776 fm.plain(
1776 1777 _(
1777 1778 b"checking Rust extensions (%s)\n"
1778 1779 % (b'missing' if rustext is None else b'installed')
1779 1780 ),
1780 1781 )
1781 1782
1782 1783 # TODO print CA cert info
1783 1784
1784 1785 # hg version
1785 1786 hgver = util.version()
1786 1787 fm.write(
1787 1788 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1788 1789 )
1789 1790 fm.write(
1790 1791 b'hgverextra',
1791 1792 _(b"checking Mercurial custom build (%s)\n"),
1792 1793 b'+'.join(hgver.split(b'+')[1:]),
1793 1794 )
1794 1795
1795 1796 # compiled modules
1796 1797 hgmodules = None
1797 1798 if util.safehasattr(sys.modules[__name__], '__file__'):
1798 1799 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1799 1800 elif getattr(sys, 'oxidized', False):
1800 1801 hgmodules = pycompat.sysexecutable
1801 1802
1802 1803 fm.write(
1803 1804 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1804 1805 )
1805 1806 fm.write(
1806 1807 b'hgmodules',
1807 1808 _(b"checking installed modules (%s)...\n"),
1808 1809 hgmodules or _(b"unknown"),
1809 1810 )
1810 1811
1811 1812 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1812 1813 rustext = rustandc # for now, that's the only case
1813 1814 cext = policy.policy in (b'c', b'allow') or rustandc
1814 1815 nopure = cext or rustext
1815 1816 if nopure:
1816 1817 err = None
1817 1818 try:
1818 1819 if cext:
1819 1820 from .cext import ( # pytype: disable=import-error
1820 1821 base85,
1821 1822 bdiff,
1822 1823 mpatch,
1823 1824 osutil,
1824 1825 )
1825 1826
1826 1827 # quiet pyflakes
1827 1828 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1828 1829 if rustext:
1829 1830 from .rustext import ( # pytype: disable=import-error
1830 1831 ancestor,
1831 1832 dirstate,
1832 1833 )
1833 1834
1834 1835 dir(ancestor), dir(dirstate) # quiet pyflakes
1835 1836 except Exception as inst:
1836 1837 err = stringutil.forcebytestr(inst)
1837 1838 problems += 1
1838 1839 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1839 1840
1840 1841 compengines = util.compengines._engines.values()
1841 1842 fm.write(
1842 1843 b'compengines',
1843 1844 _(b'checking registered compression engines (%s)\n'),
1844 1845 fm.formatlist(
1845 1846 sorted(e.name() for e in compengines),
1846 1847 name=b'compengine',
1847 1848 fmt=b'%s',
1848 1849 sep=b', ',
1849 1850 ),
1850 1851 )
1851 1852 fm.write(
1852 1853 b'compenginesavail',
1853 1854 _(b'checking available compression engines (%s)\n'),
1854 1855 fm.formatlist(
1855 1856 sorted(e.name() for e in compengines if e.available()),
1856 1857 name=b'compengine',
1857 1858 fmt=b'%s',
1858 1859 sep=b', ',
1859 1860 ),
1860 1861 )
1861 1862 wirecompengines = compression.compengines.supportedwireengines(
1862 1863 compression.SERVERROLE
1863 1864 )
1864 1865 fm.write(
1865 1866 b'compenginesserver',
1866 1867 _(
1867 1868 b'checking available compression engines '
1868 1869 b'for wire protocol (%s)\n'
1869 1870 ),
1870 1871 fm.formatlist(
1871 1872 [e.name() for e in wirecompengines if e.wireprotosupport()],
1872 1873 name=b'compengine',
1873 1874 fmt=b'%s',
1874 1875 sep=b', ',
1875 1876 ),
1876 1877 )
1877 1878 re2 = b'missing'
1878 1879 if util._re2:
1879 1880 re2 = b'available'
1880 1881 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1881 1882 fm.data(re2=bool(util._re2))
1882 1883
1883 1884 # templates
1884 1885 p = templater.templatedir()
1885 1886 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1886 1887 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1887 1888 if p:
1888 1889 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1889 1890 if m:
1890 1891 # template found, check if it is working
1891 1892 err = None
1892 1893 try:
1893 1894 templater.templater.frommapfile(m)
1894 1895 except Exception as inst:
1895 1896 err = stringutil.forcebytestr(inst)
1896 1897 p = None
1897 1898 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1898 1899 else:
1899 1900 p = None
1900 1901 fm.condwrite(
1901 1902 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1902 1903 )
1903 1904 fm.condwrite(
1904 1905 not m,
1905 1906 b'defaulttemplatenotfound',
1906 1907 _(b" template '%s' not found\n"),
1907 1908 b"default",
1908 1909 )
1909 1910 if not p:
1910 1911 problems += 1
1911 1912 fm.condwrite(
1912 1913 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1913 1914 )
1914 1915
1915 1916 # editor
1916 1917 editor = ui.geteditor()
1917 1918 editor = util.expandpath(editor)
1918 1919 editorbin = procutil.shellsplit(editor)[0]
1919 1920 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1920 1921 cmdpath = procutil.findexe(editorbin)
1921 1922 fm.condwrite(
1922 1923 not cmdpath and editor == b'vi',
1923 1924 b'vinotfound',
1924 1925 _(
1925 1926 b" No commit editor set and can't find %s in PATH\n"
1926 1927 b" (specify a commit editor in your configuration"
1927 1928 b" file)\n"
1928 1929 ),
1929 1930 not cmdpath and editor == b'vi' and editorbin,
1930 1931 )
1931 1932 fm.condwrite(
1932 1933 not cmdpath and editor != b'vi',
1933 1934 b'editornotfound',
1934 1935 _(
1935 1936 b" Can't find editor '%s' in PATH\n"
1936 1937 b" (specify a commit editor in your configuration"
1937 1938 b" file)\n"
1938 1939 ),
1939 1940 not cmdpath and editorbin,
1940 1941 )
1941 1942 if not cmdpath and editor != b'vi':
1942 1943 problems += 1
1943 1944
1944 1945 # check username
1945 1946 username = None
1946 1947 err = None
1947 1948 try:
1948 1949 username = ui.username()
1949 1950 except error.Abort as e:
1950 1951 err = e.message
1951 1952 problems += 1
1952 1953
1953 1954 fm.condwrite(
1954 1955 username, b'username', _(b"checking username (%s)\n"), username
1955 1956 )
1956 1957 fm.condwrite(
1957 1958 err,
1958 1959 b'usernameerror',
1959 1960 _(
1960 1961 b"checking username...\n %s\n"
1961 1962 b" (specify a username in your configuration file)\n"
1962 1963 ),
1963 1964 err,
1964 1965 )
1965 1966
1966 1967 for name, mod in extensions.extensions():
1967 1968 handler = getattr(mod, 'debuginstall', None)
1968 1969 if handler is not None:
1969 1970 problems += handler(ui, fm)
1970 1971
1971 1972 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1972 1973 if not problems:
1973 1974 fm.data(problems=problems)
1974 1975 fm.condwrite(
1975 1976 problems,
1976 1977 b'problems',
1977 1978 _(b"%d problems detected, please check your install!\n"),
1978 1979 problems,
1979 1980 )
1980 1981 fm.end()
1981 1982
1982 1983 return problems
1983 1984
1984 1985
1985 1986 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1986 1987 def debugknown(ui, repopath, *ids, **opts):
1987 1988 """test whether node ids are known to a repo
1988 1989
1989 1990 Every ID must be a full-length hex node id string. Returns a list of 0s
1990 1991 and 1s indicating unknown/known.
1991 1992 """
1992 1993 opts = pycompat.byteskwargs(opts)
1993 1994 repo = hg.peer(ui, opts, repopath)
1994 1995 if not repo.capable(b'known'):
1995 1996 raise error.Abort(b"known() not supported by target repository")
1996 1997 flags = repo.known([bin(s) for s in ids])
1997 1998 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1998 1999
1999 2000
2000 2001 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2001 2002 def debuglabelcomplete(ui, repo, *args):
2002 2003 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2003 2004 debugnamecomplete(ui, repo, *args)
2004 2005
2005 2006
2006 2007 @command(
2007 2008 b'debuglocks',
2008 2009 [
2009 2010 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2010 2011 (
2011 2012 b'W',
2012 2013 b'force-free-wlock',
2013 2014 None,
2014 2015 _(b'free the working state lock (DANGEROUS)'),
2015 2016 ),
2016 2017 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2017 2018 (
2018 2019 b'S',
2019 2020 b'set-wlock',
2020 2021 None,
2021 2022 _(b'set the working state lock until stopped'),
2022 2023 ),
2023 2024 ],
2024 2025 _(b'[OPTION]...'),
2025 2026 )
2026 2027 def debuglocks(ui, repo, **opts):
2027 2028 """show or modify state of locks
2028 2029
2029 2030 By default, this command will show which locks are held. This
2030 2031 includes the user and process holding the lock, the amount of time
2031 2032 the lock has been held, and the machine name where the process is
2032 2033 running if it's not local.
2033 2034
2034 2035 Locks protect the integrity of Mercurial's data, so should be
2035 2036 treated with care. System crashes or other interruptions may cause
2036 2037 locks to not be properly released, though Mercurial will usually
2037 2038 detect and remove such stale locks automatically.
2038 2039
2039 2040 However, detecting stale locks may not always be possible (for
2040 2041 instance, on a shared filesystem). Removing locks may also be
2041 2042 blocked by filesystem permissions.
2042 2043
2043 2044 Setting a lock will prevent other commands from changing the data.
2044 2045 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2045 2046 The set locks are removed when the command exits.
2046 2047
2047 2048 Returns 0 if no locks are held.
2048 2049
2049 2050 """
2050 2051
2051 2052 if opts.get('force_free_lock'):
2052 2053 repo.svfs.unlink(b'lock')
2053 2054 if opts.get('force_free_wlock'):
2054 2055 repo.vfs.unlink(b'wlock')
2055 2056 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2056 2057 return 0
2057 2058
2058 2059 locks = []
2059 2060 try:
2060 2061 if opts.get('set_wlock'):
2061 2062 try:
2062 2063 locks.append(repo.wlock(False))
2063 2064 except error.LockHeld:
2064 2065 raise error.Abort(_(b'wlock is already held'))
2065 2066 if opts.get('set_lock'):
2066 2067 try:
2067 2068 locks.append(repo.lock(False))
2068 2069 except error.LockHeld:
2069 2070 raise error.Abort(_(b'lock is already held'))
2070 2071 if len(locks):
2071 2072 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2072 2073 return 0
2073 2074 finally:
2074 2075 release(*locks)
2075 2076
2076 2077 now = time.time()
2077 2078 held = 0
2078 2079
2079 2080 def report(vfs, name, method):
2080 2081 # this causes stale locks to get reaped for more accurate reporting
2081 2082 try:
2082 2083 l = method(False)
2083 2084 except error.LockHeld:
2084 2085 l = None
2085 2086
2086 2087 if l:
2087 2088 l.release()
2088 2089 else:
2089 2090 try:
2090 2091 st = vfs.lstat(name)
2091 2092 age = now - st[stat.ST_MTIME]
2092 2093 user = util.username(st.st_uid)
2093 2094 locker = vfs.readlock(name)
2094 2095 if b":" in locker:
2095 2096 host, pid = locker.split(b':')
2096 2097 if host == socket.gethostname():
2097 2098 locker = b'user %s, process %s' % (user or b'None', pid)
2098 2099 else:
2099 2100 locker = b'user %s, process %s, host %s' % (
2100 2101 user or b'None',
2101 2102 pid,
2102 2103 host,
2103 2104 )
2104 2105 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2105 2106 return 1
2106 2107 except OSError as e:
2107 2108 if e.errno != errno.ENOENT:
2108 2109 raise
2109 2110
2110 2111 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2111 2112 return 0
2112 2113
2113 2114 held += report(repo.svfs, b"lock", repo.lock)
2114 2115 held += report(repo.vfs, b"wlock", repo.wlock)
2115 2116
2116 2117 return held
2117 2118
2118 2119
2119 2120 @command(
2120 2121 b'debugmanifestfulltextcache',
2121 2122 [
2122 2123 (b'', b'clear', False, _(b'clear the cache')),
2123 2124 (
2124 2125 b'a',
2125 2126 b'add',
2126 2127 [],
2127 2128 _(b'add the given manifest nodes to the cache'),
2128 2129 _(b'NODE'),
2129 2130 ),
2130 2131 ],
2131 2132 b'',
2132 2133 )
2133 2134 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2134 2135 """show, clear or amend the contents of the manifest fulltext cache"""
2135 2136
2136 2137 def getcache():
2137 2138 r = repo.manifestlog.getstorage(b'')
2138 2139 try:
2139 2140 return r._fulltextcache
2140 2141 except AttributeError:
2141 2142 msg = _(
2142 2143 b"Current revlog implementation doesn't appear to have a "
2143 2144 b"manifest fulltext cache\n"
2144 2145 )
2145 2146 raise error.Abort(msg)
2146 2147
2147 2148 if opts.get('clear'):
2148 2149 with repo.wlock():
2149 2150 cache = getcache()
2150 2151 cache.clear(clear_persisted_data=True)
2151 2152 return
2152 2153
2153 2154 if add:
2154 2155 with repo.wlock():
2155 2156 m = repo.manifestlog
2156 2157 store = m.getstorage(b'')
2157 2158 for n in add:
2158 2159 try:
2159 2160 manifest = m[store.lookup(n)]
2160 2161 except error.LookupError as e:
2161 2162 raise error.Abort(
2162 2163 bytes(e), hint=b"Check your manifest node id"
2163 2164 )
2164 2165 manifest.read() # stores revisision in cache too
2165 2166 return
2166 2167
2167 2168 cache = getcache()
2168 2169 if not len(cache):
2169 2170 ui.write(_(b'cache empty\n'))
2170 2171 else:
2171 2172 ui.write(
2172 2173 _(
2173 2174 b'cache contains %d manifest entries, in order of most to '
2174 2175 b'least recent:\n'
2175 2176 )
2176 2177 % (len(cache),)
2177 2178 )
2178 2179 totalsize = 0
2179 2180 for nodeid in cache:
2180 2181 # Use cache.get to not update the LRU order
2181 2182 data = cache.peek(nodeid)
2182 2183 size = len(data)
2183 2184 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2184 2185 ui.write(
2185 2186 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2186 2187 )
2187 2188 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2188 2189 ui.write(
2189 2190 _(b'total cache data size %s, on-disk %s\n')
2190 2191 % (util.bytecount(totalsize), util.bytecount(ondisk))
2191 2192 )
2192 2193
2193 2194
2194 2195 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2195 2196 def debugmergestate(ui, repo, *args, **opts):
2196 2197 """print merge state
2197 2198
2198 2199 Use --verbose to print out information about whether v1 or v2 merge state
2199 2200 was chosen."""
2200 2201
2201 2202 if ui.verbose:
2202 2203 ms = mergestatemod.mergestate(repo)
2203 2204
2204 2205 # sort so that reasonable information is on top
2205 2206 v1records = ms._readrecordsv1()
2206 2207 v2records = ms._readrecordsv2()
2207 2208
2208 2209 if not v1records and not v2records:
2209 2210 pass
2210 2211 elif not v2records:
2211 2212 ui.writenoi18n(b'no version 2 merge state\n')
2212 2213 elif ms._v1v2match(v1records, v2records):
2213 2214 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2214 2215 else:
2215 2216 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2216 2217
2217 2218 opts = pycompat.byteskwargs(opts)
2218 2219 if not opts[b'template']:
2219 2220 opts[b'template'] = (
2220 2221 b'{if(commits, "", "no merge state found\n")}'
2221 2222 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2222 2223 b'{files % "file: {path} (state \\"{state}\\")\n'
2223 2224 b'{if(local_path, "'
2224 2225 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2225 2226 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2226 2227 b' other path: {other_path} (node {other_node})\n'
2227 2228 b'")}'
2228 2229 b'{if(rename_side, "'
2229 2230 b' rename side: {rename_side}\n'
2230 2231 b' renamed path: {renamed_path}\n'
2231 2232 b'")}'
2232 2233 b'{extras % " extra: {key} = {value}\n"}'
2233 2234 b'"}'
2234 2235 b'{extras % "extra: {file} ({key} = {value})\n"}'
2235 2236 )
2236 2237
2237 2238 ms = mergestatemod.mergestate.read(repo)
2238 2239
2239 2240 fm = ui.formatter(b'debugmergestate', opts)
2240 2241 fm.startitem()
2241 2242
2242 2243 fm_commits = fm.nested(b'commits')
2243 2244 if ms.active():
2244 2245 for name, node, label_index in (
2245 2246 (b'local', ms.local, 0),
2246 2247 (b'other', ms.other, 1),
2247 2248 ):
2248 2249 fm_commits.startitem()
2249 2250 fm_commits.data(name=name)
2250 2251 fm_commits.data(node=hex(node))
2251 2252 if ms._labels and len(ms._labels) > label_index:
2252 2253 fm_commits.data(label=ms._labels[label_index])
2253 2254 fm_commits.end()
2254 2255
2255 2256 fm_files = fm.nested(b'files')
2256 2257 if ms.active():
2257 2258 for f in ms:
2258 2259 fm_files.startitem()
2259 2260 fm_files.data(path=f)
2260 2261 state = ms._state[f]
2261 2262 fm_files.data(state=state[0])
2262 2263 if state[0] in (
2263 2264 mergestatemod.MERGE_RECORD_UNRESOLVED,
2264 2265 mergestatemod.MERGE_RECORD_RESOLVED,
2265 2266 ):
2266 2267 fm_files.data(local_key=state[1])
2267 2268 fm_files.data(local_path=state[2])
2268 2269 fm_files.data(ancestor_path=state[3])
2269 2270 fm_files.data(ancestor_node=state[4])
2270 2271 fm_files.data(other_path=state[5])
2271 2272 fm_files.data(other_node=state[6])
2272 2273 fm_files.data(local_flags=state[7])
2273 2274 elif state[0] in (
2274 2275 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2275 2276 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2276 2277 ):
2277 2278 fm_files.data(renamed_path=state[1])
2278 2279 fm_files.data(rename_side=state[2])
2279 2280 fm_extras = fm_files.nested(b'extras')
2280 2281 for k, v in sorted(ms.extras(f).items()):
2281 2282 fm_extras.startitem()
2282 2283 fm_extras.data(key=k)
2283 2284 fm_extras.data(value=v)
2284 2285 fm_extras.end()
2285 2286
2286 2287 fm_files.end()
2287 2288
2288 2289 fm_extras = fm.nested(b'extras')
2289 2290 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2290 2291 if f in ms:
2291 2292 # If file is in mergestate, we have already processed it's extras
2292 2293 continue
2293 2294 for k, v in pycompat.iteritems(d):
2294 2295 fm_extras.startitem()
2295 2296 fm_extras.data(file=f)
2296 2297 fm_extras.data(key=k)
2297 2298 fm_extras.data(value=v)
2298 2299 fm_extras.end()
2299 2300
2300 2301 fm.end()
2301 2302
2302 2303
2303 2304 @command(b'debugnamecomplete', [], _(b'NAME...'))
2304 2305 def debugnamecomplete(ui, repo, *args):
2305 2306 '''complete "names" - tags, open branch names, bookmark names'''
2306 2307
2307 2308 names = set()
2308 2309 # since we previously only listed open branches, we will handle that
2309 2310 # specially (after this for loop)
2310 2311 for name, ns in pycompat.iteritems(repo.names):
2311 2312 if name != b'branches':
2312 2313 names.update(ns.listnames(repo))
2313 2314 names.update(
2314 2315 tag
2315 2316 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2316 2317 if not closed
2317 2318 )
2318 2319 completions = set()
2319 2320 if not args:
2320 2321 args = [b'']
2321 2322 for a in args:
2322 2323 completions.update(n for n in names if n.startswith(a))
2323 2324 ui.write(b'\n'.join(sorted(completions)))
2324 2325 ui.write(b'\n')
2325 2326
2326 2327
2327 2328 @command(
2328 2329 b'debugnodemap',
2329 2330 [
2330 2331 (
2331 2332 b'',
2332 2333 b'dump-new',
2333 2334 False,
2334 2335 _(b'write a (new) persistent binary nodemap on stdout'),
2335 2336 ),
2336 2337 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2337 2338 (
2338 2339 b'',
2339 2340 b'check',
2340 2341 False,
2341 2342 _(b'check that the data on disk data are correct.'),
2342 2343 ),
2343 2344 (
2344 2345 b'',
2345 2346 b'metadata',
2346 2347 False,
2347 2348 _(b'display the on disk meta data for the nodemap'),
2348 2349 ),
2349 2350 ],
2350 2351 )
2351 2352 def debugnodemap(ui, repo, **opts):
2352 2353 """write and inspect on disk nodemap"""
2353 2354 if opts['dump_new']:
2354 2355 unfi = repo.unfiltered()
2355 2356 cl = unfi.changelog
2356 2357 if util.safehasattr(cl.index, "nodemap_data_all"):
2357 2358 data = cl.index.nodemap_data_all()
2358 2359 else:
2359 2360 data = nodemap.persistent_data(cl.index)
2360 2361 ui.write(data)
2361 2362 elif opts['dump_disk']:
2362 2363 unfi = repo.unfiltered()
2363 2364 cl = unfi.changelog
2364 2365 nm_data = nodemap.persisted_data(cl)
2365 2366 if nm_data is not None:
2366 2367 docket, data = nm_data
2367 2368 ui.write(data[:])
2368 2369 elif opts['check']:
2369 2370 unfi = repo.unfiltered()
2370 2371 cl = unfi.changelog
2371 2372 nm_data = nodemap.persisted_data(cl)
2372 2373 if nm_data is not None:
2373 2374 docket, data = nm_data
2374 2375 return nodemap.check_data(ui, cl.index, data)
2375 2376 elif opts['metadata']:
2376 2377 unfi = repo.unfiltered()
2377 2378 cl = unfi.changelog
2378 2379 nm_data = nodemap.persisted_data(cl)
2379 2380 if nm_data is not None:
2380 2381 docket, data = nm_data
2381 2382 ui.write((b"uid: %s\n") % docket.uid)
2382 2383 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2383 2384 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2384 2385 ui.write((b"data-length: %d\n") % docket.data_length)
2385 2386 ui.write((b"data-unused: %d\n") % docket.data_unused)
2386 2387 unused_perc = docket.data_unused * 100.0 / docket.data_length
2387 2388 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2388 2389
2389 2390
2390 2391 @command(
2391 2392 b'debugobsolete',
2392 2393 [
2393 2394 (b'', b'flags', 0, _(b'markers flag')),
2394 2395 (
2395 2396 b'',
2396 2397 b'record-parents',
2397 2398 False,
2398 2399 _(b'record parent information for the precursor'),
2399 2400 ),
2400 2401 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2401 2402 (
2402 2403 b'',
2403 2404 b'exclusive',
2404 2405 False,
2405 2406 _(b'restrict display to markers only relevant to REV'),
2406 2407 ),
2407 2408 (b'', b'index', False, _(b'display index of the marker')),
2408 2409 (b'', b'delete', [], _(b'delete markers specified by indices')),
2409 2410 ]
2410 2411 + cmdutil.commitopts2
2411 2412 + cmdutil.formatteropts,
2412 2413 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2413 2414 )
2414 2415 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2415 2416 """create arbitrary obsolete marker
2416 2417
2417 2418 With no arguments, displays the list of obsolescence markers."""
2418 2419
2419 2420 opts = pycompat.byteskwargs(opts)
2420 2421
2421 2422 def parsenodeid(s):
2422 2423 try:
2423 2424 # We do not use revsingle/revrange functions here to accept
2424 2425 # arbitrary node identifiers, possibly not present in the
2425 2426 # local repository.
2426 2427 n = bin(s)
2427 2428 if len(n) != repo.nodeconstants.nodelen:
2428 2429 raise TypeError()
2429 2430 return n
2430 2431 except TypeError:
2431 2432 raise error.InputError(
2432 2433 b'changeset references must be full hexadecimal '
2433 2434 b'node identifiers'
2434 2435 )
2435 2436
2436 2437 if opts.get(b'delete'):
2437 2438 indices = []
2438 2439 for v in opts.get(b'delete'):
2439 2440 try:
2440 2441 indices.append(int(v))
2441 2442 except ValueError:
2442 2443 raise error.InputError(
2443 2444 _(b'invalid index value: %r') % v,
2444 2445 hint=_(b'use integers for indices'),
2445 2446 )
2446 2447
2447 2448 if repo.currenttransaction():
2448 2449 raise error.Abort(
2449 2450 _(b'cannot delete obsmarkers in the middle of transaction.')
2450 2451 )
2451 2452
2452 2453 with repo.lock():
2453 2454 n = repair.deleteobsmarkers(repo.obsstore, indices)
2454 2455 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2455 2456
2456 2457 return
2457 2458
2458 2459 if precursor is not None:
2459 2460 if opts[b'rev']:
2460 2461 raise error.InputError(
2461 2462 b'cannot select revision when creating marker'
2462 2463 )
2463 2464 metadata = {}
2464 2465 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2465 2466 succs = tuple(parsenodeid(succ) for succ in successors)
2466 2467 l = repo.lock()
2467 2468 try:
2468 2469 tr = repo.transaction(b'debugobsolete')
2469 2470 try:
2470 2471 date = opts.get(b'date')
2471 2472 if date:
2472 2473 date = dateutil.parsedate(date)
2473 2474 else:
2474 2475 date = None
2475 2476 prec = parsenodeid(precursor)
2476 2477 parents = None
2477 2478 if opts[b'record_parents']:
2478 2479 if prec not in repo.unfiltered():
2479 2480 raise error.Abort(
2480 2481 b'cannot used --record-parents on '
2481 2482 b'unknown changesets'
2482 2483 )
2483 2484 parents = repo.unfiltered()[prec].parents()
2484 2485 parents = tuple(p.node() for p in parents)
2485 2486 repo.obsstore.create(
2486 2487 tr,
2487 2488 prec,
2488 2489 succs,
2489 2490 opts[b'flags'],
2490 2491 parents=parents,
2491 2492 date=date,
2492 2493 metadata=metadata,
2493 2494 ui=ui,
2494 2495 )
2495 2496 tr.close()
2496 2497 except ValueError as exc:
2497 2498 raise error.Abort(
2498 2499 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2499 2500 )
2500 2501 finally:
2501 2502 tr.release()
2502 2503 finally:
2503 2504 l.release()
2504 2505 else:
2505 2506 if opts[b'rev']:
2506 2507 revs = scmutil.revrange(repo, opts[b'rev'])
2507 2508 nodes = [repo[r].node() for r in revs]
2508 2509 markers = list(
2509 2510 obsutil.getmarkers(
2510 2511 repo, nodes=nodes, exclusive=opts[b'exclusive']
2511 2512 )
2512 2513 )
2513 2514 markers.sort(key=lambda x: x._data)
2514 2515 else:
2515 2516 markers = obsutil.getmarkers(repo)
2516 2517
2517 2518 markerstoiter = markers
2518 2519 isrelevant = lambda m: True
2519 2520 if opts.get(b'rev') and opts.get(b'index'):
2520 2521 markerstoiter = obsutil.getmarkers(repo)
2521 2522 markerset = set(markers)
2522 2523 isrelevant = lambda m: m in markerset
2523 2524
2524 2525 fm = ui.formatter(b'debugobsolete', opts)
2525 2526 for i, m in enumerate(markerstoiter):
2526 2527 if not isrelevant(m):
2527 2528 # marker can be irrelevant when we're iterating over a set
2528 2529 # of markers (markerstoiter) which is bigger than the set
2529 2530 # of markers we want to display (markers)
2530 2531 # this can happen if both --index and --rev options are
2531 2532 # provided and thus we need to iterate over all of the markers
2532 2533 # to get the correct indices, but only display the ones that
2533 2534 # are relevant to --rev value
2534 2535 continue
2535 2536 fm.startitem()
2536 2537 ind = i if opts.get(b'index') else None
2537 2538 cmdutil.showmarker(fm, m, index=ind)
2538 2539 fm.end()
2539 2540
2540 2541
2541 2542 @command(
2542 2543 b'debugp1copies',
2543 2544 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2544 2545 _(b'[-r REV]'),
2545 2546 )
2546 2547 def debugp1copies(ui, repo, **opts):
2547 2548 """dump copy information compared to p1"""
2548 2549
2549 2550 opts = pycompat.byteskwargs(opts)
2550 2551 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2551 2552 for dst, src in ctx.p1copies().items():
2552 2553 ui.write(b'%s -> %s\n' % (src, dst))
2553 2554
2554 2555
2555 2556 @command(
2556 2557 b'debugp2copies',
2557 2558 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2558 2559 _(b'[-r REV]'),
2559 2560 )
2560 2561 def debugp1copies(ui, repo, **opts):
2561 2562 """dump copy information compared to p2"""
2562 2563
2563 2564 opts = pycompat.byteskwargs(opts)
2564 2565 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2565 2566 for dst, src in ctx.p2copies().items():
2566 2567 ui.write(b'%s -> %s\n' % (src, dst))
2567 2568
2568 2569
2569 2570 @command(
2570 2571 b'debugpathcomplete',
2571 2572 [
2572 2573 (b'f', b'full', None, _(b'complete an entire path')),
2573 2574 (b'n', b'normal', None, _(b'show only normal files')),
2574 2575 (b'a', b'added', None, _(b'show only added files')),
2575 2576 (b'r', b'removed', None, _(b'show only removed files')),
2576 2577 ],
2577 2578 _(b'FILESPEC...'),
2578 2579 )
2579 2580 def debugpathcomplete(ui, repo, *specs, **opts):
2580 2581 """complete part or all of a tracked path
2581 2582
2582 2583 This command supports shells that offer path name completion. It
2583 2584 currently completes only files already known to the dirstate.
2584 2585
2585 2586 Completion extends only to the next path segment unless
2586 2587 --full is specified, in which case entire paths are used."""
2587 2588
2588 2589 def complete(path, acceptable):
2589 2590 dirstate = repo.dirstate
2590 2591 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2591 2592 rootdir = repo.root + pycompat.ossep
2592 2593 if spec != repo.root and not spec.startswith(rootdir):
2593 2594 return [], []
2594 2595 if os.path.isdir(spec):
2595 2596 spec += b'/'
2596 2597 spec = spec[len(rootdir) :]
2597 2598 fixpaths = pycompat.ossep != b'/'
2598 2599 if fixpaths:
2599 2600 spec = spec.replace(pycompat.ossep, b'/')
2600 2601 speclen = len(spec)
2601 2602 fullpaths = opts['full']
2602 2603 files, dirs = set(), set()
2603 2604 adddir, addfile = dirs.add, files.add
2604 2605 for f, st in pycompat.iteritems(dirstate):
2605 2606 if f.startswith(spec) and st[0] in acceptable:
2606 2607 if fixpaths:
2607 2608 f = f.replace(b'/', pycompat.ossep)
2608 2609 if fullpaths:
2609 2610 addfile(f)
2610 2611 continue
2611 2612 s = f.find(pycompat.ossep, speclen)
2612 2613 if s >= 0:
2613 2614 adddir(f[:s])
2614 2615 else:
2615 2616 addfile(f)
2616 2617 return files, dirs
2617 2618
2618 2619 acceptable = b''
2619 2620 if opts['normal']:
2620 2621 acceptable += b'nm'
2621 2622 if opts['added']:
2622 2623 acceptable += b'a'
2623 2624 if opts['removed']:
2624 2625 acceptable += b'r'
2625 2626 cwd = repo.getcwd()
2626 2627 if not specs:
2627 2628 specs = [b'.']
2628 2629
2629 2630 files, dirs = set(), set()
2630 2631 for spec in specs:
2631 2632 f, d = complete(spec, acceptable or b'nmar')
2632 2633 files.update(f)
2633 2634 dirs.update(d)
2634 2635 files.update(dirs)
2635 2636 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2636 2637 ui.write(b'\n')
2637 2638
2638 2639
2639 2640 @command(
2640 2641 b'debugpathcopies',
2641 2642 cmdutil.walkopts,
2642 2643 b'hg debugpathcopies REV1 REV2 [FILE]',
2643 2644 inferrepo=True,
2644 2645 )
2645 2646 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2646 2647 """show copies between two revisions"""
2647 2648 ctx1 = scmutil.revsingle(repo, rev1)
2648 2649 ctx2 = scmutil.revsingle(repo, rev2)
2649 2650 m = scmutil.match(ctx1, pats, opts)
2650 2651 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2651 2652 ui.write(b'%s -> %s\n' % (src, dst))
2652 2653
2653 2654
2654 2655 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2655 2656 def debugpeer(ui, path):
2656 2657 """establish a connection to a peer repository"""
2657 2658 # Always enable peer request logging. Requires --debug to display
2658 2659 # though.
2659 2660 overrides = {
2660 2661 (b'devel', b'debug.peer-request'): True,
2661 2662 }
2662 2663
2663 2664 with ui.configoverride(overrides):
2664 2665 peer = hg.peer(ui, {}, path)
2665 2666
2666 2667 try:
2667 2668 local = peer.local() is not None
2668 2669 canpush = peer.canpush()
2669 2670
2670 2671 ui.write(_(b'url: %s\n') % peer.url())
2671 2672 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2672 2673 ui.write(
2673 2674 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2674 2675 )
2675 2676 finally:
2676 2677 peer.close()
2677 2678
2678 2679
2679 2680 @command(
2680 2681 b'debugpickmergetool',
2681 2682 [
2682 2683 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2683 2684 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2684 2685 ]
2685 2686 + cmdutil.walkopts
2686 2687 + cmdutil.mergetoolopts,
2687 2688 _(b'[PATTERN]...'),
2688 2689 inferrepo=True,
2689 2690 )
2690 2691 def debugpickmergetool(ui, repo, *pats, **opts):
2691 2692 """examine which merge tool is chosen for specified file
2692 2693
2693 2694 As described in :hg:`help merge-tools`, Mercurial examines
2694 2695 configurations below in this order to decide which merge tool is
2695 2696 chosen for specified file.
2696 2697
2697 2698 1. ``--tool`` option
2698 2699 2. ``HGMERGE`` environment variable
2699 2700 3. configurations in ``merge-patterns`` section
2700 2701 4. configuration of ``ui.merge``
2701 2702 5. configurations in ``merge-tools`` section
2702 2703 6. ``hgmerge`` tool (for historical reason only)
2703 2704 7. default tool for fallback (``:merge`` or ``:prompt``)
2704 2705
2705 2706 This command writes out examination result in the style below::
2706 2707
2707 2708 FILE = MERGETOOL
2708 2709
2709 2710 By default, all files known in the first parent context of the
2710 2711 working directory are examined. Use file patterns and/or -I/-X
2711 2712 options to limit target files. -r/--rev is also useful to examine
2712 2713 files in another context without actual updating to it.
2713 2714
2714 2715 With --debug, this command shows warning messages while matching
2715 2716 against ``merge-patterns`` and so on, too. It is recommended to
2716 2717 use this option with explicit file patterns and/or -I/-X options,
2717 2718 because this option increases amount of output per file according
2718 2719 to configurations in hgrc.
2719 2720
2720 2721 With -v/--verbose, this command shows configurations below at
2721 2722 first (only if specified).
2722 2723
2723 2724 - ``--tool`` option
2724 2725 - ``HGMERGE`` environment variable
2725 2726 - configuration of ``ui.merge``
2726 2727
2727 2728 If merge tool is chosen before matching against
2728 2729 ``merge-patterns``, this command can't show any helpful
2729 2730 information, even with --debug. In such case, information above is
2730 2731 useful to know why a merge tool is chosen.
2731 2732 """
2732 2733 opts = pycompat.byteskwargs(opts)
2733 2734 overrides = {}
2734 2735 if opts[b'tool']:
2735 2736 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2736 2737 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2737 2738
2738 2739 with ui.configoverride(overrides, b'debugmergepatterns'):
2739 2740 hgmerge = encoding.environ.get(b"HGMERGE")
2740 2741 if hgmerge is not None:
2741 2742 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2742 2743 uimerge = ui.config(b"ui", b"merge")
2743 2744 if uimerge:
2744 2745 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2745 2746
2746 2747 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2747 2748 m = scmutil.match(ctx, pats, opts)
2748 2749 changedelete = opts[b'changedelete']
2749 2750 for path in ctx.walk(m):
2750 2751 fctx = ctx[path]
2751 2752 try:
2752 2753 if not ui.debugflag:
2753 2754 ui.pushbuffer(error=True)
2754 2755 tool, toolpath = filemerge._picktool(
2755 2756 repo,
2756 2757 ui,
2757 2758 path,
2758 2759 fctx.isbinary(),
2759 2760 b'l' in fctx.flags(),
2760 2761 changedelete,
2761 2762 )
2762 2763 finally:
2763 2764 if not ui.debugflag:
2764 2765 ui.popbuffer()
2765 2766 ui.write(b'%s = %s\n' % (path, tool))
2766 2767
2767 2768
2768 2769 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2769 2770 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2770 2771 """access the pushkey key/value protocol
2771 2772
2772 2773 With two args, list the keys in the given namespace.
2773 2774
2774 2775 With five args, set a key to new if it currently is set to old.
2775 2776 Reports success or failure.
2776 2777 """
2777 2778
2778 2779 target = hg.peer(ui, {}, repopath)
2779 2780 try:
2780 2781 if keyinfo:
2781 2782 key, old, new = keyinfo
2782 2783 with target.commandexecutor() as e:
2783 2784 r = e.callcommand(
2784 2785 b'pushkey',
2785 2786 {
2786 2787 b'namespace': namespace,
2787 2788 b'key': key,
2788 2789 b'old': old,
2789 2790 b'new': new,
2790 2791 },
2791 2792 ).result()
2792 2793
2793 2794 ui.status(pycompat.bytestr(r) + b'\n')
2794 2795 return not r
2795 2796 else:
2796 2797 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2797 2798 ui.write(
2798 2799 b"%s\t%s\n"
2799 2800 % (stringutil.escapestr(k), stringutil.escapestr(v))
2800 2801 )
2801 2802 finally:
2802 2803 target.close()
2803 2804
2804 2805
2805 2806 @command(b'debugpvec', [], _(b'A B'))
2806 2807 def debugpvec(ui, repo, a, b=None):
2807 2808 ca = scmutil.revsingle(repo, a)
2808 2809 cb = scmutil.revsingle(repo, b)
2809 2810 pa = pvec.ctxpvec(ca)
2810 2811 pb = pvec.ctxpvec(cb)
2811 2812 if pa == pb:
2812 2813 rel = b"="
2813 2814 elif pa > pb:
2814 2815 rel = b">"
2815 2816 elif pa < pb:
2816 2817 rel = b"<"
2817 2818 elif pa | pb:
2818 2819 rel = b"|"
2819 2820 ui.write(_(b"a: %s\n") % pa)
2820 2821 ui.write(_(b"b: %s\n") % pb)
2821 2822 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2822 2823 ui.write(
2823 2824 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2824 2825 % (
2825 2826 abs(pa._depth - pb._depth),
2826 2827 pvec._hamming(pa._vec, pb._vec),
2827 2828 pa.distance(pb),
2828 2829 rel,
2829 2830 )
2830 2831 )
2831 2832
2832 2833
2833 2834 @command(
2834 2835 b'debugrebuilddirstate|debugrebuildstate',
2835 2836 [
2836 2837 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2837 2838 (
2838 2839 b'',
2839 2840 b'minimal',
2840 2841 None,
2841 2842 _(
2842 2843 b'only rebuild files that are inconsistent with '
2843 2844 b'the working copy parent'
2844 2845 ),
2845 2846 ),
2846 2847 ],
2847 2848 _(b'[-r REV]'),
2848 2849 )
2849 2850 def debugrebuilddirstate(ui, repo, rev, **opts):
2850 2851 """rebuild the dirstate as it would look like for the given revision
2851 2852
2852 2853 If no revision is specified the first current parent will be used.
2853 2854
2854 2855 The dirstate will be set to the files of the given revision.
2855 2856 The actual working directory content or existing dirstate
2856 2857 information such as adds or removes is not considered.
2857 2858
2858 2859 ``minimal`` will only rebuild the dirstate status for files that claim to be
2859 2860 tracked but are not in the parent manifest, or that exist in the parent
2860 2861 manifest but are not in the dirstate. It will not change adds, removes, or
2861 2862 modified files that are in the working copy parent.
2862 2863
2863 2864 One use of this command is to make the next :hg:`status` invocation
2864 2865 check the actual file content.
2865 2866 """
2866 2867 ctx = scmutil.revsingle(repo, rev)
2867 2868 with repo.wlock():
2868 2869 dirstate = repo.dirstate
2869 2870 changedfiles = None
2870 2871 # See command doc for what minimal does.
2871 2872 if opts.get('minimal'):
2872 2873 manifestfiles = set(ctx.manifest().keys())
2873 2874 dirstatefiles = set(dirstate)
2874 2875 manifestonly = manifestfiles - dirstatefiles
2875 2876 dsonly = dirstatefiles - manifestfiles
2876 2877 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2877 2878 changedfiles = manifestonly | dsnotadded
2878 2879
2879 2880 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2880 2881
2881 2882
2882 2883 @command(b'debugrebuildfncache', [], b'')
2883 2884 def debugrebuildfncache(ui, repo):
2884 2885 """rebuild the fncache file"""
2885 2886 repair.rebuildfncache(ui, repo)
2886 2887
2887 2888
2888 2889 @command(
2889 2890 b'debugrename',
2890 2891 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2891 2892 _(b'[-r REV] [FILE]...'),
2892 2893 )
2893 2894 def debugrename(ui, repo, *pats, **opts):
2894 2895 """dump rename information"""
2895 2896
2896 2897 opts = pycompat.byteskwargs(opts)
2897 2898 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2898 2899 m = scmutil.match(ctx, pats, opts)
2899 2900 for abs in ctx.walk(m):
2900 2901 fctx = ctx[abs]
2901 2902 o = fctx.filelog().renamed(fctx.filenode())
2902 2903 rel = repo.pathto(abs)
2903 2904 if o:
2904 2905 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2905 2906 else:
2906 2907 ui.write(_(b"%s not renamed\n") % rel)
2907 2908
2908 2909
2909 2910 @command(b'debugrequires|debugrequirements', [], b'')
2910 2911 def debugrequirements(ui, repo):
2911 2912 """print the current repo requirements"""
2912 2913 for r in sorted(repo.requirements):
2913 2914 ui.write(b"%s\n" % r)
2914 2915
2915 2916
2916 2917 @command(
2917 2918 b'debugrevlog',
2918 2919 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2919 2920 _(b'-c|-m|FILE'),
2920 2921 optionalrepo=True,
2921 2922 )
2922 2923 def debugrevlog(ui, repo, file_=None, **opts):
2923 2924 """show data and statistics about a revlog"""
2924 2925 opts = pycompat.byteskwargs(opts)
2925 2926 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2926 2927
2927 2928 if opts.get(b"dump"):
2928 2929 numrevs = len(r)
2929 2930 ui.write(
2930 2931 (
2931 2932 b"# rev p1rev p2rev start end deltastart base p1 p2"
2932 2933 b" rawsize totalsize compression heads chainlen\n"
2933 2934 )
2934 2935 )
2935 2936 ts = 0
2936 2937 heads = set()
2937 2938
2938 2939 for rev in pycompat.xrange(numrevs):
2939 2940 dbase = r.deltaparent(rev)
2940 2941 if dbase == -1:
2941 2942 dbase = rev
2942 2943 cbase = r.chainbase(rev)
2943 2944 clen = r.chainlen(rev)
2944 2945 p1, p2 = r.parentrevs(rev)
2945 2946 rs = r.rawsize(rev)
2946 2947 ts = ts + rs
2947 2948 heads -= set(r.parentrevs(rev))
2948 2949 heads.add(rev)
2949 2950 try:
2950 2951 compression = ts / r.end(rev)
2951 2952 except ZeroDivisionError:
2952 2953 compression = 0
2953 2954 ui.write(
2954 2955 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2955 2956 b"%11d %5d %8d\n"
2956 2957 % (
2957 2958 rev,
2958 2959 p1,
2959 2960 p2,
2960 2961 r.start(rev),
2961 2962 r.end(rev),
2962 2963 r.start(dbase),
2963 2964 r.start(cbase),
2964 2965 r.start(p1),
2965 2966 r.start(p2),
2966 2967 rs,
2967 2968 ts,
2968 2969 compression,
2969 2970 len(heads),
2970 2971 clen,
2971 2972 )
2972 2973 )
2973 2974 return 0
2974 2975
2975 2976 format = r._format_version
2976 2977 v = r._format_flags
2977 2978 flags = []
2978 2979 gdelta = False
2979 2980 if v & revlog.FLAG_INLINE_DATA:
2980 2981 flags.append(b'inline')
2981 2982 if v & revlog.FLAG_GENERALDELTA:
2982 2983 gdelta = True
2983 2984 flags.append(b'generaldelta')
2984 2985 if not flags:
2985 2986 flags = [b'(none)']
2986 2987
2987 2988 ### tracks merge vs single parent
2988 2989 nummerges = 0
2989 2990
2990 2991 ### tracks ways the "delta" are build
2991 2992 # nodelta
2992 2993 numempty = 0
2993 2994 numemptytext = 0
2994 2995 numemptydelta = 0
2995 2996 # full file content
2996 2997 numfull = 0
2997 2998 # intermediate snapshot against a prior snapshot
2998 2999 numsemi = 0
2999 3000 # snapshot count per depth
3000 3001 numsnapdepth = collections.defaultdict(lambda: 0)
3001 3002 # delta against previous revision
3002 3003 numprev = 0
3003 3004 # delta against first or second parent (not prev)
3004 3005 nump1 = 0
3005 3006 nump2 = 0
3006 3007 # delta against neither prev nor parents
3007 3008 numother = 0
3008 3009 # delta against prev that are also first or second parent
3009 3010 # (details of `numprev`)
3010 3011 nump1prev = 0
3011 3012 nump2prev = 0
3012 3013
3013 3014 # data about delta chain of each revs
3014 3015 chainlengths = []
3015 3016 chainbases = []
3016 3017 chainspans = []
3017 3018
3018 3019 # data about each revision
3019 3020 datasize = [None, 0, 0]
3020 3021 fullsize = [None, 0, 0]
3021 3022 semisize = [None, 0, 0]
3022 3023 # snapshot count per depth
3023 3024 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3024 3025 deltasize = [None, 0, 0]
3025 3026 chunktypecounts = {}
3026 3027 chunktypesizes = {}
3027 3028
3028 3029 def addsize(size, l):
3029 3030 if l[0] is None or size < l[0]:
3030 3031 l[0] = size
3031 3032 if size > l[1]:
3032 3033 l[1] = size
3033 3034 l[2] += size
3034 3035
3035 3036 numrevs = len(r)
3036 3037 for rev in pycompat.xrange(numrevs):
3037 3038 p1, p2 = r.parentrevs(rev)
3038 3039 delta = r.deltaparent(rev)
3039 3040 if format > 0:
3040 3041 addsize(r.rawsize(rev), datasize)
3041 3042 if p2 != nullrev:
3042 3043 nummerges += 1
3043 3044 size = r.length(rev)
3044 3045 if delta == nullrev:
3045 3046 chainlengths.append(0)
3046 3047 chainbases.append(r.start(rev))
3047 3048 chainspans.append(size)
3048 3049 if size == 0:
3049 3050 numempty += 1
3050 3051 numemptytext += 1
3051 3052 else:
3052 3053 numfull += 1
3053 3054 numsnapdepth[0] += 1
3054 3055 addsize(size, fullsize)
3055 3056 addsize(size, snapsizedepth[0])
3056 3057 else:
3057 3058 chainlengths.append(chainlengths[delta] + 1)
3058 3059 baseaddr = chainbases[delta]
3059 3060 revaddr = r.start(rev)
3060 3061 chainbases.append(baseaddr)
3061 3062 chainspans.append((revaddr - baseaddr) + size)
3062 3063 if size == 0:
3063 3064 numempty += 1
3064 3065 numemptydelta += 1
3065 3066 elif r.issnapshot(rev):
3066 3067 addsize(size, semisize)
3067 3068 numsemi += 1
3068 3069 depth = r.snapshotdepth(rev)
3069 3070 numsnapdepth[depth] += 1
3070 3071 addsize(size, snapsizedepth[depth])
3071 3072 else:
3072 3073 addsize(size, deltasize)
3073 3074 if delta == rev - 1:
3074 3075 numprev += 1
3075 3076 if delta == p1:
3076 3077 nump1prev += 1
3077 3078 elif delta == p2:
3078 3079 nump2prev += 1
3079 3080 elif delta == p1:
3080 3081 nump1 += 1
3081 3082 elif delta == p2:
3082 3083 nump2 += 1
3083 3084 elif delta != nullrev:
3084 3085 numother += 1
3085 3086
3086 3087 # Obtain data on the raw chunks in the revlog.
3087 3088 if util.safehasattr(r, b'_getsegmentforrevs'):
3088 3089 segment = r._getsegmentforrevs(rev, rev)[1]
3089 3090 else:
3090 3091 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3091 3092 if segment:
3092 3093 chunktype = bytes(segment[0:1])
3093 3094 else:
3094 3095 chunktype = b'empty'
3095 3096
3096 3097 if chunktype not in chunktypecounts:
3097 3098 chunktypecounts[chunktype] = 0
3098 3099 chunktypesizes[chunktype] = 0
3099 3100
3100 3101 chunktypecounts[chunktype] += 1
3101 3102 chunktypesizes[chunktype] += size
3102 3103
3103 3104 # Adjust size min value for empty cases
3104 3105 for size in (datasize, fullsize, semisize, deltasize):
3105 3106 if size[0] is None:
3106 3107 size[0] = 0
3107 3108
3108 3109 numdeltas = numrevs - numfull - numempty - numsemi
3109 3110 numoprev = numprev - nump1prev - nump2prev
3110 3111 totalrawsize = datasize[2]
3111 3112 datasize[2] /= numrevs
3112 3113 fulltotal = fullsize[2]
3113 3114 if numfull == 0:
3114 3115 fullsize[2] = 0
3115 3116 else:
3116 3117 fullsize[2] /= numfull
3117 3118 semitotal = semisize[2]
3118 3119 snaptotal = {}
3119 3120 if numsemi > 0:
3120 3121 semisize[2] /= numsemi
3121 3122 for depth in snapsizedepth:
3122 3123 snaptotal[depth] = snapsizedepth[depth][2]
3123 3124 snapsizedepth[depth][2] /= numsnapdepth[depth]
3124 3125
3125 3126 deltatotal = deltasize[2]
3126 3127 if numdeltas > 0:
3127 3128 deltasize[2] /= numdeltas
3128 3129 totalsize = fulltotal + semitotal + deltatotal
3129 3130 avgchainlen = sum(chainlengths) / numrevs
3130 3131 maxchainlen = max(chainlengths)
3131 3132 maxchainspan = max(chainspans)
3132 3133 compratio = 1
3133 3134 if totalsize:
3134 3135 compratio = totalrawsize / totalsize
3135 3136
3136 3137 basedfmtstr = b'%%%dd\n'
3137 3138 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3138 3139
3139 3140 def dfmtstr(max):
3140 3141 return basedfmtstr % len(str(max))
3141 3142
3142 3143 def pcfmtstr(max, padding=0):
3143 3144 return basepcfmtstr % (len(str(max)), b' ' * padding)
3144 3145
3145 3146 def pcfmt(value, total):
3146 3147 if total:
3147 3148 return (value, 100 * float(value) / total)
3148 3149 else:
3149 3150 return value, 100.0
3150 3151
3151 3152 ui.writenoi18n(b'format : %d\n' % format)
3152 3153 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3153 3154
3154 3155 ui.write(b'\n')
3155 3156 fmt = pcfmtstr(totalsize)
3156 3157 fmt2 = dfmtstr(totalsize)
3157 3158 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3158 3159 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3159 3160 ui.writenoi18n(
3160 3161 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3161 3162 )
3162 3163 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3163 3164 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3164 3165 ui.writenoi18n(
3165 3166 b' text : '
3166 3167 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3167 3168 )
3168 3169 ui.writenoi18n(
3169 3170 b' delta : '
3170 3171 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3171 3172 )
3172 3173 ui.writenoi18n(
3173 3174 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3174 3175 )
3175 3176 for depth in sorted(numsnapdepth):
3176 3177 ui.write(
3177 3178 (b' lvl-%-3d : ' % depth)
3178 3179 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3179 3180 )
3180 3181 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3181 3182 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3182 3183 ui.writenoi18n(
3183 3184 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3184 3185 )
3185 3186 for depth in sorted(numsnapdepth):
3186 3187 ui.write(
3187 3188 (b' lvl-%-3d : ' % depth)
3188 3189 + fmt % pcfmt(snaptotal[depth], totalsize)
3189 3190 )
3190 3191 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3191 3192
3192 3193 def fmtchunktype(chunktype):
3193 3194 if chunktype == b'empty':
3194 3195 return b' %s : ' % chunktype
3195 3196 elif chunktype in pycompat.bytestr(string.ascii_letters):
3196 3197 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3197 3198 else:
3198 3199 return b' 0x%s : ' % hex(chunktype)
3199 3200
3200 3201 ui.write(b'\n')
3201 3202 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3202 3203 for chunktype in sorted(chunktypecounts):
3203 3204 ui.write(fmtchunktype(chunktype))
3204 3205 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3205 3206 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3206 3207 for chunktype in sorted(chunktypecounts):
3207 3208 ui.write(fmtchunktype(chunktype))
3208 3209 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3209 3210
3210 3211 ui.write(b'\n')
3211 3212 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3212 3213 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3213 3214 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3214 3215 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3215 3216 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3216 3217
3217 3218 if format > 0:
3218 3219 ui.write(b'\n')
3219 3220 ui.writenoi18n(
3220 3221 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3221 3222 % tuple(datasize)
3222 3223 )
3223 3224 ui.writenoi18n(
3224 3225 b'full revision size (min/max/avg) : %d / %d / %d\n'
3225 3226 % tuple(fullsize)
3226 3227 )
3227 3228 ui.writenoi18n(
3228 3229 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3229 3230 % tuple(semisize)
3230 3231 )
3231 3232 for depth in sorted(snapsizedepth):
3232 3233 if depth == 0:
3233 3234 continue
3234 3235 ui.writenoi18n(
3235 3236 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3236 3237 % ((depth,) + tuple(snapsizedepth[depth]))
3237 3238 )
3238 3239 ui.writenoi18n(
3239 3240 b'delta size (min/max/avg) : %d / %d / %d\n'
3240 3241 % tuple(deltasize)
3241 3242 )
3242 3243
3243 3244 if numdeltas > 0:
3244 3245 ui.write(b'\n')
3245 3246 fmt = pcfmtstr(numdeltas)
3246 3247 fmt2 = pcfmtstr(numdeltas, 4)
3247 3248 ui.writenoi18n(
3248 3249 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3249 3250 )
3250 3251 if numprev > 0:
3251 3252 ui.writenoi18n(
3252 3253 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3253 3254 )
3254 3255 ui.writenoi18n(
3255 3256 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3256 3257 )
3257 3258 ui.writenoi18n(
3258 3259 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3259 3260 )
3260 3261 if gdelta:
3261 3262 ui.writenoi18n(
3262 3263 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3263 3264 )
3264 3265 ui.writenoi18n(
3265 3266 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3266 3267 )
3267 3268 ui.writenoi18n(
3268 3269 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3269 3270 )
3270 3271
3271 3272
3272 3273 @command(
3273 3274 b'debugrevlogindex',
3274 3275 cmdutil.debugrevlogopts
3275 3276 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3276 3277 _(b'[-f FORMAT] -c|-m|FILE'),
3277 3278 optionalrepo=True,
3278 3279 )
3279 3280 def debugrevlogindex(ui, repo, file_=None, **opts):
3280 3281 """dump the contents of a revlog index"""
3281 3282 opts = pycompat.byteskwargs(opts)
3282 3283 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3283 3284 format = opts.get(b'format', 0)
3284 3285 if format not in (0, 1):
3285 3286 raise error.Abort(_(b"unknown format %d") % format)
3286 3287
3287 3288 if ui.debugflag:
3288 3289 shortfn = hex
3289 3290 else:
3290 3291 shortfn = short
3291 3292
3292 3293 # There might not be anything in r, so have a sane default
3293 3294 idlen = 12
3294 3295 for i in r:
3295 3296 idlen = len(shortfn(r.node(i)))
3296 3297 break
3297 3298
3298 3299 if format == 0:
3299 3300 if ui.verbose:
3300 3301 ui.writenoi18n(
3301 3302 b" rev offset length linkrev %s %s p2\n"
3302 3303 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3303 3304 )
3304 3305 else:
3305 3306 ui.writenoi18n(
3306 3307 b" rev linkrev %s %s p2\n"
3307 3308 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3308 3309 )
3309 3310 elif format == 1:
3310 3311 if ui.verbose:
3311 3312 ui.writenoi18n(
3312 3313 (
3313 3314 b" rev flag offset length size link p1"
3314 3315 b" p2 %s\n"
3315 3316 )
3316 3317 % b"nodeid".rjust(idlen)
3317 3318 )
3318 3319 else:
3319 3320 ui.writenoi18n(
3320 3321 b" rev flag size link p1 p2 %s\n"
3321 3322 % b"nodeid".rjust(idlen)
3322 3323 )
3323 3324
3324 3325 for i in r:
3325 3326 node = r.node(i)
3326 3327 if format == 0:
3327 3328 try:
3328 3329 pp = r.parents(node)
3329 3330 except Exception:
3330 3331 pp = [repo.nullid, repo.nullid]
3331 3332 if ui.verbose:
3332 3333 ui.write(
3333 3334 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3334 3335 % (
3335 3336 i,
3336 3337 r.start(i),
3337 3338 r.length(i),
3338 3339 r.linkrev(i),
3339 3340 shortfn(node),
3340 3341 shortfn(pp[0]),
3341 3342 shortfn(pp[1]),
3342 3343 )
3343 3344 )
3344 3345 else:
3345 3346 ui.write(
3346 3347 b"% 6d % 7d %s %s %s\n"
3347 3348 % (
3348 3349 i,
3349 3350 r.linkrev(i),
3350 3351 shortfn(node),
3351 3352 shortfn(pp[0]),
3352 3353 shortfn(pp[1]),
3353 3354 )
3354 3355 )
3355 3356 elif format == 1:
3356 3357 pr = r.parentrevs(i)
3357 3358 if ui.verbose:
3358 3359 ui.write(
3359 3360 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3360 3361 % (
3361 3362 i,
3362 3363 r.flags(i),
3363 3364 r.start(i),
3364 3365 r.length(i),
3365 3366 r.rawsize(i),
3366 3367 r.linkrev(i),
3367 3368 pr[0],
3368 3369 pr[1],
3369 3370 shortfn(node),
3370 3371 )
3371 3372 )
3372 3373 else:
3373 3374 ui.write(
3374 3375 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3375 3376 % (
3376 3377 i,
3377 3378 r.flags(i),
3378 3379 r.rawsize(i),
3379 3380 r.linkrev(i),
3380 3381 pr[0],
3381 3382 pr[1],
3382 3383 shortfn(node),
3383 3384 )
3384 3385 )
3385 3386
3386 3387
3387 3388 @command(
3388 3389 b'debugrevspec',
3389 3390 [
3390 3391 (
3391 3392 b'',
3392 3393 b'optimize',
3393 3394 None,
3394 3395 _(b'print parsed tree after optimizing (DEPRECATED)'),
3395 3396 ),
3396 3397 (
3397 3398 b'',
3398 3399 b'show-revs',
3399 3400 True,
3400 3401 _(b'print list of result revisions (default)'),
3401 3402 ),
3402 3403 (
3403 3404 b's',
3404 3405 b'show-set',
3405 3406 None,
3406 3407 _(b'print internal representation of result set'),
3407 3408 ),
3408 3409 (
3409 3410 b'p',
3410 3411 b'show-stage',
3411 3412 [],
3412 3413 _(b'print parsed tree at the given stage'),
3413 3414 _(b'NAME'),
3414 3415 ),
3415 3416 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3416 3417 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3417 3418 ],
3418 3419 b'REVSPEC',
3419 3420 )
3420 3421 def debugrevspec(ui, repo, expr, **opts):
3421 3422 """parse and apply a revision specification
3422 3423
3423 3424 Use -p/--show-stage option to print the parsed tree at the given stages.
3424 3425 Use -p all to print tree at every stage.
3425 3426
3426 3427 Use --no-show-revs option with -s or -p to print only the set
3427 3428 representation or the parsed tree respectively.
3428 3429
3429 3430 Use --verify-optimized to compare the optimized result with the unoptimized
3430 3431 one. Returns 1 if the optimized result differs.
3431 3432 """
3432 3433 opts = pycompat.byteskwargs(opts)
3433 3434 aliases = ui.configitems(b'revsetalias')
3434 3435 stages = [
3435 3436 (b'parsed', lambda tree: tree),
3436 3437 (
3437 3438 b'expanded',
3438 3439 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3439 3440 ),
3440 3441 (b'concatenated', revsetlang.foldconcat),
3441 3442 (b'analyzed', revsetlang.analyze),
3442 3443 (b'optimized', revsetlang.optimize),
3443 3444 ]
3444 3445 if opts[b'no_optimized']:
3445 3446 stages = stages[:-1]
3446 3447 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3447 3448 raise error.Abort(
3448 3449 _(b'cannot use --verify-optimized with --no-optimized')
3449 3450 )
3450 3451 stagenames = {n for n, f in stages}
3451 3452
3452 3453 showalways = set()
3453 3454 showchanged = set()
3454 3455 if ui.verbose and not opts[b'show_stage']:
3455 3456 # show parsed tree by --verbose (deprecated)
3456 3457 showalways.add(b'parsed')
3457 3458 showchanged.update([b'expanded', b'concatenated'])
3458 3459 if opts[b'optimize']:
3459 3460 showalways.add(b'optimized')
3460 3461 if opts[b'show_stage'] and opts[b'optimize']:
3461 3462 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3462 3463 if opts[b'show_stage'] == [b'all']:
3463 3464 showalways.update(stagenames)
3464 3465 else:
3465 3466 for n in opts[b'show_stage']:
3466 3467 if n not in stagenames:
3467 3468 raise error.Abort(_(b'invalid stage name: %s') % n)
3468 3469 showalways.update(opts[b'show_stage'])
3469 3470
3470 3471 treebystage = {}
3471 3472 printedtree = None
3472 3473 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3473 3474 for n, f in stages:
3474 3475 treebystage[n] = tree = f(tree)
3475 3476 if n in showalways or (n in showchanged and tree != printedtree):
3476 3477 if opts[b'show_stage'] or n != b'parsed':
3477 3478 ui.write(b"* %s:\n" % n)
3478 3479 ui.write(revsetlang.prettyformat(tree), b"\n")
3479 3480 printedtree = tree
3480 3481
3481 3482 if opts[b'verify_optimized']:
3482 3483 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3483 3484 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3484 3485 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3485 3486 ui.writenoi18n(
3486 3487 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3487 3488 )
3488 3489 ui.writenoi18n(
3489 3490 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3490 3491 )
3491 3492 arevs = list(arevs)
3492 3493 brevs = list(brevs)
3493 3494 if arevs == brevs:
3494 3495 return 0
3495 3496 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3496 3497 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3497 3498 sm = difflib.SequenceMatcher(None, arevs, brevs)
3498 3499 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3499 3500 if tag in ('delete', 'replace'):
3500 3501 for c in arevs[alo:ahi]:
3501 3502 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3502 3503 if tag in ('insert', 'replace'):
3503 3504 for c in brevs[blo:bhi]:
3504 3505 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3505 3506 if tag == 'equal':
3506 3507 for c in arevs[alo:ahi]:
3507 3508 ui.write(b' %d\n' % c)
3508 3509 return 1
3509 3510
3510 3511 func = revset.makematcher(tree)
3511 3512 revs = func(repo)
3512 3513 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3513 3514 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3514 3515 if not opts[b'show_revs']:
3515 3516 return
3516 3517 for c in revs:
3517 3518 ui.write(b"%d\n" % c)
3518 3519
3519 3520
3520 3521 @command(
3521 3522 b'debugserve',
3522 3523 [
3523 3524 (
3524 3525 b'',
3525 3526 b'sshstdio',
3526 3527 False,
3527 3528 _(b'run an SSH server bound to process handles'),
3528 3529 ),
3529 3530 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3530 3531 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3531 3532 ],
3532 3533 b'',
3533 3534 )
3534 3535 def debugserve(ui, repo, **opts):
3535 3536 """run a server with advanced settings
3536 3537
3537 3538 This command is similar to :hg:`serve`. It exists partially as a
3538 3539 workaround to the fact that ``hg serve --stdio`` must have specific
3539 3540 arguments for security reasons.
3540 3541 """
3541 3542 opts = pycompat.byteskwargs(opts)
3542 3543
3543 3544 if not opts[b'sshstdio']:
3544 3545 raise error.Abort(_(b'only --sshstdio is currently supported'))
3545 3546
3546 3547 logfh = None
3547 3548
3548 3549 if opts[b'logiofd'] and opts[b'logiofile']:
3549 3550 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3550 3551
3551 3552 if opts[b'logiofd']:
3552 3553 # Ideally we would be line buffered. But line buffering in binary
3553 3554 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3554 3555 # buffering could have performance impacts. But since this isn't
3555 3556 # performance critical code, it should be fine.
3556 3557 try:
3557 3558 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3558 3559 except OSError as e:
3559 3560 if e.errno != errno.ESPIPE:
3560 3561 raise
3561 3562 # can't seek a pipe, so `ab` mode fails on py3
3562 3563 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3563 3564 elif opts[b'logiofile']:
3564 3565 logfh = open(opts[b'logiofile'], b'ab', 0)
3565 3566
3566 3567 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3567 3568 s.serve_forever()
3568 3569
3569 3570
3570 3571 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3571 3572 def debugsetparents(ui, repo, rev1, rev2=None):
3572 3573 """manually set the parents of the current working directory (DANGEROUS)
3573 3574
3574 3575 This command is not what you are looking for and should not be used. Using
3575 3576 this command will most certainly results in slight corruption of the file
3576 3577 level histories withing your repository. DO NOT USE THIS COMMAND.
3577 3578
3578 3579 The command update the p1 and p2 field in the dirstate, and not touching
3579 3580 anything else. This useful for writing repository conversion tools, but
3580 3581 should be used with extreme care. For example, neither the working
3581 3582 directory nor the dirstate is updated, so file status may be incorrect
3582 3583 after running this command. Only used if you are one of the few people that
3583 3584 deeply unstand both conversion tools and file level histories. If you are
3584 3585 reading this help, you are not one of this people (most of them sailed west
3585 3586 from Mithlond anyway.
3586 3587
3587 3588 So one last time DO NOT USE THIS COMMAND.
3588 3589
3589 3590 Returns 0 on success.
3590 3591 """
3591 3592
3592 3593 node1 = scmutil.revsingle(repo, rev1).node()
3593 3594 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3594 3595
3595 3596 with repo.wlock():
3596 3597 repo.setparents(node1, node2)
3597 3598
3598 3599
3599 3600 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3600 3601 def debugsidedata(ui, repo, file_, rev=None, **opts):
3601 3602 """dump the side data for a cl/manifest/file revision
3602 3603
3603 3604 Use --verbose to dump the sidedata content."""
3604 3605 opts = pycompat.byteskwargs(opts)
3605 3606 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3606 3607 if rev is not None:
3607 3608 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3608 3609 file_, rev = None, file_
3609 3610 elif rev is None:
3610 3611 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3611 3612 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3612 3613 r = getattr(r, '_revlog', r)
3613 3614 try:
3614 3615 sidedata = r.sidedata(r.lookup(rev))
3615 3616 except KeyError:
3616 3617 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3617 3618 if sidedata:
3618 3619 sidedata = list(sidedata.items())
3619 3620 sidedata.sort()
3620 3621 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3621 3622 for key, value in sidedata:
3622 3623 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3623 3624 if ui.verbose:
3624 3625 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3625 3626
3626 3627
3627 3628 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3628 3629 def debugssl(ui, repo, source=None, **opts):
3629 3630 """test a secure connection to a server
3630 3631
3631 3632 This builds the certificate chain for the server on Windows, installing the
3632 3633 missing intermediates and trusted root via Windows Update if necessary. It
3633 3634 does nothing on other platforms.
3634 3635
3635 3636 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3636 3637 that server is used. See :hg:`help urls` for more information.
3637 3638
3638 3639 If the update succeeds, retry the original operation. Otherwise, the cause
3639 3640 of the SSL error is likely another issue.
3640 3641 """
3641 3642 if not pycompat.iswindows:
3642 3643 raise error.Abort(
3643 3644 _(b'certificate chain building is only possible on Windows')
3644 3645 )
3645 3646
3646 3647 if not source:
3647 3648 if not repo:
3648 3649 raise error.Abort(
3649 3650 _(
3650 3651 b"there is no Mercurial repository here, and no "
3651 3652 b"server specified"
3652 3653 )
3653 3654 )
3654 3655 source = b"default"
3655 3656
3656 3657 source, branches = urlutil.get_unique_pull_path(
3657 3658 b'debugssl', repo, ui, source
3658 3659 )
3659 3660 url = urlutil.url(source)
3660 3661
3661 3662 defaultport = {b'https': 443, b'ssh': 22}
3662 3663 if url.scheme in defaultport:
3663 3664 try:
3664 3665 addr = (url.host, int(url.port or defaultport[url.scheme]))
3665 3666 except ValueError:
3666 3667 raise error.Abort(_(b"malformed port number in URL"))
3667 3668 else:
3668 3669 raise error.Abort(_(b"only https and ssh connections are supported"))
3669 3670
3670 3671 from . import win32
3671 3672
3672 3673 s = ssl.wrap_socket(
3673 3674 socket.socket(),
3674 3675 ssl_version=ssl.PROTOCOL_TLS,
3675 3676 cert_reqs=ssl.CERT_NONE,
3676 3677 ca_certs=None,
3677 3678 )
3678 3679
3679 3680 try:
3680 3681 s.connect(addr)
3681 3682 cert = s.getpeercert(True)
3682 3683
3683 3684 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3684 3685
3685 3686 complete = win32.checkcertificatechain(cert, build=False)
3686 3687
3687 3688 if not complete:
3688 3689 ui.status(_(b'certificate chain is incomplete, updating... '))
3689 3690
3690 3691 if not win32.checkcertificatechain(cert):
3691 3692 ui.status(_(b'failed.\n'))
3692 3693 else:
3693 3694 ui.status(_(b'done.\n'))
3694 3695 else:
3695 3696 ui.status(_(b'full certificate chain is available\n'))
3696 3697 finally:
3697 3698 s.close()
3698 3699
3699 3700
3700 3701 @command(
3701 3702 b"debugbackupbundle",
3702 3703 [
3703 3704 (
3704 3705 b"",
3705 3706 b"recover",
3706 3707 b"",
3707 3708 b"brings the specified changeset back into the repository",
3708 3709 )
3709 3710 ]
3710 3711 + cmdutil.logopts,
3711 3712 _(b"hg debugbackupbundle [--recover HASH]"),
3712 3713 )
3713 3714 def debugbackupbundle(ui, repo, *pats, **opts):
3714 3715 """lists the changesets available in backup bundles
3715 3716
3716 3717 Without any arguments, this command prints a list of the changesets in each
3717 3718 backup bundle.
3718 3719
3719 3720 --recover takes a changeset hash and unbundles the first bundle that
3720 3721 contains that hash, which puts that changeset back in your repository.
3721 3722
3722 3723 --verbose will print the entire commit message and the bundle path for that
3723 3724 backup.
3724 3725 """
3725 3726 backups = list(
3726 3727 filter(
3727 3728 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3728 3729 )
3729 3730 )
3730 3731 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3731 3732
3732 3733 opts = pycompat.byteskwargs(opts)
3733 3734 opts[b"bundle"] = b""
3734 3735 opts[b"force"] = None
3735 3736 limit = logcmdutil.getlimit(opts)
3736 3737
3737 3738 def display(other, chlist, displayer):
3738 3739 if opts.get(b"newest_first"):
3739 3740 chlist.reverse()
3740 3741 count = 0
3741 3742 for n in chlist:
3742 3743 if limit is not None and count >= limit:
3743 3744 break
3744 3745 parents = [
3745 3746 True for p in other.changelog.parents(n) if p != repo.nullid
3746 3747 ]
3747 3748 if opts.get(b"no_merges") and len(parents) == 2:
3748 3749 continue
3749 3750 count += 1
3750 3751 displayer.show(other[n])
3751 3752
3752 3753 recovernode = opts.get(b"recover")
3753 3754 if recovernode:
3754 3755 if scmutil.isrevsymbol(repo, recovernode):
3755 3756 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3756 3757 return
3757 3758 elif backups:
3758 3759 msg = _(
3759 3760 b"Recover changesets using: hg debugbackupbundle --recover "
3760 3761 b"<changeset hash>\n\nAvailable backup changesets:"
3761 3762 )
3762 3763 ui.status(msg, label=b"status.removed")
3763 3764 else:
3764 3765 ui.status(_(b"no backup changesets found\n"))
3765 3766 return
3766 3767
3767 3768 for backup in backups:
3768 3769 # Much of this is copied from the hg incoming logic
3769 3770 source = os.path.relpath(backup, encoding.getcwd())
3770 3771 source, branches = urlutil.get_unique_pull_path(
3771 3772 b'debugbackupbundle',
3772 3773 repo,
3773 3774 ui,
3774 3775 source,
3775 3776 default_branches=opts.get(b'branch'),
3776 3777 )
3777 3778 try:
3778 3779 other = hg.peer(repo, opts, source)
3779 3780 except error.LookupError as ex:
3780 3781 msg = _(b"\nwarning: unable to open bundle %s") % source
3781 3782 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3782 3783 ui.warn(msg, hint=hint)
3783 3784 continue
3784 3785 revs, checkout = hg.addbranchrevs(
3785 3786 repo, other, branches, opts.get(b"rev")
3786 3787 )
3787 3788
3788 3789 if revs:
3789 3790 revs = [other.lookup(rev) for rev in revs]
3790 3791
3791 3792 quiet = ui.quiet
3792 3793 try:
3793 3794 ui.quiet = True
3794 3795 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3795 3796 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3796 3797 )
3797 3798 except error.LookupError:
3798 3799 continue
3799 3800 finally:
3800 3801 ui.quiet = quiet
3801 3802
3802 3803 try:
3803 3804 if not chlist:
3804 3805 continue
3805 3806 if recovernode:
3806 3807 with repo.lock(), repo.transaction(b"unbundle") as tr:
3807 3808 if scmutil.isrevsymbol(other, recovernode):
3808 3809 ui.status(_(b"Unbundling %s\n") % (recovernode))
3809 3810 f = hg.openpath(ui, source)
3810 3811 gen = exchange.readbundle(ui, f, source)
3811 3812 if isinstance(gen, bundle2.unbundle20):
3812 3813 bundle2.applybundle(
3813 3814 repo,
3814 3815 gen,
3815 3816 tr,
3816 3817 source=b"unbundle",
3817 3818 url=b"bundle:" + source,
3818 3819 )
3819 3820 else:
3820 3821 gen.apply(repo, b"unbundle", b"bundle:" + source)
3821 3822 break
3822 3823 else:
3823 3824 backupdate = encoding.strtolocal(
3824 3825 time.strftime(
3825 3826 "%a %H:%M, %Y-%m-%d",
3826 3827 time.localtime(os.path.getmtime(source)),
3827 3828 )
3828 3829 )
3829 3830 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3830 3831 if ui.verbose:
3831 3832 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3832 3833 else:
3833 3834 opts[
3834 3835 b"template"
3835 3836 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3836 3837 displayer = logcmdutil.changesetdisplayer(
3837 3838 ui, other, opts, False
3838 3839 )
3839 3840 display(other, chlist, displayer)
3840 3841 displayer.close()
3841 3842 finally:
3842 3843 cleanupfn()
3843 3844
3844 3845
3845 3846 @command(
3846 3847 b'debugsub',
3847 3848 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3848 3849 _(b'[-r REV] [REV]'),
3849 3850 )
3850 3851 def debugsub(ui, repo, rev=None):
3851 3852 ctx = scmutil.revsingle(repo, rev, None)
3852 3853 for k, v in sorted(ctx.substate.items()):
3853 3854 ui.writenoi18n(b'path %s\n' % k)
3854 3855 ui.writenoi18n(b' source %s\n' % v[0])
3855 3856 ui.writenoi18n(b' revision %s\n' % v[1])
3856 3857
3857 3858
3858 3859 @command(b'debugshell', optionalrepo=True)
3859 3860 def debugshell(ui, repo):
3860 3861 """run an interactive Python interpreter
3861 3862
3862 3863 The local namespace is provided with a reference to the ui and
3863 3864 the repo instance (if available).
3864 3865 """
3865 3866 import code
3866 3867
3867 3868 imported_objects = {
3868 3869 'ui': ui,
3869 3870 'repo': repo,
3870 3871 }
3871 3872
3872 3873 code.interact(local=imported_objects)
3873 3874
3874 3875
3875 3876 @command(
3876 3877 b'debugsuccessorssets',
3877 3878 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3878 3879 _(b'[REV]'),
3879 3880 )
3880 3881 def debugsuccessorssets(ui, repo, *revs, **opts):
3881 3882 """show set of successors for revision
3882 3883
3883 3884 A successors set of changeset A is a consistent group of revisions that
3884 3885 succeed A. It contains non-obsolete changesets only unless closests
3885 3886 successors set is set.
3886 3887
3887 3888 In most cases a changeset A has a single successors set containing a single
3888 3889 successor (changeset A replaced by A').
3889 3890
3890 3891 A changeset that is made obsolete with no successors are called "pruned".
3891 3892 Such changesets have no successors sets at all.
3892 3893
3893 3894 A changeset that has been "split" will have a successors set containing
3894 3895 more than one successor.
3895 3896
3896 3897 A changeset that has been rewritten in multiple different ways is called
3897 3898 "divergent". Such changesets have multiple successor sets (each of which
3898 3899 may also be split, i.e. have multiple successors).
3899 3900
3900 3901 Results are displayed as follows::
3901 3902
3902 3903 <rev1>
3903 3904 <successors-1A>
3904 3905 <rev2>
3905 3906 <successors-2A>
3906 3907 <successors-2B1> <successors-2B2> <successors-2B3>
3907 3908
3908 3909 Here rev2 has two possible (i.e. divergent) successors sets. The first
3909 3910 holds one element, whereas the second holds three (i.e. the changeset has
3910 3911 been split).
3911 3912 """
3912 3913 # passed to successorssets caching computation from one call to another
3913 3914 cache = {}
3914 3915 ctx2str = bytes
3915 3916 node2str = short
3916 3917 for rev in scmutil.revrange(repo, revs):
3917 3918 ctx = repo[rev]
3918 3919 ui.write(b'%s\n' % ctx2str(ctx))
3919 3920 for succsset in obsutil.successorssets(
3920 3921 repo, ctx.node(), closest=opts['closest'], cache=cache
3921 3922 ):
3922 3923 if succsset:
3923 3924 ui.write(b' ')
3924 3925 ui.write(node2str(succsset[0]))
3925 3926 for node in succsset[1:]:
3926 3927 ui.write(b' ')
3927 3928 ui.write(node2str(node))
3928 3929 ui.write(b'\n')
3929 3930
3930 3931
3931 3932 @command(b'debugtagscache', [])
3932 3933 def debugtagscache(ui, repo):
3933 3934 """display the contents of .hg/cache/hgtagsfnodes1"""
3934 3935 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3935 3936 flog = repo.file(b'.hgtags')
3936 3937 for r in repo:
3937 3938 node = repo[r].node()
3938 3939 tagsnode = cache.getfnode(node, computemissing=False)
3939 3940 if tagsnode:
3940 3941 tagsnodedisplay = hex(tagsnode)
3941 3942 if not flog.hasnode(tagsnode):
3942 3943 tagsnodedisplay += b' (unknown node)'
3943 3944 elif tagsnode is None:
3944 3945 tagsnodedisplay = b'missing'
3945 3946 else:
3946 3947 tagsnodedisplay = b'invalid'
3947 3948
3948 3949 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3949 3950
3950 3951
3951 3952 @command(
3952 3953 b'debugtemplate',
3953 3954 [
3954 3955 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3955 3956 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3956 3957 ],
3957 3958 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3958 3959 optionalrepo=True,
3959 3960 )
3960 3961 def debugtemplate(ui, repo, tmpl, **opts):
3961 3962 """parse and apply a template
3962 3963
3963 3964 If -r/--rev is given, the template is processed as a log template and
3964 3965 applied to the given changesets. Otherwise, it is processed as a generic
3965 3966 template.
3966 3967
3967 3968 Use --verbose to print the parsed tree.
3968 3969 """
3969 3970 revs = None
3970 3971 if opts['rev']:
3971 3972 if repo is None:
3972 3973 raise error.RepoError(
3973 3974 _(b'there is no Mercurial repository here (.hg not found)')
3974 3975 )
3975 3976 revs = scmutil.revrange(repo, opts['rev'])
3976 3977
3977 3978 props = {}
3978 3979 for d in opts['define']:
3979 3980 try:
3980 3981 k, v = (e.strip() for e in d.split(b'=', 1))
3981 3982 if not k or k == b'ui':
3982 3983 raise ValueError
3983 3984 props[k] = v
3984 3985 except ValueError:
3985 3986 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3986 3987
3987 3988 if ui.verbose:
3988 3989 aliases = ui.configitems(b'templatealias')
3989 3990 tree = templater.parse(tmpl)
3990 3991 ui.note(templater.prettyformat(tree), b'\n')
3991 3992 newtree = templater.expandaliases(tree, aliases)
3992 3993 if newtree != tree:
3993 3994 ui.notenoi18n(
3994 3995 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3995 3996 )
3996 3997
3997 3998 if revs is None:
3998 3999 tres = formatter.templateresources(ui, repo)
3999 4000 t = formatter.maketemplater(ui, tmpl, resources=tres)
4000 4001 if ui.verbose:
4001 4002 kwds, funcs = t.symbolsuseddefault()
4002 4003 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4003 4004 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4004 4005 ui.write(t.renderdefault(props))
4005 4006 else:
4006 4007 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4007 4008 if ui.verbose:
4008 4009 kwds, funcs = displayer.t.symbolsuseddefault()
4009 4010 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4010 4011 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4011 4012 for r in revs:
4012 4013 displayer.show(repo[r], **pycompat.strkwargs(props))
4013 4014 displayer.close()
4014 4015
4015 4016
4016 4017 @command(
4017 4018 b'debuguigetpass',
4018 4019 [
4019 4020 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4020 4021 ],
4021 4022 _(b'[-p TEXT]'),
4022 4023 norepo=True,
4023 4024 )
4024 4025 def debuguigetpass(ui, prompt=b''):
4025 4026 """show prompt to type password"""
4026 4027 r = ui.getpass(prompt)
4027 4028 if r is None:
4028 4029 r = b"<default response>"
4029 4030 ui.writenoi18n(b'response: %s\n' % r)
4030 4031
4031 4032
4032 4033 @command(
4033 4034 b'debuguiprompt',
4034 4035 [
4035 4036 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4036 4037 ],
4037 4038 _(b'[-p TEXT]'),
4038 4039 norepo=True,
4039 4040 )
4040 4041 def debuguiprompt(ui, prompt=b''):
4041 4042 """show plain prompt"""
4042 4043 r = ui.prompt(prompt)
4043 4044 ui.writenoi18n(b'response: %s\n' % r)
4044 4045
4045 4046
4046 4047 @command(b'debugupdatecaches', [])
4047 4048 def debugupdatecaches(ui, repo, *pats, **opts):
4048 4049 """warm all known caches in the repository"""
4049 4050 with repo.wlock(), repo.lock():
4050 repo.updatecaches(full=True)
4051 repo.updatecaches(caches=repository.CACHES_ALL)
4051 4052
4052 4053
4053 4054 @command(
4054 4055 b'debugupgraderepo',
4055 4056 [
4056 4057 (
4057 4058 b'o',
4058 4059 b'optimize',
4059 4060 [],
4060 4061 _(b'extra optimization to perform'),
4061 4062 _(b'NAME'),
4062 4063 ),
4063 4064 (b'', b'run', False, _(b'performs an upgrade')),
4064 4065 (b'', b'backup', True, _(b'keep the old repository content around')),
4065 4066 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4066 4067 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4067 4068 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4068 4069 ],
4069 4070 )
4070 4071 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4071 4072 """upgrade a repository to use different features
4072 4073
4073 4074 If no arguments are specified, the repository is evaluated for upgrade
4074 4075 and a list of problems and potential optimizations is printed.
4075 4076
4076 4077 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4077 4078 can be influenced via additional arguments. More details will be provided
4078 4079 by the command output when run without ``--run``.
4079 4080
4080 4081 During the upgrade, the repository will be locked and no writes will be
4081 4082 allowed.
4082 4083
4083 4084 At the end of the upgrade, the repository may not be readable while new
4084 4085 repository data is swapped in. This window will be as long as it takes to
4085 4086 rename some directories inside the ``.hg`` directory. On most machines, this
4086 4087 should complete almost instantaneously and the chances of a consumer being
4087 4088 unable to access the repository should be low.
4088 4089
4089 4090 By default, all revlogs will be upgraded. You can restrict this using flags
4090 4091 such as `--manifest`:
4091 4092
4092 4093 * `--manifest`: only optimize the manifest
4093 4094 * `--no-manifest`: optimize all revlog but the manifest
4094 4095 * `--changelog`: optimize the changelog only
4095 4096 * `--no-changelog --no-manifest`: optimize filelogs only
4096 4097 * `--filelogs`: optimize the filelogs only
4097 4098 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4098 4099 """
4099 4100 return upgrade.upgraderepo(
4100 4101 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4101 4102 )
4102 4103
4103 4104
4104 4105 @command(
4105 4106 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4106 4107 )
4107 4108 def debugwalk(ui, repo, *pats, **opts):
4108 4109 """show how files match on given patterns"""
4109 4110 opts = pycompat.byteskwargs(opts)
4110 4111 m = scmutil.match(repo[None], pats, opts)
4111 4112 if ui.verbose:
4112 4113 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4113 4114 items = list(repo[None].walk(m))
4114 4115 if not items:
4115 4116 return
4116 4117 f = lambda fn: fn
4117 4118 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4118 4119 f = lambda fn: util.normpath(fn)
4119 4120 fmt = b'f %%-%ds %%-%ds %%s' % (
4120 4121 max([len(abs) for abs in items]),
4121 4122 max([len(repo.pathto(abs)) for abs in items]),
4122 4123 )
4123 4124 for abs in items:
4124 4125 line = fmt % (
4125 4126 abs,
4126 4127 f(repo.pathto(abs)),
4127 4128 m.exact(abs) and b'exact' or b'',
4128 4129 )
4129 4130 ui.write(b"%s\n" % line.rstrip())
4130 4131
4131 4132
4132 4133 @command(b'debugwhyunstable', [], _(b'REV'))
4133 4134 def debugwhyunstable(ui, repo, rev):
4134 4135 """explain instabilities of a changeset"""
4135 4136 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4136 4137 dnodes = b''
4137 4138 if entry.get(b'divergentnodes'):
4138 4139 dnodes = (
4139 4140 b' '.join(
4140 4141 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4141 4142 for ctx in entry[b'divergentnodes']
4142 4143 )
4143 4144 + b' '
4144 4145 )
4145 4146 ui.write(
4146 4147 b'%s: %s%s %s\n'
4147 4148 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4148 4149 )
4149 4150
4150 4151
4151 4152 @command(
4152 4153 b'debugwireargs',
4153 4154 [
4154 4155 (b'', b'three', b'', b'three'),
4155 4156 (b'', b'four', b'', b'four'),
4156 4157 (b'', b'five', b'', b'five'),
4157 4158 ]
4158 4159 + cmdutil.remoteopts,
4159 4160 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4160 4161 norepo=True,
4161 4162 )
4162 4163 def debugwireargs(ui, repopath, *vals, **opts):
4163 4164 opts = pycompat.byteskwargs(opts)
4164 4165 repo = hg.peer(ui, opts, repopath)
4165 4166 try:
4166 4167 for opt in cmdutil.remoteopts:
4167 4168 del opts[opt[1]]
4168 4169 args = {}
4169 4170 for k, v in pycompat.iteritems(opts):
4170 4171 if v:
4171 4172 args[k] = v
4172 4173 args = pycompat.strkwargs(args)
4173 4174 # run twice to check that we don't mess up the stream for the next command
4174 4175 res1 = repo.debugwireargs(*vals, **args)
4175 4176 res2 = repo.debugwireargs(*vals, **args)
4176 4177 ui.write(b"%s\n" % res1)
4177 4178 if res1 != res2:
4178 4179 ui.warn(b"%s\n" % res2)
4179 4180 finally:
4180 4181 repo.close()
4181 4182
4182 4183
4183 4184 def _parsewirelangblocks(fh):
4184 4185 activeaction = None
4185 4186 blocklines = []
4186 4187 lastindent = 0
4187 4188
4188 4189 for line in fh:
4189 4190 line = line.rstrip()
4190 4191 if not line:
4191 4192 continue
4192 4193
4193 4194 if line.startswith(b'#'):
4194 4195 continue
4195 4196
4196 4197 if not line.startswith(b' '):
4197 4198 # New block. Flush previous one.
4198 4199 if activeaction:
4199 4200 yield activeaction, blocklines
4200 4201
4201 4202 activeaction = line
4202 4203 blocklines = []
4203 4204 lastindent = 0
4204 4205 continue
4205 4206
4206 4207 # Else we start with an indent.
4207 4208
4208 4209 if not activeaction:
4209 4210 raise error.Abort(_(b'indented line outside of block'))
4210 4211
4211 4212 indent = len(line) - len(line.lstrip())
4212 4213
4213 4214 # If this line is indented more than the last line, concatenate it.
4214 4215 if indent > lastindent and blocklines:
4215 4216 blocklines[-1] += line.lstrip()
4216 4217 else:
4217 4218 blocklines.append(line)
4218 4219 lastindent = indent
4219 4220
4220 4221 # Flush last block.
4221 4222 if activeaction:
4222 4223 yield activeaction, blocklines
4223 4224
4224 4225
4225 4226 @command(
4226 4227 b'debugwireproto',
4227 4228 [
4228 4229 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4229 4230 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4230 4231 (
4231 4232 b'',
4232 4233 b'noreadstderr',
4233 4234 False,
4234 4235 _(b'do not read from stderr of the remote'),
4235 4236 ),
4236 4237 (
4237 4238 b'',
4238 4239 b'nologhandshake',
4239 4240 False,
4240 4241 _(b'do not log I/O related to the peer handshake'),
4241 4242 ),
4242 4243 ]
4243 4244 + cmdutil.remoteopts,
4244 4245 _(b'[PATH]'),
4245 4246 optionalrepo=True,
4246 4247 )
4247 4248 def debugwireproto(ui, repo, path=None, **opts):
4248 4249 """send wire protocol commands to a server
4249 4250
4250 4251 This command can be used to issue wire protocol commands to remote
4251 4252 peers and to debug the raw data being exchanged.
4252 4253
4253 4254 ``--localssh`` will start an SSH server against the current repository
4254 4255 and connect to that. By default, the connection will perform a handshake
4255 4256 and establish an appropriate peer instance.
4256 4257
4257 4258 ``--peer`` can be used to bypass the handshake protocol and construct a
4258 4259 peer instance using the specified class type. Valid values are ``raw``,
4259 4260 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4260 4261 raw data payloads and don't support higher-level command actions.
4261 4262
4262 4263 ``--noreadstderr`` can be used to disable automatic reading from stderr
4263 4264 of the peer (for SSH connections only). Disabling automatic reading of
4264 4265 stderr is useful for making output more deterministic.
4265 4266
4266 4267 Commands are issued via a mini language which is specified via stdin.
4267 4268 The language consists of individual actions to perform. An action is
4268 4269 defined by a block. A block is defined as a line with no leading
4269 4270 space followed by 0 or more lines with leading space. Blocks are
4270 4271 effectively a high-level command with additional metadata.
4271 4272
4272 4273 Lines beginning with ``#`` are ignored.
4273 4274
4274 4275 The following sections denote available actions.
4275 4276
4276 4277 raw
4277 4278 ---
4278 4279
4279 4280 Send raw data to the server.
4280 4281
4281 4282 The block payload contains the raw data to send as one atomic send
4282 4283 operation. The data may not actually be delivered in a single system
4283 4284 call: it depends on the abilities of the transport being used.
4284 4285
4285 4286 Each line in the block is de-indented and concatenated. Then, that
4286 4287 value is evaluated as a Python b'' literal. This allows the use of
4287 4288 backslash escaping, etc.
4288 4289
4289 4290 raw+
4290 4291 ----
4291 4292
4292 4293 Behaves like ``raw`` except flushes output afterwards.
4293 4294
4294 4295 command <X>
4295 4296 -----------
4296 4297
4297 4298 Send a request to run a named command, whose name follows the ``command``
4298 4299 string.
4299 4300
4300 4301 Arguments to the command are defined as lines in this block. The format of
4301 4302 each line is ``<key> <value>``. e.g.::
4302 4303
4303 4304 command listkeys
4304 4305 namespace bookmarks
4305 4306
4306 4307 If the value begins with ``eval:``, it will be interpreted as a Python
4307 4308 literal expression. Otherwise values are interpreted as Python b'' literals.
4308 4309 This allows sending complex types and encoding special byte sequences via
4309 4310 backslash escaping.
4310 4311
4311 4312 The following arguments have special meaning:
4312 4313
4313 4314 ``PUSHFILE``
4314 4315 When defined, the *push* mechanism of the peer will be used instead
4315 4316 of the static request-response mechanism and the content of the
4316 4317 file specified in the value of this argument will be sent as the
4317 4318 command payload.
4318 4319
4319 4320 This can be used to submit a local bundle file to the remote.
4320 4321
4321 4322 batchbegin
4322 4323 ----------
4323 4324
4324 4325 Instruct the peer to begin a batched send.
4325 4326
4326 4327 All ``command`` blocks are queued for execution until the next
4327 4328 ``batchsubmit`` block.
4328 4329
4329 4330 batchsubmit
4330 4331 -----------
4331 4332
4332 4333 Submit previously queued ``command`` blocks as a batch request.
4333 4334
4334 4335 This action MUST be paired with a ``batchbegin`` action.
4335 4336
4336 4337 httprequest <method> <path>
4337 4338 ---------------------------
4338 4339
4339 4340 (HTTP peer only)
4340 4341
4341 4342 Send an HTTP request to the peer.
4342 4343
4343 4344 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4344 4345
4345 4346 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4346 4347 headers to add to the request. e.g. ``Accept: foo``.
4347 4348
4348 4349 The following arguments are special:
4349 4350
4350 4351 ``BODYFILE``
4351 4352 The content of the file defined as the value to this argument will be
4352 4353 transferred verbatim as the HTTP request body.
4353 4354
4354 4355 ``frame <type> <flags> <payload>``
4355 4356 Send a unified protocol frame as part of the request body.
4356 4357
4357 4358 All frames will be collected and sent as the body to the HTTP
4358 4359 request.
4359 4360
4360 4361 close
4361 4362 -----
4362 4363
4363 4364 Close the connection to the server.
4364 4365
4365 4366 flush
4366 4367 -----
4367 4368
4368 4369 Flush data written to the server.
4369 4370
4370 4371 readavailable
4371 4372 -------------
4372 4373
4373 4374 Close the write end of the connection and read all available data from
4374 4375 the server.
4375 4376
4376 4377 If the connection to the server encompasses multiple pipes, we poll both
4377 4378 pipes and read available data.
4378 4379
4379 4380 readline
4380 4381 --------
4381 4382
4382 4383 Read a line of output from the server. If there are multiple output
4383 4384 pipes, reads only the main pipe.
4384 4385
4385 4386 ereadline
4386 4387 ---------
4387 4388
4388 4389 Like ``readline``, but read from the stderr pipe, if available.
4389 4390
4390 4391 read <X>
4391 4392 --------
4392 4393
4393 4394 ``read()`` N bytes from the server's main output pipe.
4394 4395
4395 4396 eread <X>
4396 4397 ---------
4397 4398
4398 4399 ``read()`` N bytes from the server's stderr pipe, if available.
4399 4400
4400 4401 Specifying Unified Frame-Based Protocol Frames
4401 4402 ----------------------------------------------
4402 4403
4403 4404 It is possible to emit a *Unified Frame-Based Protocol* by using special
4404 4405 syntax.
4405 4406
4406 4407 A frame is composed as a type, flags, and payload. These can be parsed
4407 4408 from a string of the form:
4408 4409
4409 4410 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4410 4411
4411 4412 ``request-id`` and ``stream-id`` are integers defining the request and
4412 4413 stream identifiers.
4413 4414
4414 4415 ``type`` can be an integer value for the frame type or the string name
4415 4416 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4416 4417 ``command-name``.
4417 4418
4418 4419 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4419 4420 components. Each component (and there can be just one) can be an integer
4420 4421 or a flag name for stream flags or frame flags, respectively. Values are
4421 4422 resolved to integers and then bitwise OR'd together.
4422 4423
4423 4424 ``payload`` represents the raw frame payload. If it begins with
4424 4425 ``cbor:``, the following string is evaluated as Python code and the
4425 4426 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4426 4427 as a Python byte string literal.
4427 4428 """
4428 4429 opts = pycompat.byteskwargs(opts)
4429 4430
4430 4431 if opts[b'localssh'] and not repo:
4431 4432 raise error.Abort(_(b'--localssh requires a repository'))
4432 4433
4433 4434 if opts[b'peer'] and opts[b'peer'] not in (
4434 4435 b'raw',
4435 4436 b'http2',
4436 4437 b'ssh1',
4437 4438 b'ssh2',
4438 4439 ):
4439 4440 raise error.Abort(
4440 4441 _(b'invalid value for --peer'),
4441 4442 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4442 4443 )
4443 4444
4444 4445 if path and opts[b'localssh']:
4445 4446 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4446 4447
4447 4448 if ui.interactive():
4448 4449 ui.write(_(b'(waiting for commands on stdin)\n'))
4449 4450
4450 4451 blocks = list(_parsewirelangblocks(ui.fin))
4451 4452
4452 4453 proc = None
4453 4454 stdin = None
4454 4455 stdout = None
4455 4456 stderr = None
4456 4457 opener = None
4457 4458
4458 4459 if opts[b'localssh']:
4459 4460 # We start the SSH server in its own process so there is process
4460 4461 # separation. This prevents a whole class of potential bugs around
4461 4462 # shared state from interfering with server operation.
4462 4463 args = procutil.hgcmd() + [
4463 4464 b'-R',
4464 4465 repo.root,
4465 4466 b'debugserve',
4466 4467 b'--sshstdio',
4467 4468 ]
4468 4469 proc = subprocess.Popen(
4469 4470 pycompat.rapply(procutil.tonativestr, args),
4470 4471 stdin=subprocess.PIPE,
4471 4472 stdout=subprocess.PIPE,
4472 4473 stderr=subprocess.PIPE,
4473 4474 bufsize=0,
4474 4475 )
4475 4476
4476 4477 stdin = proc.stdin
4477 4478 stdout = proc.stdout
4478 4479 stderr = proc.stderr
4479 4480
4480 4481 # We turn the pipes into observers so we can log I/O.
4481 4482 if ui.verbose or opts[b'peer'] == b'raw':
4482 4483 stdin = util.makeloggingfileobject(
4483 4484 ui, proc.stdin, b'i', logdata=True
4484 4485 )
4485 4486 stdout = util.makeloggingfileobject(
4486 4487 ui, proc.stdout, b'o', logdata=True
4487 4488 )
4488 4489 stderr = util.makeloggingfileobject(
4489 4490 ui, proc.stderr, b'e', logdata=True
4490 4491 )
4491 4492
4492 4493 # --localssh also implies the peer connection settings.
4493 4494
4494 4495 url = b'ssh://localserver'
4495 4496 autoreadstderr = not opts[b'noreadstderr']
4496 4497
4497 4498 if opts[b'peer'] == b'ssh1':
4498 4499 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4499 4500 peer = sshpeer.sshv1peer(
4500 4501 ui,
4501 4502 url,
4502 4503 proc,
4503 4504 stdin,
4504 4505 stdout,
4505 4506 stderr,
4506 4507 None,
4507 4508 autoreadstderr=autoreadstderr,
4508 4509 )
4509 4510 elif opts[b'peer'] == b'ssh2':
4510 4511 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4511 4512 peer = sshpeer.sshv2peer(
4512 4513 ui,
4513 4514 url,
4514 4515 proc,
4515 4516 stdin,
4516 4517 stdout,
4517 4518 stderr,
4518 4519 None,
4519 4520 autoreadstderr=autoreadstderr,
4520 4521 )
4521 4522 elif opts[b'peer'] == b'raw':
4522 4523 ui.write(_(b'using raw connection to peer\n'))
4523 4524 peer = None
4524 4525 else:
4525 4526 ui.write(_(b'creating ssh peer from handshake results\n'))
4526 4527 peer = sshpeer.makepeer(
4527 4528 ui,
4528 4529 url,
4529 4530 proc,
4530 4531 stdin,
4531 4532 stdout,
4532 4533 stderr,
4533 4534 autoreadstderr=autoreadstderr,
4534 4535 )
4535 4536
4536 4537 elif path:
4537 4538 # We bypass hg.peer() so we can proxy the sockets.
4538 4539 # TODO consider not doing this because we skip
4539 4540 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4540 4541 u = urlutil.url(path)
4541 4542 if u.scheme != b'http':
4542 4543 raise error.Abort(_(b'only http:// paths are currently supported'))
4543 4544
4544 4545 url, authinfo = u.authinfo()
4545 4546 openerargs = {
4546 4547 'useragent': b'Mercurial debugwireproto',
4547 4548 }
4548 4549
4549 4550 # Turn pipes/sockets into observers so we can log I/O.
4550 4551 if ui.verbose:
4551 4552 openerargs.update(
4552 4553 {
4553 4554 'loggingfh': ui,
4554 4555 'loggingname': b's',
4555 4556 'loggingopts': {
4556 4557 'logdata': True,
4557 4558 'logdataapis': False,
4558 4559 },
4559 4560 }
4560 4561 )
4561 4562
4562 4563 if ui.debugflag:
4563 4564 openerargs['loggingopts']['logdataapis'] = True
4564 4565
4565 4566 # Don't send default headers when in raw mode. This allows us to
4566 4567 # bypass most of the behavior of our URL handling code so we can
4567 4568 # have near complete control over what's sent on the wire.
4568 4569 if opts[b'peer'] == b'raw':
4569 4570 openerargs['sendaccept'] = False
4570 4571
4571 4572 opener = urlmod.opener(ui, authinfo, **openerargs)
4572 4573
4573 4574 if opts[b'peer'] == b'http2':
4574 4575 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4575 4576 # We go through makepeer() because we need an API descriptor for
4576 4577 # the peer instance to be useful.
4577 4578 with ui.configoverride(
4578 4579 {(b'experimental', b'httppeer.advertise-v2'): True}
4579 4580 ):
4580 4581 if opts[b'nologhandshake']:
4581 4582 ui.pushbuffer()
4582 4583
4583 4584 peer = httppeer.makepeer(ui, path, opener=opener)
4584 4585
4585 4586 if opts[b'nologhandshake']:
4586 4587 ui.popbuffer()
4587 4588
4588 4589 if not isinstance(peer, httppeer.httpv2peer):
4589 4590 raise error.Abort(
4590 4591 _(
4591 4592 b'could not instantiate HTTP peer for '
4592 4593 b'wire protocol version 2'
4593 4594 ),
4594 4595 hint=_(
4595 4596 b'the server may not have the feature '
4596 4597 b'enabled or is not allowing this '
4597 4598 b'client version'
4598 4599 ),
4599 4600 )
4600 4601
4601 4602 elif opts[b'peer'] == b'raw':
4602 4603 ui.write(_(b'using raw connection to peer\n'))
4603 4604 peer = None
4604 4605 elif opts[b'peer']:
4605 4606 raise error.Abort(
4606 4607 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4607 4608 )
4608 4609 else:
4609 4610 peer = httppeer.makepeer(ui, path, opener=opener)
4610 4611
4611 4612 # We /could/ populate stdin/stdout with sock.makefile()...
4612 4613 else:
4613 4614 raise error.Abort(_(b'unsupported connection configuration'))
4614 4615
4615 4616 batchedcommands = None
4616 4617
4617 4618 # Now perform actions based on the parsed wire language instructions.
4618 4619 for action, lines in blocks:
4619 4620 if action in (b'raw', b'raw+'):
4620 4621 if not stdin:
4621 4622 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4622 4623
4623 4624 # Concatenate the data together.
4624 4625 data = b''.join(l.lstrip() for l in lines)
4625 4626 data = stringutil.unescapestr(data)
4626 4627 stdin.write(data)
4627 4628
4628 4629 if action == b'raw+':
4629 4630 stdin.flush()
4630 4631 elif action == b'flush':
4631 4632 if not stdin:
4632 4633 raise error.Abort(_(b'cannot call flush on this peer'))
4633 4634 stdin.flush()
4634 4635 elif action.startswith(b'command'):
4635 4636 if not peer:
4636 4637 raise error.Abort(
4637 4638 _(
4638 4639 b'cannot send commands unless peer instance '
4639 4640 b'is available'
4640 4641 )
4641 4642 )
4642 4643
4643 4644 command = action.split(b' ', 1)[1]
4644 4645
4645 4646 args = {}
4646 4647 for line in lines:
4647 4648 # We need to allow empty values.
4648 4649 fields = line.lstrip().split(b' ', 1)
4649 4650 if len(fields) == 1:
4650 4651 key = fields[0]
4651 4652 value = b''
4652 4653 else:
4653 4654 key, value = fields
4654 4655
4655 4656 if value.startswith(b'eval:'):
4656 4657 value = stringutil.evalpythonliteral(value[5:])
4657 4658 else:
4658 4659 value = stringutil.unescapestr(value)
4659 4660
4660 4661 args[key] = value
4661 4662
4662 4663 if batchedcommands is not None:
4663 4664 batchedcommands.append((command, args))
4664 4665 continue
4665 4666
4666 4667 ui.status(_(b'sending %s command\n') % command)
4667 4668
4668 4669 if b'PUSHFILE' in args:
4669 4670 with open(args[b'PUSHFILE'], 'rb') as fh:
4670 4671 del args[b'PUSHFILE']
4671 4672 res, output = peer._callpush(
4672 4673 command, fh, **pycompat.strkwargs(args)
4673 4674 )
4674 4675 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4675 4676 ui.status(
4676 4677 _(b'remote output: %s\n') % stringutil.escapestr(output)
4677 4678 )
4678 4679 else:
4679 4680 with peer.commandexecutor() as e:
4680 4681 res = e.callcommand(command, args).result()
4681 4682
4682 4683 if isinstance(res, wireprotov2peer.commandresponse):
4683 4684 val = res.objects()
4684 4685 ui.status(
4685 4686 _(b'response: %s\n')
4686 4687 % stringutil.pprint(val, bprefix=True, indent=2)
4687 4688 )
4688 4689 else:
4689 4690 ui.status(
4690 4691 _(b'response: %s\n')
4691 4692 % stringutil.pprint(res, bprefix=True, indent=2)
4692 4693 )
4693 4694
4694 4695 elif action == b'batchbegin':
4695 4696 if batchedcommands is not None:
4696 4697 raise error.Abort(_(b'nested batchbegin not allowed'))
4697 4698
4698 4699 batchedcommands = []
4699 4700 elif action == b'batchsubmit':
4700 4701 # There is a batching API we could go through. But it would be
4701 4702 # difficult to normalize requests into function calls. It is easier
4702 4703 # to bypass this layer and normalize to commands + args.
4703 4704 ui.status(
4704 4705 _(b'sending batch with %d sub-commands\n')
4705 4706 % len(batchedcommands)
4706 4707 )
4707 4708 assert peer is not None
4708 4709 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4709 4710 ui.status(
4710 4711 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4711 4712 )
4712 4713
4713 4714 batchedcommands = None
4714 4715
4715 4716 elif action.startswith(b'httprequest '):
4716 4717 if not opener:
4717 4718 raise error.Abort(
4718 4719 _(b'cannot use httprequest without an HTTP peer')
4719 4720 )
4720 4721
4721 4722 request = action.split(b' ', 2)
4722 4723 if len(request) != 3:
4723 4724 raise error.Abort(
4724 4725 _(
4725 4726 b'invalid httprequest: expected format is '
4726 4727 b'"httprequest <method> <path>'
4727 4728 )
4728 4729 )
4729 4730
4730 4731 method, httppath = request[1:]
4731 4732 headers = {}
4732 4733 body = None
4733 4734 frames = []
4734 4735 for line in lines:
4735 4736 line = line.lstrip()
4736 4737 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4737 4738 if m:
4738 4739 # Headers need to use native strings.
4739 4740 key = pycompat.strurl(m.group(1))
4740 4741 value = pycompat.strurl(m.group(2))
4741 4742 headers[key] = value
4742 4743 continue
4743 4744
4744 4745 if line.startswith(b'BODYFILE '):
4745 4746 with open(line.split(b' ', 1), b'rb') as fh:
4746 4747 body = fh.read()
4747 4748 elif line.startswith(b'frame '):
4748 4749 frame = wireprotoframing.makeframefromhumanstring(
4749 4750 line[len(b'frame ') :]
4750 4751 )
4751 4752
4752 4753 frames.append(frame)
4753 4754 else:
4754 4755 raise error.Abort(
4755 4756 _(b'unknown argument to httprequest: %s') % line
4756 4757 )
4757 4758
4758 4759 url = path + httppath
4759 4760
4760 4761 if frames:
4761 4762 body = b''.join(bytes(f) for f in frames)
4762 4763
4763 4764 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4764 4765
4765 4766 # urllib.Request insists on using has_data() as a proxy for
4766 4767 # determining the request method. Override that to use our
4767 4768 # explicitly requested method.
4768 4769 req.get_method = lambda: pycompat.sysstr(method)
4769 4770
4770 4771 try:
4771 4772 res = opener.open(req)
4772 4773 body = res.read()
4773 4774 except util.urlerr.urlerror as e:
4774 4775 # read() method must be called, but only exists in Python 2
4775 4776 getattr(e, 'read', lambda: None)()
4776 4777 continue
4777 4778
4778 4779 ct = res.headers.get('Content-Type')
4779 4780 if ct == 'application/mercurial-cbor':
4780 4781 ui.write(
4781 4782 _(b'cbor> %s\n')
4782 4783 % stringutil.pprint(
4783 4784 cborutil.decodeall(body), bprefix=True, indent=2
4784 4785 )
4785 4786 )
4786 4787
4787 4788 elif action == b'close':
4788 4789 assert peer is not None
4789 4790 peer.close()
4790 4791 elif action == b'readavailable':
4791 4792 if not stdout or not stderr:
4792 4793 raise error.Abort(
4793 4794 _(b'readavailable not available on this peer')
4794 4795 )
4795 4796
4796 4797 stdin.close()
4797 4798 stdout.read()
4798 4799 stderr.read()
4799 4800
4800 4801 elif action == b'readline':
4801 4802 if not stdout:
4802 4803 raise error.Abort(_(b'readline not available on this peer'))
4803 4804 stdout.readline()
4804 4805 elif action == b'ereadline':
4805 4806 if not stderr:
4806 4807 raise error.Abort(_(b'ereadline not available on this peer'))
4807 4808 stderr.readline()
4808 4809 elif action.startswith(b'read '):
4809 4810 count = int(action.split(b' ', 1)[1])
4810 4811 if not stdout:
4811 4812 raise error.Abort(_(b'read not available on this peer'))
4812 4813 stdout.read(count)
4813 4814 elif action.startswith(b'eread '):
4814 4815 count = int(action.split(b' ', 1)[1])
4815 4816 if not stderr:
4816 4817 raise error.Abort(_(b'eread not available on this peer'))
4817 4818 stderr.read(count)
4818 4819 else:
4819 4820 raise error.Abort(_(b'unknown action: %s') % action)
4820 4821
4821 4822 if batchedcommands is not None:
4822 4823 raise error.Abort(_(b'unclosed "batchbegin" request'))
4823 4824
4824 4825 if peer:
4825 4826 peer.close()
4826 4827
4827 4828 if proc:
4828 4829 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now