##// END OF EJS Templates
debuginstall: gracefully handle missing __file__ attributes...
Matt Harbison -
r44083:1fb19665 default
parent child Browse files
Show More
@@ -1,4266 +1,4278
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 changegroup,
42 42 cmdutil,
43 43 color,
44 44 context,
45 45 copies,
46 46 dagparser,
47 47 encoding,
48 48 error,
49 49 exchange,
50 50 extensions,
51 51 filemerge,
52 52 filesetlang,
53 53 formatter,
54 54 hg,
55 55 httppeer,
56 56 localrepo,
57 57 lock as lockmod,
58 58 logcmdutil,
59 59 merge as mergemod,
60 60 obsolete,
61 61 obsutil,
62 62 pathutil,
63 63 phases,
64 64 policy,
65 65 pvec,
66 66 pycompat,
67 67 registrar,
68 68 repair,
69 69 revlog,
70 70 revset,
71 71 revsetlang,
72 72 scmutil,
73 73 setdiscovery,
74 74 simplemerge,
75 75 sshpeer,
76 76 sslutil,
77 77 streamclone,
78 78 templater,
79 79 treediscovery,
80 80 upgrade,
81 81 url as urlmod,
82 82 util,
83 83 vfs as vfsmod,
84 84 wireprotoframing,
85 85 wireprotoserver,
86 86 wireprotov2peer,
87 87 )
88 88 from .utils import (
89 89 cborutil,
90 90 compression,
91 91 dateutil,
92 92 procutil,
93 93 stringutil,
94 94 )
95 95
96 96 from .revlogutils import deltas as deltautil
97 97
98 98 release = lockmod.release
99 99
100 100 command = registrar.command()
101 101
102 102
103 103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 104 def debugancestor(ui, repo, *args):
105 105 """find the ancestor revision of two revisions in a given index"""
106 106 if len(args) == 3:
107 107 index, rev1, rev2 = args
108 108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 109 lookup = r.lookup
110 110 elif len(args) == 2:
111 111 if not repo:
112 112 raise error.Abort(
113 113 _(b'there is no Mercurial repository here (.hg not found)')
114 114 )
115 115 rev1, rev2 = args
116 116 r = repo.changelog
117 117 lookup = repo.lookup
118 118 else:
119 119 raise error.Abort(_(b'either two or three arguments required'))
120 120 a = r.ancestor(lookup(rev1), lookup(rev2))
121 121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122 122
123 123
124 124 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 125 def debugapplystreamclonebundle(ui, repo, fname):
126 126 """apply a stream clone bundle file"""
127 127 f = hg.openpath(ui, fname)
128 128 gen = exchange.readbundle(ui, f, fname)
129 129 gen.apply(repo)
130 130
131 131
132 132 @command(
133 133 b'debugbuilddag',
134 134 [
135 135 (
136 136 b'm',
137 137 b'mergeable-file',
138 138 None,
139 139 _(b'add single file mergeable changes'),
140 140 ),
141 141 (
142 142 b'o',
143 143 b'overwritten-file',
144 144 None,
145 145 _(b'add single file all revs overwrite'),
146 146 ),
147 147 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 148 ],
149 149 _(b'[OPTION]... [TEXT]'),
150 150 )
151 151 def debugbuilddag(
152 152 ui,
153 153 repo,
154 154 text=None,
155 155 mergeable_file=False,
156 156 overwritten_file=False,
157 157 new_file=False,
158 158 ):
159 159 """builds a repo with a given DAG from scratch in the current empty repo
160 160
161 161 The description of the DAG is read from stdin if not given on the
162 162 command line.
163 163
164 164 Elements:
165 165
166 166 - "+n" is a linear run of n nodes based on the current default parent
167 167 - "." is a single node based on the current default parent
168 168 - "$" resets the default parent to null (implied at the start);
169 169 otherwise the default parent is always the last node created
170 170 - "<p" sets the default parent to the backref p
171 171 - "*p" is a fork at parent p, which is a backref
172 172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 173 - "/p2" is a merge of the preceding node and p2
174 174 - ":tag" defines a local tag for the preceding node
175 175 - "@branch" sets the named branch for subsequent nodes
176 176 - "#...\\n" is a comment up to the end of the line
177 177
178 178 Whitespace between the above elements is ignored.
179 179
180 180 A backref is either
181 181
182 182 - a number n, which references the node curr-n, where curr is the current
183 183 node, or
184 184 - the name of a local tag you placed earlier using ":tag", or
185 185 - empty to denote the default parent.
186 186
187 187 All string valued-elements are either strictly alphanumeric, or must
188 188 be enclosed in double quotes ("..."), with "\\" as escape character.
189 189 """
190 190
191 191 if text is None:
192 192 ui.status(_(b"reading DAG from stdin\n"))
193 193 text = ui.fin.read()
194 194
195 195 cl = repo.changelog
196 196 if len(cl) > 0:
197 197 raise error.Abort(_(b'repository is not empty'))
198 198
199 199 # determine number of revs in DAG
200 200 total = 0
201 201 for type, data in dagparser.parsedag(text):
202 202 if type == b'n':
203 203 total += 1
204 204
205 205 if mergeable_file:
206 206 linesperrev = 2
207 207 # make a file with k lines per rev
208 208 initialmergedlines = [
209 209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 210 ]
211 211 initialmergedlines.append(b"")
212 212
213 213 tags = []
214 214 progress = ui.makeprogress(
215 215 _(b'building'), unit=_(b'revisions'), total=total
216 216 )
217 217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 218 at = -1
219 219 atbranch = b'default'
220 220 nodeids = []
221 221 id = 0
222 222 progress.update(id)
223 223 for type, data in dagparser.parsedag(text):
224 224 if type == b'n':
225 225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 226 id, ps = data
227 227
228 228 files = []
229 229 filecontent = {}
230 230
231 231 p2 = None
232 232 if mergeable_file:
233 233 fn = b"mf"
234 234 p1 = repo[ps[0]]
235 235 if len(ps) > 1:
236 236 p2 = repo[ps[1]]
237 237 pa = p1.ancestor(p2)
238 238 base, local, other = [
239 239 x[fn].data() for x in (pa, p1, p2)
240 240 ]
241 241 m3 = simplemerge.Merge3Text(base, local, other)
242 242 ml = [l.strip() for l in m3.merge_lines()]
243 243 ml.append(b"")
244 244 elif at > 0:
245 245 ml = p1[fn].data().split(b"\n")
246 246 else:
247 247 ml = initialmergedlines
248 248 ml[id * linesperrev] += b" r%i" % id
249 249 mergedtext = b"\n".join(ml)
250 250 files.append(fn)
251 251 filecontent[fn] = mergedtext
252 252
253 253 if overwritten_file:
254 254 fn = b"of"
255 255 files.append(fn)
256 256 filecontent[fn] = b"r%i\n" % id
257 257
258 258 if new_file:
259 259 fn = b"nf%i" % id
260 260 files.append(fn)
261 261 filecontent[fn] = b"r%i\n" % id
262 262 if len(ps) > 1:
263 263 if not p2:
264 264 p2 = repo[ps[1]]
265 265 for fn in p2:
266 266 if fn.startswith(b"nf"):
267 267 files.append(fn)
268 268 filecontent[fn] = p2[fn].data()
269 269
270 270 def fctxfn(repo, cx, path):
271 271 if path in filecontent:
272 272 return context.memfilectx(
273 273 repo, cx, path, filecontent[path]
274 274 )
275 275 return None
276 276
277 277 if len(ps) == 0 or ps[0] < 0:
278 278 pars = [None, None]
279 279 elif len(ps) == 1:
280 280 pars = [nodeids[ps[0]], None]
281 281 else:
282 282 pars = [nodeids[p] for p in ps]
283 283 cx = context.memctx(
284 284 repo,
285 285 pars,
286 286 b"r%i" % id,
287 287 files,
288 288 fctxfn,
289 289 date=(id, 0),
290 290 user=b"debugbuilddag",
291 291 extra={b'branch': atbranch},
292 292 )
293 293 nodeid = repo.commitctx(cx)
294 294 nodeids.append(nodeid)
295 295 at = id
296 296 elif type == b'l':
297 297 id, name = data
298 298 ui.note((b'tag %s\n' % name))
299 299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 300 elif type == b'a':
301 301 ui.note((b'branch %s\n' % data))
302 302 atbranch = data
303 303 progress.update(id)
304 304
305 305 if tags:
306 306 repo.vfs.write(b"localtags", b"".join(tags))
307 307
308 308
309 309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 310 indent_string = b' ' * indent
311 311 if all:
312 312 ui.writenoi18n(
313 313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 314 % indent_string
315 315 )
316 316
317 317 def showchunks(named):
318 318 ui.write(b"\n%s%s\n" % (indent_string, named))
319 319 for deltadata in gen.deltaiter():
320 320 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 321 ui.write(
322 322 b"%s%s %s %s %s %s %d\n"
323 323 % (
324 324 indent_string,
325 325 hex(node),
326 326 hex(p1),
327 327 hex(p2),
328 328 hex(cs),
329 329 hex(deltabase),
330 330 len(delta),
331 331 )
332 332 )
333 333
334 334 chunkdata = gen.changelogheader()
335 335 showchunks(b"changelog")
336 336 chunkdata = gen.manifestheader()
337 337 showchunks(b"manifest")
338 338 for chunkdata in iter(gen.filelogheader, {}):
339 339 fname = chunkdata[b'filename']
340 340 showchunks(fname)
341 341 else:
342 342 if isinstance(gen, bundle2.unbundle20):
343 343 raise error.Abort(_(b'use debugbundle2 for this file'))
344 344 chunkdata = gen.changelogheader()
345 345 for deltadata in gen.deltaiter():
346 346 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348 348
349 349
350 350 def _debugobsmarkers(ui, part, indent=0, **opts):
351 351 """display version and markers contained in 'data'"""
352 352 opts = pycompat.byteskwargs(opts)
353 353 data = part.read()
354 354 indent_string = b' ' * indent
355 355 try:
356 356 version, markers = obsolete._readmarkers(data)
357 357 except error.UnknownVersion as exc:
358 358 msg = b"%sunsupported version: %s (%d bytes)\n"
359 359 msg %= indent_string, exc.version, len(data)
360 360 ui.write(msg)
361 361 else:
362 362 msg = b"%sversion: %d (%d bytes)\n"
363 363 msg %= indent_string, version, len(data)
364 364 ui.write(msg)
365 365 fm = ui.formatter(b'debugobsolete', opts)
366 366 for rawmarker in sorted(markers):
367 367 m = obsutil.marker(None, rawmarker)
368 368 fm.startitem()
369 369 fm.plain(indent_string)
370 370 cmdutil.showmarker(fm, m)
371 371 fm.end()
372 372
373 373
374 374 def _debugphaseheads(ui, data, indent=0):
375 375 """display version and markers contained in 'data'"""
376 376 indent_string = b' ' * indent
377 377 headsbyphase = phases.binarydecode(data)
378 378 for phase in phases.allphases:
379 379 for head in headsbyphase[phase]:
380 380 ui.write(indent_string)
381 381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382 382
383 383
384 384 def _quasirepr(thing):
385 385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 386 return b'{%s}' % (
387 387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 388 )
389 389 return pycompat.bytestr(repr(thing))
390 390
391 391
392 392 def _debugbundle2(ui, gen, all=None, **opts):
393 393 """lists the contents of a bundle2"""
394 394 if not isinstance(gen, bundle2.unbundle20):
395 395 raise error.Abort(_(b'not a bundle2 file'))
396 396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 397 parttypes = opts.get('part_type', [])
398 398 for part in gen.iterparts():
399 399 if parttypes and part.type not in parttypes:
400 400 continue
401 401 msg = b'%s -- %s (mandatory: %r)\n'
402 402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 403 if part.type == b'changegroup':
404 404 version = part.params.get(b'version', b'01')
405 405 cg = changegroup.getunbundler(version, part, b'UN')
406 406 if not ui.quiet:
407 407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 408 if part.type == b'obsmarkers':
409 409 if not ui.quiet:
410 410 _debugobsmarkers(ui, part, indent=4, **opts)
411 411 if part.type == b'phase-heads':
412 412 if not ui.quiet:
413 413 _debugphaseheads(ui, part, indent=4)
414 414
415 415
416 416 @command(
417 417 b'debugbundle',
418 418 [
419 419 (b'a', b'all', None, _(b'show all details')),
420 420 (b'', b'part-type', [], _(b'show only the named part type')),
421 421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 422 ],
423 423 _(b'FILE'),
424 424 norepo=True,
425 425 )
426 426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 427 """lists the contents of a bundle"""
428 428 with hg.openpath(ui, bundlepath) as f:
429 429 if spec:
430 430 spec = exchange.getbundlespec(ui, f)
431 431 ui.write(b'%s\n' % spec)
432 432 return
433 433
434 434 gen = exchange.readbundle(ui, f, bundlepath)
435 435 if isinstance(gen, bundle2.unbundle20):
436 436 return _debugbundle2(ui, gen, all=all, **opts)
437 437 _debugchangegroup(ui, gen, all=all, **opts)
438 438
439 439
440 440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 441 def debugcapabilities(ui, path, **opts):
442 442 """lists the capabilities of a remote peer"""
443 443 opts = pycompat.byteskwargs(opts)
444 444 peer = hg.peer(ui, opts, path)
445 445 caps = peer.capabilities()
446 446 ui.writenoi18n(b'Main capabilities:\n')
447 447 for c in sorted(caps):
448 448 ui.write(b' %s\n' % c)
449 449 b2caps = bundle2.bundle2caps(peer)
450 450 if b2caps:
451 451 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 452 for key, values in sorted(pycompat.iteritems(b2caps)):
453 453 ui.write(b' %s\n' % key)
454 454 for v in values:
455 455 ui.write(b' %s\n' % v)
456 456
457 457
458 458 @command(b'debugcheckstate', [], b'')
459 459 def debugcheckstate(ui, repo):
460 460 """validate the correctness of the current dirstate"""
461 461 parent1, parent2 = repo.dirstate.parents()
462 462 m1 = repo[parent1].manifest()
463 463 m2 = repo[parent2].manifest()
464 464 errors = 0
465 465 for f in repo.dirstate:
466 466 state = repo.dirstate[f]
467 467 if state in b"nr" and f not in m1:
468 468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 469 errors += 1
470 470 if state in b"a" and f in m1:
471 471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 472 errors += 1
473 473 if state in b"m" and f not in m1 and f not in m2:
474 474 ui.warn(
475 475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 476 )
477 477 errors += 1
478 478 for f in m1:
479 479 state = repo.dirstate[f]
480 480 if state not in b"nrm":
481 481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 482 errors += 1
483 483 if errors:
484 484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 485 raise error.Abort(errstr)
486 486
487 487
488 488 @command(
489 489 b'debugcolor',
490 490 [(b'', b'style', None, _(b'show all configured styles'))],
491 491 b'hg debugcolor',
492 492 )
493 493 def debugcolor(ui, repo, **opts):
494 494 """show available color, effects or style"""
495 495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 496 if opts.get('style'):
497 497 return _debugdisplaystyle(ui)
498 498 else:
499 499 return _debugdisplaycolor(ui)
500 500
501 501
502 502 def _debugdisplaycolor(ui):
503 503 ui = ui.copy()
504 504 ui._styles.clear()
505 505 for effect in color._activeeffects(ui).keys():
506 506 ui._styles[effect] = effect
507 507 if ui._terminfoparams:
508 508 for k, v in ui.configitems(b'color'):
509 509 if k.startswith(b'color.'):
510 510 ui._styles[k] = k[6:]
511 511 elif k.startswith(b'terminfo.'):
512 512 ui._styles[k] = k[9:]
513 513 ui.write(_(b'available colors:\n'))
514 514 # sort label with a '_' after the other to group '_background' entry.
515 515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 516 for colorname, label in items:
517 517 ui.write(b'%s\n' % colorname, label=label)
518 518
519 519
520 520 def _debugdisplaystyle(ui):
521 521 ui.write(_(b'available style:\n'))
522 522 if not ui._styles:
523 523 return
524 524 width = max(len(s) for s in ui._styles)
525 525 for label, effects in sorted(ui._styles.items()):
526 526 ui.write(b'%s' % label, label=label)
527 527 if effects:
528 528 # 50
529 529 ui.write(b': ')
530 530 ui.write(b' ' * (max(0, width - len(label))))
531 531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 532 ui.write(b'\n')
533 533
534 534
535 535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 536 def debugcreatestreamclonebundle(ui, repo, fname):
537 537 """create a stream clone bundle file
538 538
539 539 Stream bundles are special bundles that are essentially archives of
540 540 revlog files. They are commonly used for cloning very quickly.
541 541 """
542 542 # TODO we may want to turn this into an abort when this functionality
543 543 # is moved into `hg bundle`.
544 544 if phases.hassecret(repo):
545 545 ui.warn(
546 546 _(
547 547 b'(warning: stream clone bundle will contain secret '
548 548 b'revisions)\n'
549 549 )
550 550 )
551 551
552 552 requirements, gen = streamclone.generatebundlev1(repo)
553 553 changegroup.writechunks(ui, gen, fname)
554 554
555 555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556 556
557 557
558 558 @command(
559 559 b'debugdag',
560 560 [
561 561 (b't', b'tags', None, _(b'use tags as labels')),
562 562 (b'b', b'branches', None, _(b'annotate with branch names')),
563 563 (b'', b'dots', None, _(b'use dots for runs')),
564 564 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 565 ],
566 566 _(b'[OPTION]... [FILE [REV]...]'),
567 567 optionalrepo=True,
568 568 )
569 569 def debugdag(ui, repo, file_=None, *revs, **opts):
570 570 """format the changelog or an index DAG as a concise textual description
571 571
572 572 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 573 revision numbers, they get labeled in the output as rN.
574 574
575 575 Otherwise, the changelog DAG of the current repo is emitted.
576 576 """
577 577 spaces = opts.get('spaces')
578 578 dots = opts.get('dots')
579 579 if file_:
580 580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 581 revs = set((int(r) for r in revs))
582 582
583 583 def events():
584 584 for r in rlog:
585 585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 586 if r in revs:
587 587 yield b'l', (r, b"r%i" % r)
588 588
589 589 elif repo:
590 590 cl = repo.changelog
591 591 tags = opts.get('tags')
592 592 branches = opts.get('branches')
593 593 if tags:
594 594 labels = {}
595 595 for l, n in repo.tags().items():
596 596 labels.setdefault(cl.rev(n), []).append(l)
597 597
598 598 def events():
599 599 b = b"default"
600 600 for r in cl:
601 601 if branches:
602 602 newb = cl.read(cl.node(r))[5][b'branch']
603 603 if newb != b:
604 604 yield b'a', newb
605 605 b = newb
606 606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 607 if tags:
608 608 ls = labels.get(r)
609 609 if ls:
610 610 for l in ls:
611 611 yield b'l', (r, l)
612 612
613 613 else:
614 614 raise error.Abort(_(b'need repo for changelog dag'))
615 615
616 616 for line in dagparser.dagtextlines(
617 617 events(),
618 618 addspaces=spaces,
619 619 wraplabels=True,
620 620 wrapannotations=True,
621 621 wrapnonlinear=dots,
622 622 usedots=dots,
623 623 maxlinewidth=70,
624 624 ):
625 625 ui.write(line)
626 626 ui.write(b"\n")
627 627
628 628
629 629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 630 def debugdata(ui, repo, file_, rev=None, **opts):
631 631 """dump the contents of a data file revision"""
632 632 opts = pycompat.byteskwargs(opts)
633 633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 634 if rev is not None:
635 635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 636 file_, rev = None, file_
637 637 elif rev is None:
638 638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 640 try:
641 641 ui.write(r.rawdata(r.lookup(rev)))
642 642 except KeyError:
643 643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644 644
645 645
646 646 @command(
647 647 b'debugdate',
648 648 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 649 _(b'[-e] DATE [RANGE]'),
650 650 norepo=True,
651 651 optionalrepo=True,
652 652 )
653 653 def debugdate(ui, date, range=None, **opts):
654 654 """parse and display a date"""
655 655 if opts["extended"]:
656 656 d = dateutil.parsedate(date, util.extendeddateformats)
657 657 else:
658 658 d = dateutil.parsedate(date)
659 659 ui.writenoi18n(b"internal: %d %d\n" % d)
660 660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 661 if range:
662 662 m = dateutil.matchdate(range)
663 663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664 664
665 665
666 666 @command(
667 667 b'debugdeltachain',
668 668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 669 _(b'-c|-m|FILE'),
670 670 optionalrepo=True,
671 671 )
672 672 def debugdeltachain(ui, repo, file_=None, **opts):
673 673 """dump information about delta chains in a revlog
674 674
675 675 Output can be templatized. Available template keywords are:
676 676
677 677 :``rev``: revision number
678 678 :``chainid``: delta chain identifier (numbered by unique base)
679 679 :``chainlen``: delta chain length to this revision
680 680 :``prevrev``: previous revision in delta chain
681 681 :``deltatype``: role of delta / how it was computed
682 682 :``compsize``: compressed size of revision
683 683 :``uncompsize``: uncompressed size of revision
684 684 :``chainsize``: total size of compressed revisions in chain
685 685 :``chainratio``: total chain size divided by uncompressed revision size
686 686 (new delta chains typically start at ratio 2.00)
687 687 :``lindist``: linear distance from base revision in delta chain to end
688 688 of this revision
689 689 :``extradist``: total size of revisions not part of this delta chain from
690 690 base of delta chain to end of this revision; a measurement
691 691 of how much extra data we need to read/seek across to read
692 692 the delta chain for this revision
693 693 :``extraratio``: extradist divided by chainsize; another representation of
694 694 how much unrelated data is needed to load this delta chain
695 695
696 696 If the repository is configured to use the sparse read, additional keywords
697 697 are available:
698 698
699 699 :``readsize``: total size of data read from the disk for a revision
700 700 (sum of the sizes of all the blocks)
701 701 :``largestblock``: size of the largest block of data read from the disk
702 702 :``readdensity``: density of useful bytes in the data read from the disk
703 703 :``srchunks``: in how many data hunks the whole revision would be read
704 704
705 705 The sparse read can be enabled with experimental.sparse-read = True
706 706 """
707 707 opts = pycompat.byteskwargs(opts)
708 708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 709 index = r.index
710 710 start = r.start
711 711 length = r.length
712 712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 713 withsparseread = getattr(r, '_withsparseread', False)
714 714
715 715 def revinfo(rev):
716 716 e = index[rev]
717 717 compsize = e[1]
718 718 uncompsize = e[2]
719 719 chainsize = 0
720 720
721 721 if generaldelta:
722 722 if e[3] == e[5]:
723 723 deltatype = b'p1'
724 724 elif e[3] == e[6]:
725 725 deltatype = b'p2'
726 726 elif e[3] == rev - 1:
727 727 deltatype = b'prev'
728 728 elif e[3] == rev:
729 729 deltatype = b'base'
730 730 else:
731 731 deltatype = b'other'
732 732 else:
733 733 if e[3] == rev:
734 734 deltatype = b'base'
735 735 else:
736 736 deltatype = b'prev'
737 737
738 738 chain = r._deltachain(rev)[0]
739 739 for iterrev in chain:
740 740 e = index[iterrev]
741 741 chainsize += e[1]
742 742
743 743 return compsize, uncompsize, deltatype, chain, chainsize
744 744
745 745 fm = ui.formatter(b'debugdeltachain', opts)
746 746
747 747 fm.plain(
748 748 b' rev chain# chainlen prev delta '
749 749 b'size rawsize chainsize ratio lindist extradist '
750 750 b'extraratio'
751 751 )
752 752 if withsparseread:
753 753 fm.plain(b' readsize largestblk rddensity srchunks')
754 754 fm.plain(b'\n')
755 755
756 756 chainbases = {}
757 757 for rev in r:
758 758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 759 chainbase = chain[0]
760 760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 761 basestart = start(chainbase)
762 762 revstart = start(rev)
763 763 lineardist = revstart + comp - basestart
764 764 extradist = lineardist - chainsize
765 765 try:
766 766 prevrev = chain[-2]
767 767 except IndexError:
768 768 prevrev = -1
769 769
770 770 if uncomp != 0:
771 771 chainratio = float(chainsize) / float(uncomp)
772 772 else:
773 773 chainratio = chainsize
774 774
775 775 if chainsize != 0:
776 776 extraratio = float(extradist) / float(chainsize)
777 777 else:
778 778 extraratio = extradist
779 779
780 780 fm.startitem()
781 781 fm.write(
782 782 b'rev chainid chainlen prevrev deltatype compsize '
783 783 b'uncompsize chainsize chainratio lindist extradist '
784 784 b'extraratio',
785 785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 786 rev,
787 787 chainid,
788 788 len(chain),
789 789 prevrev,
790 790 deltatype,
791 791 comp,
792 792 uncomp,
793 793 chainsize,
794 794 chainratio,
795 795 lineardist,
796 796 extradist,
797 797 extraratio,
798 798 rev=rev,
799 799 chainid=chainid,
800 800 chainlen=len(chain),
801 801 prevrev=prevrev,
802 802 deltatype=deltatype,
803 803 compsize=comp,
804 804 uncompsize=uncomp,
805 805 chainsize=chainsize,
806 806 chainratio=chainratio,
807 807 lindist=lineardist,
808 808 extradist=extradist,
809 809 extraratio=extraratio,
810 810 )
811 811 if withsparseread:
812 812 readsize = 0
813 813 largestblock = 0
814 814 srchunks = 0
815 815
816 816 for revschunk in deltautil.slicechunk(r, chain):
817 817 srchunks += 1
818 818 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 819 blksize = blkend - start(revschunk[0])
820 820
821 821 readsize += blksize
822 822 if largestblock < blksize:
823 823 largestblock = blksize
824 824
825 825 if readsize:
826 826 readdensity = float(chainsize) / float(readsize)
827 827 else:
828 828 readdensity = 1
829 829
830 830 fm.write(
831 831 b'readsize largestblock readdensity srchunks',
832 832 b' %10d %10d %9.5f %8d',
833 833 readsize,
834 834 largestblock,
835 835 readdensity,
836 836 srchunks,
837 837 readsize=readsize,
838 838 largestblock=largestblock,
839 839 readdensity=readdensity,
840 840 srchunks=srchunks,
841 841 )
842 842
843 843 fm.plain(b'\n')
844 844
845 845 fm.end()
846 846
847 847
848 848 @command(
849 849 b'debugdirstate|debugstate',
850 850 [
851 851 (
852 852 b'',
853 853 b'nodates',
854 854 None,
855 855 _(b'do not display the saved mtime (DEPRECATED)'),
856 856 ),
857 857 (b'', b'dates', True, _(b'display the saved mtime')),
858 858 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 859 ],
860 860 _(b'[OPTION]...'),
861 861 )
862 862 def debugstate(ui, repo, **opts):
863 863 """show the contents of the current dirstate"""
864 864
865 865 nodates = not opts['dates']
866 866 if opts.get('nodates') is not None:
867 867 nodates = True
868 868 datesort = opts.get('datesort')
869 869
870 870 if datesort:
871 871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 872 else:
873 873 keyfunc = None # sort by filename
874 874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 875 if ent[3] == -1:
876 876 timestr = b'unset '
877 877 elif nodates:
878 878 timestr = b'set '
879 879 else:
880 880 timestr = time.strftime(
881 881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 882 )
883 883 timestr = encoding.strtolocal(timestr)
884 884 if ent[1] & 0o20000:
885 885 mode = b'lnk'
886 886 else:
887 887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 889 for f in repo.dirstate.copies():
890 890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891 891
892 892
893 893 @command(
894 894 b'debugdiscovery',
895 895 [
896 896 (b'', b'old', None, _(b'use old-style discovery')),
897 897 (
898 898 b'',
899 899 b'nonheads',
900 900 None,
901 901 _(b'use old-style discovery with non-heads included'),
902 902 ),
903 903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 905 ]
906 906 + cmdutil.remoteopts,
907 907 _(b'[--rev REV] [OTHER]'),
908 908 )
909 909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 910 """runs the changeset discovery protocol in isolation"""
911 911 opts = pycompat.byteskwargs(opts)
912 912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 913 remote = hg.peer(repo, opts, remoteurl)
914 914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915 915
916 916 # make sure tests are repeatable
917 917 random.seed(int(opts[b'seed']))
918 918
919 919 if opts.get(b'old'):
920 920
921 921 def doit(pushedrevs, remoteheads, remote=remote):
922 922 if not util.safehasattr(remote, b'branches'):
923 923 # enable in-client legacy support
924 924 remote = localrepo.locallegacypeer(remote.local())
925 925 common, _in, hds = treediscovery.findcommonincoming(
926 926 repo, remote, force=True
927 927 )
928 928 common = set(common)
929 929 if not opts.get(b'nonheads'):
930 930 ui.writenoi18n(
931 931 b"unpruned common: %s\n"
932 932 % b" ".join(sorted(short(n) for n in common))
933 933 )
934 934
935 935 clnode = repo.changelog.node
936 936 common = repo.revs(b'heads(::%ln)', common)
937 937 common = {clnode(r) for r in common}
938 938 return common, hds
939 939
940 940 else:
941 941
942 942 def doit(pushedrevs, remoteheads, remote=remote):
943 943 nodes = None
944 944 if pushedrevs:
945 945 revs = scmutil.revrange(repo, pushedrevs)
946 946 nodes = [repo[r].node() for r in revs]
947 947 common, any, hds = setdiscovery.findcommonheads(
948 948 ui, repo, remote, ancestorsof=nodes
949 949 )
950 950 return common, hds
951 951
952 952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 953 localrevs = opts[b'rev']
954 954 with util.timedcm('debug-discovery') as t:
955 955 common, hds = doit(localrevs, remoterevs)
956 956
957 957 # compute all statistics
958 958 common = set(common)
959 959 rheads = set(hds)
960 960 lheads = set(repo.heads())
961 961
962 962 data = {}
963 963 data[b'elapsed'] = t.elapsed
964 964 data[b'nb-common'] = len(common)
965 965 data[b'nb-common-local'] = len(common & lheads)
966 966 data[b'nb-common-remote'] = len(common & rheads)
967 967 data[b'nb-common-both'] = len(common & rheads & lheads)
968 968 data[b'nb-local'] = len(lheads)
969 969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 970 data[b'nb-remote'] = len(rheads)
971 971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 972 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975 975
976 976 # display discovery summary
977 977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 978 ui.writenoi18n(b"heads summary:\n")
979 979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992 992
993 993 if ui.verbose:
994 994 ui.writenoi18n(
995 995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 996 )
997 997
998 998
999 999 _chunksize = 4 << 10
1000 1000
1001 1001
1002 1002 @command(
1003 1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 1004 )
1005 1005 def debugdownload(ui, repo, url, output=None, **opts):
1006 1006 """download a resource using Mercurial logic and config
1007 1007 """
1008 1008 fh = urlmod.open(ui, url, output)
1009 1009
1010 1010 dest = ui
1011 1011 if output:
1012 1012 dest = open(output, b"wb", _chunksize)
1013 1013 try:
1014 1014 data = fh.read(_chunksize)
1015 1015 while data:
1016 1016 dest.write(data)
1017 1017 data = fh.read(_chunksize)
1018 1018 finally:
1019 1019 if output:
1020 1020 dest.close()
1021 1021
1022 1022
1023 1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 1024 def debugextensions(ui, repo, **opts):
1025 1025 '''show information about active extensions'''
1026 1026 opts = pycompat.byteskwargs(opts)
1027 1027 exts = extensions.extensions(ui)
1028 1028 hgver = util.version()
1029 1029 fm = ui.formatter(b'debugextensions', opts)
1030 1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 1031 isinternal = extensions.ismoduleinternal(extmod)
1032 1032 extsource = pycompat.fsencode(extmod.__file__)
1033 1033 if isinternal:
1034 1034 exttestedwith = [] # never expose magic string to users
1035 1035 else:
1036 1036 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1037 1037 extbuglink = getattr(extmod, 'buglink', None)
1038 1038
1039 1039 fm.startitem()
1040 1040
1041 1041 if ui.quiet or ui.verbose:
1042 1042 fm.write(b'name', b'%s\n', extname)
1043 1043 else:
1044 1044 fm.write(b'name', b'%s', extname)
1045 1045 if isinternal or hgver in exttestedwith:
1046 1046 fm.plain(b'\n')
1047 1047 elif not exttestedwith:
1048 1048 fm.plain(_(b' (untested!)\n'))
1049 1049 else:
1050 1050 lasttestedversion = exttestedwith[-1]
1051 1051 fm.plain(b' (%s!)\n' % lasttestedversion)
1052 1052
1053 1053 fm.condwrite(
1054 1054 ui.verbose and extsource,
1055 1055 b'source',
1056 1056 _(b' location: %s\n'),
1057 1057 extsource or b"",
1058 1058 )
1059 1059
1060 1060 if ui.verbose:
1061 1061 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1062 1062 fm.data(bundled=isinternal)
1063 1063
1064 1064 fm.condwrite(
1065 1065 ui.verbose and exttestedwith,
1066 1066 b'testedwith',
1067 1067 _(b' tested with: %s\n'),
1068 1068 fm.formatlist(exttestedwith, name=b'ver'),
1069 1069 )
1070 1070
1071 1071 fm.condwrite(
1072 1072 ui.verbose and extbuglink,
1073 1073 b'buglink',
1074 1074 _(b' bug reporting: %s\n'),
1075 1075 extbuglink or b"",
1076 1076 )
1077 1077
1078 1078 fm.end()
1079 1079
1080 1080
1081 1081 @command(
1082 1082 b'debugfileset',
1083 1083 [
1084 1084 (
1085 1085 b'r',
1086 1086 b'rev',
1087 1087 b'',
1088 1088 _(b'apply the filespec on this revision'),
1089 1089 _(b'REV'),
1090 1090 ),
1091 1091 (
1092 1092 b'',
1093 1093 b'all-files',
1094 1094 False,
1095 1095 _(b'test files from all revisions and working directory'),
1096 1096 ),
1097 1097 (
1098 1098 b's',
1099 1099 b'show-matcher',
1100 1100 None,
1101 1101 _(b'print internal representation of matcher'),
1102 1102 ),
1103 1103 (
1104 1104 b'p',
1105 1105 b'show-stage',
1106 1106 [],
1107 1107 _(b'print parsed tree at the given stage'),
1108 1108 _(b'NAME'),
1109 1109 ),
1110 1110 ],
1111 1111 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1112 1112 )
1113 1113 def debugfileset(ui, repo, expr, **opts):
1114 1114 '''parse and apply a fileset specification'''
1115 1115 from . import fileset
1116 1116
1117 1117 fileset.symbols # force import of fileset so we have predicates to optimize
1118 1118 opts = pycompat.byteskwargs(opts)
1119 1119 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1120 1120
1121 1121 stages = [
1122 1122 (b'parsed', pycompat.identity),
1123 1123 (b'analyzed', filesetlang.analyze),
1124 1124 (b'optimized', filesetlang.optimize),
1125 1125 ]
1126 1126 stagenames = set(n for n, f in stages)
1127 1127
1128 1128 showalways = set()
1129 1129 if ui.verbose and not opts[b'show_stage']:
1130 1130 # show parsed tree by --verbose (deprecated)
1131 1131 showalways.add(b'parsed')
1132 1132 if opts[b'show_stage'] == [b'all']:
1133 1133 showalways.update(stagenames)
1134 1134 else:
1135 1135 for n in opts[b'show_stage']:
1136 1136 if n not in stagenames:
1137 1137 raise error.Abort(_(b'invalid stage name: %s') % n)
1138 1138 showalways.update(opts[b'show_stage'])
1139 1139
1140 1140 tree = filesetlang.parse(expr)
1141 1141 for n, f in stages:
1142 1142 tree = f(tree)
1143 1143 if n in showalways:
1144 1144 if opts[b'show_stage'] or n != b'parsed':
1145 1145 ui.write(b"* %s:\n" % n)
1146 1146 ui.write(filesetlang.prettyformat(tree), b"\n")
1147 1147
1148 1148 files = set()
1149 1149 if opts[b'all_files']:
1150 1150 for r in repo:
1151 1151 c = repo[r]
1152 1152 files.update(c.files())
1153 1153 files.update(c.substate)
1154 1154 if opts[b'all_files'] or ctx.rev() is None:
1155 1155 wctx = repo[None]
1156 1156 files.update(
1157 1157 repo.dirstate.walk(
1158 1158 scmutil.matchall(repo),
1159 1159 subrepos=list(wctx.substate),
1160 1160 unknown=True,
1161 1161 ignored=True,
1162 1162 )
1163 1163 )
1164 1164 files.update(wctx.substate)
1165 1165 else:
1166 1166 files.update(ctx.files())
1167 1167 files.update(ctx.substate)
1168 1168
1169 1169 m = ctx.matchfileset(expr)
1170 1170 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1171 1171 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1172 1172 for f in sorted(files):
1173 1173 if not m(f):
1174 1174 continue
1175 1175 ui.write(b"%s\n" % f)
1176 1176
1177 1177
1178 1178 @command(b'debugformat', [] + cmdutil.formatteropts)
1179 1179 def debugformat(ui, repo, **opts):
1180 1180 """display format information about the current repository
1181 1181
1182 1182 Use --verbose to get extra information about current config value and
1183 1183 Mercurial default."""
1184 1184 opts = pycompat.byteskwargs(opts)
1185 1185 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1186 1186 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1187 1187
1188 1188 def makeformatname(name):
1189 1189 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1190 1190
1191 1191 fm = ui.formatter(b'debugformat', opts)
1192 1192 if fm.isplain():
1193 1193
1194 1194 def formatvalue(value):
1195 1195 if util.safehasattr(value, b'startswith'):
1196 1196 return value
1197 1197 if value:
1198 1198 return b'yes'
1199 1199 else:
1200 1200 return b'no'
1201 1201
1202 1202 else:
1203 1203 formatvalue = pycompat.identity
1204 1204
1205 1205 fm.plain(b'format-variant')
1206 1206 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1207 1207 fm.plain(b' repo')
1208 1208 if ui.verbose:
1209 1209 fm.plain(b' config default')
1210 1210 fm.plain(b'\n')
1211 1211 for fv in upgrade.allformatvariant:
1212 1212 fm.startitem()
1213 1213 repovalue = fv.fromrepo(repo)
1214 1214 configvalue = fv.fromconfig(repo)
1215 1215
1216 1216 if repovalue != configvalue:
1217 1217 namelabel = b'formatvariant.name.mismatchconfig'
1218 1218 repolabel = b'formatvariant.repo.mismatchconfig'
1219 1219 elif repovalue != fv.default:
1220 1220 namelabel = b'formatvariant.name.mismatchdefault'
1221 1221 repolabel = b'formatvariant.repo.mismatchdefault'
1222 1222 else:
1223 1223 namelabel = b'formatvariant.name.uptodate'
1224 1224 repolabel = b'formatvariant.repo.uptodate'
1225 1225
1226 1226 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1227 1227 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1228 1228 if fv.default != configvalue:
1229 1229 configlabel = b'formatvariant.config.special'
1230 1230 else:
1231 1231 configlabel = b'formatvariant.config.default'
1232 1232 fm.condwrite(
1233 1233 ui.verbose,
1234 1234 b'config',
1235 1235 b' %6s',
1236 1236 formatvalue(configvalue),
1237 1237 label=configlabel,
1238 1238 )
1239 1239 fm.condwrite(
1240 1240 ui.verbose,
1241 1241 b'default',
1242 1242 b' %7s',
1243 1243 formatvalue(fv.default),
1244 1244 label=b'formatvariant.default',
1245 1245 )
1246 1246 fm.plain(b'\n')
1247 1247 fm.end()
1248 1248
1249 1249
1250 1250 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1251 1251 def debugfsinfo(ui, path=b"."):
1252 1252 """show information detected about current filesystem"""
1253 1253 ui.writenoi18n(b'path: %s\n' % path)
1254 1254 ui.writenoi18n(
1255 1255 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1256 1256 )
1257 1257 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1258 1258 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1259 1259 ui.writenoi18n(
1260 1260 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1261 1261 )
1262 1262 ui.writenoi18n(
1263 1263 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1264 1264 )
1265 1265 casesensitive = b'(unknown)'
1266 1266 try:
1267 1267 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1268 1268 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1269 1269 except OSError:
1270 1270 pass
1271 1271 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1272 1272
1273 1273
1274 1274 @command(
1275 1275 b'debuggetbundle',
1276 1276 [
1277 1277 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1278 1278 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1279 1279 (
1280 1280 b't',
1281 1281 b'type',
1282 1282 b'bzip2',
1283 1283 _(b'bundle compression type to use'),
1284 1284 _(b'TYPE'),
1285 1285 ),
1286 1286 ],
1287 1287 _(b'REPO FILE [-H|-C ID]...'),
1288 1288 norepo=True,
1289 1289 )
1290 1290 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1291 1291 """retrieves a bundle from a repo
1292 1292
1293 1293 Every ID must be a full-length hex node id string. Saves the bundle to the
1294 1294 given file.
1295 1295 """
1296 1296 opts = pycompat.byteskwargs(opts)
1297 1297 repo = hg.peer(ui, opts, repopath)
1298 1298 if not repo.capable(b'getbundle'):
1299 1299 raise error.Abort(b"getbundle() not supported by target repository")
1300 1300 args = {}
1301 1301 if common:
1302 1302 args['common'] = [bin(s) for s in common]
1303 1303 if head:
1304 1304 args['heads'] = [bin(s) for s in head]
1305 1305 # TODO: get desired bundlecaps from command line.
1306 1306 args['bundlecaps'] = None
1307 1307 bundle = repo.getbundle(b'debug', **args)
1308 1308
1309 1309 bundletype = opts.get(b'type', b'bzip2').lower()
1310 1310 btypes = {
1311 1311 b'none': b'HG10UN',
1312 1312 b'bzip2': b'HG10BZ',
1313 1313 b'gzip': b'HG10GZ',
1314 1314 b'bundle2': b'HG20',
1315 1315 }
1316 1316 bundletype = btypes.get(bundletype)
1317 1317 if bundletype not in bundle2.bundletypes:
1318 1318 raise error.Abort(_(b'unknown bundle type specified with --type'))
1319 1319 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1320 1320
1321 1321
1322 1322 @command(b'debugignore', [], b'[FILE]')
1323 1323 def debugignore(ui, repo, *files, **opts):
1324 1324 """display the combined ignore pattern and information about ignored files
1325 1325
1326 1326 With no argument display the combined ignore pattern.
1327 1327
1328 1328 Given space separated file names, shows if the given file is ignored and
1329 1329 if so, show the ignore rule (file and line number) that matched it.
1330 1330 """
1331 1331 ignore = repo.dirstate._ignore
1332 1332 if not files:
1333 1333 # Show all the patterns
1334 1334 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1335 1335 else:
1336 1336 m = scmutil.match(repo[None], pats=files)
1337 1337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1338 1338 for f in m.files():
1339 1339 nf = util.normpath(f)
1340 1340 ignored = None
1341 1341 ignoredata = None
1342 1342 if nf != b'.':
1343 1343 if ignore(nf):
1344 1344 ignored = nf
1345 1345 ignoredata = repo.dirstate._ignorefileandline(nf)
1346 1346 else:
1347 1347 for p in pathutil.finddirs(nf):
1348 1348 if ignore(p):
1349 1349 ignored = p
1350 1350 ignoredata = repo.dirstate._ignorefileandline(p)
1351 1351 break
1352 1352 if ignored:
1353 1353 if ignored == nf:
1354 1354 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1355 1355 else:
1356 1356 ui.write(
1357 1357 _(
1358 1358 b"%s is ignored because of "
1359 1359 b"containing directory %s\n"
1360 1360 )
1361 1361 % (uipathfn(f), ignored)
1362 1362 )
1363 1363 ignorefile, lineno, line = ignoredata
1364 1364 ui.write(
1365 1365 _(b"(ignore rule in %s, line %d: '%s')\n")
1366 1366 % (ignorefile, lineno, line)
1367 1367 )
1368 1368 else:
1369 1369 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1370 1370
1371 1371
1372 1372 @command(
1373 1373 b'debugindex',
1374 1374 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1375 1375 _(b'-c|-m|FILE'),
1376 1376 )
1377 1377 def debugindex(ui, repo, file_=None, **opts):
1378 1378 """dump index data for a storage primitive"""
1379 1379 opts = pycompat.byteskwargs(opts)
1380 1380 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1381 1381
1382 1382 if ui.debugflag:
1383 1383 shortfn = hex
1384 1384 else:
1385 1385 shortfn = short
1386 1386
1387 1387 idlen = 12
1388 1388 for i in store:
1389 1389 idlen = len(shortfn(store.node(i)))
1390 1390 break
1391 1391
1392 1392 fm = ui.formatter(b'debugindex', opts)
1393 1393 fm.plain(
1394 1394 b' rev linkrev %s %s p2\n'
1395 1395 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1396 1396 )
1397 1397
1398 1398 for rev in store:
1399 1399 node = store.node(rev)
1400 1400 parents = store.parents(node)
1401 1401
1402 1402 fm.startitem()
1403 1403 fm.write(b'rev', b'%6d ', rev)
1404 1404 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1405 1405 fm.write(b'node', b'%s ', shortfn(node))
1406 1406 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1407 1407 fm.write(b'p2', b'%s', shortfn(parents[1]))
1408 1408 fm.plain(b'\n')
1409 1409
1410 1410 fm.end()
1411 1411
1412 1412
1413 1413 @command(
1414 1414 b'debugindexdot',
1415 1415 cmdutil.debugrevlogopts,
1416 1416 _(b'-c|-m|FILE'),
1417 1417 optionalrepo=True,
1418 1418 )
1419 1419 def debugindexdot(ui, repo, file_=None, **opts):
1420 1420 """dump an index DAG as a graphviz dot file"""
1421 1421 opts = pycompat.byteskwargs(opts)
1422 1422 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1423 1423 ui.writenoi18n(b"digraph G {\n")
1424 1424 for i in r:
1425 1425 node = r.node(i)
1426 1426 pp = r.parents(node)
1427 1427 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1428 1428 if pp[1] != nullid:
1429 1429 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1430 1430 ui.write(b"}\n")
1431 1431
1432 1432
1433 1433 @command(b'debugindexstats', [])
1434 1434 def debugindexstats(ui, repo):
1435 1435 """show stats related to the changelog index"""
1436 1436 repo.changelog.shortest(nullid, 1)
1437 1437 index = repo.changelog.index
1438 1438 if not util.safehasattr(index, b'stats'):
1439 1439 raise error.Abort(_(b'debugindexstats only works with native code'))
1440 1440 for k, v in sorted(index.stats().items()):
1441 1441 ui.write(b'%s: %d\n' % (k, v))
1442 1442
1443 1443
1444 1444 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1445 1445 def debuginstall(ui, **opts):
1446 1446 '''test Mercurial installation
1447 1447
1448 1448 Returns 0 on success.
1449 1449 '''
1450 1450 opts = pycompat.byteskwargs(opts)
1451 1451
1452 1452 problems = 0
1453 1453
1454 1454 fm = ui.formatter(b'debuginstall', opts)
1455 1455 fm.startitem()
1456 1456
1457 1457 # encoding
1458 1458 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1459 1459 err = None
1460 1460 try:
1461 1461 codecs.lookup(pycompat.sysstr(encoding.encoding))
1462 1462 except LookupError as inst:
1463 1463 err = stringutil.forcebytestr(inst)
1464 1464 problems += 1
1465 1465 fm.condwrite(
1466 1466 err,
1467 1467 b'encodingerror',
1468 1468 _(b" %s\n (check that your locale is properly set)\n"),
1469 1469 err,
1470 1470 )
1471 1471
1472 1472 # Python
1473 pythonlib = None
1474 if util.safehasattr(os, '__file__'):
1475 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1476 elif getattr(sys, 'oxidized', False):
1477 pythonlib = pycompat.sysexecutable
1478
1473 1479 fm.write(
1474 1480 b'pythonexe',
1475 1481 _(b"checking Python executable (%s)\n"),
1476 1482 pycompat.sysexecutable or _(b"unknown"),
1477 1483 )
1478 1484 fm.write(
1479 1485 b'pythonver',
1480 1486 _(b"checking Python version (%s)\n"),
1481 1487 (b"%d.%d.%d" % sys.version_info[:3]),
1482 1488 )
1483 1489 fm.write(
1484 1490 b'pythonlib',
1485 1491 _(b"checking Python lib (%s)...\n"),
1486 os.path.dirname(pycompat.fsencode(os.__file__)),
1492 pythonlib or _(b"unknown"),
1487 1493 )
1488 1494
1489 1495 security = set(sslutil.supportedprotocols)
1490 1496 if sslutil.hassni:
1491 1497 security.add(b'sni')
1492 1498
1493 1499 fm.write(
1494 1500 b'pythonsecurity',
1495 1501 _(b"checking Python security support (%s)\n"),
1496 1502 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1497 1503 )
1498 1504
1499 1505 # These are warnings, not errors. So don't increment problem count. This
1500 1506 # may change in the future.
1501 1507 if b'tls1.2' not in security:
1502 1508 fm.plain(
1503 1509 _(
1504 1510 b' TLS 1.2 not supported by Python install; '
1505 1511 b'network connections lack modern security\n'
1506 1512 )
1507 1513 )
1508 1514 if b'sni' not in security:
1509 1515 fm.plain(
1510 1516 _(
1511 1517 b' SNI not supported by Python install; may have '
1512 1518 b'connectivity issues with some servers\n'
1513 1519 )
1514 1520 )
1515 1521
1516 1522 # TODO print CA cert info
1517 1523
1518 1524 # hg version
1519 1525 hgver = util.version()
1520 1526 fm.write(
1521 1527 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1522 1528 )
1523 1529 fm.write(
1524 1530 b'hgverextra',
1525 1531 _(b"checking Mercurial custom build (%s)\n"),
1526 1532 b'+'.join(hgver.split(b'+')[1:]),
1527 1533 )
1528 1534
1529 1535 # compiled modules
1536 hgmodules = None
1537 if util.safehasattr(sys.modules[__name__], '__file__'):
1538 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1539 elif getattr(sys, 'oxidized', False):
1540 hgmodules = pycompat.sysexecutable
1541
1530 1542 fm.write(
1531 1543 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1532 1544 )
1533 1545 fm.write(
1534 1546 b'hgmodules',
1535 1547 _(b"checking installed modules (%s)...\n"),
1536 os.path.dirname(pycompat.fsencode(__file__)),
1548 hgmodules or _(b"unknown"),
1537 1549 )
1538 1550
1539 1551 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1540 1552 rustext = rustandc # for now, that's the only case
1541 1553 cext = policy.policy in (b'c', b'allow') or rustandc
1542 1554 nopure = cext or rustext
1543 1555 if nopure:
1544 1556 err = None
1545 1557 try:
1546 1558 if cext:
1547 1559 from .cext import (
1548 1560 base85,
1549 1561 bdiff,
1550 1562 mpatch,
1551 1563 osutil,
1552 1564 )
1553 1565
1554 1566 # quiet pyflakes
1555 1567 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1556 1568 if rustext:
1557 1569 from .rustext import (
1558 1570 ancestor,
1559 1571 dirstate,
1560 1572 )
1561 1573
1562 1574 dir(ancestor), dir(dirstate) # quiet pyflakes
1563 1575 except Exception as inst:
1564 1576 err = stringutil.forcebytestr(inst)
1565 1577 problems += 1
1566 1578 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1567 1579
1568 1580 compengines = util.compengines._engines.values()
1569 1581 fm.write(
1570 1582 b'compengines',
1571 1583 _(b'checking registered compression engines (%s)\n'),
1572 1584 fm.formatlist(
1573 1585 sorted(e.name() for e in compengines),
1574 1586 name=b'compengine',
1575 1587 fmt=b'%s',
1576 1588 sep=b', ',
1577 1589 ),
1578 1590 )
1579 1591 fm.write(
1580 1592 b'compenginesavail',
1581 1593 _(b'checking available compression engines (%s)\n'),
1582 1594 fm.formatlist(
1583 1595 sorted(e.name() for e in compengines if e.available()),
1584 1596 name=b'compengine',
1585 1597 fmt=b'%s',
1586 1598 sep=b', ',
1587 1599 ),
1588 1600 )
1589 1601 wirecompengines = compression.compengines.supportedwireengines(
1590 1602 compression.SERVERROLE
1591 1603 )
1592 1604 fm.write(
1593 1605 b'compenginesserver',
1594 1606 _(
1595 1607 b'checking available compression engines '
1596 1608 b'for wire protocol (%s)\n'
1597 1609 ),
1598 1610 fm.formatlist(
1599 1611 [e.name() for e in wirecompengines if e.wireprotosupport()],
1600 1612 name=b'compengine',
1601 1613 fmt=b'%s',
1602 1614 sep=b', ',
1603 1615 ),
1604 1616 )
1605 1617 re2 = b'missing'
1606 1618 if util._re2:
1607 1619 re2 = b'available'
1608 1620 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1609 1621 fm.data(re2=bool(util._re2))
1610 1622
1611 1623 # templates
1612 1624 p = templater.templatepaths()
1613 1625 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1614 1626 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1615 1627 if p:
1616 1628 m = templater.templatepath(b"map-cmdline.default")
1617 1629 if m:
1618 1630 # template found, check if it is working
1619 1631 err = None
1620 1632 try:
1621 1633 templater.templater.frommapfile(m)
1622 1634 except Exception as inst:
1623 1635 err = stringutil.forcebytestr(inst)
1624 1636 p = None
1625 1637 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1626 1638 else:
1627 1639 p = None
1628 1640 fm.condwrite(
1629 1641 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1630 1642 )
1631 1643 fm.condwrite(
1632 1644 not m,
1633 1645 b'defaulttemplatenotfound',
1634 1646 _(b" template '%s' not found\n"),
1635 1647 b"default",
1636 1648 )
1637 1649 if not p:
1638 1650 problems += 1
1639 1651 fm.condwrite(
1640 1652 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1641 1653 )
1642 1654
1643 1655 # editor
1644 1656 editor = ui.geteditor()
1645 1657 editor = util.expandpath(editor)
1646 1658 editorbin = procutil.shellsplit(editor)[0]
1647 1659 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1648 1660 cmdpath = procutil.findexe(editorbin)
1649 1661 fm.condwrite(
1650 1662 not cmdpath and editor == b'vi',
1651 1663 b'vinotfound',
1652 1664 _(
1653 1665 b" No commit editor set and can't find %s in PATH\n"
1654 1666 b" (specify a commit editor in your configuration"
1655 1667 b" file)\n"
1656 1668 ),
1657 1669 not cmdpath and editor == b'vi' and editorbin,
1658 1670 )
1659 1671 fm.condwrite(
1660 1672 not cmdpath and editor != b'vi',
1661 1673 b'editornotfound',
1662 1674 _(
1663 1675 b" Can't find editor '%s' in PATH\n"
1664 1676 b" (specify a commit editor in your configuration"
1665 1677 b" file)\n"
1666 1678 ),
1667 1679 not cmdpath and editorbin,
1668 1680 )
1669 1681 if not cmdpath and editor != b'vi':
1670 1682 problems += 1
1671 1683
1672 1684 # check username
1673 1685 username = None
1674 1686 err = None
1675 1687 try:
1676 1688 username = ui.username()
1677 1689 except error.Abort as e:
1678 1690 err = stringutil.forcebytestr(e)
1679 1691 problems += 1
1680 1692
1681 1693 fm.condwrite(
1682 1694 username, b'username', _(b"checking username (%s)\n"), username
1683 1695 )
1684 1696 fm.condwrite(
1685 1697 err,
1686 1698 b'usernameerror',
1687 1699 _(
1688 1700 b"checking username...\n %s\n"
1689 1701 b" (specify a username in your configuration file)\n"
1690 1702 ),
1691 1703 err,
1692 1704 )
1693 1705
1694 1706 for name, mod in extensions.extensions():
1695 1707 handler = getattr(mod, 'debuginstall', None)
1696 1708 if handler is not None:
1697 1709 problems += handler(ui, fm)
1698 1710
1699 1711 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1700 1712 if not problems:
1701 1713 fm.data(problems=problems)
1702 1714 fm.condwrite(
1703 1715 problems,
1704 1716 b'problems',
1705 1717 _(b"%d problems detected, please check your install!\n"),
1706 1718 problems,
1707 1719 )
1708 1720 fm.end()
1709 1721
1710 1722 return problems
1711 1723
1712 1724
1713 1725 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1714 1726 def debugknown(ui, repopath, *ids, **opts):
1715 1727 """test whether node ids are known to a repo
1716 1728
1717 1729 Every ID must be a full-length hex node id string. Returns a list of 0s
1718 1730 and 1s indicating unknown/known.
1719 1731 """
1720 1732 opts = pycompat.byteskwargs(opts)
1721 1733 repo = hg.peer(ui, opts, repopath)
1722 1734 if not repo.capable(b'known'):
1723 1735 raise error.Abort(b"known() not supported by target repository")
1724 1736 flags = repo.known([bin(s) for s in ids])
1725 1737 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1726 1738
1727 1739
1728 1740 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1729 1741 def debuglabelcomplete(ui, repo, *args):
1730 1742 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1731 1743 debugnamecomplete(ui, repo, *args)
1732 1744
1733 1745
1734 1746 @command(
1735 1747 b'debuglocks',
1736 1748 [
1737 1749 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1738 1750 (
1739 1751 b'W',
1740 1752 b'force-wlock',
1741 1753 None,
1742 1754 _(b'free the working state lock (DANGEROUS)'),
1743 1755 ),
1744 1756 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1745 1757 (
1746 1758 b'S',
1747 1759 b'set-wlock',
1748 1760 None,
1749 1761 _(b'set the working state lock until stopped'),
1750 1762 ),
1751 1763 ],
1752 1764 _(b'[OPTION]...'),
1753 1765 )
1754 1766 def debuglocks(ui, repo, **opts):
1755 1767 """show or modify state of locks
1756 1768
1757 1769 By default, this command will show which locks are held. This
1758 1770 includes the user and process holding the lock, the amount of time
1759 1771 the lock has been held, and the machine name where the process is
1760 1772 running if it's not local.
1761 1773
1762 1774 Locks protect the integrity of Mercurial's data, so should be
1763 1775 treated with care. System crashes or other interruptions may cause
1764 1776 locks to not be properly released, though Mercurial will usually
1765 1777 detect and remove such stale locks automatically.
1766 1778
1767 1779 However, detecting stale locks may not always be possible (for
1768 1780 instance, on a shared filesystem). Removing locks may also be
1769 1781 blocked by filesystem permissions.
1770 1782
1771 1783 Setting a lock will prevent other commands from changing the data.
1772 1784 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1773 1785 The set locks are removed when the command exits.
1774 1786
1775 1787 Returns 0 if no locks are held.
1776 1788
1777 1789 """
1778 1790
1779 1791 if opts.get('force_lock'):
1780 1792 repo.svfs.unlink(b'lock')
1781 1793 if opts.get('force_wlock'):
1782 1794 repo.vfs.unlink(b'wlock')
1783 1795 if opts.get('force_lock') or opts.get('force_wlock'):
1784 1796 return 0
1785 1797
1786 1798 locks = []
1787 1799 try:
1788 1800 if opts.get('set_wlock'):
1789 1801 try:
1790 1802 locks.append(repo.wlock(False))
1791 1803 except error.LockHeld:
1792 1804 raise error.Abort(_(b'wlock is already held'))
1793 1805 if opts.get('set_lock'):
1794 1806 try:
1795 1807 locks.append(repo.lock(False))
1796 1808 except error.LockHeld:
1797 1809 raise error.Abort(_(b'lock is already held'))
1798 1810 if len(locks):
1799 1811 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1800 1812 return 0
1801 1813 finally:
1802 1814 release(*locks)
1803 1815
1804 1816 now = time.time()
1805 1817 held = 0
1806 1818
1807 1819 def report(vfs, name, method):
1808 1820 # this causes stale locks to get reaped for more accurate reporting
1809 1821 try:
1810 1822 l = method(False)
1811 1823 except error.LockHeld:
1812 1824 l = None
1813 1825
1814 1826 if l:
1815 1827 l.release()
1816 1828 else:
1817 1829 try:
1818 1830 st = vfs.lstat(name)
1819 1831 age = now - st[stat.ST_MTIME]
1820 1832 user = util.username(st.st_uid)
1821 1833 locker = vfs.readlock(name)
1822 1834 if b":" in locker:
1823 1835 host, pid = locker.split(b':')
1824 1836 if host == socket.gethostname():
1825 1837 locker = b'user %s, process %s' % (user or b'None', pid)
1826 1838 else:
1827 1839 locker = b'user %s, process %s, host %s' % (
1828 1840 user or b'None',
1829 1841 pid,
1830 1842 host,
1831 1843 )
1832 1844 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1833 1845 return 1
1834 1846 except OSError as e:
1835 1847 if e.errno != errno.ENOENT:
1836 1848 raise
1837 1849
1838 1850 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1839 1851 return 0
1840 1852
1841 1853 held += report(repo.svfs, b"lock", repo.lock)
1842 1854 held += report(repo.vfs, b"wlock", repo.wlock)
1843 1855
1844 1856 return held
1845 1857
1846 1858
1847 1859 @command(
1848 1860 b'debugmanifestfulltextcache',
1849 1861 [
1850 1862 (b'', b'clear', False, _(b'clear the cache')),
1851 1863 (
1852 1864 b'a',
1853 1865 b'add',
1854 1866 [],
1855 1867 _(b'add the given manifest nodes to the cache'),
1856 1868 _(b'NODE'),
1857 1869 ),
1858 1870 ],
1859 1871 b'',
1860 1872 )
1861 1873 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1862 1874 """show, clear or amend the contents of the manifest fulltext cache"""
1863 1875
1864 1876 def getcache():
1865 1877 r = repo.manifestlog.getstorage(b'')
1866 1878 try:
1867 1879 return r._fulltextcache
1868 1880 except AttributeError:
1869 1881 msg = _(
1870 1882 b"Current revlog implementation doesn't appear to have a "
1871 1883 b"manifest fulltext cache\n"
1872 1884 )
1873 1885 raise error.Abort(msg)
1874 1886
1875 1887 if opts.get('clear'):
1876 1888 with repo.wlock():
1877 1889 cache = getcache()
1878 1890 cache.clear(clear_persisted_data=True)
1879 1891 return
1880 1892
1881 1893 if add:
1882 1894 with repo.wlock():
1883 1895 m = repo.manifestlog
1884 1896 store = m.getstorage(b'')
1885 1897 for n in add:
1886 1898 try:
1887 1899 manifest = m[store.lookup(n)]
1888 1900 except error.LookupError as e:
1889 1901 raise error.Abort(e, hint=b"Check your manifest node id")
1890 1902 manifest.read() # stores revisision in cache too
1891 1903 return
1892 1904
1893 1905 cache = getcache()
1894 1906 if not len(cache):
1895 1907 ui.write(_(b'cache empty\n'))
1896 1908 else:
1897 1909 ui.write(
1898 1910 _(
1899 1911 b'cache contains %d manifest entries, in order of most to '
1900 1912 b'least recent:\n'
1901 1913 )
1902 1914 % (len(cache),)
1903 1915 )
1904 1916 totalsize = 0
1905 1917 for nodeid in cache:
1906 1918 # Use cache.get to not update the LRU order
1907 1919 data = cache.peek(nodeid)
1908 1920 size = len(data)
1909 1921 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1910 1922 ui.write(
1911 1923 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1912 1924 )
1913 1925 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1914 1926 ui.write(
1915 1927 _(b'total cache data size %s, on-disk %s\n')
1916 1928 % (util.bytecount(totalsize), util.bytecount(ondisk))
1917 1929 )
1918 1930
1919 1931
1920 1932 @command(b'debugmergestate', [], b'')
1921 1933 def debugmergestate(ui, repo, *args):
1922 1934 """print merge state
1923 1935
1924 1936 Use --verbose to print out information about whether v1 or v2 merge state
1925 1937 was chosen."""
1926 1938
1927 1939 def _hashornull(h):
1928 1940 if h == nullhex:
1929 1941 return b'null'
1930 1942 else:
1931 1943 return h
1932 1944
1933 1945 def printrecords(version):
1934 1946 ui.writenoi18n(b'* version %d records\n' % version)
1935 1947 if version == 1:
1936 1948 records = v1records
1937 1949 else:
1938 1950 records = v2records
1939 1951
1940 1952 for rtype, record in records:
1941 1953 # pretty print some record types
1942 1954 if rtype == b'L':
1943 1955 ui.writenoi18n(b'local: %s\n' % record)
1944 1956 elif rtype == b'O':
1945 1957 ui.writenoi18n(b'other: %s\n' % record)
1946 1958 elif rtype == b'm':
1947 1959 driver, mdstate = record.split(b'\0', 1)
1948 1960 ui.writenoi18n(
1949 1961 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1950 1962 )
1951 1963 elif rtype in b'FDC':
1952 1964 r = record.split(b'\0')
1953 1965 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1954 1966 if version == 1:
1955 1967 onode = b'not stored in v1 format'
1956 1968 flags = r[7]
1957 1969 else:
1958 1970 onode, flags = r[7:9]
1959 1971 ui.writenoi18n(
1960 1972 b'file: %s (record type "%s", state "%s", hash %s)\n'
1961 1973 % (f, rtype, state, _hashornull(hash))
1962 1974 )
1963 1975 ui.writenoi18n(
1964 1976 b' local path: %s (flags "%s")\n' % (lfile, flags)
1965 1977 )
1966 1978 ui.writenoi18n(
1967 1979 b' ancestor path: %s (node %s)\n'
1968 1980 % (afile, _hashornull(anode))
1969 1981 )
1970 1982 ui.writenoi18n(
1971 1983 b' other path: %s (node %s)\n'
1972 1984 % (ofile, _hashornull(onode))
1973 1985 )
1974 1986 elif rtype == b'f':
1975 1987 filename, rawextras = record.split(b'\0', 1)
1976 1988 extras = rawextras.split(b'\0')
1977 1989 i = 0
1978 1990 extrastrings = []
1979 1991 while i < len(extras):
1980 1992 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1981 1993 i += 2
1982 1994
1983 1995 ui.writenoi18n(
1984 1996 b'file extras: %s (%s)\n'
1985 1997 % (filename, b', '.join(extrastrings))
1986 1998 )
1987 1999 elif rtype == b'l':
1988 2000 labels = record.split(b'\0', 2)
1989 2001 labels = [l for l in labels if len(l) > 0]
1990 2002 ui.writenoi18n(b'labels:\n')
1991 2003 ui.write((b' local: %s\n' % labels[0]))
1992 2004 ui.write((b' other: %s\n' % labels[1]))
1993 2005 if len(labels) > 2:
1994 2006 ui.write((b' base: %s\n' % labels[2]))
1995 2007 else:
1996 2008 ui.writenoi18n(
1997 2009 b'unrecognized entry: %s\t%s\n'
1998 2010 % (rtype, record.replace(b'\0', b'\t'))
1999 2011 )
2000 2012
2001 2013 # Avoid mergestate.read() since it may raise an exception for unsupported
2002 2014 # merge state records. We shouldn't be doing this, but this is OK since this
2003 2015 # command is pretty low-level.
2004 2016 ms = mergemod.mergestate(repo)
2005 2017
2006 2018 # sort so that reasonable information is on top
2007 2019 v1records = ms._readrecordsv1()
2008 2020 v2records = ms._readrecordsv2()
2009 2021 order = b'LOml'
2010 2022
2011 2023 def key(r):
2012 2024 idx = order.find(r[0])
2013 2025 if idx == -1:
2014 2026 return (1, r[1])
2015 2027 else:
2016 2028 return (0, idx)
2017 2029
2018 2030 v1records.sort(key=key)
2019 2031 v2records.sort(key=key)
2020 2032
2021 2033 if not v1records and not v2records:
2022 2034 ui.writenoi18n(b'no merge state found\n')
2023 2035 elif not v2records:
2024 2036 ui.notenoi18n(b'no version 2 merge state\n')
2025 2037 printrecords(1)
2026 2038 elif ms._v1v2match(v1records, v2records):
2027 2039 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2028 2040 printrecords(2)
2029 2041 else:
2030 2042 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2031 2043 printrecords(1)
2032 2044 if ui.verbose:
2033 2045 printrecords(2)
2034 2046
2035 2047
2036 2048 @command(b'debugnamecomplete', [], _(b'NAME...'))
2037 2049 def debugnamecomplete(ui, repo, *args):
2038 2050 '''complete "names" - tags, open branch names, bookmark names'''
2039 2051
2040 2052 names = set()
2041 2053 # since we previously only listed open branches, we will handle that
2042 2054 # specially (after this for loop)
2043 2055 for name, ns in pycompat.iteritems(repo.names):
2044 2056 if name != b'branches':
2045 2057 names.update(ns.listnames(repo))
2046 2058 names.update(
2047 2059 tag
2048 2060 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2049 2061 if not closed
2050 2062 )
2051 2063 completions = set()
2052 2064 if not args:
2053 2065 args = [b'']
2054 2066 for a in args:
2055 2067 completions.update(n for n in names if n.startswith(a))
2056 2068 ui.write(b'\n'.join(sorted(completions)))
2057 2069 ui.write(b'\n')
2058 2070
2059 2071
2060 2072 @command(
2061 2073 b'debugobsolete',
2062 2074 [
2063 2075 (b'', b'flags', 0, _(b'markers flag')),
2064 2076 (
2065 2077 b'',
2066 2078 b'record-parents',
2067 2079 False,
2068 2080 _(b'record parent information for the precursor'),
2069 2081 ),
2070 2082 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2071 2083 (
2072 2084 b'',
2073 2085 b'exclusive',
2074 2086 False,
2075 2087 _(b'restrict display to markers only relevant to REV'),
2076 2088 ),
2077 2089 (b'', b'index', False, _(b'display index of the marker')),
2078 2090 (b'', b'delete', [], _(b'delete markers specified by indices')),
2079 2091 ]
2080 2092 + cmdutil.commitopts2
2081 2093 + cmdutil.formatteropts,
2082 2094 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2083 2095 )
2084 2096 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2085 2097 """create arbitrary obsolete marker
2086 2098
2087 2099 With no arguments, displays the list of obsolescence markers."""
2088 2100
2089 2101 opts = pycompat.byteskwargs(opts)
2090 2102
2091 2103 def parsenodeid(s):
2092 2104 try:
2093 2105 # We do not use revsingle/revrange functions here to accept
2094 2106 # arbitrary node identifiers, possibly not present in the
2095 2107 # local repository.
2096 2108 n = bin(s)
2097 2109 if len(n) != len(nullid):
2098 2110 raise TypeError()
2099 2111 return n
2100 2112 except TypeError:
2101 2113 raise error.Abort(
2102 2114 b'changeset references must be full hexadecimal '
2103 2115 b'node identifiers'
2104 2116 )
2105 2117
2106 2118 if opts.get(b'delete'):
2107 2119 indices = []
2108 2120 for v in opts.get(b'delete'):
2109 2121 try:
2110 2122 indices.append(int(v))
2111 2123 except ValueError:
2112 2124 raise error.Abort(
2113 2125 _(b'invalid index value: %r') % v,
2114 2126 hint=_(b'use integers for indices'),
2115 2127 )
2116 2128
2117 2129 if repo.currenttransaction():
2118 2130 raise error.Abort(
2119 2131 _(b'cannot delete obsmarkers in the middle of transaction.')
2120 2132 )
2121 2133
2122 2134 with repo.lock():
2123 2135 n = repair.deleteobsmarkers(repo.obsstore, indices)
2124 2136 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2125 2137
2126 2138 return
2127 2139
2128 2140 if precursor is not None:
2129 2141 if opts[b'rev']:
2130 2142 raise error.Abort(b'cannot select revision when creating marker')
2131 2143 metadata = {}
2132 2144 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2133 2145 succs = tuple(parsenodeid(succ) for succ in successors)
2134 2146 l = repo.lock()
2135 2147 try:
2136 2148 tr = repo.transaction(b'debugobsolete')
2137 2149 try:
2138 2150 date = opts.get(b'date')
2139 2151 if date:
2140 2152 date = dateutil.parsedate(date)
2141 2153 else:
2142 2154 date = None
2143 2155 prec = parsenodeid(precursor)
2144 2156 parents = None
2145 2157 if opts[b'record_parents']:
2146 2158 if prec not in repo.unfiltered():
2147 2159 raise error.Abort(
2148 2160 b'cannot used --record-parents on '
2149 2161 b'unknown changesets'
2150 2162 )
2151 2163 parents = repo.unfiltered()[prec].parents()
2152 2164 parents = tuple(p.node() for p in parents)
2153 2165 repo.obsstore.create(
2154 2166 tr,
2155 2167 prec,
2156 2168 succs,
2157 2169 opts[b'flags'],
2158 2170 parents=parents,
2159 2171 date=date,
2160 2172 metadata=metadata,
2161 2173 ui=ui,
2162 2174 )
2163 2175 tr.close()
2164 2176 except ValueError as exc:
2165 2177 raise error.Abort(
2166 2178 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2167 2179 )
2168 2180 finally:
2169 2181 tr.release()
2170 2182 finally:
2171 2183 l.release()
2172 2184 else:
2173 2185 if opts[b'rev']:
2174 2186 revs = scmutil.revrange(repo, opts[b'rev'])
2175 2187 nodes = [repo[r].node() for r in revs]
2176 2188 markers = list(
2177 2189 obsutil.getmarkers(
2178 2190 repo, nodes=nodes, exclusive=opts[b'exclusive']
2179 2191 )
2180 2192 )
2181 2193 markers.sort(key=lambda x: x._data)
2182 2194 else:
2183 2195 markers = obsutil.getmarkers(repo)
2184 2196
2185 2197 markerstoiter = markers
2186 2198 isrelevant = lambda m: True
2187 2199 if opts.get(b'rev') and opts.get(b'index'):
2188 2200 markerstoiter = obsutil.getmarkers(repo)
2189 2201 markerset = set(markers)
2190 2202 isrelevant = lambda m: m in markerset
2191 2203
2192 2204 fm = ui.formatter(b'debugobsolete', opts)
2193 2205 for i, m in enumerate(markerstoiter):
2194 2206 if not isrelevant(m):
2195 2207 # marker can be irrelevant when we're iterating over a set
2196 2208 # of markers (markerstoiter) which is bigger than the set
2197 2209 # of markers we want to display (markers)
2198 2210 # this can happen if both --index and --rev options are
2199 2211 # provided and thus we need to iterate over all of the markers
2200 2212 # to get the correct indices, but only display the ones that
2201 2213 # are relevant to --rev value
2202 2214 continue
2203 2215 fm.startitem()
2204 2216 ind = i if opts.get(b'index') else None
2205 2217 cmdutil.showmarker(fm, m, index=ind)
2206 2218 fm.end()
2207 2219
2208 2220
2209 2221 @command(
2210 2222 b'debugp1copies',
2211 2223 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2212 2224 _(b'[-r REV]'),
2213 2225 )
2214 2226 def debugp1copies(ui, repo, **opts):
2215 2227 """dump copy information compared to p1"""
2216 2228
2217 2229 opts = pycompat.byteskwargs(opts)
2218 2230 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2219 2231 for dst, src in ctx.p1copies().items():
2220 2232 ui.write(b'%s -> %s\n' % (src, dst))
2221 2233
2222 2234
2223 2235 @command(
2224 2236 b'debugp2copies',
2225 2237 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2226 2238 _(b'[-r REV]'),
2227 2239 )
2228 2240 def debugp1copies(ui, repo, **opts):
2229 2241 """dump copy information compared to p2"""
2230 2242
2231 2243 opts = pycompat.byteskwargs(opts)
2232 2244 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2233 2245 for dst, src in ctx.p2copies().items():
2234 2246 ui.write(b'%s -> %s\n' % (src, dst))
2235 2247
2236 2248
2237 2249 @command(
2238 2250 b'debugpathcomplete',
2239 2251 [
2240 2252 (b'f', b'full', None, _(b'complete an entire path')),
2241 2253 (b'n', b'normal', None, _(b'show only normal files')),
2242 2254 (b'a', b'added', None, _(b'show only added files')),
2243 2255 (b'r', b'removed', None, _(b'show only removed files')),
2244 2256 ],
2245 2257 _(b'FILESPEC...'),
2246 2258 )
2247 2259 def debugpathcomplete(ui, repo, *specs, **opts):
2248 2260 '''complete part or all of a tracked path
2249 2261
2250 2262 This command supports shells that offer path name completion. It
2251 2263 currently completes only files already known to the dirstate.
2252 2264
2253 2265 Completion extends only to the next path segment unless
2254 2266 --full is specified, in which case entire paths are used.'''
2255 2267
2256 2268 def complete(path, acceptable):
2257 2269 dirstate = repo.dirstate
2258 2270 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2259 2271 rootdir = repo.root + pycompat.ossep
2260 2272 if spec != repo.root and not spec.startswith(rootdir):
2261 2273 return [], []
2262 2274 if os.path.isdir(spec):
2263 2275 spec += b'/'
2264 2276 spec = spec[len(rootdir) :]
2265 2277 fixpaths = pycompat.ossep != b'/'
2266 2278 if fixpaths:
2267 2279 spec = spec.replace(pycompat.ossep, b'/')
2268 2280 speclen = len(spec)
2269 2281 fullpaths = opts['full']
2270 2282 files, dirs = set(), set()
2271 2283 adddir, addfile = dirs.add, files.add
2272 2284 for f, st in pycompat.iteritems(dirstate):
2273 2285 if f.startswith(spec) and st[0] in acceptable:
2274 2286 if fixpaths:
2275 2287 f = f.replace(b'/', pycompat.ossep)
2276 2288 if fullpaths:
2277 2289 addfile(f)
2278 2290 continue
2279 2291 s = f.find(pycompat.ossep, speclen)
2280 2292 if s >= 0:
2281 2293 adddir(f[:s])
2282 2294 else:
2283 2295 addfile(f)
2284 2296 return files, dirs
2285 2297
2286 2298 acceptable = b''
2287 2299 if opts['normal']:
2288 2300 acceptable += b'nm'
2289 2301 if opts['added']:
2290 2302 acceptable += b'a'
2291 2303 if opts['removed']:
2292 2304 acceptable += b'r'
2293 2305 cwd = repo.getcwd()
2294 2306 if not specs:
2295 2307 specs = [b'.']
2296 2308
2297 2309 files, dirs = set(), set()
2298 2310 for spec in specs:
2299 2311 f, d = complete(spec, acceptable or b'nmar')
2300 2312 files.update(f)
2301 2313 dirs.update(d)
2302 2314 files.update(dirs)
2303 2315 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2304 2316 ui.write(b'\n')
2305 2317
2306 2318
2307 2319 @command(
2308 2320 b'debugpathcopies',
2309 2321 cmdutil.walkopts,
2310 2322 b'hg debugpathcopies REV1 REV2 [FILE]',
2311 2323 inferrepo=True,
2312 2324 )
2313 2325 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2314 2326 """show copies between two revisions"""
2315 2327 ctx1 = scmutil.revsingle(repo, rev1)
2316 2328 ctx2 = scmutil.revsingle(repo, rev2)
2317 2329 m = scmutil.match(ctx1, pats, opts)
2318 2330 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2319 2331 ui.write(b'%s -> %s\n' % (src, dst))
2320 2332
2321 2333
2322 2334 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2323 2335 def debugpeer(ui, path):
2324 2336 """establish a connection to a peer repository"""
2325 2337 # Always enable peer request logging. Requires --debug to display
2326 2338 # though.
2327 2339 overrides = {
2328 2340 (b'devel', b'debug.peer-request'): True,
2329 2341 }
2330 2342
2331 2343 with ui.configoverride(overrides):
2332 2344 peer = hg.peer(ui, {}, path)
2333 2345
2334 2346 local = peer.local() is not None
2335 2347 canpush = peer.canpush()
2336 2348
2337 2349 ui.write(_(b'url: %s\n') % peer.url())
2338 2350 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2339 2351 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2340 2352
2341 2353
2342 2354 @command(
2343 2355 b'debugpickmergetool',
2344 2356 [
2345 2357 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2346 2358 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2347 2359 ]
2348 2360 + cmdutil.walkopts
2349 2361 + cmdutil.mergetoolopts,
2350 2362 _(b'[PATTERN]...'),
2351 2363 inferrepo=True,
2352 2364 )
2353 2365 def debugpickmergetool(ui, repo, *pats, **opts):
2354 2366 """examine which merge tool is chosen for specified file
2355 2367
2356 2368 As described in :hg:`help merge-tools`, Mercurial examines
2357 2369 configurations below in this order to decide which merge tool is
2358 2370 chosen for specified file.
2359 2371
2360 2372 1. ``--tool`` option
2361 2373 2. ``HGMERGE`` environment variable
2362 2374 3. configurations in ``merge-patterns`` section
2363 2375 4. configuration of ``ui.merge``
2364 2376 5. configurations in ``merge-tools`` section
2365 2377 6. ``hgmerge`` tool (for historical reason only)
2366 2378 7. default tool for fallback (``:merge`` or ``:prompt``)
2367 2379
2368 2380 This command writes out examination result in the style below::
2369 2381
2370 2382 FILE = MERGETOOL
2371 2383
2372 2384 By default, all files known in the first parent context of the
2373 2385 working directory are examined. Use file patterns and/or -I/-X
2374 2386 options to limit target files. -r/--rev is also useful to examine
2375 2387 files in another context without actual updating to it.
2376 2388
2377 2389 With --debug, this command shows warning messages while matching
2378 2390 against ``merge-patterns`` and so on, too. It is recommended to
2379 2391 use this option with explicit file patterns and/or -I/-X options,
2380 2392 because this option increases amount of output per file according
2381 2393 to configurations in hgrc.
2382 2394
2383 2395 With -v/--verbose, this command shows configurations below at
2384 2396 first (only if specified).
2385 2397
2386 2398 - ``--tool`` option
2387 2399 - ``HGMERGE`` environment variable
2388 2400 - configuration of ``ui.merge``
2389 2401
2390 2402 If merge tool is chosen before matching against
2391 2403 ``merge-patterns``, this command can't show any helpful
2392 2404 information, even with --debug. In such case, information above is
2393 2405 useful to know why a merge tool is chosen.
2394 2406 """
2395 2407 opts = pycompat.byteskwargs(opts)
2396 2408 overrides = {}
2397 2409 if opts[b'tool']:
2398 2410 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2399 2411 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2400 2412
2401 2413 with ui.configoverride(overrides, b'debugmergepatterns'):
2402 2414 hgmerge = encoding.environ.get(b"HGMERGE")
2403 2415 if hgmerge is not None:
2404 2416 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2405 2417 uimerge = ui.config(b"ui", b"merge")
2406 2418 if uimerge:
2407 2419 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2408 2420
2409 2421 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2410 2422 m = scmutil.match(ctx, pats, opts)
2411 2423 changedelete = opts[b'changedelete']
2412 2424 for path in ctx.walk(m):
2413 2425 fctx = ctx[path]
2414 2426 try:
2415 2427 if not ui.debugflag:
2416 2428 ui.pushbuffer(error=True)
2417 2429 tool, toolpath = filemerge._picktool(
2418 2430 repo,
2419 2431 ui,
2420 2432 path,
2421 2433 fctx.isbinary(),
2422 2434 b'l' in fctx.flags(),
2423 2435 changedelete,
2424 2436 )
2425 2437 finally:
2426 2438 if not ui.debugflag:
2427 2439 ui.popbuffer()
2428 2440 ui.write(b'%s = %s\n' % (path, tool))
2429 2441
2430 2442
2431 2443 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2432 2444 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2433 2445 '''access the pushkey key/value protocol
2434 2446
2435 2447 With two args, list the keys in the given namespace.
2436 2448
2437 2449 With five args, set a key to new if it currently is set to old.
2438 2450 Reports success or failure.
2439 2451 '''
2440 2452
2441 2453 target = hg.peer(ui, {}, repopath)
2442 2454 if keyinfo:
2443 2455 key, old, new = keyinfo
2444 2456 with target.commandexecutor() as e:
2445 2457 r = e.callcommand(
2446 2458 b'pushkey',
2447 2459 {
2448 2460 b'namespace': namespace,
2449 2461 b'key': key,
2450 2462 b'old': old,
2451 2463 b'new': new,
2452 2464 },
2453 2465 ).result()
2454 2466
2455 2467 ui.status(pycompat.bytestr(r) + b'\n')
2456 2468 return not r
2457 2469 else:
2458 2470 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2459 2471 ui.write(
2460 2472 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2461 2473 )
2462 2474
2463 2475
2464 2476 @command(b'debugpvec', [], _(b'A B'))
2465 2477 def debugpvec(ui, repo, a, b=None):
2466 2478 ca = scmutil.revsingle(repo, a)
2467 2479 cb = scmutil.revsingle(repo, b)
2468 2480 pa = pvec.ctxpvec(ca)
2469 2481 pb = pvec.ctxpvec(cb)
2470 2482 if pa == pb:
2471 2483 rel = b"="
2472 2484 elif pa > pb:
2473 2485 rel = b">"
2474 2486 elif pa < pb:
2475 2487 rel = b"<"
2476 2488 elif pa | pb:
2477 2489 rel = b"|"
2478 2490 ui.write(_(b"a: %s\n") % pa)
2479 2491 ui.write(_(b"b: %s\n") % pb)
2480 2492 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2481 2493 ui.write(
2482 2494 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2483 2495 % (
2484 2496 abs(pa._depth - pb._depth),
2485 2497 pvec._hamming(pa._vec, pb._vec),
2486 2498 pa.distance(pb),
2487 2499 rel,
2488 2500 )
2489 2501 )
2490 2502
2491 2503
2492 2504 @command(
2493 2505 b'debugrebuilddirstate|debugrebuildstate',
2494 2506 [
2495 2507 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2496 2508 (
2497 2509 b'',
2498 2510 b'minimal',
2499 2511 None,
2500 2512 _(
2501 2513 b'only rebuild files that are inconsistent with '
2502 2514 b'the working copy parent'
2503 2515 ),
2504 2516 ),
2505 2517 ],
2506 2518 _(b'[-r REV]'),
2507 2519 )
2508 2520 def debugrebuilddirstate(ui, repo, rev, **opts):
2509 2521 """rebuild the dirstate as it would look like for the given revision
2510 2522
2511 2523 If no revision is specified the first current parent will be used.
2512 2524
2513 2525 The dirstate will be set to the files of the given revision.
2514 2526 The actual working directory content or existing dirstate
2515 2527 information such as adds or removes is not considered.
2516 2528
2517 2529 ``minimal`` will only rebuild the dirstate status for files that claim to be
2518 2530 tracked but are not in the parent manifest, or that exist in the parent
2519 2531 manifest but are not in the dirstate. It will not change adds, removes, or
2520 2532 modified files that are in the working copy parent.
2521 2533
2522 2534 One use of this command is to make the next :hg:`status` invocation
2523 2535 check the actual file content.
2524 2536 """
2525 2537 ctx = scmutil.revsingle(repo, rev)
2526 2538 with repo.wlock():
2527 2539 dirstate = repo.dirstate
2528 2540 changedfiles = None
2529 2541 # See command doc for what minimal does.
2530 2542 if opts.get('minimal'):
2531 2543 manifestfiles = set(ctx.manifest().keys())
2532 2544 dirstatefiles = set(dirstate)
2533 2545 manifestonly = manifestfiles - dirstatefiles
2534 2546 dsonly = dirstatefiles - manifestfiles
2535 2547 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2536 2548 changedfiles = manifestonly | dsnotadded
2537 2549
2538 2550 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2539 2551
2540 2552
2541 2553 @command(b'debugrebuildfncache', [], b'')
2542 2554 def debugrebuildfncache(ui, repo):
2543 2555 """rebuild the fncache file"""
2544 2556 repair.rebuildfncache(ui, repo)
2545 2557
2546 2558
2547 2559 @command(
2548 2560 b'debugrename',
2549 2561 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2550 2562 _(b'[-r REV] [FILE]...'),
2551 2563 )
2552 2564 def debugrename(ui, repo, *pats, **opts):
2553 2565 """dump rename information"""
2554 2566
2555 2567 opts = pycompat.byteskwargs(opts)
2556 2568 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2557 2569 m = scmutil.match(ctx, pats, opts)
2558 2570 for abs in ctx.walk(m):
2559 2571 fctx = ctx[abs]
2560 2572 o = fctx.filelog().renamed(fctx.filenode())
2561 2573 rel = repo.pathto(abs)
2562 2574 if o:
2563 2575 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2564 2576 else:
2565 2577 ui.write(_(b"%s not renamed\n") % rel)
2566 2578
2567 2579
2568 2580 @command(
2569 2581 b'debugrevlog',
2570 2582 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2571 2583 _(b'-c|-m|FILE'),
2572 2584 optionalrepo=True,
2573 2585 )
2574 2586 def debugrevlog(ui, repo, file_=None, **opts):
2575 2587 """show data and statistics about a revlog"""
2576 2588 opts = pycompat.byteskwargs(opts)
2577 2589 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2578 2590
2579 2591 if opts.get(b"dump"):
2580 2592 numrevs = len(r)
2581 2593 ui.write(
2582 2594 (
2583 2595 b"# rev p1rev p2rev start end deltastart base p1 p2"
2584 2596 b" rawsize totalsize compression heads chainlen\n"
2585 2597 )
2586 2598 )
2587 2599 ts = 0
2588 2600 heads = set()
2589 2601
2590 2602 for rev in pycompat.xrange(numrevs):
2591 2603 dbase = r.deltaparent(rev)
2592 2604 if dbase == -1:
2593 2605 dbase = rev
2594 2606 cbase = r.chainbase(rev)
2595 2607 clen = r.chainlen(rev)
2596 2608 p1, p2 = r.parentrevs(rev)
2597 2609 rs = r.rawsize(rev)
2598 2610 ts = ts + rs
2599 2611 heads -= set(r.parentrevs(rev))
2600 2612 heads.add(rev)
2601 2613 try:
2602 2614 compression = ts / r.end(rev)
2603 2615 except ZeroDivisionError:
2604 2616 compression = 0
2605 2617 ui.write(
2606 2618 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2607 2619 b"%11d %5d %8d\n"
2608 2620 % (
2609 2621 rev,
2610 2622 p1,
2611 2623 p2,
2612 2624 r.start(rev),
2613 2625 r.end(rev),
2614 2626 r.start(dbase),
2615 2627 r.start(cbase),
2616 2628 r.start(p1),
2617 2629 r.start(p2),
2618 2630 rs,
2619 2631 ts,
2620 2632 compression,
2621 2633 len(heads),
2622 2634 clen,
2623 2635 )
2624 2636 )
2625 2637 return 0
2626 2638
2627 2639 v = r.version
2628 2640 format = v & 0xFFFF
2629 2641 flags = []
2630 2642 gdelta = False
2631 2643 if v & revlog.FLAG_INLINE_DATA:
2632 2644 flags.append(b'inline')
2633 2645 if v & revlog.FLAG_GENERALDELTA:
2634 2646 gdelta = True
2635 2647 flags.append(b'generaldelta')
2636 2648 if not flags:
2637 2649 flags = [b'(none)']
2638 2650
2639 2651 ### tracks merge vs single parent
2640 2652 nummerges = 0
2641 2653
2642 2654 ### tracks ways the "delta" are build
2643 2655 # nodelta
2644 2656 numempty = 0
2645 2657 numemptytext = 0
2646 2658 numemptydelta = 0
2647 2659 # full file content
2648 2660 numfull = 0
2649 2661 # intermediate snapshot against a prior snapshot
2650 2662 numsemi = 0
2651 2663 # snapshot count per depth
2652 2664 numsnapdepth = collections.defaultdict(lambda: 0)
2653 2665 # delta against previous revision
2654 2666 numprev = 0
2655 2667 # delta against first or second parent (not prev)
2656 2668 nump1 = 0
2657 2669 nump2 = 0
2658 2670 # delta against neither prev nor parents
2659 2671 numother = 0
2660 2672 # delta against prev that are also first or second parent
2661 2673 # (details of `numprev`)
2662 2674 nump1prev = 0
2663 2675 nump2prev = 0
2664 2676
2665 2677 # data about delta chain of each revs
2666 2678 chainlengths = []
2667 2679 chainbases = []
2668 2680 chainspans = []
2669 2681
2670 2682 # data about each revision
2671 2683 datasize = [None, 0, 0]
2672 2684 fullsize = [None, 0, 0]
2673 2685 semisize = [None, 0, 0]
2674 2686 # snapshot count per depth
2675 2687 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2676 2688 deltasize = [None, 0, 0]
2677 2689 chunktypecounts = {}
2678 2690 chunktypesizes = {}
2679 2691
2680 2692 def addsize(size, l):
2681 2693 if l[0] is None or size < l[0]:
2682 2694 l[0] = size
2683 2695 if size > l[1]:
2684 2696 l[1] = size
2685 2697 l[2] += size
2686 2698
2687 2699 numrevs = len(r)
2688 2700 for rev in pycompat.xrange(numrevs):
2689 2701 p1, p2 = r.parentrevs(rev)
2690 2702 delta = r.deltaparent(rev)
2691 2703 if format > 0:
2692 2704 addsize(r.rawsize(rev), datasize)
2693 2705 if p2 != nullrev:
2694 2706 nummerges += 1
2695 2707 size = r.length(rev)
2696 2708 if delta == nullrev:
2697 2709 chainlengths.append(0)
2698 2710 chainbases.append(r.start(rev))
2699 2711 chainspans.append(size)
2700 2712 if size == 0:
2701 2713 numempty += 1
2702 2714 numemptytext += 1
2703 2715 else:
2704 2716 numfull += 1
2705 2717 numsnapdepth[0] += 1
2706 2718 addsize(size, fullsize)
2707 2719 addsize(size, snapsizedepth[0])
2708 2720 else:
2709 2721 chainlengths.append(chainlengths[delta] + 1)
2710 2722 baseaddr = chainbases[delta]
2711 2723 revaddr = r.start(rev)
2712 2724 chainbases.append(baseaddr)
2713 2725 chainspans.append((revaddr - baseaddr) + size)
2714 2726 if size == 0:
2715 2727 numempty += 1
2716 2728 numemptydelta += 1
2717 2729 elif r.issnapshot(rev):
2718 2730 addsize(size, semisize)
2719 2731 numsemi += 1
2720 2732 depth = r.snapshotdepth(rev)
2721 2733 numsnapdepth[depth] += 1
2722 2734 addsize(size, snapsizedepth[depth])
2723 2735 else:
2724 2736 addsize(size, deltasize)
2725 2737 if delta == rev - 1:
2726 2738 numprev += 1
2727 2739 if delta == p1:
2728 2740 nump1prev += 1
2729 2741 elif delta == p2:
2730 2742 nump2prev += 1
2731 2743 elif delta == p1:
2732 2744 nump1 += 1
2733 2745 elif delta == p2:
2734 2746 nump2 += 1
2735 2747 elif delta != nullrev:
2736 2748 numother += 1
2737 2749
2738 2750 # Obtain data on the raw chunks in the revlog.
2739 2751 if util.safehasattr(r, b'_getsegmentforrevs'):
2740 2752 segment = r._getsegmentforrevs(rev, rev)[1]
2741 2753 else:
2742 2754 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2743 2755 if segment:
2744 2756 chunktype = bytes(segment[0:1])
2745 2757 else:
2746 2758 chunktype = b'empty'
2747 2759
2748 2760 if chunktype not in chunktypecounts:
2749 2761 chunktypecounts[chunktype] = 0
2750 2762 chunktypesizes[chunktype] = 0
2751 2763
2752 2764 chunktypecounts[chunktype] += 1
2753 2765 chunktypesizes[chunktype] += size
2754 2766
2755 2767 # Adjust size min value for empty cases
2756 2768 for size in (datasize, fullsize, semisize, deltasize):
2757 2769 if size[0] is None:
2758 2770 size[0] = 0
2759 2771
2760 2772 numdeltas = numrevs - numfull - numempty - numsemi
2761 2773 numoprev = numprev - nump1prev - nump2prev
2762 2774 totalrawsize = datasize[2]
2763 2775 datasize[2] /= numrevs
2764 2776 fulltotal = fullsize[2]
2765 2777 if numfull == 0:
2766 2778 fullsize[2] = 0
2767 2779 else:
2768 2780 fullsize[2] /= numfull
2769 2781 semitotal = semisize[2]
2770 2782 snaptotal = {}
2771 2783 if numsemi > 0:
2772 2784 semisize[2] /= numsemi
2773 2785 for depth in snapsizedepth:
2774 2786 snaptotal[depth] = snapsizedepth[depth][2]
2775 2787 snapsizedepth[depth][2] /= numsnapdepth[depth]
2776 2788
2777 2789 deltatotal = deltasize[2]
2778 2790 if numdeltas > 0:
2779 2791 deltasize[2] /= numdeltas
2780 2792 totalsize = fulltotal + semitotal + deltatotal
2781 2793 avgchainlen = sum(chainlengths) / numrevs
2782 2794 maxchainlen = max(chainlengths)
2783 2795 maxchainspan = max(chainspans)
2784 2796 compratio = 1
2785 2797 if totalsize:
2786 2798 compratio = totalrawsize / totalsize
2787 2799
2788 2800 basedfmtstr = b'%%%dd\n'
2789 2801 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2790 2802
2791 2803 def dfmtstr(max):
2792 2804 return basedfmtstr % len(str(max))
2793 2805
2794 2806 def pcfmtstr(max, padding=0):
2795 2807 return basepcfmtstr % (len(str(max)), b' ' * padding)
2796 2808
2797 2809 def pcfmt(value, total):
2798 2810 if total:
2799 2811 return (value, 100 * float(value) / total)
2800 2812 else:
2801 2813 return value, 100.0
2802 2814
2803 2815 ui.writenoi18n(b'format : %d\n' % format)
2804 2816 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2805 2817
2806 2818 ui.write(b'\n')
2807 2819 fmt = pcfmtstr(totalsize)
2808 2820 fmt2 = dfmtstr(totalsize)
2809 2821 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2810 2822 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2811 2823 ui.writenoi18n(
2812 2824 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2813 2825 )
2814 2826 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2815 2827 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2816 2828 ui.writenoi18n(
2817 2829 b' text : '
2818 2830 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2819 2831 )
2820 2832 ui.writenoi18n(
2821 2833 b' delta : '
2822 2834 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2823 2835 )
2824 2836 ui.writenoi18n(
2825 2837 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2826 2838 )
2827 2839 for depth in sorted(numsnapdepth):
2828 2840 ui.write(
2829 2841 (b' lvl-%-3d : ' % depth)
2830 2842 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2831 2843 )
2832 2844 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2833 2845 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2834 2846 ui.writenoi18n(
2835 2847 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2836 2848 )
2837 2849 for depth in sorted(numsnapdepth):
2838 2850 ui.write(
2839 2851 (b' lvl-%-3d : ' % depth)
2840 2852 + fmt % pcfmt(snaptotal[depth], totalsize)
2841 2853 )
2842 2854 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2843 2855
2844 2856 def fmtchunktype(chunktype):
2845 2857 if chunktype == b'empty':
2846 2858 return b' %s : ' % chunktype
2847 2859 elif chunktype in pycompat.bytestr(string.ascii_letters):
2848 2860 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2849 2861 else:
2850 2862 return b' 0x%s : ' % hex(chunktype)
2851 2863
2852 2864 ui.write(b'\n')
2853 2865 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2854 2866 for chunktype in sorted(chunktypecounts):
2855 2867 ui.write(fmtchunktype(chunktype))
2856 2868 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2857 2869 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2858 2870 for chunktype in sorted(chunktypecounts):
2859 2871 ui.write(fmtchunktype(chunktype))
2860 2872 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2861 2873
2862 2874 ui.write(b'\n')
2863 2875 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2864 2876 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2865 2877 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2866 2878 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2867 2879 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2868 2880
2869 2881 if format > 0:
2870 2882 ui.write(b'\n')
2871 2883 ui.writenoi18n(
2872 2884 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2873 2885 % tuple(datasize)
2874 2886 )
2875 2887 ui.writenoi18n(
2876 2888 b'full revision size (min/max/avg) : %d / %d / %d\n'
2877 2889 % tuple(fullsize)
2878 2890 )
2879 2891 ui.writenoi18n(
2880 2892 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2881 2893 % tuple(semisize)
2882 2894 )
2883 2895 for depth in sorted(snapsizedepth):
2884 2896 if depth == 0:
2885 2897 continue
2886 2898 ui.writenoi18n(
2887 2899 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2888 2900 % ((depth,) + tuple(snapsizedepth[depth]))
2889 2901 )
2890 2902 ui.writenoi18n(
2891 2903 b'delta size (min/max/avg) : %d / %d / %d\n'
2892 2904 % tuple(deltasize)
2893 2905 )
2894 2906
2895 2907 if numdeltas > 0:
2896 2908 ui.write(b'\n')
2897 2909 fmt = pcfmtstr(numdeltas)
2898 2910 fmt2 = pcfmtstr(numdeltas, 4)
2899 2911 ui.writenoi18n(
2900 2912 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2901 2913 )
2902 2914 if numprev > 0:
2903 2915 ui.writenoi18n(
2904 2916 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2905 2917 )
2906 2918 ui.writenoi18n(
2907 2919 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2908 2920 )
2909 2921 ui.writenoi18n(
2910 2922 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2911 2923 )
2912 2924 if gdelta:
2913 2925 ui.writenoi18n(
2914 2926 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2915 2927 )
2916 2928 ui.writenoi18n(
2917 2929 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2918 2930 )
2919 2931 ui.writenoi18n(
2920 2932 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2921 2933 )
2922 2934
2923 2935
2924 2936 @command(
2925 2937 b'debugrevlogindex',
2926 2938 cmdutil.debugrevlogopts
2927 2939 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2928 2940 _(b'[-f FORMAT] -c|-m|FILE'),
2929 2941 optionalrepo=True,
2930 2942 )
2931 2943 def debugrevlogindex(ui, repo, file_=None, **opts):
2932 2944 """dump the contents of a revlog index"""
2933 2945 opts = pycompat.byteskwargs(opts)
2934 2946 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2935 2947 format = opts.get(b'format', 0)
2936 2948 if format not in (0, 1):
2937 2949 raise error.Abort(_(b"unknown format %d") % format)
2938 2950
2939 2951 if ui.debugflag:
2940 2952 shortfn = hex
2941 2953 else:
2942 2954 shortfn = short
2943 2955
2944 2956 # There might not be anything in r, so have a sane default
2945 2957 idlen = 12
2946 2958 for i in r:
2947 2959 idlen = len(shortfn(r.node(i)))
2948 2960 break
2949 2961
2950 2962 if format == 0:
2951 2963 if ui.verbose:
2952 2964 ui.writenoi18n(
2953 2965 b" rev offset length linkrev %s %s p2\n"
2954 2966 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2955 2967 )
2956 2968 else:
2957 2969 ui.writenoi18n(
2958 2970 b" rev linkrev %s %s p2\n"
2959 2971 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2960 2972 )
2961 2973 elif format == 1:
2962 2974 if ui.verbose:
2963 2975 ui.writenoi18n(
2964 2976 (
2965 2977 b" rev flag offset length size link p1"
2966 2978 b" p2 %s\n"
2967 2979 )
2968 2980 % b"nodeid".rjust(idlen)
2969 2981 )
2970 2982 else:
2971 2983 ui.writenoi18n(
2972 2984 b" rev flag size link p1 p2 %s\n"
2973 2985 % b"nodeid".rjust(idlen)
2974 2986 )
2975 2987
2976 2988 for i in r:
2977 2989 node = r.node(i)
2978 2990 if format == 0:
2979 2991 try:
2980 2992 pp = r.parents(node)
2981 2993 except Exception:
2982 2994 pp = [nullid, nullid]
2983 2995 if ui.verbose:
2984 2996 ui.write(
2985 2997 b"% 6d % 9d % 7d % 7d %s %s %s\n"
2986 2998 % (
2987 2999 i,
2988 3000 r.start(i),
2989 3001 r.length(i),
2990 3002 r.linkrev(i),
2991 3003 shortfn(node),
2992 3004 shortfn(pp[0]),
2993 3005 shortfn(pp[1]),
2994 3006 )
2995 3007 )
2996 3008 else:
2997 3009 ui.write(
2998 3010 b"% 6d % 7d %s %s %s\n"
2999 3011 % (
3000 3012 i,
3001 3013 r.linkrev(i),
3002 3014 shortfn(node),
3003 3015 shortfn(pp[0]),
3004 3016 shortfn(pp[1]),
3005 3017 )
3006 3018 )
3007 3019 elif format == 1:
3008 3020 pr = r.parentrevs(i)
3009 3021 if ui.verbose:
3010 3022 ui.write(
3011 3023 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3012 3024 % (
3013 3025 i,
3014 3026 r.flags(i),
3015 3027 r.start(i),
3016 3028 r.length(i),
3017 3029 r.rawsize(i),
3018 3030 r.linkrev(i),
3019 3031 pr[0],
3020 3032 pr[1],
3021 3033 shortfn(node),
3022 3034 )
3023 3035 )
3024 3036 else:
3025 3037 ui.write(
3026 3038 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3027 3039 % (
3028 3040 i,
3029 3041 r.flags(i),
3030 3042 r.rawsize(i),
3031 3043 r.linkrev(i),
3032 3044 pr[0],
3033 3045 pr[1],
3034 3046 shortfn(node),
3035 3047 )
3036 3048 )
3037 3049
3038 3050
3039 3051 @command(
3040 3052 b'debugrevspec',
3041 3053 [
3042 3054 (
3043 3055 b'',
3044 3056 b'optimize',
3045 3057 None,
3046 3058 _(b'print parsed tree after optimizing (DEPRECATED)'),
3047 3059 ),
3048 3060 (
3049 3061 b'',
3050 3062 b'show-revs',
3051 3063 True,
3052 3064 _(b'print list of result revisions (default)'),
3053 3065 ),
3054 3066 (
3055 3067 b's',
3056 3068 b'show-set',
3057 3069 None,
3058 3070 _(b'print internal representation of result set'),
3059 3071 ),
3060 3072 (
3061 3073 b'p',
3062 3074 b'show-stage',
3063 3075 [],
3064 3076 _(b'print parsed tree at the given stage'),
3065 3077 _(b'NAME'),
3066 3078 ),
3067 3079 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3068 3080 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3069 3081 ],
3070 3082 b'REVSPEC',
3071 3083 )
3072 3084 def debugrevspec(ui, repo, expr, **opts):
3073 3085 """parse and apply a revision specification
3074 3086
3075 3087 Use -p/--show-stage option to print the parsed tree at the given stages.
3076 3088 Use -p all to print tree at every stage.
3077 3089
3078 3090 Use --no-show-revs option with -s or -p to print only the set
3079 3091 representation or the parsed tree respectively.
3080 3092
3081 3093 Use --verify-optimized to compare the optimized result with the unoptimized
3082 3094 one. Returns 1 if the optimized result differs.
3083 3095 """
3084 3096 opts = pycompat.byteskwargs(opts)
3085 3097 aliases = ui.configitems(b'revsetalias')
3086 3098 stages = [
3087 3099 (b'parsed', lambda tree: tree),
3088 3100 (
3089 3101 b'expanded',
3090 3102 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3091 3103 ),
3092 3104 (b'concatenated', revsetlang.foldconcat),
3093 3105 (b'analyzed', revsetlang.analyze),
3094 3106 (b'optimized', revsetlang.optimize),
3095 3107 ]
3096 3108 if opts[b'no_optimized']:
3097 3109 stages = stages[:-1]
3098 3110 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3099 3111 raise error.Abort(
3100 3112 _(b'cannot use --verify-optimized with --no-optimized')
3101 3113 )
3102 3114 stagenames = set(n for n, f in stages)
3103 3115
3104 3116 showalways = set()
3105 3117 showchanged = set()
3106 3118 if ui.verbose and not opts[b'show_stage']:
3107 3119 # show parsed tree by --verbose (deprecated)
3108 3120 showalways.add(b'parsed')
3109 3121 showchanged.update([b'expanded', b'concatenated'])
3110 3122 if opts[b'optimize']:
3111 3123 showalways.add(b'optimized')
3112 3124 if opts[b'show_stage'] and opts[b'optimize']:
3113 3125 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3114 3126 if opts[b'show_stage'] == [b'all']:
3115 3127 showalways.update(stagenames)
3116 3128 else:
3117 3129 for n in opts[b'show_stage']:
3118 3130 if n not in stagenames:
3119 3131 raise error.Abort(_(b'invalid stage name: %s') % n)
3120 3132 showalways.update(opts[b'show_stage'])
3121 3133
3122 3134 treebystage = {}
3123 3135 printedtree = None
3124 3136 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3125 3137 for n, f in stages:
3126 3138 treebystage[n] = tree = f(tree)
3127 3139 if n in showalways or (n in showchanged and tree != printedtree):
3128 3140 if opts[b'show_stage'] or n != b'parsed':
3129 3141 ui.write(b"* %s:\n" % n)
3130 3142 ui.write(revsetlang.prettyformat(tree), b"\n")
3131 3143 printedtree = tree
3132 3144
3133 3145 if opts[b'verify_optimized']:
3134 3146 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3135 3147 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3136 3148 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3137 3149 ui.writenoi18n(
3138 3150 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3139 3151 )
3140 3152 ui.writenoi18n(
3141 3153 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3142 3154 )
3143 3155 arevs = list(arevs)
3144 3156 brevs = list(brevs)
3145 3157 if arevs == brevs:
3146 3158 return 0
3147 3159 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3148 3160 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3149 3161 sm = difflib.SequenceMatcher(None, arevs, brevs)
3150 3162 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3151 3163 if tag in ('delete', 'replace'):
3152 3164 for c in arevs[alo:ahi]:
3153 3165 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3154 3166 if tag in ('insert', 'replace'):
3155 3167 for c in brevs[blo:bhi]:
3156 3168 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3157 3169 if tag == 'equal':
3158 3170 for c in arevs[alo:ahi]:
3159 3171 ui.write(b' %d\n' % c)
3160 3172 return 1
3161 3173
3162 3174 func = revset.makematcher(tree)
3163 3175 revs = func(repo)
3164 3176 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3165 3177 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3166 3178 if not opts[b'show_revs']:
3167 3179 return
3168 3180 for c in revs:
3169 3181 ui.write(b"%d\n" % c)
3170 3182
3171 3183
3172 3184 @command(
3173 3185 b'debugserve',
3174 3186 [
3175 3187 (
3176 3188 b'',
3177 3189 b'sshstdio',
3178 3190 False,
3179 3191 _(b'run an SSH server bound to process handles'),
3180 3192 ),
3181 3193 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3182 3194 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3183 3195 ],
3184 3196 b'',
3185 3197 )
3186 3198 def debugserve(ui, repo, **opts):
3187 3199 """run a server with advanced settings
3188 3200
3189 3201 This command is similar to :hg:`serve`. It exists partially as a
3190 3202 workaround to the fact that ``hg serve --stdio`` must have specific
3191 3203 arguments for security reasons.
3192 3204 """
3193 3205 opts = pycompat.byteskwargs(opts)
3194 3206
3195 3207 if not opts[b'sshstdio']:
3196 3208 raise error.Abort(_(b'only --sshstdio is currently supported'))
3197 3209
3198 3210 logfh = None
3199 3211
3200 3212 if opts[b'logiofd'] and opts[b'logiofile']:
3201 3213 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3202 3214
3203 3215 if opts[b'logiofd']:
3204 3216 # Line buffered because output is line based.
3205 3217 try:
3206 3218 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3207 3219 except OSError as e:
3208 3220 if e.errno != errno.ESPIPE:
3209 3221 raise
3210 3222 # can't seek a pipe, so `ab` mode fails on py3
3211 3223 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3212 3224 elif opts[b'logiofile']:
3213 3225 logfh = open(opts[b'logiofile'], b'ab', 1)
3214 3226
3215 3227 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3216 3228 s.serve_forever()
3217 3229
3218 3230
3219 3231 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3220 3232 def debugsetparents(ui, repo, rev1, rev2=None):
3221 3233 """manually set the parents of the current working directory
3222 3234
3223 3235 This is useful for writing repository conversion tools, but should
3224 3236 be used with care. For example, neither the working directory nor the
3225 3237 dirstate is updated, so file status may be incorrect after running this
3226 3238 command.
3227 3239
3228 3240 Returns 0 on success.
3229 3241 """
3230 3242
3231 3243 node1 = scmutil.revsingle(repo, rev1).node()
3232 3244 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3233 3245
3234 3246 with repo.wlock():
3235 3247 repo.setparents(node1, node2)
3236 3248
3237 3249
3238 3250 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3239 3251 def debugsidedata(ui, repo, file_, rev=None, **opts):
3240 3252 """dump the side data for a cl/manifest/file revision
3241 3253
3242 3254 Use --verbose to dump the sidedata content."""
3243 3255 opts = pycompat.byteskwargs(opts)
3244 3256 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3245 3257 if rev is not None:
3246 3258 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3247 3259 file_, rev = None, file_
3248 3260 elif rev is None:
3249 3261 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3250 3262 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3251 3263 r = getattr(r, '_revlog', r)
3252 3264 try:
3253 3265 sidedata = r.sidedata(r.lookup(rev))
3254 3266 except KeyError:
3255 3267 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3256 3268 if sidedata:
3257 3269 sidedata = list(sidedata.items())
3258 3270 sidedata.sort()
3259 3271 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3260 3272 for key, value in sidedata:
3261 3273 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3262 3274 if ui.verbose:
3263 3275 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3264 3276
3265 3277
3266 3278 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3267 3279 def debugssl(ui, repo, source=None, **opts):
3268 3280 '''test a secure connection to a server
3269 3281
3270 3282 This builds the certificate chain for the server on Windows, installing the
3271 3283 missing intermediates and trusted root via Windows Update if necessary. It
3272 3284 does nothing on other platforms.
3273 3285
3274 3286 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3275 3287 that server is used. See :hg:`help urls` for more information.
3276 3288
3277 3289 If the update succeeds, retry the original operation. Otherwise, the cause
3278 3290 of the SSL error is likely another issue.
3279 3291 '''
3280 3292 if not pycompat.iswindows:
3281 3293 raise error.Abort(
3282 3294 _(b'certificate chain building is only possible on Windows')
3283 3295 )
3284 3296
3285 3297 if not source:
3286 3298 if not repo:
3287 3299 raise error.Abort(
3288 3300 _(
3289 3301 b"there is no Mercurial repository here, and no "
3290 3302 b"server specified"
3291 3303 )
3292 3304 )
3293 3305 source = b"default"
3294 3306
3295 3307 source, branches = hg.parseurl(ui.expandpath(source))
3296 3308 url = util.url(source)
3297 3309
3298 3310 defaultport = {b'https': 443, b'ssh': 22}
3299 3311 if url.scheme in defaultport:
3300 3312 try:
3301 3313 addr = (url.host, int(url.port or defaultport[url.scheme]))
3302 3314 except ValueError:
3303 3315 raise error.Abort(_(b"malformed port number in URL"))
3304 3316 else:
3305 3317 raise error.Abort(_(b"only https and ssh connections are supported"))
3306 3318
3307 3319 from . import win32
3308 3320
3309 3321 s = ssl.wrap_socket(
3310 3322 socket.socket(),
3311 3323 ssl_version=ssl.PROTOCOL_TLS,
3312 3324 cert_reqs=ssl.CERT_NONE,
3313 3325 ca_certs=None,
3314 3326 )
3315 3327
3316 3328 try:
3317 3329 s.connect(addr)
3318 3330 cert = s.getpeercert(True)
3319 3331
3320 3332 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3321 3333
3322 3334 complete = win32.checkcertificatechain(cert, build=False)
3323 3335
3324 3336 if not complete:
3325 3337 ui.status(_(b'certificate chain is incomplete, updating... '))
3326 3338
3327 3339 if not win32.checkcertificatechain(cert):
3328 3340 ui.status(_(b'failed.\n'))
3329 3341 else:
3330 3342 ui.status(_(b'done.\n'))
3331 3343 else:
3332 3344 ui.status(_(b'full certificate chain is available\n'))
3333 3345 finally:
3334 3346 s.close()
3335 3347
3336 3348
3337 3349 @command(
3338 3350 b'debugsub',
3339 3351 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3340 3352 _(b'[-r REV] [REV]'),
3341 3353 )
3342 3354 def debugsub(ui, repo, rev=None):
3343 3355 ctx = scmutil.revsingle(repo, rev, None)
3344 3356 for k, v in sorted(ctx.substate.items()):
3345 3357 ui.writenoi18n(b'path %s\n' % k)
3346 3358 ui.writenoi18n(b' source %s\n' % v[0])
3347 3359 ui.writenoi18n(b' revision %s\n' % v[1])
3348 3360
3349 3361
3350 3362 @command(
3351 3363 b'debugsuccessorssets',
3352 3364 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3353 3365 _(b'[REV]'),
3354 3366 )
3355 3367 def debugsuccessorssets(ui, repo, *revs, **opts):
3356 3368 """show set of successors for revision
3357 3369
3358 3370 A successors set of changeset A is a consistent group of revisions that
3359 3371 succeed A. It contains non-obsolete changesets only unless closests
3360 3372 successors set is set.
3361 3373
3362 3374 In most cases a changeset A has a single successors set containing a single
3363 3375 successor (changeset A replaced by A').
3364 3376
3365 3377 A changeset that is made obsolete with no successors are called "pruned".
3366 3378 Such changesets have no successors sets at all.
3367 3379
3368 3380 A changeset that has been "split" will have a successors set containing
3369 3381 more than one successor.
3370 3382
3371 3383 A changeset that has been rewritten in multiple different ways is called
3372 3384 "divergent". Such changesets have multiple successor sets (each of which
3373 3385 may also be split, i.e. have multiple successors).
3374 3386
3375 3387 Results are displayed as follows::
3376 3388
3377 3389 <rev1>
3378 3390 <successors-1A>
3379 3391 <rev2>
3380 3392 <successors-2A>
3381 3393 <successors-2B1> <successors-2B2> <successors-2B3>
3382 3394
3383 3395 Here rev2 has two possible (i.e. divergent) successors sets. The first
3384 3396 holds one element, whereas the second holds three (i.e. the changeset has
3385 3397 been split).
3386 3398 """
3387 3399 # passed to successorssets caching computation from one call to another
3388 3400 cache = {}
3389 3401 ctx2str = bytes
3390 3402 node2str = short
3391 3403 for rev in scmutil.revrange(repo, revs):
3392 3404 ctx = repo[rev]
3393 3405 ui.write(b'%s\n' % ctx2str(ctx))
3394 3406 for succsset in obsutil.successorssets(
3395 3407 repo, ctx.node(), closest=opts['closest'], cache=cache
3396 3408 ):
3397 3409 if succsset:
3398 3410 ui.write(b' ')
3399 3411 ui.write(node2str(succsset[0]))
3400 3412 for node in succsset[1:]:
3401 3413 ui.write(b' ')
3402 3414 ui.write(node2str(node))
3403 3415 ui.write(b'\n')
3404 3416
3405 3417
3406 3418 @command(
3407 3419 b'debugtemplate',
3408 3420 [
3409 3421 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3410 3422 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3411 3423 ],
3412 3424 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3413 3425 optionalrepo=True,
3414 3426 )
3415 3427 def debugtemplate(ui, repo, tmpl, **opts):
3416 3428 """parse and apply a template
3417 3429
3418 3430 If -r/--rev is given, the template is processed as a log template and
3419 3431 applied to the given changesets. Otherwise, it is processed as a generic
3420 3432 template.
3421 3433
3422 3434 Use --verbose to print the parsed tree.
3423 3435 """
3424 3436 revs = None
3425 3437 if opts['rev']:
3426 3438 if repo is None:
3427 3439 raise error.RepoError(
3428 3440 _(b'there is no Mercurial repository here (.hg not found)')
3429 3441 )
3430 3442 revs = scmutil.revrange(repo, opts['rev'])
3431 3443
3432 3444 props = {}
3433 3445 for d in opts['define']:
3434 3446 try:
3435 3447 k, v = (e.strip() for e in d.split(b'=', 1))
3436 3448 if not k or k == b'ui':
3437 3449 raise ValueError
3438 3450 props[k] = v
3439 3451 except ValueError:
3440 3452 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3441 3453
3442 3454 if ui.verbose:
3443 3455 aliases = ui.configitems(b'templatealias')
3444 3456 tree = templater.parse(tmpl)
3445 3457 ui.note(templater.prettyformat(tree), b'\n')
3446 3458 newtree = templater.expandaliases(tree, aliases)
3447 3459 if newtree != tree:
3448 3460 ui.notenoi18n(
3449 3461 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3450 3462 )
3451 3463
3452 3464 if revs is None:
3453 3465 tres = formatter.templateresources(ui, repo)
3454 3466 t = formatter.maketemplater(ui, tmpl, resources=tres)
3455 3467 if ui.verbose:
3456 3468 kwds, funcs = t.symbolsuseddefault()
3457 3469 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3458 3470 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3459 3471 ui.write(t.renderdefault(props))
3460 3472 else:
3461 3473 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3462 3474 if ui.verbose:
3463 3475 kwds, funcs = displayer.t.symbolsuseddefault()
3464 3476 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3465 3477 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3466 3478 for r in revs:
3467 3479 displayer.show(repo[r], **pycompat.strkwargs(props))
3468 3480 displayer.close()
3469 3481
3470 3482
3471 3483 @command(
3472 3484 b'debuguigetpass',
3473 3485 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3474 3486 _(b'[-p TEXT]'),
3475 3487 norepo=True,
3476 3488 )
3477 3489 def debuguigetpass(ui, prompt=b''):
3478 3490 """show prompt to type password"""
3479 3491 r = ui.getpass(prompt)
3480 3492 ui.writenoi18n(b'respose: %s\n' % r)
3481 3493
3482 3494
3483 3495 @command(
3484 3496 b'debuguiprompt',
3485 3497 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3486 3498 _(b'[-p TEXT]'),
3487 3499 norepo=True,
3488 3500 )
3489 3501 def debuguiprompt(ui, prompt=b''):
3490 3502 """show plain prompt"""
3491 3503 r = ui.prompt(prompt)
3492 3504 ui.writenoi18n(b'response: %s\n' % r)
3493 3505
3494 3506
3495 3507 @command(b'debugupdatecaches', [])
3496 3508 def debugupdatecaches(ui, repo, *pats, **opts):
3497 3509 """warm all known caches in the repository"""
3498 3510 with repo.wlock(), repo.lock():
3499 3511 repo.updatecaches(full=True)
3500 3512
3501 3513
3502 3514 @command(
3503 3515 b'debugupgraderepo',
3504 3516 [
3505 3517 (
3506 3518 b'o',
3507 3519 b'optimize',
3508 3520 [],
3509 3521 _(b'extra optimization to perform'),
3510 3522 _(b'NAME'),
3511 3523 ),
3512 3524 (b'', b'run', False, _(b'performs an upgrade')),
3513 3525 (b'', b'backup', True, _(b'keep the old repository content around')),
3514 3526 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3515 3527 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3516 3528 ],
3517 3529 )
3518 3530 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3519 3531 """upgrade a repository to use different features
3520 3532
3521 3533 If no arguments are specified, the repository is evaluated for upgrade
3522 3534 and a list of problems and potential optimizations is printed.
3523 3535
3524 3536 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3525 3537 can be influenced via additional arguments. More details will be provided
3526 3538 by the command output when run without ``--run``.
3527 3539
3528 3540 During the upgrade, the repository will be locked and no writes will be
3529 3541 allowed.
3530 3542
3531 3543 At the end of the upgrade, the repository may not be readable while new
3532 3544 repository data is swapped in. This window will be as long as it takes to
3533 3545 rename some directories inside the ``.hg`` directory. On most machines, this
3534 3546 should complete almost instantaneously and the chances of a consumer being
3535 3547 unable to access the repository should be low.
3536 3548
3537 3549 By default, all revlog will be upgraded. You can restrict this using flag
3538 3550 such as `--manifest`:
3539 3551
3540 3552 * `--manifest`: only optimize the manifest
3541 3553 * `--no-manifest`: optimize all revlog but the manifest
3542 3554 * `--changelog`: optimize the changelog only
3543 3555 * `--no-changelog --no-manifest`: optimize filelogs only
3544 3556 """
3545 3557 return upgrade.upgraderepo(
3546 3558 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3547 3559 )
3548 3560
3549 3561
3550 3562 @command(
3551 3563 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3552 3564 )
3553 3565 def debugwalk(ui, repo, *pats, **opts):
3554 3566 """show how files match on given patterns"""
3555 3567 opts = pycompat.byteskwargs(opts)
3556 3568 m = scmutil.match(repo[None], pats, opts)
3557 3569 if ui.verbose:
3558 3570 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3559 3571 items = list(repo[None].walk(m))
3560 3572 if not items:
3561 3573 return
3562 3574 f = lambda fn: fn
3563 3575 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3564 3576 f = lambda fn: util.normpath(fn)
3565 3577 fmt = b'f %%-%ds %%-%ds %%s' % (
3566 3578 max([len(abs) for abs in items]),
3567 3579 max([len(repo.pathto(abs)) for abs in items]),
3568 3580 )
3569 3581 for abs in items:
3570 3582 line = fmt % (
3571 3583 abs,
3572 3584 f(repo.pathto(abs)),
3573 3585 m.exact(abs) and b'exact' or b'',
3574 3586 )
3575 3587 ui.write(b"%s\n" % line.rstrip())
3576 3588
3577 3589
3578 3590 @command(b'debugwhyunstable', [], _(b'REV'))
3579 3591 def debugwhyunstable(ui, repo, rev):
3580 3592 """explain instabilities of a changeset"""
3581 3593 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3582 3594 dnodes = b''
3583 3595 if entry.get(b'divergentnodes'):
3584 3596 dnodes = (
3585 3597 b' '.join(
3586 3598 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3587 3599 for ctx in entry[b'divergentnodes']
3588 3600 )
3589 3601 + b' '
3590 3602 )
3591 3603 ui.write(
3592 3604 b'%s: %s%s %s\n'
3593 3605 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3594 3606 )
3595 3607
3596 3608
3597 3609 @command(
3598 3610 b'debugwireargs',
3599 3611 [
3600 3612 (b'', b'three', b'', b'three'),
3601 3613 (b'', b'four', b'', b'four'),
3602 3614 (b'', b'five', b'', b'five'),
3603 3615 ]
3604 3616 + cmdutil.remoteopts,
3605 3617 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3606 3618 norepo=True,
3607 3619 )
3608 3620 def debugwireargs(ui, repopath, *vals, **opts):
3609 3621 opts = pycompat.byteskwargs(opts)
3610 3622 repo = hg.peer(ui, opts, repopath)
3611 3623 for opt in cmdutil.remoteopts:
3612 3624 del opts[opt[1]]
3613 3625 args = {}
3614 3626 for k, v in pycompat.iteritems(opts):
3615 3627 if v:
3616 3628 args[k] = v
3617 3629 args = pycompat.strkwargs(args)
3618 3630 # run twice to check that we don't mess up the stream for the next command
3619 3631 res1 = repo.debugwireargs(*vals, **args)
3620 3632 res2 = repo.debugwireargs(*vals, **args)
3621 3633 ui.write(b"%s\n" % res1)
3622 3634 if res1 != res2:
3623 3635 ui.warn(b"%s\n" % res2)
3624 3636
3625 3637
3626 3638 def _parsewirelangblocks(fh):
3627 3639 activeaction = None
3628 3640 blocklines = []
3629 3641 lastindent = 0
3630 3642
3631 3643 for line in fh:
3632 3644 line = line.rstrip()
3633 3645 if not line:
3634 3646 continue
3635 3647
3636 3648 if line.startswith(b'#'):
3637 3649 continue
3638 3650
3639 3651 if not line.startswith(b' '):
3640 3652 # New block. Flush previous one.
3641 3653 if activeaction:
3642 3654 yield activeaction, blocklines
3643 3655
3644 3656 activeaction = line
3645 3657 blocklines = []
3646 3658 lastindent = 0
3647 3659 continue
3648 3660
3649 3661 # Else we start with an indent.
3650 3662
3651 3663 if not activeaction:
3652 3664 raise error.Abort(_(b'indented line outside of block'))
3653 3665
3654 3666 indent = len(line) - len(line.lstrip())
3655 3667
3656 3668 # If this line is indented more than the last line, concatenate it.
3657 3669 if indent > lastindent and blocklines:
3658 3670 blocklines[-1] += line.lstrip()
3659 3671 else:
3660 3672 blocklines.append(line)
3661 3673 lastindent = indent
3662 3674
3663 3675 # Flush last block.
3664 3676 if activeaction:
3665 3677 yield activeaction, blocklines
3666 3678
3667 3679
3668 3680 @command(
3669 3681 b'debugwireproto',
3670 3682 [
3671 3683 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3672 3684 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3673 3685 (
3674 3686 b'',
3675 3687 b'noreadstderr',
3676 3688 False,
3677 3689 _(b'do not read from stderr of the remote'),
3678 3690 ),
3679 3691 (
3680 3692 b'',
3681 3693 b'nologhandshake',
3682 3694 False,
3683 3695 _(b'do not log I/O related to the peer handshake'),
3684 3696 ),
3685 3697 ]
3686 3698 + cmdutil.remoteopts,
3687 3699 _(b'[PATH]'),
3688 3700 optionalrepo=True,
3689 3701 )
3690 3702 def debugwireproto(ui, repo, path=None, **opts):
3691 3703 """send wire protocol commands to a server
3692 3704
3693 3705 This command can be used to issue wire protocol commands to remote
3694 3706 peers and to debug the raw data being exchanged.
3695 3707
3696 3708 ``--localssh`` will start an SSH server against the current repository
3697 3709 and connect to that. By default, the connection will perform a handshake
3698 3710 and establish an appropriate peer instance.
3699 3711
3700 3712 ``--peer`` can be used to bypass the handshake protocol and construct a
3701 3713 peer instance using the specified class type. Valid values are ``raw``,
3702 3714 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3703 3715 raw data payloads and don't support higher-level command actions.
3704 3716
3705 3717 ``--noreadstderr`` can be used to disable automatic reading from stderr
3706 3718 of the peer (for SSH connections only). Disabling automatic reading of
3707 3719 stderr is useful for making output more deterministic.
3708 3720
3709 3721 Commands are issued via a mini language which is specified via stdin.
3710 3722 The language consists of individual actions to perform. An action is
3711 3723 defined by a block. A block is defined as a line with no leading
3712 3724 space followed by 0 or more lines with leading space. Blocks are
3713 3725 effectively a high-level command with additional metadata.
3714 3726
3715 3727 Lines beginning with ``#`` are ignored.
3716 3728
3717 3729 The following sections denote available actions.
3718 3730
3719 3731 raw
3720 3732 ---
3721 3733
3722 3734 Send raw data to the server.
3723 3735
3724 3736 The block payload contains the raw data to send as one atomic send
3725 3737 operation. The data may not actually be delivered in a single system
3726 3738 call: it depends on the abilities of the transport being used.
3727 3739
3728 3740 Each line in the block is de-indented and concatenated. Then, that
3729 3741 value is evaluated as a Python b'' literal. This allows the use of
3730 3742 backslash escaping, etc.
3731 3743
3732 3744 raw+
3733 3745 ----
3734 3746
3735 3747 Behaves like ``raw`` except flushes output afterwards.
3736 3748
3737 3749 command <X>
3738 3750 -----------
3739 3751
3740 3752 Send a request to run a named command, whose name follows the ``command``
3741 3753 string.
3742 3754
3743 3755 Arguments to the command are defined as lines in this block. The format of
3744 3756 each line is ``<key> <value>``. e.g.::
3745 3757
3746 3758 command listkeys
3747 3759 namespace bookmarks
3748 3760
3749 3761 If the value begins with ``eval:``, it will be interpreted as a Python
3750 3762 literal expression. Otherwise values are interpreted as Python b'' literals.
3751 3763 This allows sending complex types and encoding special byte sequences via
3752 3764 backslash escaping.
3753 3765
3754 3766 The following arguments have special meaning:
3755 3767
3756 3768 ``PUSHFILE``
3757 3769 When defined, the *push* mechanism of the peer will be used instead
3758 3770 of the static request-response mechanism and the content of the
3759 3771 file specified in the value of this argument will be sent as the
3760 3772 command payload.
3761 3773
3762 3774 This can be used to submit a local bundle file to the remote.
3763 3775
3764 3776 batchbegin
3765 3777 ----------
3766 3778
3767 3779 Instruct the peer to begin a batched send.
3768 3780
3769 3781 All ``command`` blocks are queued for execution until the next
3770 3782 ``batchsubmit`` block.
3771 3783
3772 3784 batchsubmit
3773 3785 -----------
3774 3786
3775 3787 Submit previously queued ``command`` blocks as a batch request.
3776 3788
3777 3789 This action MUST be paired with a ``batchbegin`` action.
3778 3790
3779 3791 httprequest <method> <path>
3780 3792 ---------------------------
3781 3793
3782 3794 (HTTP peer only)
3783 3795
3784 3796 Send an HTTP request to the peer.
3785 3797
3786 3798 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3787 3799
3788 3800 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3789 3801 headers to add to the request. e.g. ``Accept: foo``.
3790 3802
3791 3803 The following arguments are special:
3792 3804
3793 3805 ``BODYFILE``
3794 3806 The content of the file defined as the value to this argument will be
3795 3807 transferred verbatim as the HTTP request body.
3796 3808
3797 3809 ``frame <type> <flags> <payload>``
3798 3810 Send a unified protocol frame as part of the request body.
3799 3811
3800 3812 All frames will be collected and sent as the body to the HTTP
3801 3813 request.
3802 3814
3803 3815 close
3804 3816 -----
3805 3817
3806 3818 Close the connection to the server.
3807 3819
3808 3820 flush
3809 3821 -----
3810 3822
3811 3823 Flush data written to the server.
3812 3824
3813 3825 readavailable
3814 3826 -------------
3815 3827
3816 3828 Close the write end of the connection and read all available data from
3817 3829 the server.
3818 3830
3819 3831 If the connection to the server encompasses multiple pipes, we poll both
3820 3832 pipes and read available data.
3821 3833
3822 3834 readline
3823 3835 --------
3824 3836
3825 3837 Read a line of output from the server. If there are multiple output
3826 3838 pipes, reads only the main pipe.
3827 3839
3828 3840 ereadline
3829 3841 ---------
3830 3842
3831 3843 Like ``readline``, but read from the stderr pipe, if available.
3832 3844
3833 3845 read <X>
3834 3846 --------
3835 3847
3836 3848 ``read()`` N bytes from the server's main output pipe.
3837 3849
3838 3850 eread <X>
3839 3851 ---------
3840 3852
3841 3853 ``read()`` N bytes from the server's stderr pipe, if available.
3842 3854
3843 3855 Specifying Unified Frame-Based Protocol Frames
3844 3856 ----------------------------------------------
3845 3857
3846 3858 It is possible to emit a *Unified Frame-Based Protocol* by using special
3847 3859 syntax.
3848 3860
3849 3861 A frame is composed as a type, flags, and payload. These can be parsed
3850 3862 from a string of the form:
3851 3863
3852 3864 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3853 3865
3854 3866 ``request-id`` and ``stream-id`` are integers defining the request and
3855 3867 stream identifiers.
3856 3868
3857 3869 ``type`` can be an integer value for the frame type or the string name
3858 3870 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3859 3871 ``command-name``.
3860 3872
3861 3873 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3862 3874 components. Each component (and there can be just one) can be an integer
3863 3875 or a flag name for stream flags or frame flags, respectively. Values are
3864 3876 resolved to integers and then bitwise OR'd together.
3865 3877
3866 3878 ``payload`` represents the raw frame payload. If it begins with
3867 3879 ``cbor:``, the following string is evaluated as Python code and the
3868 3880 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3869 3881 as a Python byte string literal.
3870 3882 """
3871 3883 opts = pycompat.byteskwargs(opts)
3872 3884
3873 3885 if opts[b'localssh'] and not repo:
3874 3886 raise error.Abort(_(b'--localssh requires a repository'))
3875 3887
3876 3888 if opts[b'peer'] and opts[b'peer'] not in (
3877 3889 b'raw',
3878 3890 b'http2',
3879 3891 b'ssh1',
3880 3892 b'ssh2',
3881 3893 ):
3882 3894 raise error.Abort(
3883 3895 _(b'invalid value for --peer'),
3884 3896 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3885 3897 )
3886 3898
3887 3899 if path and opts[b'localssh']:
3888 3900 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3889 3901
3890 3902 if ui.interactive():
3891 3903 ui.write(_(b'(waiting for commands on stdin)\n'))
3892 3904
3893 3905 blocks = list(_parsewirelangblocks(ui.fin))
3894 3906
3895 3907 proc = None
3896 3908 stdin = None
3897 3909 stdout = None
3898 3910 stderr = None
3899 3911 opener = None
3900 3912
3901 3913 if opts[b'localssh']:
3902 3914 # We start the SSH server in its own process so there is process
3903 3915 # separation. This prevents a whole class of potential bugs around
3904 3916 # shared state from interfering with server operation.
3905 3917 args = procutil.hgcmd() + [
3906 3918 b'-R',
3907 3919 repo.root,
3908 3920 b'debugserve',
3909 3921 b'--sshstdio',
3910 3922 ]
3911 3923 proc = subprocess.Popen(
3912 3924 pycompat.rapply(procutil.tonativestr, args),
3913 3925 stdin=subprocess.PIPE,
3914 3926 stdout=subprocess.PIPE,
3915 3927 stderr=subprocess.PIPE,
3916 3928 bufsize=0,
3917 3929 )
3918 3930
3919 3931 stdin = proc.stdin
3920 3932 stdout = proc.stdout
3921 3933 stderr = proc.stderr
3922 3934
3923 3935 # We turn the pipes into observers so we can log I/O.
3924 3936 if ui.verbose or opts[b'peer'] == b'raw':
3925 3937 stdin = util.makeloggingfileobject(
3926 3938 ui, proc.stdin, b'i', logdata=True
3927 3939 )
3928 3940 stdout = util.makeloggingfileobject(
3929 3941 ui, proc.stdout, b'o', logdata=True
3930 3942 )
3931 3943 stderr = util.makeloggingfileobject(
3932 3944 ui, proc.stderr, b'e', logdata=True
3933 3945 )
3934 3946
3935 3947 # --localssh also implies the peer connection settings.
3936 3948
3937 3949 url = b'ssh://localserver'
3938 3950 autoreadstderr = not opts[b'noreadstderr']
3939 3951
3940 3952 if opts[b'peer'] == b'ssh1':
3941 3953 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3942 3954 peer = sshpeer.sshv1peer(
3943 3955 ui,
3944 3956 url,
3945 3957 proc,
3946 3958 stdin,
3947 3959 stdout,
3948 3960 stderr,
3949 3961 None,
3950 3962 autoreadstderr=autoreadstderr,
3951 3963 )
3952 3964 elif opts[b'peer'] == b'ssh2':
3953 3965 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3954 3966 peer = sshpeer.sshv2peer(
3955 3967 ui,
3956 3968 url,
3957 3969 proc,
3958 3970 stdin,
3959 3971 stdout,
3960 3972 stderr,
3961 3973 None,
3962 3974 autoreadstderr=autoreadstderr,
3963 3975 )
3964 3976 elif opts[b'peer'] == b'raw':
3965 3977 ui.write(_(b'using raw connection to peer\n'))
3966 3978 peer = None
3967 3979 else:
3968 3980 ui.write(_(b'creating ssh peer from handshake results\n'))
3969 3981 peer = sshpeer.makepeer(
3970 3982 ui,
3971 3983 url,
3972 3984 proc,
3973 3985 stdin,
3974 3986 stdout,
3975 3987 stderr,
3976 3988 autoreadstderr=autoreadstderr,
3977 3989 )
3978 3990
3979 3991 elif path:
3980 3992 # We bypass hg.peer() so we can proxy the sockets.
3981 3993 # TODO consider not doing this because we skip
3982 3994 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3983 3995 u = util.url(path)
3984 3996 if u.scheme != b'http':
3985 3997 raise error.Abort(_(b'only http:// paths are currently supported'))
3986 3998
3987 3999 url, authinfo = u.authinfo()
3988 4000 openerargs = {
3989 4001 'useragent': b'Mercurial debugwireproto',
3990 4002 }
3991 4003
3992 4004 # Turn pipes/sockets into observers so we can log I/O.
3993 4005 if ui.verbose:
3994 4006 openerargs.update(
3995 4007 {
3996 4008 'loggingfh': ui,
3997 4009 'loggingname': b's',
3998 4010 'loggingopts': {'logdata': True, 'logdataapis': False,},
3999 4011 }
4000 4012 )
4001 4013
4002 4014 if ui.debugflag:
4003 4015 openerargs['loggingopts']['logdataapis'] = True
4004 4016
4005 4017 # Don't send default headers when in raw mode. This allows us to
4006 4018 # bypass most of the behavior of our URL handling code so we can
4007 4019 # have near complete control over what's sent on the wire.
4008 4020 if opts[b'peer'] == b'raw':
4009 4021 openerargs['sendaccept'] = False
4010 4022
4011 4023 opener = urlmod.opener(ui, authinfo, **openerargs)
4012 4024
4013 4025 if opts[b'peer'] == b'http2':
4014 4026 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4015 4027 # We go through makepeer() because we need an API descriptor for
4016 4028 # the peer instance to be useful.
4017 4029 with ui.configoverride(
4018 4030 {(b'experimental', b'httppeer.advertise-v2'): True}
4019 4031 ):
4020 4032 if opts[b'nologhandshake']:
4021 4033 ui.pushbuffer()
4022 4034
4023 4035 peer = httppeer.makepeer(ui, path, opener=opener)
4024 4036
4025 4037 if opts[b'nologhandshake']:
4026 4038 ui.popbuffer()
4027 4039
4028 4040 if not isinstance(peer, httppeer.httpv2peer):
4029 4041 raise error.Abort(
4030 4042 _(
4031 4043 b'could not instantiate HTTP peer for '
4032 4044 b'wire protocol version 2'
4033 4045 ),
4034 4046 hint=_(
4035 4047 b'the server may not have the feature '
4036 4048 b'enabled or is not allowing this '
4037 4049 b'client version'
4038 4050 ),
4039 4051 )
4040 4052
4041 4053 elif opts[b'peer'] == b'raw':
4042 4054 ui.write(_(b'using raw connection to peer\n'))
4043 4055 peer = None
4044 4056 elif opts[b'peer']:
4045 4057 raise error.Abort(
4046 4058 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4047 4059 )
4048 4060 else:
4049 4061 peer = httppeer.makepeer(ui, path, opener=opener)
4050 4062
4051 4063 # We /could/ populate stdin/stdout with sock.makefile()...
4052 4064 else:
4053 4065 raise error.Abort(_(b'unsupported connection configuration'))
4054 4066
4055 4067 batchedcommands = None
4056 4068
4057 4069 # Now perform actions based on the parsed wire language instructions.
4058 4070 for action, lines in blocks:
4059 4071 if action in (b'raw', b'raw+'):
4060 4072 if not stdin:
4061 4073 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4062 4074
4063 4075 # Concatenate the data together.
4064 4076 data = b''.join(l.lstrip() for l in lines)
4065 4077 data = stringutil.unescapestr(data)
4066 4078 stdin.write(data)
4067 4079
4068 4080 if action == b'raw+':
4069 4081 stdin.flush()
4070 4082 elif action == b'flush':
4071 4083 if not stdin:
4072 4084 raise error.Abort(_(b'cannot call flush on this peer'))
4073 4085 stdin.flush()
4074 4086 elif action.startswith(b'command'):
4075 4087 if not peer:
4076 4088 raise error.Abort(
4077 4089 _(
4078 4090 b'cannot send commands unless peer instance '
4079 4091 b'is available'
4080 4092 )
4081 4093 )
4082 4094
4083 4095 command = action.split(b' ', 1)[1]
4084 4096
4085 4097 args = {}
4086 4098 for line in lines:
4087 4099 # We need to allow empty values.
4088 4100 fields = line.lstrip().split(b' ', 1)
4089 4101 if len(fields) == 1:
4090 4102 key = fields[0]
4091 4103 value = b''
4092 4104 else:
4093 4105 key, value = fields
4094 4106
4095 4107 if value.startswith(b'eval:'):
4096 4108 value = stringutil.evalpythonliteral(value[5:])
4097 4109 else:
4098 4110 value = stringutil.unescapestr(value)
4099 4111
4100 4112 args[key] = value
4101 4113
4102 4114 if batchedcommands is not None:
4103 4115 batchedcommands.append((command, args))
4104 4116 continue
4105 4117
4106 4118 ui.status(_(b'sending %s command\n') % command)
4107 4119
4108 4120 if b'PUSHFILE' in args:
4109 4121 with open(args[b'PUSHFILE'], 'rb') as fh:
4110 4122 del args[b'PUSHFILE']
4111 4123 res, output = peer._callpush(
4112 4124 command, fh, **pycompat.strkwargs(args)
4113 4125 )
4114 4126 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4115 4127 ui.status(
4116 4128 _(b'remote output: %s\n') % stringutil.escapestr(output)
4117 4129 )
4118 4130 else:
4119 4131 with peer.commandexecutor() as e:
4120 4132 res = e.callcommand(command, args).result()
4121 4133
4122 4134 if isinstance(res, wireprotov2peer.commandresponse):
4123 4135 val = res.objects()
4124 4136 ui.status(
4125 4137 _(b'response: %s\n')
4126 4138 % stringutil.pprint(val, bprefix=True, indent=2)
4127 4139 )
4128 4140 else:
4129 4141 ui.status(
4130 4142 _(b'response: %s\n')
4131 4143 % stringutil.pprint(res, bprefix=True, indent=2)
4132 4144 )
4133 4145
4134 4146 elif action == b'batchbegin':
4135 4147 if batchedcommands is not None:
4136 4148 raise error.Abort(_(b'nested batchbegin not allowed'))
4137 4149
4138 4150 batchedcommands = []
4139 4151 elif action == b'batchsubmit':
4140 4152 # There is a batching API we could go through. But it would be
4141 4153 # difficult to normalize requests into function calls. It is easier
4142 4154 # to bypass this layer and normalize to commands + args.
4143 4155 ui.status(
4144 4156 _(b'sending batch with %d sub-commands\n')
4145 4157 % len(batchedcommands)
4146 4158 )
4147 4159 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4148 4160 ui.status(
4149 4161 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4150 4162 )
4151 4163
4152 4164 batchedcommands = None
4153 4165
4154 4166 elif action.startswith(b'httprequest '):
4155 4167 if not opener:
4156 4168 raise error.Abort(
4157 4169 _(b'cannot use httprequest without an HTTP peer')
4158 4170 )
4159 4171
4160 4172 request = action.split(b' ', 2)
4161 4173 if len(request) != 3:
4162 4174 raise error.Abort(
4163 4175 _(
4164 4176 b'invalid httprequest: expected format is '
4165 4177 b'"httprequest <method> <path>'
4166 4178 )
4167 4179 )
4168 4180
4169 4181 method, httppath = request[1:]
4170 4182 headers = {}
4171 4183 body = None
4172 4184 frames = []
4173 4185 for line in lines:
4174 4186 line = line.lstrip()
4175 4187 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4176 4188 if m:
4177 4189 # Headers need to use native strings.
4178 4190 key = pycompat.strurl(m.group(1))
4179 4191 value = pycompat.strurl(m.group(2))
4180 4192 headers[key] = value
4181 4193 continue
4182 4194
4183 4195 if line.startswith(b'BODYFILE '):
4184 4196 with open(line.split(b' ', 1), b'rb') as fh:
4185 4197 body = fh.read()
4186 4198 elif line.startswith(b'frame '):
4187 4199 frame = wireprotoframing.makeframefromhumanstring(
4188 4200 line[len(b'frame ') :]
4189 4201 )
4190 4202
4191 4203 frames.append(frame)
4192 4204 else:
4193 4205 raise error.Abort(
4194 4206 _(b'unknown argument to httprequest: %s') % line
4195 4207 )
4196 4208
4197 4209 url = path + httppath
4198 4210
4199 4211 if frames:
4200 4212 body = b''.join(bytes(f) for f in frames)
4201 4213
4202 4214 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4203 4215
4204 4216 # urllib.Request insists on using has_data() as a proxy for
4205 4217 # determining the request method. Override that to use our
4206 4218 # explicitly requested method.
4207 4219 req.get_method = lambda: pycompat.sysstr(method)
4208 4220
4209 4221 try:
4210 4222 res = opener.open(req)
4211 4223 body = res.read()
4212 4224 except util.urlerr.urlerror as e:
4213 4225 # read() method must be called, but only exists in Python 2
4214 4226 getattr(e, 'read', lambda: None)()
4215 4227 continue
4216 4228
4217 4229 ct = res.headers.get('Content-Type')
4218 4230 if ct == 'application/mercurial-cbor':
4219 4231 ui.write(
4220 4232 _(b'cbor> %s\n')
4221 4233 % stringutil.pprint(
4222 4234 cborutil.decodeall(body), bprefix=True, indent=2
4223 4235 )
4224 4236 )
4225 4237
4226 4238 elif action == b'close':
4227 4239 peer.close()
4228 4240 elif action == b'readavailable':
4229 4241 if not stdout or not stderr:
4230 4242 raise error.Abort(
4231 4243 _(b'readavailable not available on this peer')
4232 4244 )
4233 4245
4234 4246 stdin.close()
4235 4247 stdout.read()
4236 4248 stderr.read()
4237 4249
4238 4250 elif action == b'readline':
4239 4251 if not stdout:
4240 4252 raise error.Abort(_(b'readline not available on this peer'))
4241 4253 stdout.readline()
4242 4254 elif action == b'ereadline':
4243 4255 if not stderr:
4244 4256 raise error.Abort(_(b'ereadline not available on this peer'))
4245 4257 stderr.readline()
4246 4258 elif action.startswith(b'read '):
4247 4259 count = int(action.split(b' ', 1)[1])
4248 4260 if not stdout:
4249 4261 raise error.Abort(_(b'read not available on this peer'))
4250 4262 stdout.read(count)
4251 4263 elif action.startswith(b'eread '):
4252 4264 count = int(action.split(b' ', 1)[1])
4253 4265 if not stderr:
4254 4266 raise error.Abort(_(b'eread not available on this peer'))
4255 4267 stderr.read(count)
4256 4268 else:
4257 4269 raise error.Abort(_(b'unknown action: %s') % action)
4258 4270
4259 4271 if batchedcommands is not None:
4260 4272 raise error.Abort(_(b'unclosed "batchbegin" request'))
4261 4273
4262 4274 if peer:
4263 4275 peer.close()
4264 4276
4265 4277 if proc:
4266 4278 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now