##// END OF EJS Templates
debugcommands: don't shadow the error module...
Augie Fackler -
r44034:72b454fa default
parent child Browse files
Show More
@@ -1,4266 +1,4266
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 changegroup,
42 42 cmdutil,
43 43 color,
44 44 context,
45 45 copies,
46 46 dagparser,
47 47 encoding,
48 48 error,
49 49 exchange,
50 50 extensions,
51 51 filemerge,
52 52 filesetlang,
53 53 formatter,
54 54 hg,
55 55 httppeer,
56 56 localrepo,
57 57 lock as lockmod,
58 58 logcmdutil,
59 59 merge as mergemod,
60 60 obsolete,
61 61 obsutil,
62 62 pathutil,
63 63 phases,
64 64 policy,
65 65 pvec,
66 66 pycompat,
67 67 registrar,
68 68 repair,
69 69 revlog,
70 70 revset,
71 71 revsetlang,
72 72 scmutil,
73 73 setdiscovery,
74 74 simplemerge,
75 75 sshpeer,
76 76 sslutil,
77 77 streamclone,
78 78 templater,
79 79 treediscovery,
80 80 upgrade,
81 81 url as urlmod,
82 82 util,
83 83 vfs as vfsmod,
84 84 wireprotoframing,
85 85 wireprotoserver,
86 86 wireprotov2peer,
87 87 )
88 88 from .utils import (
89 89 cborutil,
90 90 compression,
91 91 dateutil,
92 92 procutil,
93 93 stringutil,
94 94 )
95 95
96 96 from .revlogutils import deltas as deltautil
97 97
98 98 release = lockmod.release
99 99
100 100 command = registrar.command()
101 101
102 102
103 103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 104 def debugancestor(ui, repo, *args):
105 105 """find the ancestor revision of two revisions in a given index"""
106 106 if len(args) == 3:
107 107 index, rev1, rev2 = args
108 108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 109 lookup = r.lookup
110 110 elif len(args) == 2:
111 111 if not repo:
112 112 raise error.Abort(
113 113 _(b'there is no Mercurial repository here (.hg not found)')
114 114 )
115 115 rev1, rev2 = args
116 116 r = repo.changelog
117 117 lookup = repo.lookup
118 118 else:
119 119 raise error.Abort(_(b'either two or three arguments required'))
120 120 a = r.ancestor(lookup(rev1), lookup(rev2))
121 121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122 122
123 123
124 124 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 125 def debugapplystreamclonebundle(ui, repo, fname):
126 126 """apply a stream clone bundle file"""
127 127 f = hg.openpath(ui, fname)
128 128 gen = exchange.readbundle(ui, f, fname)
129 129 gen.apply(repo)
130 130
131 131
132 132 @command(
133 133 b'debugbuilddag',
134 134 [
135 135 (
136 136 b'm',
137 137 b'mergeable-file',
138 138 None,
139 139 _(b'add single file mergeable changes'),
140 140 ),
141 141 (
142 142 b'o',
143 143 b'overwritten-file',
144 144 None,
145 145 _(b'add single file all revs overwrite'),
146 146 ),
147 147 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 148 ],
149 149 _(b'[OPTION]... [TEXT]'),
150 150 )
151 151 def debugbuilddag(
152 152 ui,
153 153 repo,
154 154 text=None,
155 155 mergeable_file=False,
156 156 overwritten_file=False,
157 157 new_file=False,
158 158 ):
159 159 """builds a repo with a given DAG from scratch in the current empty repo
160 160
161 161 The description of the DAG is read from stdin if not given on the
162 162 command line.
163 163
164 164 Elements:
165 165
166 166 - "+n" is a linear run of n nodes based on the current default parent
167 167 - "." is a single node based on the current default parent
168 168 - "$" resets the default parent to null (implied at the start);
169 169 otherwise the default parent is always the last node created
170 170 - "<p" sets the default parent to the backref p
171 171 - "*p" is a fork at parent p, which is a backref
172 172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 173 - "/p2" is a merge of the preceding node and p2
174 174 - ":tag" defines a local tag for the preceding node
175 175 - "@branch" sets the named branch for subsequent nodes
176 176 - "#...\\n" is a comment up to the end of the line
177 177
178 178 Whitespace between the above elements is ignored.
179 179
180 180 A backref is either
181 181
182 182 - a number n, which references the node curr-n, where curr is the current
183 183 node, or
184 184 - the name of a local tag you placed earlier using ":tag", or
185 185 - empty to denote the default parent.
186 186
187 187 All string valued-elements are either strictly alphanumeric, or must
188 188 be enclosed in double quotes ("..."), with "\\" as escape character.
189 189 """
190 190
191 191 if text is None:
192 192 ui.status(_(b"reading DAG from stdin\n"))
193 193 text = ui.fin.read()
194 194
195 195 cl = repo.changelog
196 196 if len(cl) > 0:
197 197 raise error.Abort(_(b'repository is not empty'))
198 198
199 199 # determine number of revs in DAG
200 200 total = 0
201 201 for type, data in dagparser.parsedag(text):
202 202 if type == b'n':
203 203 total += 1
204 204
205 205 if mergeable_file:
206 206 linesperrev = 2
207 207 # make a file with k lines per rev
208 208 initialmergedlines = [
209 209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 210 ]
211 211 initialmergedlines.append(b"")
212 212
213 213 tags = []
214 214 progress = ui.makeprogress(
215 215 _(b'building'), unit=_(b'revisions'), total=total
216 216 )
217 217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 218 at = -1
219 219 atbranch = b'default'
220 220 nodeids = []
221 221 id = 0
222 222 progress.update(id)
223 223 for type, data in dagparser.parsedag(text):
224 224 if type == b'n':
225 225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 226 id, ps = data
227 227
228 228 files = []
229 229 filecontent = {}
230 230
231 231 p2 = None
232 232 if mergeable_file:
233 233 fn = b"mf"
234 234 p1 = repo[ps[0]]
235 235 if len(ps) > 1:
236 236 p2 = repo[ps[1]]
237 237 pa = p1.ancestor(p2)
238 238 base, local, other = [
239 239 x[fn].data() for x in (pa, p1, p2)
240 240 ]
241 241 m3 = simplemerge.Merge3Text(base, local, other)
242 242 ml = [l.strip() for l in m3.merge_lines()]
243 243 ml.append(b"")
244 244 elif at > 0:
245 245 ml = p1[fn].data().split(b"\n")
246 246 else:
247 247 ml = initialmergedlines
248 248 ml[id * linesperrev] += b" r%i" % id
249 249 mergedtext = b"\n".join(ml)
250 250 files.append(fn)
251 251 filecontent[fn] = mergedtext
252 252
253 253 if overwritten_file:
254 254 fn = b"of"
255 255 files.append(fn)
256 256 filecontent[fn] = b"r%i\n" % id
257 257
258 258 if new_file:
259 259 fn = b"nf%i" % id
260 260 files.append(fn)
261 261 filecontent[fn] = b"r%i\n" % id
262 262 if len(ps) > 1:
263 263 if not p2:
264 264 p2 = repo[ps[1]]
265 265 for fn in p2:
266 266 if fn.startswith(b"nf"):
267 267 files.append(fn)
268 268 filecontent[fn] = p2[fn].data()
269 269
270 270 def fctxfn(repo, cx, path):
271 271 if path in filecontent:
272 272 return context.memfilectx(
273 273 repo, cx, path, filecontent[path]
274 274 )
275 275 return None
276 276
277 277 if len(ps) == 0 or ps[0] < 0:
278 278 pars = [None, None]
279 279 elif len(ps) == 1:
280 280 pars = [nodeids[ps[0]], None]
281 281 else:
282 282 pars = [nodeids[p] for p in ps]
283 283 cx = context.memctx(
284 284 repo,
285 285 pars,
286 286 b"r%i" % id,
287 287 files,
288 288 fctxfn,
289 289 date=(id, 0),
290 290 user=b"debugbuilddag",
291 291 extra={b'branch': atbranch},
292 292 )
293 293 nodeid = repo.commitctx(cx)
294 294 nodeids.append(nodeid)
295 295 at = id
296 296 elif type == b'l':
297 297 id, name = data
298 298 ui.note((b'tag %s\n' % name))
299 299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 300 elif type == b'a':
301 301 ui.note((b'branch %s\n' % data))
302 302 atbranch = data
303 303 progress.update(id)
304 304
305 305 if tags:
306 306 repo.vfs.write(b"localtags", b"".join(tags))
307 307
308 308
309 309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 310 indent_string = b' ' * indent
311 311 if all:
312 312 ui.writenoi18n(
313 313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 314 % indent_string
315 315 )
316 316
317 317 def showchunks(named):
318 318 ui.write(b"\n%s%s\n" % (indent_string, named))
319 319 for deltadata in gen.deltaiter():
320 320 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 321 ui.write(
322 322 b"%s%s %s %s %s %s %d\n"
323 323 % (
324 324 indent_string,
325 325 hex(node),
326 326 hex(p1),
327 327 hex(p2),
328 328 hex(cs),
329 329 hex(deltabase),
330 330 len(delta),
331 331 )
332 332 )
333 333
334 334 chunkdata = gen.changelogheader()
335 335 showchunks(b"changelog")
336 336 chunkdata = gen.manifestheader()
337 337 showchunks(b"manifest")
338 338 for chunkdata in iter(gen.filelogheader, {}):
339 339 fname = chunkdata[b'filename']
340 340 showchunks(fname)
341 341 else:
342 342 if isinstance(gen, bundle2.unbundle20):
343 343 raise error.Abort(_(b'use debugbundle2 for this file'))
344 344 chunkdata = gen.changelogheader()
345 345 for deltadata in gen.deltaiter():
346 346 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348 348
349 349
350 350 def _debugobsmarkers(ui, part, indent=0, **opts):
351 351 """display version and markers contained in 'data'"""
352 352 opts = pycompat.byteskwargs(opts)
353 353 data = part.read()
354 354 indent_string = b' ' * indent
355 355 try:
356 356 version, markers = obsolete._readmarkers(data)
357 357 except error.UnknownVersion as exc:
358 358 msg = b"%sunsupported version: %s (%d bytes)\n"
359 359 msg %= indent_string, exc.version, len(data)
360 360 ui.write(msg)
361 361 else:
362 362 msg = b"%sversion: %d (%d bytes)\n"
363 363 msg %= indent_string, version, len(data)
364 364 ui.write(msg)
365 365 fm = ui.formatter(b'debugobsolete', opts)
366 366 for rawmarker in sorted(markers):
367 367 m = obsutil.marker(None, rawmarker)
368 368 fm.startitem()
369 369 fm.plain(indent_string)
370 370 cmdutil.showmarker(fm, m)
371 371 fm.end()
372 372
373 373
374 374 def _debugphaseheads(ui, data, indent=0):
375 375 """display version and markers contained in 'data'"""
376 376 indent_string = b' ' * indent
377 377 headsbyphase = phases.binarydecode(data)
378 378 for phase in phases.allphases:
379 379 for head in headsbyphase[phase]:
380 380 ui.write(indent_string)
381 381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382 382
383 383
384 384 def _quasirepr(thing):
385 385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 386 return b'{%s}' % (
387 387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 388 )
389 389 return pycompat.bytestr(repr(thing))
390 390
391 391
392 392 def _debugbundle2(ui, gen, all=None, **opts):
393 393 """lists the contents of a bundle2"""
394 394 if not isinstance(gen, bundle2.unbundle20):
395 395 raise error.Abort(_(b'not a bundle2 file'))
396 396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 397 parttypes = opts.get('part_type', [])
398 398 for part in gen.iterparts():
399 399 if parttypes and part.type not in parttypes:
400 400 continue
401 401 msg = b'%s -- %s (mandatory: %r)\n'
402 402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 403 if part.type == b'changegroup':
404 404 version = part.params.get(b'version', b'01')
405 405 cg = changegroup.getunbundler(version, part, b'UN')
406 406 if not ui.quiet:
407 407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 408 if part.type == b'obsmarkers':
409 409 if not ui.quiet:
410 410 _debugobsmarkers(ui, part, indent=4, **opts)
411 411 if part.type == b'phase-heads':
412 412 if not ui.quiet:
413 413 _debugphaseheads(ui, part, indent=4)
414 414
415 415
416 416 @command(
417 417 b'debugbundle',
418 418 [
419 419 (b'a', b'all', None, _(b'show all details')),
420 420 (b'', b'part-type', [], _(b'show only the named part type')),
421 421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 422 ],
423 423 _(b'FILE'),
424 424 norepo=True,
425 425 )
426 426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 427 """lists the contents of a bundle"""
428 428 with hg.openpath(ui, bundlepath) as f:
429 429 if spec:
430 430 spec = exchange.getbundlespec(ui, f)
431 431 ui.write(b'%s\n' % spec)
432 432 return
433 433
434 434 gen = exchange.readbundle(ui, f, bundlepath)
435 435 if isinstance(gen, bundle2.unbundle20):
436 436 return _debugbundle2(ui, gen, all=all, **opts)
437 437 _debugchangegroup(ui, gen, all=all, **opts)
438 438
439 439
440 440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 441 def debugcapabilities(ui, path, **opts):
442 442 """lists the capabilities of a remote peer"""
443 443 opts = pycompat.byteskwargs(opts)
444 444 peer = hg.peer(ui, opts, path)
445 445 caps = peer.capabilities()
446 446 ui.writenoi18n(b'Main capabilities:\n')
447 447 for c in sorted(caps):
448 448 ui.write(b' %s\n' % c)
449 449 b2caps = bundle2.bundle2caps(peer)
450 450 if b2caps:
451 451 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 452 for key, values in sorted(pycompat.iteritems(b2caps)):
453 453 ui.write(b' %s\n' % key)
454 454 for v in values:
455 455 ui.write(b' %s\n' % v)
456 456
457 457
458 458 @command(b'debugcheckstate', [], b'')
459 459 def debugcheckstate(ui, repo):
460 460 """validate the correctness of the current dirstate"""
461 461 parent1, parent2 = repo.dirstate.parents()
462 462 m1 = repo[parent1].manifest()
463 463 m2 = repo[parent2].manifest()
464 464 errors = 0
465 465 for f in repo.dirstate:
466 466 state = repo.dirstate[f]
467 467 if state in b"nr" and f not in m1:
468 468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 469 errors += 1
470 470 if state in b"a" and f in m1:
471 471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 472 errors += 1
473 473 if state in b"m" and f not in m1 and f not in m2:
474 474 ui.warn(
475 475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 476 )
477 477 errors += 1
478 478 for f in m1:
479 479 state = repo.dirstate[f]
480 480 if state not in b"nrm":
481 481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 482 errors += 1
483 483 if errors:
484 error = _(b".hg/dirstate inconsistent with current parent's manifest")
485 raise error.Abort(error)
484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 raise error.Abort(errstr)
486 486
487 487
488 488 @command(
489 489 b'debugcolor',
490 490 [(b'', b'style', None, _(b'show all configured styles'))],
491 491 b'hg debugcolor',
492 492 )
493 493 def debugcolor(ui, repo, **opts):
494 494 """show available color, effects or style"""
495 495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 496 if opts.get('style'):
497 497 return _debugdisplaystyle(ui)
498 498 else:
499 499 return _debugdisplaycolor(ui)
500 500
501 501
502 502 def _debugdisplaycolor(ui):
503 503 ui = ui.copy()
504 504 ui._styles.clear()
505 505 for effect in color._activeeffects(ui).keys():
506 506 ui._styles[effect] = effect
507 507 if ui._terminfoparams:
508 508 for k, v in ui.configitems(b'color'):
509 509 if k.startswith(b'color.'):
510 510 ui._styles[k] = k[6:]
511 511 elif k.startswith(b'terminfo.'):
512 512 ui._styles[k] = k[9:]
513 513 ui.write(_(b'available colors:\n'))
514 514 # sort label with a '_' after the other to group '_background' entry.
515 515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 516 for colorname, label in items:
517 517 ui.write(b'%s\n' % colorname, label=label)
518 518
519 519
520 520 def _debugdisplaystyle(ui):
521 521 ui.write(_(b'available style:\n'))
522 522 if not ui._styles:
523 523 return
524 524 width = max(len(s) for s in ui._styles)
525 525 for label, effects in sorted(ui._styles.items()):
526 526 ui.write(b'%s' % label, label=label)
527 527 if effects:
528 528 # 50
529 529 ui.write(b': ')
530 530 ui.write(b' ' * (max(0, width - len(label))))
531 531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 532 ui.write(b'\n')
533 533
534 534
535 535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 536 def debugcreatestreamclonebundle(ui, repo, fname):
537 537 """create a stream clone bundle file
538 538
539 539 Stream bundles are special bundles that are essentially archives of
540 540 revlog files. They are commonly used for cloning very quickly.
541 541 """
542 542 # TODO we may want to turn this into an abort when this functionality
543 543 # is moved into `hg bundle`.
544 544 if phases.hassecret(repo):
545 545 ui.warn(
546 546 _(
547 547 b'(warning: stream clone bundle will contain secret '
548 548 b'revisions)\n'
549 549 )
550 550 )
551 551
552 552 requirements, gen = streamclone.generatebundlev1(repo)
553 553 changegroup.writechunks(ui, gen, fname)
554 554
555 555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556 556
557 557
558 558 @command(
559 559 b'debugdag',
560 560 [
561 561 (b't', b'tags', None, _(b'use tags as labels')),
562 562 (b'b', b'branches', None, _(b'annotate with branch names')),
563 563 (b'', b'dots', None, _(b'use dots for runs')),
564 564 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 565 ],
566 566 _(b'[OPTION]... [FILE [REV]...]'),
567 567 optionalrepo=True,
568 568 )
569 569 def debugdag(ui, repo, file_=None, *revs, **opts):
570 570 """format the changelog or an index DAG as a concise textual description
571 571
572 572 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 573 revision numbers, they get labeled in the output as rN.
574 574
575 575 Otherwise, the changelog DAG of the current repo is emitted.
576 576 """
577 577 spaces = opts.get('spaces')
578 578 dots = opts.get('dots')
579 579 if file_:
580 580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 581 revs = set((int(r) for r in revs))
582 582
583 583 def events():
584 584 for r in rlog:
585 585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 586 if r in revs:
587 587 yield b'l', (r, b"r%i" % r)
588 588
589 589 elif repo:
590 590 cl = repo.changelog
591 591 tags = opts.get('tags')
592 592 branches = opts.get('branches')
593 593 if tags:
594 594 labels = {}
595 595 for l, n in repo.tags().items():
596 596 labels.setdefault(cl.rev(n), []).append(l)
597 597
598 598 def events():
599 599 b = b"default"
600 600 for r in cl:
601 601 if branches:
602 602 newb = cl.read(cl.node(r))[5][b'branch']
603 603 if newb != b:
604 604 yield b'a', newb
605 605 b = newb
606 606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 607 if tags:
608 608 ls = labels.get(r)
609 609 if ls:
610 610 for l in ls:
611 611 yield b'l', (r, l)
612 612
613 613 else:
614 614 raise error.Abort(_(b'need repo for changelog dag'))
615 615
616 616 for line in dagparser.dagtextlines(
617 617 events(),
618 618 addspaces=spaces,
619 619 wraplabels=True,
620 620 wrapannotations=True,
621 621 wrapnonlinear=dots,
622 622 usedots=dots,
623 623 maxlinewidth=70,
624 624 ):
625 625 ui.write(line)
626 626 ui.write(b"\n")
627 627
628 628
629 629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 630 def debugdata(ui, repo, file_, rev=None, **opts):
631 631 """dump the contents of a data file revision"""
632 632 opts = pycompat.byteskwargs(opts)
633 633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 634 if rev is not None:
635 635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 636 file_, rev = None, file_
637 637 elif rev is None:
638 638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 640 try:
641 641 ui.write(r.rawdata(r.lookup(rev)))
642 642 except KeyError:
643 643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644 644
645 645
646 646 @command(
647 647 b'debugdate',
648 648 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 649 _(b'[-e] DATE [RANGE]'),
650 650 norepo=True,
651 651 optionalrepo=True,
652 652 )
653 653 def debugdate(ui, date, range=None, **opts):
654 654 """parse and display a date"""
655 655 if opts["extended"]:
656 656 d = dateutil.parsedate(date, util.extendeddateformats)
657 657 else:
658 658 d = dateutil.parsedate(date)
659 659 ui.writenoi18n(b"internal: %d %d\n" % d)
660 660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 661 if range:
662 662 m = dateutil.matchdate(range)
663 663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664 664
665 665
666 666 @command(
667 667 b'debugdeltachain',
668 668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 669 _(b'-c|-m|FILE'),
670 670 optionalrepo=True,
671 671 )
672 672 def debugdeltachain(ui, repo, file_=None, **opts):
673 673 """dump information about delta chains in a revlog
674 674
675 675 Output can be templatized. Available template keywords are:
676 676
677 677 :``rev``: revision number
678 678 :``chainid``: delta chain identifier (numbered by unique base)
679 679 :``chainlen``: delta chain length to this revision
680 680 :``prevrev``: previous revision in delta chain
681 681 :``deltatype``: role of delta / how it was computed
682 682 :``compsize``: compressed size of revision
683 683 :``uncompsize``: uncompressed size of revision
684 684 :``chainsize``: total size of compressed revisions in chain
685 685 :``chainratio``: total chain size divided by uncompressed revision size
686 686 (new delta chains typically start at ratio 2.00)
687 687 :``lindist``: linear distance from base revision in delta chain to end
688 688 of this revision
689 689 :``extradist``: total size of revisions not part of this delta chain from
690 690 base of delta chain to end of this revision; a measurement
691 691 of how much extra data we need to read/seek across to read
692 692 the delta chain for this revision
693 693 :``extraratio``: extradist divided by chainsize; another representation of
694 694 how much unrelated data is needed to load this delta chain
695 695
696 696 If the repository is configured to use the sparse read, additional keywords
697 697 are available:
698 698
699 699 :``readsize``: total size of data read from the disk for a revision
700 700 (sum of the sizes of all the blocks)
701 701 :``largestblock``: size of the largest block of data read from the disk
702 702 :``readdensity``: density of useful bytes in the data read from the disk
703 703 :``srchunks``: in how many data hunks the whole revision would be read
704 704
705 705 The sparse read can be enabled with experimental.sparse-read = True
706 706 """
707 707 opts = pycompat.byteskwargs(opts)
708 708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 709 index = r.index
710 710 start = r.start
711 711 length = r.length
712 712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 713 withsparseread = getattr(r, '_withsparseread', False)
714 714
715 715 def revinfo(rev):
716 716 e = index[rev]
717 717 compsize = e[1]
718 718 uncompsize = e[2]
719 719 chainsize = 0
720 720
721 721 if generaldelta:
722 722 if e[3] == e[5]:
723 723 deltatype = b'p1'
724 724 elif e[3] == e[6]:
725 725 deltatype = b'p2'
726 726 elif e[3] == rev - 1:
727 727 deltatype = b'prev'
728 728 elif e[3] == rev:
729 729 deltatype = b'base'
730 730 else:
731 731 deltatype = b'other'
732 732 else:
733 733 if e[3] == rev:
734 734 deltatype = b'base'
735 735 else:
736 736 deltatype = b'prev'
737 737
738 738 chain = r._deltachain(rev)[0]
739 739 for iterrev in chain:
740 740 e = index[iterrev]
741 741 chainsize += e[1]
742 742
743 743 return compsize, uncompsize, deltatype, chain, chainsize
744 744
745 745 fm = ui.formatter(b'debugdeltachain', opts)
746 746
747 747 fm.plain(
748 748 b' rev chain# chainlen prev delta '
749 749 b'size rawsize chainsize ratio lindist extradist '
750 750 b'extraratio'
751 751 )
752 752 if withsparseread:
753 753 fm.plain(b' readsize largestblk rddensity srchunks')
754 754 fm.plain(b'\n')
755 755
756 756 chainbases = {}
757 757 for rev in r:
758 758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 759 chainbase = chain[0]
760 760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 761 basestart = start(chainbase)
762 762 revstart = start(rev)
763 763 lineardist = revstart + comp - basestart
764 764 extradist = lineardist - chainsize
765 765 try:
766 766 prevrev = chain[-2]
767 767 except IndexError:
768 768 prevrev = -1
769 769
770 770 if uncomp != 0:
771 771 chainratio = float(chainsize) / float(uncomp)
772 772 else:
773 773 chainratio = chainsize
774 774
775 775 if chainsize != 0:
776 776 extraratio = float(extradist) / float(chainsize)
777 777 else:
778 778 extraratio = extradist
779 779
780 780 fm.startitem()
781 781 fm.write(
782 782 b'rev chainid chainlen prevrev deltatype compsize '
783 783 b'uncompsize chainsize chainratio lindist extradist '
784 784 b'extraratio',
785 785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 786 rev,
787 787 chainid,
788 788 len(chain),
789 789 prevrev,
790 790 deltatype,
791 791 comp,
792 792 uncomp,
793 793 chainsize,
794 794 chainratio,
795 795 lineardist,
796 796 extradist,
797 797 extraratio,
798 798 rev=rev,
799 799 chainid=chainid,
800 800 chainlen=len(chain),
801 801 prevrev=prevrev,
802 802 deltatype=deltatype,
803 803 compsize=comp,
804 804 uncompsize=uncomp,
805 805 chainsize=chainsize,
806 806 chainratio=chainratio,
807 807 lindist=lineardist,
808 808 extradist=extradist,
809 809 extraratio=extraratio,
810 810 )
811 811 if withsparseread:
812 812 readsize = 0
813 813 largestblock = 0
814 814 srchunks = 0
815 815
816 816 for revschunk in deltautil.slicechunk(r, chain):
817 817 srchunks += 1
818 818 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 819 blksize = blkend - start(revschunk[0])
820 820
821 821 readsize += blksize
822 822 if largestblock < blksize:
823 823 largestblock = blksize
824 824
825 825 if readsize:
826 826 readdensity = float(chainsize) / float(readsize)
827 827 else:
828 828 readdensity = 1
829 829
830 830 fm.write(
831 831 b'readsize largestblock readdensity srchunks',
832 832 b' %10d %10d %9.5f %8d',
833 833 readsize,
834 834 largestblock,
835 835 readdensity,
836 836 srchunks,
837 837 readsize=readsize,
838 838 largestblock=largestblock,
839 839 readdensity=readdensity,
840 840 srchunks=srchunks,
841 841 )
842 842
843 843 fm.plain(b'\n')
844 844
845 845 fm.end()
846 846
847 847
848 848 @command(
849 849 b'debugdirstate|debugstate',
850 850 [
851 851 (
852 852 b'',
853 853 b'nodates',
854 854 None,
855 855 _(b'do not display the saved mtime (DEPRECATED)'),
856 856 ),
857 857 (b'', b'dates', True, _(b'display the saved mtime')),
858 858 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 859 ],
860 860 _(b'[OPTION]...'),
861 861 )
862 862 def debugstate(ui, repo, **opts):
863 863 """show the contents of the current dirstate"""
864 864
865 865 nodates = not opts['dates']
866 866 if opts.get('nodates') is not None:
867 867 nodates = True
868 868 datesort = opts.get('datesort')
869 869
870 870 if datesort:
871 871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 872 else:
873 873 keyfunc = None # sort by filename
874 874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 875 if ent[3] == -1:
876 876 timestr = b'unset '
877 877 elif nodates:
878 878 timestr = b'set '
879 879 else:
880 880 timestr = time.strftime(
881 881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 882 )
883 883 timestr = encoding.strtolocal(timestr)
884 884 if ent[1] & 0o20000:
885 885 mode = b'lnk'
886 886 else:
887 887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 889 for f in repo.dirstate.copies():
890 890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891 891
892 892
893 893 @command(
894 894 b'debugdiscovery',
895 895 [
896 896 (b'', b'old', None, _(b'use old-style discovery')),
897 897 (
898 898 b'',
899 899 b'nonheads',
900 900 None,
901 901 _(b'use old-style discovery with non-heads included'),
902 902 ),
903 903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 905 ]
906 906 + cmdutil.remoteopts,
907 907 _(b'[--rev REV] [OTHER]'),
908 908 )
909 909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 910 """runs the changeset discovery protocol in isolation"""
911 911 opts = pycompat.byteskwargs(opts)
912 912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 913 remote = hg.peer(repo, opts, remoteurl)
914 914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915 915
916 916 # make sure tests are repeatable
917 917 random.seed(int(opts[b'seed']))
918 918
919 919 if opts.get(b'old'):
920 920
921 921 def doit(pushedrevs, remoteheads, remote=remote):
922 922 if not util.safehasattr(remote, b'branches'):
923 923 # enable in-client legacy support
924 924 remote = localrepo.locallegacypeer(remote.local())
925 925 common, _in, hds = treediscovery.findcommonincoming(
926 926 repo, remote, force=True
927 927 )
928 928 common = set(common)
929 929 if not opts.get(b'nonheads'):
930 930 ui.writenoi18n(
931 931 b"unpruned common: %s\n"
932 932 % b" ".join(sorted(short(n) for n in common))
933 933 )
934 934
935 935 clnode = repo.changelog.node
936 936 common = repo.revs(b'heads(::%ln)', common)
937 937 common = {clnode(r) for r in common}
938 938 return common, hds
939 939
940 940 else:
941 941
942 942 def doit(pushedrevs, remoteheads, remote=remote):
943 943 nodes = None
944 944 if pushedrevs:
945 945 revs = scmutil.revrange(repo, pushedrevs)
946 946 nodes = [repo[r].node() for r in revs]
947 947 common, any, hds = setdiscovery.findcommonheads(
948 948 ui, repo, remote, ancestorsof=nodes
949 949 )
950 950 return common, hds
951 951
952 952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 953 localrevs = opts[b'rev']
954 954 with util.timedcm('debug-discovery') as t:
955 955 common, hds = doit(localrevs, remoterevs)
956 956
957 957 # compute all statistics
958 958 common = set(common)
959 959 rheads = set(hds)
960 960 lheads = set(repo.heads())
961 961
962 962 data = {}
963 963 data[b'elapsed'] = t.elapsed
964 964 data[b'nb-common'] = len(common)
965 965 data[b'nb-common-local'] = len(common & lheads)
966 966 data[b'nb-common-remote'] = len(common & rheads)
967 967 data[b'nb-common-both'] = len(common & rheads & lheads)
968 968 data[b'nb-local'] = len(lheads)
969 969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 970 data[b'nb-remote'] = len(rheads)
971 971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 972 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975 975
976 976 # display discovery summary
977 977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 978 ui.writenoi18n(b"heads summary:\n")
979 979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992 992
993 993 if ui.verbose:
994 994 ui.writenoi18n(
995 995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 996 )
997 997
998 998
999 999 _chunksize = 4 << 10
1000 1000
1001 1001
1002 1002 @command(
1003 1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 1004 )
1005 1005 def debugdownload(ui, repo, url, output=None, **opts):
1006 1006 """download a resource using Mercurial logic and config
1007 1007 """
1008 1008 fh = urlmod.open(ui, url, output)
1009 1009
1010 1010 dest = ui
1011 1011 if output:
1012 1012 dest = open(output, b"wb", _chunksize)
1013 1013 try:
1014 1014 data = fh.read(_chunksize)
1015 1015 while data:
1016 1016 dest.write(data)
1017 1017 data = fh.read(_chunksize)
1018 1018 finally:
1019 1019 if output:
1020 1020 dest.close()
1021 1021
1022 1022
1023 1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 1024 def debugextensions(ui, repo, **opts):
1025 1025 '''show information about active extensions'''
1026 1026 opts = pycompat.byteskwargs(opts)
1027 1027 exts = extensions.extensions(ui)
1028 1028 hgver = util.version()
1029 1029 fm = ui.formatter(b'debugextensions', opts)
1030 1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 1031 isinternal = extensions.ismoduleinternal(extmod)
1032 1032 extsource = pycompat.fsencode(extmod.__file__)
1033 1033 if isinternal:
1034 1034 exttestedwith = [] # never expose magic string to users
1035 1035 else:
1036 1036 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1037 1037 extbuglink = getattr(extmod, 'buglink', None)
1038 1038
1039 1039 fm.startitem()
1040 1040
1041 1041 if ui.quiet or ui.verbose:
1042 1042 fm.write(b'name', b'%s\n', extname)
1043 1043 else:
1044 1044 fm.write(b'name', b'%s', extname)
1045 1045 if isinternal or hgver in exttestedwith:
1046 1046 fm.plain(b'\n')
1047 1047 elif not exttestedwith:
1048 1048 fm.plain(_(b' (untested!)\n'))
1049 1049 else:
1050 1050 lasttestedversion = exttestedwith[-1]
1051 1051 fm.plain(b' (%s!)\n' % lasttestedversion)
1052 1052
1053 1053 fm.condwrite(
1054 1054 ui.verbose and extsource,
1055 1055 b'source',
1056 1056 _(b' location: %s\n'),
1057 1057 extsource or b"",
1058 1058 )
1059 1059
1060 1060 if ui.verbose:
1061 1061 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1062 1062 fm.data(bundled=isinternal)
1063 1063
1064 1064 fm.condwrite(
1065 1065 ui.verbose and exttestedwith,
1066 1066 b'testedwith',
1067 1067 _(b' tested with: %s\n'),
1068 1068 fm.formatlist(exttestedwith, name=b'ver'),
1069 1069 )
1070 1070
1071 1071 fm.condwrite(
1072 1072 ui.verbose and extbuglink,
1073 1073 b'buglink',
1074 1074 _(b' bug reporting: %s\n'),
1075 1075 extbuglink or b"",
1076 1076 )
1077 1077
1078 1078 fm.end()
1079 1079
1080 1080
1081 1081 @command(
1082 1082 b'debugfileset',
1083 1083 [
1084 1084 (
1085 1085 b'r',
1086 1086 b'rev',
1087 1087 b'',
1088 1088 _(b'apply the filespec on this revision'),
1089 1089 _(b'REV'),
1090 1090 ),
1091 1091 (
1092 1092 b'',
1093 1093 b'all-files',
1094 1094 False,
1095 1095 _(b'test files from all revisions and working directory'),
1096 1096 ),
1097 1097 (
1098 1098 b's',
1099 1099 b'show-matcher',
1100 1100 None,
1101 1101 _(b'print internal representation of matcher'),
1102 1102 ),
1103 1103 (
1104 1104 b'p',
1105 1105 b'show-stage',
1106 1106 [],
1107 1107 _(b'print parsed tree at the given stage'),
1108 1108 _(b'NAME'),
1109 1109 ),
1110 1110 ],
1111 1111 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1112 1112 )
1113 1113 def debugfileset(ui, repo, expr, **opts):
1114 1114 '''parse and apply a fileset specification'''
1115 1115 from . import fileset
1116 1116
1117 1117 fileset.symbols # force import of fileset so we have predicates to optimize
1118 1118 opts = pycompat.byteskwargs(opts)
1119 1119 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1120 1120
1121 1121 stages = [
1122 1122 (b'parsed', pycompat.identity),
1123 1123 (b'analyzed', filesetlang.analyze),
1124 1124 (b'optimized', filesetlang.optimize),
1125 1125 ]
1126 1126 stagenames = set(n for n, f in stages)
1127 1127
1128 1128 showalways = set()
1129 1129 if ui.verbose and not opts[b'show_stage']:
1130 1130 # show parsed tree by --verbose (deprecated)
1131 1131 showalways.add(b'parsed')
1132 1132 if opts[b'show_stage'] == [b'all']:
1133 1133 showalways.update(stagenames)
1134 1134 else:
1135 1135 for n in opts[b'show_stage']:
1136 1136 if n not in stagenames:
1137 1137 raise error.Abort(_(b'invalid stage name: %s') % n)
1138 1138 showalways.update(opts[b'show_stage'])
1139 1139
1140 1140 tree = filesetlang.parse(expr)
1141 1141 for n, f in stages:
1142 1142 tree = f(tree)
1143 1143 if n in showalways:
1144 1144 if opts[b'show_stage'] or n != b'parsed':
1145 1145 ui.write(b"* %s:\n" % n)
1146 1146 ui.write(filesetlang.prettyformat(tree), b"\n")
1147 1147
1148 1148 files = set()
1149 1149 if opts[b'all_files']:
1150 1150 for r in repo:
1151 1151 c = repo[r]
1152 1152 files.update(c.files())
1153 1153 files.update(c.substate)
1154 1154 if opts[b'all_files'] or ctx.rev() is None:
1155 1155 wctx = repo[None]
1156 1156 files.update(
1157 1157 repo.dirstate.walk(
1158 1158 scmutil.matchall(repo),
1159 1159 subrepos=list(wctx.substate),
1160 1160 unknown=True,
1161 1161 ignored=True,
1162 1162 )
1163 1163 )
1164 1164 files.update(wctx.substate)
1165 1165 else:
1166 1166 files.update(ctx.files())
1167 1167 files.update(ctx.substate)
1168 1168
1169 1169 m = ctx.matchfileset(expr)
1170 1170 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1171 1171 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1172 1172 for f in sorted(files):
1173 1173 if not m(f):
1174 1174 continue
1175 1175 ui.write(b"%s\n" % f)
1176 1176
1177 1177
1178 1178 @command(b'debugformat', [] + cmdutil.formatteropts)
1179 1179 def debugformat(ui, repo, **opts):
1180 1180 """display format information about the current repository
1181 1181
1182 1182 Use --verbose to get extra information about current config value and
1183 1183 Mercurial default."""
1184 1184 opts = pycompat.byteskwargs(opts)
1185 1185 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1186 1186 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1187 1187
1188 1188 def makeformatname(name):
1189 1189 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1190 1190
1191 1191 fm = ui.formatter(b'debugformat', opts)
1192 1192 if fm.isplain():
1193 1193
1194 1194 def formatvalue(value):
1195 1195 if util.safehasattr(value, b'startswith'):
1196 1196 return value
1197 1197 if value:
1198 1198 return b'yes'
1199 1199 else:
1200 1200 return b'no'
1201 1201
1202 1202 else:
1203 1203 formatvalue = pycompat.identity
1204 1204
1205 1205 fm.plain(b'format-variant')
1206 1206 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1207 1207 fm.plain(b' repo')
1208 1208 if ui.verbose:
1209 1209 fm.plain(b' config default')
1210 1210 fm.plain(b'\n')
1211 1211 for fv in upgrade.allformatvariant:
1212 1212 fm.startitem()
1213 1213 repovalue = fv.fromrepo(repo)
1214 1214 configvalue = fv.fromconfig(repo)
1215 1215
1216 1216 if repovalue != configvalue:
1217 1217 namelabel = b'formatvariant.name.mismatchconfig'
1218 1218 repolabel = b'formatvariant.repo.mismatchconfig'
1219 1219 elif repovalue != fv.default:
1220 1220 namelabel = b'formatvariant.name.mismatchdefault'
1221 1221 repolabel = b'formatvariant.repo.mismatchdefault'
1222 1222 else:
1223 1223 namelabel = b'formatvariant.name.uptodate'
1224 1224 repolabel = b'formatvariant.repo.uptodate'
1225 1225
1226 1226 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1227 1227 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1228 1228 if fv.default != configvalue:
1229 1229 configlabel = b'formatvariant.config.special'
1230 1230 else:
1231 1231 configlabel = b'formatvariant.config.default'
1232 1232 fm.condwrite(
1233 1233 ui.verbose,
1234 1234 b'config',
1235 1235 b' %6s',
1236 1236 formatvalue(configvalue),
1237 1237 label=configlabel,
1238 1238 )
1239 1239 fm.condwrite(
1240 1240 ui.verbose,
1241 1241 b'default',
1242 1242 b' %7s',
1243 1243 formatvalue(fv.default),
1244 1244 label=b'formatvariant.default',
1245 1245 )
1246 1246 fm.plain(b'\n')
1247 1247 fm.end()
1248 1248
1249 1249
1250 1250 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1251 1251 def debugfsinfo(ui, path=b"."):
1252 1252 """show information detected about current filesystem"""
1253 1253 ui.writenoi18n(b'path: %s\n' % path)
1254 1254 ui.writenoi18n(
1255 1255 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1256 1256 )
1257 1257 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1258 1258 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1259 1259 ui.writenoi18n(
1260 1260 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1261 1261 )
1262 1262 ui.writenoi18n(
1263 1263 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1264 1264 )
1265 1265 casesensitive = b'(unknown)'
1266 1266 try:
1267 1267 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1268 1268 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1269 1269 except OSError:
1270 1270 pass
1271 1271 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1272 1272
1273 1273
1274 1274 @command(
1275 1275 b'debuggetbundle',
1276 1276 [
1277 1277 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1278 1278 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1279 1279 (
1280 1280 b't',
1281 1281 b'type',
1282 1282 b'bzip2',
1283 1283 _(b'bundle compression type to use'),
1284 1284 _(b'TYPE'),
1285 1285 ),
1286 1286 ],
1287 1287 _(b'REPO FILE [-H|-C ID]...'),
1288 1288 norepo=True,
1289 1289 )
1290 1290 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1291 1291 """retrieves a bundle from a repo
1292 1292
1293 1293 Every ID must be a full-length hex node id string. Saves the bundle to the
1294 1294 given file.
1295 1295 """
1296 1296 opts = pycompat.byteskwargs(opts)
1297 1297 repo = hg.peer(ui, opts, repopath)
1298 1298 if not repo.capable(b'getbundle'):
1299 1299 raise error.Abort(b"getbundle() not supported by target repository")
1300 1300 args = {}
1301 1301 if common:
1302 1302 args['common'] = [bin(s) for s in common]
1303 1303 if head:
1304 1304 args['heads'] = [bin(s) for s in head]
1305 1305 # TODO: get desired bundlecaps from command line.
1306 1306 args['bundlecaps'] = None
1307 1307 bundle = repo.getbundle(b'debug', **args)
1308 1308
1309 1309 bundletype = opts.get(b'type', b'bzip2').lower()
1310 1310 btypes = {
1311 1311 b'none': b'HG10UN',
1312 1312 b'bzip2': b'HG10BZ',
1313 1313 b'gzip': b'HG10GZ',
1314 1314 b'bundle2': b'HG20',
1315 1315 }
1316 1316 bundletype = btypes.get(bundletype)
1317 1317 if bundletype not in bundle2.bundletypes:
1318 1318 raise error.Abort(_(b'unknown bundle type specified with --type'))
1319 1319 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1320 1320
1321 1321
1322 1322 @command(b'debugignore', [], b'[FILE]')
1323 1323 def debugignore(ui, repo, *files, **opts):
1324 1324 """display the combined ignore pattern and information about ignored files
1325 1325
1326 1326 With no argument display the combined ignore pattern.
1327 1327
1328 1328 Given space separated file names, shows if the given file is ignored and
1329 1329 if so, show the ignore rule (file and line number) that matched it.
1330 1330 """
1331 1331 ignore = repo.dirstate._ignore
1332 1332 if not files:
1333 1333 # Show all the patterns
1334 1334 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1335 1335 else:
1336 1336 m = scmutil.match(repo[None], pats=files)
1337 1337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1338 1338 for f in m.files():
1339 1339 nf = util.normpath(f)
1340 1340 ignored = None
1341 1341 ignoredata = None
1342 1342 if nf != b'.':
1343 1343 if ignore(nf):
1344 1344 ignored = nf
1345 1345 ignoredata = repo.dirstate._ignorefileandline(nf)
1346 1346 else:
1347 1347 for p in pathutil.finddirs(nf):
1348 1348 if ignore(p):
1349 1349 ignored = p
1350 1350 ignoredata = repo.dirstate._ignorefileandline(p)
1351 1351 break
1352 1352 if ignored:
1353 1353 if ignored == nf:
1354 1354 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1355 1355 else:
1356 1356 ui.write(
1357 1357 _(
1358 1358 b"%s is ignored because of "
1359 1359 b"containing directory %s\n"
1360 1360 )
1361 1361 % (uipathfn(f), ignored)
1362 1362 )
1363 1363 ignorefile, lineno, line = ignoredata
1364 1364 ui.write(
1365 1365 _(b"(ignore rule in %s, line %d: '%s')\n")
1366 1366 % (ignorefile, lineno, line)
1367 1367 )
1368 1368 else:
1369 1369 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1370 1370
1371 1371
1372 1372 @command(
1373 1373 b'debugindex',
1374 1374 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1375 1375 _(b'-c|-m|FILE'),
1376 1376 )
1377 1377 def debugindex(ui, repo, file_=None, **opts):
1378 1378 """dump index data for a storage primitive"""
1379 1379 opts = pycompat.byteskwargs(opts)
1380 1380 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1381 1381
1382 1382 if ui.debugflag:
1383 1383 shortfn = hex
1384 1384 else:
1385 1385 shortfn = short
1386 1386
1387 1387 idlen = 12
1388 1388 for i in store:
1389 1389 idlen = len(shortfn(store.node(i)))
1390 1390 break
1391 1391
1392 1392 fm = ui.formatter(b'debugindex', opts)
1393 1393 fm.plain(
1394 1394 b' rev linkrev %s %s p2\n'
1395 1395 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1396 1396 )
1397 1397
1398 1398 for rev in store:
1399 1399 node = store.node(rev)
1400 1400 parents = store.parents(node)
1401 1401
1402 1402 fm.startitem()
1403 1403 fm.write(b'rev', b'%6d ', rev)
1404 1404 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1405 1405 fm.write(b'node', b'%s ', shortfn(node))
1406 1406 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1407 1407 fm.write(b'p2', b'%s', shortfn(parents[1]))
1408 1408 fm.plain(b'\n')
1409 1409
1410 1410 fm.end()
1411 1411
1412 1412
1413 1413 @command(
1414 1414 b'debugindexdot',
1415 1415 cmdutil.debugrevlogopts,
1416 1416 _(b'-c|-m|FILE'),
1417 1417 optionalrepo=True,
1418 1418 )
1419 1419 def debugindexdot(ui, repo, file_=None, **opts):
1420 1420 """dump an index DAG as a graphviz dot file"""
1421 1421 opts = pycompat.byteskwargs(opts)
1422 1422 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1423 1423 ui.writenoi18n(b"digraph G {\n")
1424 1424 for i in r:
1425 1425 node = r.node(i)
1426 1426 pp = r.parents(node)
1427 1427 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1428 1428 if pp[1] != nullid:
1429 1429 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1430 1430 ui.write(b"}\n")
1431 1431
1432 1432
1433 1433 @command(b'debugindexstats', [])
1434 1434 def debugindexstats(ui, repo):
1435 1435 """show stats related to the changelog index"""
1436 1436 repo.changelog.shortest(nullid, 1)
1437 1437 index = repo.changelog.index
1438 1438 if not util.safehasattr(index, b'stats'):
1439 1439 raise error.Abort(_(b'debugindexstats only works with native code'))
1440 1440 for k, v in sorted(index.stats().items()):
1441 1441 ui.write(b'%s: %d\n' % (k, v))
1442 1442
1443 1443
1444 1444 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1445 1445 def debuginstall(ui, **opts):
1446 1446 '''test Mercurial installation
1447 1447
1448 1448 Returns 0 on success.
1449 1449 '''
1450 1450 opts = pycompat.byteskwargs(opts)
1451 1451
1452 1452 problems = 0
1453 1453
1454 1454 fm = ui.formatter(b'debuginstall', opts)
1455 1455 fm.startitem()
1456 1456
1457 1457 # encoding
1458 1458 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1459 1459 err = None
1460 1460 try:
1461 1461 codecs.lookup(pycompat.sysstr(encoding.encoding))
1462 1462 except LookupError as inst:
1463 1463 err = stringutil.forcebytestr(inst)
1464 1464 problems += 1
1465 1465 fm.condwrite(
1466 1466 err,
1467 1467 b'encodingerror',
1468 1468 _(b" %s\n (check that your locale is properly set)\n"),
1469 1469 err,
1470 1470 )
1471 1471
1472 1472 # Python
1473 1473 fm.write(
1474 1474 b'pythonexe',
1475 1475 _(b"checking Python executable (%s)\n"),
1476 1476 pycompat.sysexecutable or _(b"unknown"),
1477 1477 )
1478 1478 fm.write(
1479 1479 b'pythonver',
1480 1480 _(b"checking Python version (%s)\n"),
1481 1481 (b"%d.%d.%d" % sys.version_info[:3]),
1482 1482 )
1483 1483 fm.write(
1484 1484 b'pythonlib',
1485 1485 _(b"checking Python lib (%s)...\n"),
1486 1486 os.path.dirname(pycompat.fsencode(os.__file__)),
1487 1487 )
1488 1488
1489 1489 security = set(sslutil.supportedprotocols)
1490 1490 if sslutil.hassni:
1491 1491 security.add(b'sni')
1492 1492
1493 1493 fm.write(
1494 1494 b'pythonsecurity',
1495 1495 _(b"checking Python security support (%s)\n"),
1496 1496 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1497 1497 )
1498 1498
1499 1499 # These are warnings, not errors. So don't increment problem count. This
1500 1500 # may change in the future.
1501 1501 if b'tls1.2' not in security:
1502 1502 fm.plain(
1503 1503 _(
1504 1504 b' TLS 1.2 not supported by Python install; '
1505 1505 b'network connections lack modern security\n'
1506 1506 )
1507 1507 )
1508 1508 if b'sni' not in security:
1509 1509 fm.plain(
1510 1510 _(
1511 1511 b' SNI not supported by Python install; may have '
1512 1512 b'connectivity issues with some servers\n'
1513 1513 )
1514 1514 )
1515 1515
1516 1516 # TODO print CA cert info
1517 1517
1518 1518 # hg version
1519 1519 hgver = util.version()
1520 1520 fm.write(
1521 1521 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1522 1522 )
1523 1523 fm.write(
1524 1524 b'hgverextra',
1525 1525 _(b"checking Mercurial custom build (%s)\n"),
1526 1526 b'+'.join(hgver.split(b'+')[1:]),
1527 1527 )
1528 1528
1529 1529 # compiled modules
1530 1530 fm.write(
1531 1531 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1532 1532 )
1533 1533 fm.write(
1534 1534 b'hgmodules',
1535 1535 _(b"checking installed modules (%s)...\n"),
1536 1536 os.path.dirname(pycompat.fsencode(__file__)),
1537 1537 )
1538 1538
1539 1539 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1540 1540 rustext = rustandc # for now, that's the only case
1541 1541 cext = policy.policy in (b'c', b'allow') or rustandc
1542 1542 nopure = cext or rustext
1543 1543 if nopure:
1544 1544 err = None
1545 1545 try:
1546 1546 if cext:
1547 1547 from .cext import (
1548 1548 base85,
1549 1549 bdiff,
1550 1550 mpatch,
1551 1551 osutil,
1552 1552 )
1553 1553
1554 1554 # quiet pyflakes
1555 1555 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1556 1556 if rustext:
1557 1557 from .rustext import (
1558 1558 ancestor,
1559 1559 dirstate,
1560 1560 )
1561 1561
1562 1562 dir(ancestor), dir(dirstate) # quiet pyflakes
1563 1563 except Exception as inst:
1564 1564 err = stringutil.forcebytestr(inst)
1565 1565 problems += 1
1566 1566 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1567 1567
1568 1568 compengines = util.compengines._engines.values()
1569 1569 fm.write(
1570 1570 b'compengines',
1571 1571 _(b'checking registered compression engines (%s)\n'),
1572 1572 fm.formatlist(
1573 1573 sorted(e.name() for e in compengines),
1574 1574 name=b'compengine',
1575 1575 fmt=b'%s',
1576 1576 sep=b', ',
1577 1577 ),
1578 1578 )
1579 1579 fm.write(
1580 1580 b'compenginesavail',
1581 1581 _(b'checking available compression engines (%s)\n'),
1582 1582 fm.formatlist(
1583 1583 sorted(e.name() for e in compengines if e.available()),
1584 1584 name=b'compengine',
1585 1585 fmt=b'%s',
1586 1586 sep=b', ',
1587 1587 ),
1588 1588 )
1589 1589 wirecompengines = compression.compengines.supportedwireengines(
1590 1590 compression.SERVERROLE
1591 1591 )
1592 1592 fm.write(
1593 1593 b'compenginesserver',
1594 1594 _(
1595 1595 b'checking available compression engines '
1596 1596 b'for wire protocol (%s)\n'
1597 1597 ),
1598 1598 fm.formatlist(
1599 1599 [e.name() for e in wirecompengines if e.wireprotosupport()],
1600 1600 name=b'compengine',
1601 1601 fmt=b'%s',
1602 1602 sep=b', ',
1603 1603 ),
1604 1604 )
1605 1605 re2 = b'missing'
1606 1606 if util._re2:
1607 1607 re2 = b'available'
1608 1608 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1609 1609 fm.data(re2=bool(util._re2))
1610 1610
1611 1611 # templates
1612 1612 p = templater.templatepaths()
1613 1613 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1614 1614 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1615 1615 if p:
1616 1616 m = templater.templatepath(b"map-cmdline.default")
1617 1617 if m:
1618 1618 # template found, check if it is working
1619 1619 err = None
1620 1620 try:
1621 1621 templater.templater.frommapfile(m)
1622 1622 except Exception as inst:
1623 1623 err = stringutil.forcebytestr(inst)
1624 1624 p = None
1625 1625 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1626 1626 else:
1627 1627 p = None
1628 1628 fm.condwrite(
1629 1629 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1630 1630 )
1631 1631 fm.condwrite(
1632 1632 not m,
1633 1633 b'defaulttemplatenotfound',
1634 1634 _(b" template '%s' not found\n"),
1635 1635 b"default",
1636 1636 )
1637 1637 if not p:
1638 1638 problems += 1
1639 1639 fm.condwrite(
1640 1640 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1641 1641 )
1642 1642
1643 1643 # editor
1644 1644 editor = ui.geteditor()
1645 1645 editor = util.expandpath(editor)
1646 1646 editorbin = procutil.shellsplit(editor)[0]
1647 1647 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1648 1648 cmdpath = procutil.findexe(editorbin)
1649 1649 fm.condwrite(
1650 1650 not cmdpath and editor == b'vi',
1651 1651 b'vinotfound',
1652 1652 _(
1653 1653 b" No commit editor set and can't find %s in PATH\n"
1654 1654 b" (specify a commit editor in your configuration"
1655 1655 b" file)\n"
1656 1656 ),
1657 1657 not cmdpath and editor == b'vi' and editorbin,
1658 1658 )
1659 1659 fm.condwrite(
1660 1660 not cmdpath and editor != b'vi',
1661 1661 b'editornotfound',
1662 1662 _(
1663 1663 b" Can't find editor '%s' in PATH\n"
1664 1664 b" (specify a commit editor in your configuration"
1665 1665 b" file)\n"
1666 1666 ),
1667 1667 not cmdpath and editorbin,
1668 1668 )
1669 1669 if not cmdpath and editor != b'vi':
1670 1670 problems += 1
1671 1671
1672 1672 # check username
1673 1673 username = None
1674 1674 err = None
1675 1675 try:
1676 1676 username = ui.username()
1677 1677 except error.Abort as e:
1678 1678 err = stringutil.forcebytestr(e)
1679 1679 problems += 1
1680 1680
1681 1681 fm.condwrite(
1682 1682 username, b'username', _(b"checking username (%s)\n"), username
1683 1683 )
1684 1684 fm.condwrite(
1685 1685 err,
1686 1686 b'usernameerror',
1687 1687 _(
1688 1688 b"checking username...\n %s\n"
1689 1689 b" (specify a username in your configuration file)\n"
1690 1690 ),
1691 1691 err,
1692 1692 )
1693 1693
1694 1694 for name, mod in extensions.extensions():
1695 1695 handler = getattr(mod, 'debuginstall', None)
1696 1696 if handler is not None:
1697 1697 problems += handler(ui, fm)
1698 1698
1699 1699 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1700 1700 if not problems:
1701 1701 fm.data(problems=problems)
1702 1702 fm.condwrite(
1703 1703 problems,
1704 1704 b'problems',
1705 1705 _(b"%d problems detected, please check your install!\n"),
1706 1706 problems,
1707 1707 )
1708 1708 fm.end()
1709 1709
1710 1710 return problems
1711 1711
1712 1712
1713 1713 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1714 1714 def debugknown(ui, repopath, *ids, **opts):
1715 1715 """test whether node ids are known to a repo
1716 1716
1717 1717 Every ID must be a full-length hex node id string. Returns a list of 0s
1718 1718 and 1s indicating unknown/known.
1719 1719 """
1720 1720 opts = pycompat.byteskwargs(opts)
1721 1721 repo = hg.peer(ui, opts, repopath)
1722 1722 if not repo.capable(b'known'):
1723 1723 raise error.Abort(b"known() not supported by target repository")
1724 1724 flags = repo.known([bin(s) for s in ids])
1725 1725 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1726 1726
1727 1727
1728 1728 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1729 1729 def debuglabelcomplete(ui, repo, *args):
1730 1730 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1731 1731 debugnamecomplete(ui, repo, *args)
1732 1732
1733 1733
1734 1734 @command(
1735 1735 b'debuglocks',
1736 1736 [
1737 1737 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1738 1738 (
1739 1739 b'W',
1740 1740 b'force-wlock',
1741 1741 None,
1742 1742 _(b'free the working state lock (DANGEROUS)'),
1743 1743 ),
1744 1744 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1745 1745 (
1746 1746 b'S',
1747 1747 b'set-wlock',
1748 1748 None,
1749 1749 _(b'set the working state lock until stopped'),
1750 1750 ),
1751 1751 ],
1752 1752 _(b'[OPTION]...'),
1753 1753 )
1754 1754 def debuglocks(ui, repo, **opts):
1755 1755 """show or modify state of locks
1756 1756
1757 1757 By default, this command will show which locks are held. This
1758 1758 includes the user and process holding the lock, the amount of time
1759 1759 the lock has been held, and the machine name where the process is
1760 1760 running if it's not local.
1761 1761
1762 1762 Locks protect the integrity of Mercurial's data, so should be
1763 1763 treated with care. System crashes or other interruptions may cause
1764 1764 locks to not be properly released, though Mercurial will usually
1765 1765 detect and remove such stale locks automatically.
1766 1766
1767 1767 However, detecting stale locks may not always be possible (for
1768 1768 instance, on a shared filesystem). Removing locks may also be
1769 1769 blocked by filesystem permissions.
1770 1770
1771 1771 Setting a lock will prevent other commands from changing the data.
1772 1772 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1773 1773 The set locks are removed when the command exits.
1774 1774
1775 1775 Returns 0 if no locks are held.
1776 1776
1777 1777 """
1778 1778
1779 1779 if opts.get('force_lock'):
1780 1780 repo.svfs.unlink(b'lock')
1781 1781 if opts.get('force_wlock'):
1782 1782 repo.vfs.unlink(b'wlock')
1783 1783 if opts.get('force_lock') or opts.get('force_wlock'):
1784 1784 return 0
1785 1785
1786 1786 locks = []
1787 1787 try:
1788 1788 if opts.get('set_wlock'):
1789 1789 try:
1790 1790 locks.append(repo.wlock(False))
1791 1791 except error.LockHeld:
1792 1792 raise error.Abort(_(b'wlock is already held'))
1793 1793 if opts.get('set_lock'):
1794 1794 try:
1795 1795 locks.append(repo.lock(False))
1796 1796 except error.LockHeld:
1797 1797 raise error.Abort(_(b'lock is already held'))
1798 1798 if len(locks):
1799 1799 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1800 1800 return 0
1801 1801 finally:
1802 1802 release(*locks)
1803 1803
1804 1804 now = time.time()
1805 1805 held = 0
1806 1806
1807 1807 def report(vfs, name, method):
1808 1808 # this causes stale locks to get reaped for more accurate reporting
1809 1809 try:
1810 1810 l = method(False)
1811 1811 except error.LockHeld:
1812 1812 l = None
1813 1813
1814 1814 if l:
1815 1815 l.release()
1816 1816 else:
1817 1817 try:
1818 1818 st = vfs.lstat(name)
1819 1819 age = now - st[stat.ST_MTIME]
1820 1820 user = util.username(st.st_uid)
1821 1821 locker = vfs.readlock(name)
1822 1822 if b":" in locker:
1823 1823 host, pid = locker.split(b':')
1824 1824 if host == socket.gethostname():
1825 1825 locker = b'user %s, process %s' % (user or b'None', pid)
1826 1826 else:
1827 1827 locker = b'user %s, process %s, host %s' % (
1828 1828 user or b'None',
1829 1829 pid,
1830 1830 host,
1831 1831 )
1832 1832 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1833 1833 return 1
1834 1834 except OSError as e:
1835 1835 if e.errno != errno.ENOENT:
1836 1836 raise
1837 1837
1838 1838 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1839 1839 return 0
1840 1840
1841 1841 held += report(repo.svfs, b"lock", repo.lock)
1842 1842 held += report(repo.vfs, b"wlock", repo.wlock)
1843 1843
1844 1844 return held
1845 1845
1846 1846
1847 1847 @command(
1848 1848 b'debugmanifestfulltextcache',
1849 1849 [
1850 1850 (b'', b'clear', False, _(b'clear the cache')),
1851 1851 (
1852 1852 b'a',
1853 1853 b'add',
1854 1854 [],
1855 1855 _(b'add the given manifest nodes to the cache'),
1856 1856 _(b'NODE'),
1857 1857 ),
1858 1858 ],
1859 1859 b'',
1860 1860 )
1861 1861 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1862 1862 """show, clear or amend the contents of the manifest fulltext cache"""
1863 1863
1864 1864 def getcache():
1865 1865 r = repo.manifestlog.getstorage(b'')
1866 1866 try:
1867 1867 return r._fulltextcache
1868 1868 except AttributeError:
1869 1869 msg = _(
1870 1870 b"Current revlog implementation doesn't appear to have a "
1871 1871 b"manifest fulltext cache\n"
1872 1872 )
1873 1873 raise error.Abort(msg)
1874 1874
1875 1875 if opts.get('clear'):
1876 1876 with repo.wlock():
1877 1877 cache = getcache()
1878 1878 cache.clear(clear_persisted_data=True)
1879 1879 return
1880 1880
1881 1881 if add:
1882 1882 with repo.wlock():
1883 1883 m = repo.manifestlog
1884 1884 store = m.getstorage(b'')
1885 1885 for n in add:
1886 1886 try:
1887 1887 manifest = m[store.lookup(n)]
1888 1888 except error.LookupError as e:
1889 1889 raise error.Abort(e, hint=b"Check your manifest node id")
1890 1890 manifest.read() # stores revisision in cache too
1891 1891 return
1892 1892
1893 1893 cache = getcache()
1894 1894 if not len(cache):
1895 1895 ui.write(_(b'cache empty\n'))
1896 1896 else:
1897 1897 ui.write(
1898 1898 _(
1899 1899 b'cache contains %d manifest entries, in order of most to '
1900 1900 b'least recent:\n'
1901 1901 )
1902 1902 % (len(cache),)
1903 1903 )
1904 1904 totalsize = 0
1905 1905 for nodeid in cache:
1906 1906 # Use cache.get to not update the LRU order
1907 1907 data = cache.peek(nodeid)
1908 1908 size = len(data)
1909 1909 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1910 1910 ui.write(
1911 1911 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1912 1912 )
1913 1913 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1914 1914 ui.write(
1915 1915 _(b'total cache data size %s, on-disk %s\n')
1916 1916 % (util.bytecount(totalsize), util.bytecount(ondisk))
1917 1917 )
1918 1918
1919 1919
1920 1920 @command(b'debugmergestate', [], b'')
1921 1921 def debugmergestate(ui, repo, *args):
1922 1922 """print merge state
1923 1923
1924 1924 Use --verbose to print out information about whether v1 or v2 merge state
1925 1925 was chosen."""
1926 1926
1927 1927 def _hashornull(h):
1928 1928 if h == nullhex:
1929 1929 return b'null'
1930 1930 else:
1931 1931 return h
1932 1932
1933 1933 def printrecords(version):
1934 1934 ui.writenoi18n(b'* version %d records\n' % version)
1935 1935 if version == 1:
1936 1936 records = v1records
1937 1937 else:
1938 1938 records = v2records
1939 1939
1940 1940 for rtype, record in records:
1941 1941 # pretty print some record types
1942 1942 if rtype == b'L':
1943 1943 ui.writenoi18n(b'local: %s\n' % record)
1944 1944 elif rtype == b'O':
1945 1945 ui.writenoi18n(b'other: %s\n' % record)
1946 1946 elif rtype == b'm':
1947 1947 driver, mdstate = record.split(b'\0', 1)
1948 1948 ui.writenoi18n(
1949 1949 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1950 1950 )
1951 1951 elif rtype in b'FDC':
1952 1952 r = record.split(b'\0')
1953 1953 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1954 1954 if version == 1:
1955 1955 onode = b'not stored in v1 format'
1956 1956 flags = r[7]
1957 1957 else:
1958 1958 onode, flags = r[7:9]
1959 1959 ui.writenoi18n(
1960 1960 b'file: %s (record type "%s", state "%s", hash %s)\n'
1961 1961 % (f, rtype, state, _hashornull(hash))
1962 1962 )
1963 1963 ui.writenoi18n(
1964 1964 b' local path: %s (flags "%s")\n' % (lfile, flags)
1965 1965 )
1966 1966 ui.writenoi18n(
1967 1967 b' ancestor path: %s (node %s)\n'
1968 1968 % (afile, _hashornull(anode))
1969 1969 )
1970 1970 ui.writenoi18n(
1971 1971 b' other path: %s (node %s)\n'
1972 1972 % (ofile, _hashornull(onode))
1973 1973 )
1974 1974 elif rtype == b'f':
1975 1975 filename, rawextras = record.split(b'\0', 1)
1976 1976 extras = rawextras.split(b'\0')
1977 1977 i = 0
1978 1978 extrastrings = []
1979 1979 while i < len(extras):
1980 1980 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1981 1981 i += 2
1982 1982
1983 1983 ui.writenoi18n(
1984 1984 b'file extras: %s (%s)\n'
1985 1985 % (filename, b', '.join(extrastrings))
1986 1986 )
1987 1987 elif rtype == b'l':
1988 1988 labels = record.split(b'\0', 2)
1989 1989 labels = [l for l in labels if len(l) > 0]
1990 1990 ui.writenoi18n(b'labels:\n')
1991 1991 ui.write((b' local: %s\n' % labels[0]))
1992 1992 ui.write((b' other: %s\n' % labels[1]))
1993 1993 if len(labels) > 2:
1994 1994 ui.write((b' base: %s\n' % labels[2]))
1995 1995 else:
1996 1996 ui.writenoi18n(
1997 1997 b'unrecognized entry: %s\t%s\n'
1998 1998 % (rtype, record.replace(b'\0', b'\t'))
1999 1999 )
2000 2000
2001 2001 # Avoid mergestate.read() since it may raise an exception for unsupported
2002 2002 # merge state records. We shouldn't be doing this, but this is OK since this
2003 2003 # command is pretty low-level.
2004 2004 ms = mergemod.mergestate(repo)
2005 2005
2006 2006 # sort so that reasonable information is on top
2007 2007 v1records = ms._readrecordsv1()
2008 2008 v2records = ms._readrecordsv2()
2009 2009 order = b'LOml'
2010 2010
2011 2011 def key(r):
2012 2012 idx = order.find(r[0])
2013 2013 if idx == -1:
2014 2014 return (1, r[1])
2015 2015 else:
2016 2016 return (0, idx)
2017 2017
2018 2018 v1records.sort(key=key)
2019 2019 v2records.sort(key=key)
2020 2020
2021 2021 if not v1records and not v2records:
2022 2022 ui.writenoi18n(b'no merge state found\n')
2023 2023 elif not v2records:
2024 2024 ui.notenoi18n(b'no version 2 merge state\n')
2025 2025 printrecords(1)
2026 2026 elif ms._v1v2match(v1records, v2records):
2027 2027 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2028 2028 printrecords(2)
2029 2029 else:
2030 2030 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2031 2031 printrecords(1)
2032 2032 if ui.verbose:
2033 2033 printrecords(2)
2034 2034
2035 2035
2036 2036 @command(b'debugnamecomplete', [], _(b'NAME...'))
2037 2037 def debugnamecomplete(ui, repo, *args):
2038 2038 '''complete "names" - tags, open branch names, bookmark names'''
2039 2039
2040 2040 names = set()
2041 2041 # since we previously only listed open branches, we will handle that
2042 2042 # specially (after this for loop)
2043 2043 for name, ns in pycompat.iteritems(repo.names):
2044 2044 if name != b'branches':
2045 2045 names.update(ns.listnames(repo))
2046 2046 names.update(
2047 2047 tag
2048 2048 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2049 2049 if not closed
2050 2050 )
2051 2051 completions = set()
2052 2052 if not args:
2053 2053 args = [b'']
2054 2054 for a in args:
2055 2055 completions.update(n for n in names if n.startswith(a))
2056 2056 ui.write(b'\n'.join(sorted(completions)))
2057 2057 ui.write(b'\n')
2058 2058
2059 2059
2060 2060 @command(
2061 2061 b'debugobsolete',
2062 2062 [
2063 2063 (b'', b'flags', 0, _(b'markers flag')),
2064 2064 (
2065 2065 b'',
2066 2066 b'record-parents',
2067 2067 False,
2068 2068 _(b'record parent information for the precursor'),
2069 2069 ),
2070 2070 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2071 2071 (
2072 2072 b'',
2073 2073 b'exclusive',
2074 2074 False,
2075 2075 _(b'restrict display to markers only relevant to REV'),
2076 2076 ),
2077 2077 (b'', b'index', False, _(b'display index of the marker')),
2078 2078 (b'', b'delete', [], _(b'delete markers specified by indices')),
2079 2079 ]
2080 2080 + cmdutil.commitopts2
2081 2081 + cmdutil.formatteropts,
2082 2082 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2083 2083 )
2084 2084 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2085 2085 """create arbitrary obsolete marker
2086 2086
2087 2087 With no arguments, displays the list of obsolescence markers."""
2088 2088
2089 2089 opts = pycompat.byteskwargs(opts)
2090 2090
2091 2091 def parsenodeid(s):
2092 2092 try:
2093 2093 # We do not use revsingle/revrange functions here to accept
2094 2094 # arbitrary node identifiers, possibly not present in the
2095 2095 # local repository.
2096 2096 n = bin(s)
2097 2097 if len(n) != len(nullid):
2098 2098 raise TypeError()
2099 2099 return n
2100 2100 except TypeError:
2101 2101 raise error.Abort(
2102 2102 b'changeset references must be full hexadecimal '
2103 2103 b'node identifiers'
2104 2104 )
2105 2105
2106 2106 if opts.get(b'delete'):
2107 2107 indices = []
2108 2108 for v in opts.get(b'delete'):
2109 2109 try:
2110 2110 indices.append(int(v))
2111 2111 except ValueError:
2112 2112 raise error.Abort(
2113 2113 _(b'invalid index value: %r') % v,
2114 2114 hint=_(b'use integers for indices'),
2115 2115 )
2116 2116
2117 2117 if repo.currenttransaction():
2118 2118 raise error.Abort(
2119 2119 _(b'cannot delete obsmarkers in the middle of transaction.')
2120 2120 )
2121 2121
2122 2122 with repo.lock():
2123 2123 n = repair.deleteobsmarkers(repo.obsstore, indices)
2124 2124 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2125 2125
2126 2126 return
2127 2127
2128 2128 if precursor is not None:
2129 2129 if opts[b'rev']:
2130 2130 raise error.Abort(b'cannot select revision when creating marker')
2131 2131 metadata = {}
2132 2132 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2133 2133 succs = tuple(parsenodeid(succ) for succ in successors)
2134 2134 l = repo.lock()
2135 2135 try:
2136 2136 tr = repo.transaction(b'debugobsolete')
2137 2137 try:
2138 2138 date = opts.get(b'date')
2139 2139 if date:
2140 2140 date = dateutil.parsedate(date)
2141 2141 else:
2142 2142 date = None
2143 2143 prec = parsenodeid(precursor)
2144 2144 parents = None
2145 2145 if opts[b'record_parents']:
2146 2146 if prec not in repo.unfiltered():
2147 2147 raise error.Abort(
2148 2148 b'cannot used --record-parents on '
2149 2149 b'unknown changesets'
2150 2150 )
2151 2151 parents = repo.unfiltered()[prec].parents()
2152 2152 parents = tuple(p.node() for p in parents)
2153 2153 repo.obsstore.create(
2154 2154 tr,
2155 2155 prec,
2156 2156 succs,
2157 2157 opts[b'flags'],
2158 2158 parents=parents,
2159 2159 date=date,
2160 2160 metadata=metadata,
2161 2161 ui=ui,
2162 2162 )
2163 2163 tr.close()
2164 2164 except ValueError as exc:
2165 2165 raise error.Abort(
2166 2166 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2167 2167 )
2168 2168 finally:
2169 2169 tr.release()
2170 2170 finally:
2171 2171 l.release()
2172 2172 else:
2173 2173 if opts[b'rev']:
2174 2174 revs = scmutil.revrange(repo, opts[b'rev'])
2175 2175 nodes = [repo[r].node() for r in revs]
2176 2176 markers = list(
2177 2177 obsutil.getmarkers(
2178 2178 repo, nodes=nodes, exclusive=opts[b'exclusive']
2179 2179 )
2180 2180 )
2181 2181 markers.sort(key=lambda x: x._data)
2182 2182 else:
2183 2183 markers = obsutil.getmarkers(repo)
2184 2184
2185 2185 markerstoiter = markers
2186 2186 isrelevant = lambda m: True
2187 2187 if opts.get(b'rev') and opts.get(b'index'):
2188 2188 markerstoiter = obsutil.getmarkers(repo)
2189 2189 markerset = set(markers)
2190 2190 isrelevant = lambda m: m in markerset
2191 2191
2192 2192 fm = ui.formatter(b'debugobsolete', opts)
2193 2193 for i, m in enumerate(markerstoiter):
2194 2194 if not isrelevant(m):
2195 2195 # marker can be irrelevant when we're iterating over a set
2196 2196 # of markers (markerstoiter) which is bigger than the set
2197 2197 # of markers we want to display (markers)
2198 2198 # this can happen if both --index and --rev options are
2199 2199 # provided and thus we need to iterate over all of the markers
2200 2200 # to get the correct indices, but only display the ones that
2201 2201 # are relevant to --rev value
2202 2202 continue
2203 2203 fm.startitem()
2204 2204 ind = i if opts.get(b'index') else None
2205 2205 cmdutil.showmarker(fm, m, index=ind)
2206 2206 fm.end()
2207 2207
2208 2208
2209 2209 @command(
2210 2210 b'debugp1copies',
2211 2211 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2212 2212 _(b'[-r REV]'),
2213 2213 )
2214 2214 def debugp1copies(ui, repo, **opts):
2215 2215 """dump copy information compared to p1"""
2216 2216
2217 2217 opts = pycompat.byteskwargs(opts)
2218 2218 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2219 2219 for dst, src in ctx.p1copies().items():
2220 2220 ui.write(b'%s -> %s\n' % (src, dst))
2221 2221
2222 2222
2223 2223 @command(
2224 2224 b'debugp2copies',
2225 2225 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2226 2226 _(b'[-r REV]'),
2227 2227 )
2228 2228 def debugp1copies(ui, repo, **opts):
2229 2229 """dump copy information compared to p2"""
2230 2230
2231 2231 opts = pycompat.byteskwargs(opts)
2232 2232 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2233 2233 for dst, src in ctx.p2copies().items():
2234 2234 ui.write(b'%s -> %s\n' % (src, dst))
2235 2235
2236 2236
2237 2237 @command(
2238 2238 b'debugpathcomplete',
2239 2239 [
2240 2240 (b'f', b'full', None, _(b'complete an entire path')),
2241 2241 (b'n', b'normal', None, _(b'show only normal files')),
2242 2242 (b'a', b'added', None, _(b'show only added files')),
2243 2243 (b'r', b'removed', None, _(b'show only removed files')),
2244 2244 ],
2245 2245 _(b'FILESPEC...'),
2246 2246 )
2247 2247 def debugpathcomplete(ui, repo, *specs, **opts):
2248 2248 '''complete part or all of a tracked path
2249 2249
2250 2250 This command supports shells that offer path name completion. It
2251 2251 currently completes only files already known to the dirstate.
2252 2252
2253 2253 Completion extends only to the next path segment unless
2254 2254 --full is specified, in which case entire paths are used.'''
2255 2255
2256 2256 def complete(path, acceptable):
2257 2257 dirstate = repo.dirstate
2258 2258 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2259 2259 rootdir = repo.root + pycompat.ossep
2260 2260 if spec != repo.root and not spec.startswith(rootdir):
2261 2261 return [], []
2262 2262 if os.path.isdir(spec):
2263 2263 spec += b'/'
2264 2264 spec = spec[len(rootdir) :]
2265 2265 fixpaths = pycompat.ossep != b'/'
2266 2266 if fixpaths:
2267 2267 spec = spec.replace(pycompat.ossep, b'/')
2268 2268 speclen = len(spec)
2269 2269 fullpaths = opts['full']
2270 2270 files, dirs = set(), set()
2271 2271 adddir, addfile = dirs.add, files.add
2272 2272 for f, st in pycompat.iteritems(dirstate):
2273 2273 if f.startswith(spec) and st[0] in acceptable:
2274 2274 if fixpaths:
2275 2275 f = f.replace(b'/', pycompat.ossep)
2276 2276 if fullpaths:
2277 2277 addfile(f)
2278 2278 continue
2279 2279 s = f.find(pycompat.ossep, speclen)
2280 2280 if s >= 0:
2281 2281 adddir(f[:s])
2282 2282 else:
2283 2283 addfile(f)
2284 2284 return files, dirs
2285 2285
2286 2286 acceptable = b''
2287 2287 if opts['normal']:
2288 2288 acceptable += b'nm'
2289 2289 if opts['added']:
2290 2290 acceptable += b'a'
2291 2291 if opts['removed']:
2292 2292 acceptable += b'r'
2293 2293 cwd = repo.getcwd()
2294 2294 if not specs:
2295 2295 specs = [b'.']
2296 2296
2297 2297 files, dirs = set(), set()
2298 2298 for spec in specs:
2299 2299 f, d = complete(spec, acceptable or b'nmar')
2300 2300 files.update(f)
2301 2301 dirs.update(d)
2302 2302 files.update(dirs)
2303 2303 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2304 2304 ui.write(b'\n')
2305 2305
2306 2306
2307 2307 @command(
2308 2308 b'debugpathcopies',
2309 2309 cmdutil.walkopts,
2310 2310 b'hg debugpathcopies REV1 REV2 [FILE]',
2311 2311 inferrepo=True,
2312 2312 )
2313 2313 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2314 2314 """show copies between two revisions"""
2315 2315 ctx1 = scmutil.revsingle(repo, rev1)
2316 2316 ctx2 = scmutil.revsingle(repo, rev2)
2317 2317 m = scmutil.match(ctx1, pats, opts)
2318 2318 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2319 2319 ui.write(b'%s -> %s\n' % (src, dst))
2320 2320
2321 2321
2322 2322 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2323 2323 def debugpeer(ui, path):
2324 2324 """establish a connection to a peer repository"""
2325 2325 # Always enable peer request logging. Requires --debug to display
2326 2326 # though.
2327 2327 overrides = {
2328 2328 (b'devel', b'debug.peer-request'): True,
2329 2329 }
2330 2330
2331 2331 with ui.configoverride(overrides):
2332 2332 peer = hg.peer(ui, {}, path)
2333 2333
2334 2334 local = peer.local() is not None
2335 2335 canpush = peer.canpush()
2336 2336
2337 2337 ui.write(_(b'url: %s\n') % peer.url())
2338 2338 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2339 2339 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2340 2340
2341 2341
2342 2342 @command(
2343 2343 b'debugpickmergetool',
2344 2344 [
2345 2345 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2346 2346 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2347 2347 ]
2348 2348 + cmdutil.walkopts
2349 2349 + cmdutil.mergetoolopts,
2350 2350 _(b'[PATTERN]...'),
2351 2351 inferrepo=True,
2352 2352 )
2353 2353 def debugpickmergetool(ui, repo, *pats, **opts):
2354 2354 """examine which merge tool is chosen for specified file
2355 2355
2356 2356 As described in :hg:`help merge-tools`, Mercurial examines
2357 2357 configurations below in this order to decide which merge tool is
2358 2358 chosen for specified file.
2359 2359
2360 2360 1. ``--tool`` option
2361 2361 2. ``HGMERGE`` environment variable
2362 2362 3. configurations in ``merge-patterns`` section
2363 2363 4. configuration of ``ui.merge``
2364 2364 5. configurations in ``merge-tools`` section
2365 2365 6. ``hgmerge`` tool (for historical reason only)
2366 2366 7. default tool for fallback (``:merge`` or ``:prompt``)
2367 2367
2368 2368 This command writes out examination result in the style below::
2369 2369
2370 2370 FILE = MERGETOOL
2371 2371
2372 2372 By default, all files known in the first parent context of the
2373 2373 working directory are examined. Use file patterns and/or -I/-X
2374 2374 options to limit target files. -r/--rev is also useful to examine
2375 2375 files in another context without actual updating to it.
2376 2376
2377 2377 With --debug, this command shows warning messages while matching
2378 2378 against ``merge-patterns`` and so on, too. It is recommended to
2379 2379 use this option with explicit file patterns and/or -I/-X options,
2380 2380 because this option increases amount of output per file according
2381 2381 to configurations in hgrc.
2382 2382
2383 2383 With -v/--verbose, this command shows configurations below at
2384 2384 first (only if specified).
2385 2385
2386 2386 - ``--tool`` option
2387 2387 - ``HGMERGE`` environment variable
2388 2388 - configuration of ``ui.merge``
2389 2389
2390 2390 If merge tool is chosen before matching against
2391 2391 ``merge-patterns``, this command can't show any helpful
2392 2392 information, even with --debug. In such case, information above is
2393 2393 useful to know why a merge tool is chosen.
2394 2394 """
2395 2395 opts = pycompat.byteskwargs(opts)
2396 2396 overrides = {}
2397 2397 if opts[b'tool']:
2398 2398 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2399 2399 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2400 2400
2401 2401 with ui.configoverride(overrides, b'debugmergepatterns'):
2402 2402 hgmerge = encoding.environ.get(b"HGMERGE")
2403 2403 if hgmerge is not None:
2404 2404 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2405 2405 uimerge = ui.config(b"ui", b"merge")
2406 2406 if uimerge:
2407 2407 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2408 2408
2409 2409 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2410 2410 m = scmutil.match(ctx, pats, opts)
2411 2411 changedelete = opts[b'changedelete']
2412 2412 for path in ctx.walk(m):
2413 2413 fctx = ctx[path]
2414 2414 try:
2415 2415 if not ui.debugflag:
2416 2416 ui.pushbuffer(error=True)
2417 2417 tool, toolpath = filemerge._picktool(
2418 2418 repo,
2419 2419 ui,
2420 2420 path,
2421 2421 fctx.isbinary(),
2422 2422 b'l' in fctx.flags(),
2423 2423 changedelete,
2424 2424 )
2425 2425 finally:
2426 2426 if not ui.debugflag:
2427 2427 ui.popbuffer()
2428 2428 ui.write(b'%s = %s\n' % (path, tool))
2429 2429
2430 2430
2431 2431 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2432 2432 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2433 2433 '''access the pushkey key/value protocol
2434 2434
2435 2435 With two args, list the keys in the given namespace.
2436 2436
2437 2437 With five args, set a key to new if it currently is set to old.
2438 2438 Reports success or failure.
2439 2439 '''
2440 2440
2441 2441 target = hg.peer(ui, {}, repopath)
2442 2442 if keyinfo:
2443 2443 key, old, new = keyinfo
2444 2444 with target.commandexecutor() as e:
2445 2445 r = e.callcommand(
2446 2446 b'pushkey',
2447 2447 {
2448 2448 b'namespace': namespace,
2449 2449 b'key': key,
2450 2450 b'old': old,
2451 2451 b'new': new,
2452 2452 },
2453 2453 ).result()
2454 2454
2455 2455 ui.status(pycompat.bytestr(r) + b'\n')
2456 2456 return not r
2457 2457 else:
2458 2458 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2459 2459 ui.write(
2460 2460 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2461 2461 )
2462 2462
2463 2463
2464 2464 @command(b'debugpvec', [], _(b'A B'))
2465 2465 def debugpvec(ui, repo, a, b=None):
2466 2466 ca = scmutil.revsingle(repo, a)
2467 2467 cb = scmutil.revsingle(repo, b)
2468 2468 pa = pvec.ctxpvec(ca)
2469 2469 pb = pvec.ctxpvec(cb)
2470 2470 if pa == pb:
2471 2471 rel = b"="
2472 2472 elif pa > pb:
2473 2473 rel = b">"
2474 2474 elif pa < pb:
2475 2475 rel = b"<"
2476 2476 elif pa | pb:
2477 2477 rel = b"|"
2478 2478 ui.write(_(b"a: %s\n") % pa)
2479 2479 ui.write(_(b"b: %s\n") % pb)
2480 2480 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2481 2481 ui.write(
2482 2482 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2483 2483 % (
2484 2484 abs(pa._depth - pb._depth),
2485 2485 pvec._hamming(pa._vec, pb._vec),
2486 2486 pa.distance(pb),
2487 2487 rel,
2488 2488 )
2489 2489 )
2490 2490
2491 2491
2492 2492 @command(
2493 2493 b'debugrebuilddirstate|debugrebuildstate',
2494 2494 [
2495 2495 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2496 2496 (
2497 2497 b'',
2498 2498 b'minimal',
2499 2499 None,
2500 2500 _(
2501 2501 b'only rebuild files that are inconsistent with '
2502 2502 b'the working copy parent'
2503 2503 ),
2504 2504 ),
2505 2505 ],
2506 2506 _(b'[-r REV]'),
2507 2507 )
2508 2508 def debugrebuilddirstate(ui, repo, rev, **opts):
2509 2509 """rebuild the dirstate as it would look like for the given revision
2510 2510
2511 2511 If no revision is specified the first current parent will be used.
2512 2512
2513 2513 The dirstate will be set to the files of the given revision.
2514 2514 The actual working directory content or existing dirstate
2515 2515 information such as adds or removes is not considered.
2516 2516
2517 2517 ``minimal`` will only rebuild the dirstate status for files that claim to be
2518 2518 tracked but are not in the parent manifest, or that exist in the parent
2519 2519 manifest but are not in the dirstate. It will not change adds, removes, or
2520 2520 modified files that are in the working copy parent.
2521 2521
2522 2522 One use of this command is to make the next :hg:`status` invocation
2523 2523 check the actual file content.
2524 2524 """
2525 2525 ctx = scmutil.revsingle(repo, rev)
2526 2526 with repo.wlock():
2527 2527 dirstate = repo.dirstate
2528 2528 changedfiles = None
2529 2529 # See command doc for what minimal does.
2530 2530 if opts.get('minimal'):
2531 2531 manifestfiles = set(ctx.manifest().keys())
2532 2532 dirstatefiles = set(dirstate)
2533 2533 manifestonly = manifestfiles - dirstatefiles
2534 2534 dsonly = dirstatefiles - manifestfiles
2535 2535 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2536 2536 changedfiles = manifestonly | dsnotadded
2537 2537
2538 2538 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2539 2539
2540 2540
2541 2541 @command(b'debugrebuildfncache', [], b'')
2542 2542 def debugrebuildfncache(ui, repo):
2543 2543 """rebuild the fncache file"""
2544 2544 repair.rebuildfncache(ui, repo)
2545 2545
2546 2546
2547 2547 @command(
2548 2548 b'debugrename',
2549 2549 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2550 2550 _(b'[-r REV] [FILE]...'),
2551 2551 )
2552 2552 def debugrename(ui, repo, *pats, **opts):
2553 2553 """dump rename information"""
2554 2554
2555 2555 opts = pycompat.byteskwargs(opts)
2556 2556 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2557 2557 m = scmutil.match(ctx, pats, opts)
2558 2558 for abs in ctx.walk(m):
2559 2559 fctx = ctx[abs]
2560 2560 o = fctx.filelog().renamed(fctx.filenode())
2561 2561 rel = repo.pathto(abs)
2562 2562 if o:
2563 2563 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2564 2564 else:
2565 2565 ui.write(_(b"%s not renamed\n") % rel)
2566 2566
2567 2567
2568 2568 @command(
2569 2569 b'debugrevlog',
2570 2570 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2571 2571 _(b'-c|-m|FILE'),
2572 2572 optionalrepo=True,
2573 2573 )
2574 2574 def debugrevlog(ui, repo, file_=None, **opts):
2575 2575 """show data and statistics about a revlog"""
2576 2576 opts = pycompat.byteskwargs(opts)
2577 2577 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2578 2578
2579 2579 if opts.get(b"dump"):
2580 2580 numrevs = len(r)
2581 2581 ui.write(
2582 2582 (
2583 2583 b"# rev p1rev p2rev start end deltastart base p1 p2"
2584 2584 b" rawsize totalsize compression heads chainlen\n"
2585 2585 )
2586 2586 )
2587 2587 ts = 0
2588 2588 heads = set()
2589 2589
2590 2590 for rev in pycompat.xrange(numrevs):
2591 2591 dbase = r.deltaparent(rev)
2592 2592 if dbase == -1:
2593 2593 dbase = rev
2594 2594 cbase = r.chainbase(rev)
2595 2595 clen = r.chainlen(rev)
2596 2596 p1, p2 = r.parentrevs(rev)
2597 2597 rs = r.rawsize(rev)
2598 2598 ts = ts + rs
2599 2599 heads -= set(r.parentrevs(rev))
2600 2600 heads.add(rev)
2601 2601 try:
2602 2602 compression = ts / r.end(rev)
2603 2603 except ZeroDivisionError:
2604 2604 compression = 0
2605 2605 ui.write(
2606 2606 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2607 2607 b"%11d %5d %8d\n"
2608 2608 % (
2609 2609 rev,
2610 2610 p1,
2611 2611 p2,
2612 2612 r.start(rev),
2613 2613 r.end(rev),
2614 2614 r.start(dbase),
2615 2615 r.start(cbase),
2616 2616 r.start(p1),
2617 2617 r.start(p2),
2618 2618 rs,
2619 2619 ts,
2620 2620 compression,
2621 2621 len(heads),
2622 2622 clen,
2623 2623 )
2624 2624 )
2625 2625 return 0
2626 2626
2627 2627 v = r.version
2628 2628 format = v & 0xFFFF
2629 2629 flags = []
2630 2630 gdelta = False
2631 2631 if v & revlog.FLAG_INLINE_DATA:
2632 2632 flags.append(b'inline')
2633 2633 if v & revlog.FLAG_GENERALDELTA:
2634 2634 gdelta = True
2635 2635 flags.append(b'generaldelta')
2636 2636 if not flags:
2637 2637 flags = [b'(none)']
2638 2638
2639 2639 ### tracks merge vs single parent
2640 2640 nummerges = 0
2641 2641
2642 2642 ### tracks ways the "delta" are build
2643 2643 # nodelta
2644 2644 numempty = 0
2645 2645 numemptytext = 0
2646 2646 numemptydelta = 0
2647 2647 # full file content
2648 2648 numfull = 0
2649 2649 # intermediate snapshot against a prior snapshot
2650 2650 numsemi = 0
2651 2651 # snapshot count per depth
2652 2652 numsnapdepth = collections.defaultdict(lambda: 0)
2653 2653 # delta against previous revision
2654 2654 numprev = 0
2655 2655 # delta against first or second parent (not prev)
2656 2656 nump1 = 0
2657 2657 nump2 = 0
2658 2658 # delta against neither prev nor parents
2659 2659 numother = 0
2660 2660 # delta against prev that are also first or second parent
2661 2661 # (details of `numprev`)
2662 2662 nump1prev = 0
2663 2663 nump2prev = 0
2664 2664
2665 2665 # data about delta chain of each revs
2666 2666 chainlengths = []
2667 2667 chainbases = []
2668 2668 chainspans = []
2669 2669
2670 2670 # data about each revision
2671 2671 datasize = [None, 0, 0]
2672 2672 fullsize = [None, 0, 0]
2673 2673 semisize = [None, 0, 0]
2674 2674 # snapshot count per depth
2675 2675 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2676 2676 deltasize = [None, 0, 0]
2677 2677 chunktypecounts = {}
2678 2678 chunktypesizes = {}
2679 2679
2680 2680 def addsize(size, l):
2681 2681 if l[0] is None or size < l[0]:
2682 2682 l[0] = size
2683 2683 if size > l[1]:
2684 2684 l[1] = size
2685 2685 l[2] += size
2686 2686
2687 2687 numrevs = len(r)
2688 2688 for rev in pycompat.xrange(numrevs):
2689 2689 p1, p2 = r.parentrevs(rev)
2690 2690 delta = r.deltaparent(rev)
2691 2691 if format > 0:
2692 2692 addsize(r.rawsize(rev), datasize)
2693 2693 if p2 != nullrev:
2694 2694 nummerges += 1
2695 2695 size = r.length(rev)
2696 2696 if delta == nullrev:
2697 2697 chainlengths.append(0)
2698 2698 chainbases.append(r.start(rev))
2699 2699 chainspans.append(size)
2700 2700 if size == 0:
2701 2701 numempty += 1
2702 2702 numemptytext += 1
2703 2703 else:
2704 2704 numfull += 1
2705 2705 numsnapdepth[0] += 1
2706 2706 addsize(size, fullsize)
2707 2707 addsize(size, snapsizedepth[0])
2708 2708 else:
2709 2709 chainlengths.append(chainlengths[delta] + 1)
2710 2710 baseaddr = chainbases[delta]
2711 2711 revaddr = r.start(rev)
2712 2712 chainbases.append(baseaddr)
2713 2713 chainspans.append((revaddr - baseaddr) + size)
2714 2714 if size == 0:
2715 2715 numempty += 1
2716 2716 numemptydelta += 1
2717 2717 elif r.issnapshot(rev):
2718 2718 addsize(size, semisize)
2719 2719 numsemi += 1
2720 2720 depth = r.snapshotdepth(rev)
2721 2721 numsnapdepth[depth] += 1
2722 2722 addsize(size, snapsizedepth[depth])
2723 2723 else:
2724 2724 addsize(size, deltasize)
2725 2725 if delta == rev - 1:
2726 2726 numprev += 1
2727 2727 if delta == p1:
2728 2728 nump1prev += 1
2729 2729 elif delta == p2:
2730 2730 nump2prev += 1
2731 2731 elif delta == p1:
2732 2732 nump1 += 1
2733 2733 elif delta == p2:
2734 2734 nump2 += 1
2735 2735 elif delta != nullrev:
2736 2736 numother += 1
2737 2737
2738 2738 # Obtain data on the raw chunks in the revlog.
2739 2739 if util.safehasattr(r, b'_getsegmentforrevs'):
2740 2740 segment = r._getsegmentforrevs(rev, rev)[1]
2741 2741 else:
2742 2742 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2743 2743 if segment:
2744 2744 chunktype = bytes(segment[0:1])
2745 2745 else:
2746 2746 chunktype = b'empty'
2747 2747
2748 2748 if chunktype not in chunktypecounts:
2749 2749 chunktypecounts[chunktype] = 0
2750 2750 chunktypesizes[chunktype] = 0
2751 2751
2752 2752 chunktypecounts[chunktype] += 1
2753 2753 chunktypesizes[chunktype] += size
2754 2754
2755 2755 # Adjust size min value for empty cases
2756 2756 for size in (datasize, fullsize, semisize, deltasize):
2757 2757 if size[0] is None:
2758 2758 size[0] = 0
2759 2759
2760 2760 numdeltas = numrevs - numfull - numempty - numsemi
2761 2761 numoprev = numprev - nump1prev - nump2prev
2762 2762 totalrawsize = datasize[2]
2763 2763 datasize[2] /= numrevs
2764 2764 fulltotal = fullsize[2]
2765 2765 if numfull == 0:
2766 2766 fullsize[2] = 0
2767 2767 else:
2768 2768 fullsize[2] /= numfull
2769 2769 semitotal = semisize[2]
2770 2770 snaptotal = {}
2771 2771 if numsemi > 0:
2772 2772 semisize[2] /= numsemi
2773 2773 for depth in snapsizedepth:
2774 2774 snaptotal[depth] = snapsizedepth[depth][2]
2775 2775 snapsizedepth[depth][2] /= numsnapdepth[depth]
2776 2776
2777 2777 deltatotal = deltasize[2]
2778 2778 if numdeltas > 0:
2779 2779 deltasize[2] /= numdeltas
2780 2780 totalsize = fulltotal + semitotal + deltatotal
2781 2781 avgchainlen = sum(chainlengths) / numrevs
2782 2782 maxchainlen = max(chainlengths)
2783 2783 maxchainspan = max(chainspans)
2784 2784 compratio = 1
2785 2785 if totalsize:
2786 2786 compratio = totalrawsize / totalsize
2787 2787
2788 2788 basedfmtstr = b'%%%dd\n'
2789 2789 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2790 2790
2791 2791 def dfmtstr(max):
2792 2792 return basedfmtstr % len(str(max))
2793 2793
2794 2794 def pcfmtstr(max, padding=0):
2795 2795 return basepcfmtstr % (len(str(max)), b' ' * padding)
2796 2796
2797 2797 def pcfmt(value, total):
2798 2798 if total:
2799 2799 return (value, 100 * float(value) / total)
2800 2800 else:
2801 2801 return value, 100.0
2802 2802
2803 2803 ui.writenoi18n(b'format : %d\n' % format)
2804 2804 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2805 2805
2806 2806 ui.write(b'\n')
2807 2807 fmt = pcfmtstr(totalsize)
2808 2808 fmt2 = dfmtstr(totalsize)
2809 2809 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2810 2810 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2811 2811 ui.writenoi18n(
2812 2812 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2813 2813 )
2814 2814 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2815 2815 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2816 2816 ui.writenoi18n(
2817 2817 b' text : '
2818 2818 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2819 2819 )
2820 2820 ui.writenoi18n(
2821 2821 b' delta : '
2822 2822 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2823 2823 )
2824 2824 ui.writenoi18n(
2825 2825 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2826 2826 )
2827 2827 for depth in sorted(numsnapdepth):
2828 2828 ui.write(
2829 2829 (b' lvl-%-3d : ' % depth)
2830 2830 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2831 2831 )
2832 2832 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2833 2833 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2834 2834 ui.writenoi18n(
2835 2835 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2836 2836 )
2837 2837 for depth in sorted(numsnapdepth):
2838 2838 ui.write(
2839 2839 (b' lvl-%-3d : ' % depth)
2840 2840 + fmt % pcfmt(snaptotal[depth], totalsize)
2841 2841 )
2842 2842 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2843 2843
2844 2844 def fmtchunktype(chunktype):
2845 2845 if chunktype == b'empty':
2846 2846 return b' %s : ' % chunktype
2847 2847 elif chunktype in pycompat.bytestr(string.ascii_letters):
2848 2848 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2849 2849 else:
2850 2850 return b' 0x%s : ' % hex(chunktype)
2851 2851
2852 2852 ui.write(b'\n')
2853 2853 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2854 2854 for chunktype in sorted(chunktypecounts):
2855 2855 ui.write(fmtchunktype(chunktype))
2856 2856 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2857 2857 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2858 2858 for chunktype in sorted(chunktypecounts):
2859 2859 ui.write(fmtchunktype(chunktype))
2860 2860 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2861 2861
2862 2862 ui.write(b'\n')
2863 2863 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2864 2864 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2865 2865 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2866 2866 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2867 2867 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2868 2868
2869 2869 if format > 0:
2870 2870 ui.write(b'\n')
2871 2871 ui.writenoi18n(
2872 2872 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2873 2873 % tuple(datasize)
2874 2874 )
2875 2875 ui.writenoi18n(
2876 2876 b'full revision size (min/max/avg) : %d / %d / %d\n'
2877 2877 % tuple(fullsize)
2878 2878 )
2879 2879 ui.writenoi18n(
2880 2880 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2881 2881 % tuple(semisize)
2882 2882 )
2883 2883 for depth in sorted(snapsizedepth):
2884 2884 if depth == 0:
2885 2885 continue
2886 2886 ui.writenoi18n(
2887 2887 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2888 2888 % ((depth,) + tuple(snapsizedepth[depth]))
2889 2889 )
2890 2890 ui.writenoi18n(
2891 2891 b'delta size (min/max/avg) : %d / %d / %d\n'
2892 2892 % tuple(deltasize)
2893 2893 )
2894 2894
2895 2895 if numdeltas > 0:
2896 2896 ui.write(b'\n')
2897 2897 fmt = pcfmtstr(numdeltas)
2898 2898 fmt2 = pcfmtstr(numdeltas, 4)
2899 2899 ui.writenoi18n(
2900 2900 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2901 2901 )
2902 2902 if numprev > 0:
2903 2903 ui.writenoi18n(
2904 2904 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2905 2905 )
2906 2906 ui.writenoi18n(
2907 2907 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2908 2908 )
2909 2909 ui.writenoi18n(
2910 2910 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2911 2911 )
2912 2912 if gdelta:
2913 2913 ui.writenoi18n(
2914 2914 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2915 2915 )
2916 2916 ui.writenoi18n(
2917 2917 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2918 2918 )
2919 2919 ui.writenoi18n(
2920 2920 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2921 2921 )
2922 2922
2923 2923
2924 2924 @command(
2925 2925 b'debugrevlogindex',
2926 2926 cmdutil.debugrevlogopts
2927 2927 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2928 2928 _(b'[-f FORMAT] -c|-m|FILE'),
2929 2929 optionalrepo=True,
2930 2930 )
2931 2931 def debugrevlogindex(ui, repo, file_=None, **opts):
2932 2932 """dump the contents of a revlog index"""
2933 2933 opts = pycompat.byteskwargs(opts)
2934 2934 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2935 2935 format = opts.get(b'format', 0)
2936 2936 if format not in (0, 1):
2937 2937 raise error.Abort(_(b"unknown format %d") % format)
2938 2938
2939 2939 if ui.debugflag:
2940 2940 shortfn = hex
2941 2941 else:
2942 2942 shortfn = short
2943 2943
2944 2944 # There might not be anything in r, so have a sane default
2945 2945 idlen = 12
2946 2946 for i in r:
2947 2947 idlen = len(shortfn(r.node(i)))
2948 2948 break
2949 2949
2950 2950 if format == 0:
2951 2951 if ui.verbose:
2952 2952 ui.writenoi18n(
2953 2953 b" rev offset length linkrev %s %s p2\n"
2954 2954 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2955 2955 )
2956 2956 else:
2957 2957 ui.writenoi18n(
2958 2958 b" rev linkrev %s %s p2\n"
2959 2959 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2960 2960 )
2961 2961 elif format == 1:
2962 2962 if ui.verbose:
2963 2963 ui.writenoi18n(
2964 2964 (
2965 2965 b" rev flag offset length size link p1"
2966 2966 b" p2 %s\n"
2967 2967 )
2968 2968 % b"nodeid".rjust(idlen)
2969 2969 )
2970 2970 else:
2971 2971 ui.writenoi18n(
2972 2972 b" rev flag size link p1 p2 %s\n"
2973 2973 % b"nodeid".rjust(idlen)
2974 2974 )
2975 2975
2976 2976 for i in r:
2977 2977 node = r.node(i)
2978 2978 if format == 0:
2979 2979 try:
2980 2980 pp = r.parents(node)
2981 2981 except Exception:
2982 2982 pp = [nullid, nullid]
2983 2983 if ui.verbose:
2984 2984 ui.write(
2985 2985 b"% 6d % 9d % 7d % 7d %s %s %s\n"
2986 2986 % (
2987 2987 i,
2988 2988 r.start(i),
2989 2989 r.length(i),
2990 2990 r.linkrev(i),
2991 2991 shortfn(node),
2992 2992 shortfn(pp[0]),
2993 2993 shortfn(pp[1]),
2994 2994 )
2995 2995 )
2996 2996 else:
2997 2997 ui.write(
2998 2998 b"% 6d % 7d %s %s %s\n"
2999 2999 % (
3000 3000 i,
3001 3001 r.linkrev(i),
3002 3002 shortfn(node),
3003 3003 shortfn(pp[0]),
3004 3004 shortfn(pp[1]),
3005 3005 )
3006 3006 )
3007 3007 elif format == 1:
3008 3008 pr = r.parentrevs(i)
3009 3009 if ui.verbose:
3010 3010 ui.write(
3011 3011 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3012 3012 % (
3013 3013 i,
3014 3014 r.flags(i),
3015 3015 r.start(i),
3016 3016 r.length(i),
3017 3017 r.rawsize(i),
3018 3018 r.linkrev(i),
3019 3019 pr[0],
3020 3020 pr[1],
3021 3021 shortfn(node),
3022 3022 )
3023 3023 )
3024 3024 else:
3025 3025 ui.write(
3026 3026 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3027 3027 % (
3028 3028 i,
3029 3029 r.flags(i),
3030 3030 r.rawsize(i),
3031 3031 r.linkrev(i),
3032 3032 pr[0],
3033 3033 pr[1],
3034 3034 shortfn(node),
3035 3035 )
3036 3036 )
3037 3037
3038 3038
3039 3039 @command(
3040 3040 b'debugrevspec',
3041 3041 [
3042 3042 (
3043 3043 b'',
3044 3044 b'optimize',
3045 3045 None,
3046 3046 _(b'print parsed tree after optimizing (DEPRECATED)'),
3047 3047 ),
3048 3048 (
3049 3049 b'',
3050 3050 b'show-revs',
3051 3051 True,
3052 3052 _(b'print list of result revisions (default)'),
3053 3053 ),
3054 3054 (
3055 3055 b's',
3056 3056 b'show-set',
3057 3057 None,
3058 3058 _(b'print internal representation of result set'),
3059 3059 ),
3060 3060 (
3061 3061 b'p',
3062 3062 b'show-stage',
3063 3063 [],
3064 3064 _(b'print parsed tree at the given stage'),
3065 3065 _(b'NAME'),
3066 3066 ),
3067 3067 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3068 3068 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3069 3069 ],
3070 3070 b'REVSPEC',
3071 3071 )
3072 3072 def debugrevspec(ui, repo, expr, **opts):
3073 3073 """parse and apply a revision specification
3074 3074
3075 3075 Use -p/--show-stage option to print the parsed tree at the given stages.
3076 3076 Use -p all to print tree at every stage.
3077 3077
3078 3078 Use --no-show-revs option with -s or -p to print only the set
3079 3079 representation or the parsed tree respectively.
3080 3080
3081 3081 Use --verify-optimized to compare the optimized result with the unoptimized
3082 3082 one. Returns 1 if the optimized result differs.
3083 3083 """
3084 3084 opts = pycompat.byteskwargs(opts)
3085 3085 aliases = ui.configitems(b'revsetalias')
3086 3086 stages = [
3087 3087 (b'parsed', lambda tree: tree),
3088 3088 (
3089 3089 b'expanded',
3090 3090 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3091 3091 ),
3092 3092 (b'concatenated', revsetlang.foldconcat),
3093 3093 (b'analyzed', revsetlang.analyze),
3094 3094 (b'optimized', revsetlang.optimize),
3095 3095 ]
3096 3096 if opts[b'no_optimized']:
3097 3097 stages = stages[:-1]
3098 3098 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3099 3099 raise error.Abort(
3100 3100 _(b'cannot use --verify-optimized with --no-optimized')
3101 3101 )
3102 3102 stagenames = set(n for n, f in stages)
3103 3103
3104 3104 showalways = set()
3105 3105 showchanged = set()
3106 3106 if ui.verbose and not opts[b'show_stage']:
3107 3107 # show parsed tree by --verbose (deprecated)
3108 3108 showalways.add(b'parsed')
3109 3109 showchanged.update([b'expanded', b'concatenated'])
3110 3110 if opts[b'optimize']:
3111 3111 showalways.add(b'optimized')
3112 3112 if opts[b'show_stage'] and opts[b'optimize']:
3113 3113 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3114 3114 if opts[b'show_stage'] == [b'all']:
3115 3115 showalways.update(stagenames)
3116 3116 else:
3117 3117 for n in opts[b'show_stage']:
3118 3118 if n not in stagenames:
3119 3119 raise error.Abort(_(b'invalid stage name: %s') % n)
3120 3120 showalways.update(opts[b'show_stage'])
3121 3121
3122 3122 treebystage = {}
3123 3123 printedtree = None
3124 3124 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3125 3125 for n, f in stages:
3126 3126 treebystage[n] = tree = f(tree)
3127 3127 if n in showalways or (n in showchanged and tree != printedtree):
3128 3128 if opts[b'show_stage'] or n != b'parsed':
3129 3129 ui.write(b"* %s:\n" % n)
3130 3130 ui.write(revsetlang.prettyformat(tree), b"\n")
3131 3131 printedtree = tree
3132 3132
3133 3133 if opts[b'verify_optimized']:
3134 3134 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3135 3135 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3136 3136 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3137 3137 ui.writenoi18n(
3138 3138 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3139 3139 )
3140 3140 ui.writenoi18n(
3141 3141 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3142 3142 )
3143 3143 arevs = list(arevs)
3144 3144 brevs = list(brevs)
3145 3145 if arevs == brevs:
3146 3146 return 0
3147 3147 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3148 3148 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3149 3149 sm = difflib.SequenceMatcher(None, arevs, brevs)
3150 3150 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3151 3151 if tag in ('delete', 'replace'):
3152 3152 for c in arevs[alo:ahi]:
3153 3153 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3154 3154 if tag in ('insert', 'replace'):
3155 3155 for c in brevs[blo:bhi]:
3156 3156 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3157 3157 if tag == 'equal':
3158 3158 for c in arevs[alo:ahi]:
3159 3159 ui.write(b' %d\n' % c)
3160 3160 return 1
3161 3161
3162 3162 func = revset.makematcher(tree)
3163 3163 revs = func(repo)
3164 3164 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3165 3165 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3166 3166 if not opts[b'show_revs']:
3167 3167 return
3168 3168 for c in revs:
3169 3169 ui.write(b"%d\n" % c)
3170 3170
3171 3171
3172 3172 @command(
3173 3173 b'debugserve',
3174 3174 [
3175 3175 (
3176 3176 b'',
3177 3177 b'sshstdio',
3178 3178 False,
3179 3179 _(b'run an SSH server bound to process handles'),
3180 3180 ),
3181 3181 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3182 3182 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3183 3183 ],
3184 3184 b'',
3185 3185 )
3186 3186 def debugserve(ui, repo, **opts):
3187 3187 """run a server with advanced settings
3188 3188
3189 3189 This command is similar to :hg:`serve`. It exists partially as a
3190 3190 workaround to the fact that ``hg serve --stdio`` must have specific
3191 3191 arguments for security reasons.
3192 3192 """
3193 3193 opts = pycompat.byteskwargs(opts)
3194 3194
3195 3195 if not opts[b'sshstdio']:
3196 3196 raise error.Abort(_(b'only --sshstdio is currently supported'))
3197 3197
3198 3198 logfh = None
3199 3199
3200 3200 if opts[b'logiofd'] and opts[b'logiofile']:
3201 3201 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3202 3202
3203 3203 if opts[b'logiofd']:
3204 3204 # Line buffered because output is line based.
3205 3205 try:
3206 3206 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3207 3207 except OSError as e:
3208 3208 if e.errno != errno.ESPIPE:
3209 3209 raise
3210 3210 # can't seek a pipe, so `ab` mode fails on py3
3211 3211 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3212 3212 elif opts[b'logiofile']:
3213 3213 logfh = open(opts[b'logiofile'], b'ab', 1)
3214 3214
3215 3215 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3216 3216 s.serve_forever()
3217 3217
3218 3218
3219 3219 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3220 3220 def debugsetparents(ui, repo, rev1, rev2=None):
3221 3221 """manually set the parents of the current working directory
3222 3222
3223 3223 This is useful for writing repository conversion tools, but should
3224 3224 be used with care. For example, neither the working directory nor the
3225 3225 dirstate is updated, so file status may be incorrect after running this
3226 3226 command.
3227 3227
3228 3228 Returns 0 on success.
3229 3229 """
3230 3230
3231 3231 node1 = scmutil.revsingle(repo, rev1).node()
3232 3232 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3233 3233
3234 3234 with repo.wlock():
3235 3235 repo.setparents(node1, node2)
3236 3236
3237 3237
3238 3238 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3239 3239 def debugsidedata(ui, repo, file_, rev=None, **opts):
3240 3240 """dump the side data for a cl/manifest/file revision
3241 3241
3242 3242 Use --verbose to dump the sidedata content."""
3243 3243 opts = pycompat.byteskwargs(opts)
3244 3244 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3245 3245 if rev is not None:
3246 3246 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3247 3247 file_, rev = None, file_
3248 3248 elif rev is None:
3249 3249 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3250 3250 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3251 3251 r = getattr(r, '_revlog', r)
3252 3252 try:
3253 3253 sidedata = r.sidedata(r.lookup(rev))
3254 3254 except KeyError:
3255 3255 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3256 3256 if sidedata:
3257 3257 sidedata = list(sidedata.items())
3258 3258 sidedata.sort()
3259 3259 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3260 3260 for key, value in sidedata:
3261 3261 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3262 3262 if ui.verbose:
3263 3263 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3264 3264
3265 3265
3266 3266 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3267 3267 def debugssl(ui, repo, source=None, **opts):
3268 3268 '''test a secure connection to a server
3269 3269
3270 3270 This builds the certificate chain for the server on Windows, installing the
3271 3271 missing intermediates and trusted root via Windows Update if necessary. It
3272 3272 does nothing on other platforms.
3273 3273
3274 3274 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3275 3275 that server is used. See :hg:`help urls` for more information.
3276 3276
3277 3277 If the update succeeds, retry the original operation. Otherwise, the cause
3278 3278 of the SSL error is likely another issue.
3279 3279 '''
3280 3280 if not pycompat.iswindows:
3281 3281 raise error.Abort(
3282 3282 _(b'certificate chain building is only possible on Windows')
3283 3283 )
3284 3284
3285 3285 if not source:
3286 3286 if not repo:
3287 3287 raise error.Abort(
3288 3288 _(
3289 3289 b"there is no Mercurial repository here, and no "
3290 3290 b"server specified"
3291 3291 )
3292 3292 )
3293 3293 source = b"default"
3294 3294
3295 3295 source, branches = hg.parseurl(ui.expandpath(source))
3296 3296 url = util.url(source)
3297 3297
3298 3298 defaultport = {b'https': 443, b'ssh': 22}
3299 3299 if url.scheme in defaultport:
3300 3300 try:
3301 3301 addr = (url.host, int(url.port or defaultport[url.scheme]))
3302 3302 except ValueError:
3303 3303 raise error.Abort(_(b"malformed port number in URL"))
3304 3304 else:
3305 3305 raise error.Abort(_(b"only https and ssh connections are supported"))
3306 3306
3307 3307 from . import win32
3308 3308
3309 3309 s = ssl.wrap_socket(
3310 3310 socket.socket(),
3311 3311 ssl_version=ssl.PROTOCOL_TLS,
3312 3312 cert_reqs=ssl.CERT_NONE,
3313 3313 ca_certs=None,
3314 3314 )
3315 3315
3316 3316 try:
3317 3317 s.connect(addr)
3318 3318 cert = s.getpeercert(True)
3319 3319
3320 3320 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3321 3321
3322 3322 complete = win32.checkcertificatechain(cert, build=False)
3323 3323
3324 3324 if not complete:
3325 3325 ui.status(_(b'certificate chain is incomplete, updating... '))
3326 3326
3327 3327 if not win32.checkcertificatechain(cert):
3328 3328 ui.status(_(b'failed.\n'))
3329 3329 else:
3330 3330 ui.status(_(b'done.\n'))
3331 3331 else:
3332 3332 ui.status(_(b'full certificate chain is available\n'))
3333 3333 finally:
3334 3334 s.close()
3335 3335
3336 3336
3337 3337 @command(
3338 3338 b'debugsub',
3339 3339 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3340 3340 _(b'[-r REV] [REV]'),
3341 3341 )
3342 3342 def debugsub(ui, repo, rev=None):
3343 3343 ctx = scmutil.revsingle(repo, rev, None)
3344 3344 for k, v in sorted(ctx.substate.items()):
3345 3345 ui.writenoi18n(b'path %s\n' % k)
3346 3346 ui.writenoi18n(b' source %s\n' % v[0])
3347 3347 ui.writenoi18n(b' revision %s\n' % v[1])
3348 3348
3349 3349
3350 3350 @command(
3351 3351 b'debugsuccessorssets',
3352 3352 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3353 3353 _(b'[REV]'),
3354 3354 )
3355 3355 def debugsuccessorssets(ui, repo, *revs, **opts):
3356 3356 """show set of successors for revision
3357 3357
3358 3358 A successors set of changeset A is a consistent group of revisions that
3359 3359 succeed A. It contains non-obsolete changesets only unless closests
3360 3360 successors set is set.
3361 3361
3362 3362 In most cases a changeset A has a single successors set containing a single
3363 3363 successor (changeset A replaced by A').
3364 3364
3365 3365 A changeset that is made obsolete with no successors are called "pruned".
3366 3366 Such changesets have no successors sets at all.
3367 3367
3368 3368 A changeset that has been "split" will have a successors set containing
3369 3369 more than one successor.
3370 3370
3371 3371 A changeset that has been rewritten in multiple different ways is called
3372 3372 "divergent". Such changesets have multiple successor sets (each of which
3373 3373 may also be split, i.e. have multiple successors).
3374 3374
3375 3375 Results are displayed as follows::
3376 3376
3377 3377 <rev1>
3378 3378 <successors-1A>
3379 3379 <rev2>
3380 3380 <successors-2A>
3381 3381 <successors-2B1> <successors-2B2> <successors-2B3>
3382 3382
3383 3383 Here rev2 has two possible (i.e. divergent) successors sets. The first
3384 3384 holds one element, whereas the second holds three (i.e. the changeset has
3385 3385 been split).
3386 3386 """
3387 3387 # passed to successorssets caching computation from one call to another
3388 3388 cache = {}
3389 3389 ctx2str = bytes
3390 3390 node2str = short
3391 3391 for rev in scmutil.revrange(repo, revs):
3392 3392 ctx = repo[rev]
3393 3393 ui.write(b'%s\n' % ctx2str(ctx))
3394 3394 for succsset in obsutil.successorssets(
3395 3395 repo, ctx.node(), closest=opts['closest'], cache=cache
3396 3396 ):
3397 3397 if succsset:
3398 3398 ui.write(b' ')
3399 3399 ui.write(node2str(succsset[0]))
3400 3400 for node in succsset[1:]:
3401 3401 ui.write(b' ')
3402 3402 ui.write(node2str(node))
3403 3403 ui.write(b'\n')
3404 3404
3405 3405
3406 3406 @command(
3407 3407 b'debugtemplate',
3408 3408 [
3409 3409 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3410 3410 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3411 3411 ],
3412 3412 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3413 3413 optionalrepo=True,
3414 3414 )
3415 3415 def debugtemplate(ui, repo, tmpl, **opts):
3416 3416 """parse and apply a template
3417 3417
3418 3418 If -r/--rev is given, the template is processed as a log template and
3419 3419 applied to the given changesets. Otherwise, it is processed as a generic
3420 3420 template.
3421 3421
3422 3422 Use --verbose to print the parsed tree.
3423 3423 """
3424 3424 revs = None
3425 3425 if opts['rev']:
3426 3426 if repo is None:
3427 3427 raise error.RepoError(
3428 3428 _(b'there is no Mercurial repository here (.hg not found)')
3429 3429 )
3430 3430 revs = scmutil.revrange(repo, opts['rev'])
3431 3431
3432 3432 props = {}
3433 3433 for d in opts['define']:
3434 3434 try:
3435 3435 k, v = (e.strip() for e in d.split(b'=', 1))
3436 3436 if not k or k == b'ui':
3437 3437 raise ValueError
3438 3438 props[k] = v
3439 3439 except ValueError:
3440 3440 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3441 3441
3442 3442 if ui.verbose:
3443 3443 aliases = ui.configitems(b'templatealias')
3444 3444 tree = templater.parse(tmpl)
3445 3445 ui.note(templater.prettyformat(tree), b'\n')
3446 3446 newtree = templater.expandaliases(tree, aliases)
3447 3447 if newtree != tree:
3448 3448 ui.notenoi18n(
3449 3449 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3450 3450 )
3451 3451
3452 3452 if revs is None:
3453 3453 tres = formatter.templateresources(ui, repo)
3454 3454 t = formatter.maketemplater(ui, tmpl, resources=tres)
3455 3455 if ui.verbose:
3456 3456 kwds, funcs = t.symbolsuseddefault()
3457 3457 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3458 3458 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3459 3459 ui.write(t.renderdefault(props))
3460 3460 else:
3461 3461 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3462 3462 if ui.verbose:
3463 3463 kwds, funcs = displayer.t.symbolsuseddefault()
3464 3464 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3465 3465 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3466 3466 for r in revs:
3467 3467 displayer.show(repo[r], **pycompat.strkwargs(props))
3468 3468 displayer.close()
3469 3469
3470 3470
3471 3471 @command(
3472 3472 b'debuguigetpass',
3473 3473 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3474 3474 _(b'[-p TEXT]'),
3475 3475 norepo=True,
3476 3476 )
3477 3477 def debuguigetpass(ui, prompt=b''):
3478 3478 """show prompt to type password"""
3479 3479 r = ui.getpass(prompt)
3480 3480 ui.writenoi18n(b'respose: %s\n' % r)
3481 3481
3482 3482
3483 3483 @command(
3484 3484 b'debuguiprompt',
3485 3485 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3486 3486 _(b'[-p TEXT]'),
3487 3487 norepo=True,
3488 3488 )
3489 3489 def debuguiprompt(ui, prompt=b''):
3490 3490 """show plain prompt"""
3491 3491 r = ui.prompt(prompt)
3492 3492 ui.writenoi18n(b'response: %s\n' % r)
3493 3493
3494 3494
3495 3495 @command(b'debugupdatecaches', [])
3496 3496 def debugupdatecaches(ui, repo, *pats, **opts):
3497 3497 """warm all known caches in the repository"""
3498 3498 with repo.wlock(), repo.lock():
3499 3499 repo.updatecaches(full=True)
3500 3500
3501 3501
3502 3502 @command(
3503 3503 b'debugupgraderepo',
3504 3504 [
3505 3505 (
3506 3506 b'o',
3507 3507 b'optimize',
3508 3508 [],
3509 3509 _(b'extra optimization to perform'),
3510 3510 _(b'NAME'),
3511 3511 ),
3512 3512 (b'', b'run', False, _(b'performs an upgrade')),
3513 3513 (b'', b'backup', True, _(b'keep the old repository content around')),
3514 3514 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3515 3515 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3516 3516 ],
3517 3517 )
3518 3518 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3519 3519 """upgrade a repository to use different features
3520 3520
3521 3521 If no arguments are specified, the repository is evaluated for upgrade
3522 3522 and a list of problems and potential optimizations is printed.
3523 3523
3524 3524 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3525 3525 can be influenced via additional arguments. More details will be provided
3526 3526 by the command output when run without ``--run``.
3527 3527
3528 3528 During the upgrade, the repository will be locked and no writes will be
3529 3529 allowed.
3530 3530
3531 3531 At the end of the upgrade, the repository may not be readable while new
3532 3532 repository data is swapped in. This window will be as long as it takes to
3533 3533 rename some directories inside the ``.hg`` directory. On most machines, this
3534 3534 should complete almost instantaneously and the chances of a consumer being
3535 3535 unable to access the repository should be low.
3536 3536
3537 3537 By default, all revlog will be upgraded. You can restrict this using flag
3538 3538 such as `--manifest`:
3539 3539
3540 3540 * `--manifest`: only optimize the manifest
3541 3541 * `--no-manifest`: optimize all revlog but the manifest
3542 3542 * `--changelog`: optimize the changelog only
3543 3543 * `--no-changelog --no-manifest`: optimize filelogs only
3544 3544 """
3545 3545 return upgrade.upgraderepo(
3546 3546 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3547 3547 )
3548 3548
3549 3549
3550 3550 @command(
3551 3551 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3552 3552 )
3553 3553 def debugwalk(ui, repo, *pats, **opts):
3554 3554 """show how files match on given patterns"""
3555 3555 opts = pycompat.byteskwargs(opts)
3556 3556 m = scmutil.match(repo[None], pats, opts)
3557 3557 if ui.verbose:
3558 3558 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3559 3559 items = list(repo[None].walk(m))
3560 3560 if not items:
3561 3561 return
3562 3562 f = lambda fn: fn
3563 3563 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3564 3564 f = lambda fn: util.normpath(fn)
3565 3565 fmt = b'f %%-%ds %%-%ds %%s' % (
3566 3566 max([len(abs) for abs in items]),
3567 3567 max([len(repo.pathto(abs)) for abs in items]),
3568 3568 )
3569 3569 for abs in items:
3570 3570 line = fmt % (
3571 3571 abs,
3572 3572 f(repo.pathto(abs)),
3573 3573 m.exact(abs) and b'exact' or b'',
3574 3574 )
3575 3575 ui.write(b"%s\n" % line.rstrip())
3576 3576
3577 3577
3578 3578 @command(b'debugwhyunstable', [], _(b'REV'))
3579 3579 def debugwhyunstable(ui, repo, rev):
3580 3580 """explain instabilities of a changeset"""
3581 3581 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3582 3582 dnodes = b''
3583 3583 if entry.get(b'divergentnodes'):
3584 3584 dnodes = (
3585 3585 b' '.join(
3586 3586 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3587 3587 for ctx in entry[b'divergentnodes']
3588 3588 )
3589 3589 + b' '
3590 3590 )
3591 3591 ui.write(
3592 3592 b'%s: %s%s %s\n'
3593 3593 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3594 3594 )
3595 3595
3596 3596
3597 3597 @command(
3598 3598 b'debugwireargs',
3599 3599 [
3600 3600 (b'', b'three', b'', b'three'),
3601 3601 (b'', b'four', b'', b'four'),
3602 3602 (b'', b'five', b'', b'five'),
3603 3603 ]
3604 3604 + cmdutil.remoteopts,
3605 3605 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3606 3606 norepo=True,
3607 3607 )
3608 3608 def debugwireargs(ui, repopath, *vals, **opts):
3609 3609 opts = pycompat.byteskwargs(opts)
3610 3610 repo = hg.peer(ui, opts, repopath)
3611 3611 for opt in cmdutil.remoteopts:
3612 3612 del opts[opt[1]]
3613 3613 args = {}
3614 3614 for k, v in pycompat.iteritems(opts):
3615 3615 if v:
3616 3616 args[k] = v
3617 3617 args = pycompat.strkwargs(args)
3618 3618 # run twice to check that we don't mess up the stream for the next command
3619 3619 res1 = repo.debugwireargs(*vals, **args)
3620 3620 res2 = repo.debugwireargs(*vals, **args)
3621 3621 ui.write(b"%s\n" % res1)
3622 3622 if res1 != res2:
3623 3623 ui.warn(b"%s\n" % res2)
3624 3624
3625 3625
3626 3626 def _parsewirelangblocks(fh):
3627 3627 activeaction = None
3628 3628 blocklines = []
3629 3629 lastindent = 0
3630 3630
3631 3631 for line in fh:
3632 3632 line = line.rstrip()
3633 3633 if not line:
3634 3634 continue
3635 3635
3636 3636 if line.startswith(b'#'):
3637 3637 continue
3638 3638
3639 3639 if not line.startswith(b' '):
3640 3640 # New block. Flush previous one.
3641 3641 if activeaction:
3642 3642 yield activeaction, blocklines
3643 3643
3644 3644 activeaction = line
3645 3645 blocklines = []
3646 3646 lastindent = 0
3647 3647 continue
3648 3648
3649 3649 # Else we start with an indent.
3650 3650
3651 3651 if not activeaction:
3652 3652 raise error.Abort(_(b'indented line outside of block'))
3653 3653
3654 3654 indent = len(line) - len(line.lstrip())
3655 3655
3656 3656 # If this line is indented more than the last line, concatenate it.
3657 3657 if indent > lastindent and blocklines:
3658 3658 blocklines[-1] += line.lstrip()
3659 3659 else:
3660 3660 blocklines.append(line)
3661 3661 lastindent = indent
3662 3662
3663 3663 # Flush last block.
3664 3664 if activeaction:
3665 3665 yield activeaction, blocklines
3666 3666
3667 3667
3668 3668 @command(
3669 3669 b'debugwireproto',
3670 3670 [
3671 3671 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3672 3672 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3673 3673 (
3674 3674 b'',
3675 3675 b'noreadstderr',
3676 3676 False,
3677 3677 _(b'do not read from stderr of the remote'),
3678 3678 ),
3679 3679 (
3680 3680 b'',
3681 3681 b'nologhandshake',
3682 3682 False,
3683 3683 _(b'do not log I/O related to the peer handshake'),
3684 3684 ),
3685 3685 ]
3686 3686 + cmdutil.remoteopts,
3687 3687 _(b'[PATH]'),
3688 3688 optionalrepo=True,
3689 3689 )
3690 3690 def debugwireproto(ui, repo, path=None, **opts):
3691 3691 """send wire protocol commands to a server
3692 3692
3693 3693 This command can be used to issue wire protocol commands to remote
3694 3694 peers and to debug the raw data being exchanged.
3695 3695
3696 3696 ``--localssh`` will start an SSH server against the current repository
3697 3697 and connect to that. By default, the connection will perform a handshake
3698 3698 and establish an appropriate peer instance.
3699 3699
3700 3700 ``--peer`` can be used to bypass the handshake protocol and construct a
3701 3701 peer instance using the specified class type. Valid values are ``raw``,
3702 3702 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3703 3703 raw data payloads and don't support higher-level command actions.
3704 3704
3705 3705 ``--noreadstderr`` can be used to disable automatic reading from stderr
3706 3706 of the peer (for SSH connections only). Disabling automatic reading of
3707 3707 stderr is useful for making output more deterministic.
3708 3708
3709 3709 Commands are issued via a mini language which is specified via stdin.
3710 3710 The language consists of individual actions to perform. An action is
3711 3711 defined by a block. A block is defined as a line with no leading
3712 3712 space followed by 0 or more lines with leading space. Blocks are
3713 3713 effectively a high-level command with additional metadata.
3714 3714
3715 3715 Lines beginning with ``#`` are ignored.
3716 3716
3717 3717 The following sections denote available actions.
3718 3718
3719 3719 raw
3720 3720 ---
3721 3721
3722 3722 Send raw data to the server.
3723 3723
3724 3724 The block payload contains the raw data to send as one atomic send
3725 3725 operation. The data may not actually be delivered in a single system
3726 3726 call: it depends on the abilities of the transport being used.
3727 3727
3728 3728 Each line in the block is de-indented and concatenated. Then, that
3729 3729 value is evaluated as a Python b'' literal. This allows the use of
3730 3730 backslash escaping, etc.
3731 3731
3732 3732 raw+
3733 3733 ----
3734 3734
3735 3735 Behaves like ``raw`` except flushes output afterwards.
3736 3736
3737 3737 command <X>
3738 3738 -----------
3739 3739
3740 3740 Send a request to run a named command, whose name follows the ``command``
3741 3741 string.
3742 3742
3743 3743 Arguments to the command are defined as lines in this block. The format of
3744 3744 each line is ``<key> <value>``. e.g.::
3745 3745
3746 3746 command listkeys
3747 3747 namespace bookmarks
3748 3748
3749 3749 If the value begins with ``eval:``, it will be interpreted as a Python
3750 3750 literal expression. Otherwise values are interpreted as Python b'' literals.
3751 3751 This allows sending complex types and encoding special byte sequences via
3752 3752 backslash escaping.
3753 3753
3754 3754 The following arguments have special meaning:
3755 3755
3756 3756 ``PUSHFILE``
3757 3757 When defined, the *push* mechanism of the peer will be used instead
3758 3758 of the static request-response mechanism and the content of the
3759 3759 file specified in the value of this argument will be sent as the
3760 3760 command payload.
3761 3761
3762 3762 This can be used to submit a local bundle file to the remote.
3763 3763
3764 3764 batchbegin
3765 3765 ----------
3766 3766
3767 3767 Instruct the peer to begin a batched send.
3768 3768
3769 3769 All ``command`` blocks are queued for execution until the next
3770 3770 ``batchsubmit`` block.
3771 3771
3772 3772 batchsubmit
3773 3773 -----------
3774 3774
3775 3775 Submit previously queued ``command`` blocks as a batch request.
3776 3776
3777 3777 This action MUST be paired with a ``batchbegin`` action.
3778 3778
3779 3779 httprequest <method> <path>
3780 3780 ---------------------------
3781 3781
3782 3782 (HTTP peer only)
3783 3783
3784 3784 Send an HTTP request to the peer.
3785 3785
3786 3786 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3787 3787
3788 3788 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3789 3789 headers to add to the request. e.g. ``Accept: foo``.
3790 3790
3791 3791 The following arguments are special:
3792 3792
3793 3793 ``BODYFILE``
3794 3794 The content of the file defined as the value to this argument will be
3795 3795 transferred verbatim as the HTTP request body.
3796 3796
3797 3797 ``frame <type> <flags> <payload>``
3798 3798 Send a unified protocol frame as part of the request body.
3799 3799
3800 3800 All frames will be collected and sent as the body to the HTTP
3801 3801 request.
3802 3802
3803 3803 close
3804 3804 -----
3805 3805
3806 3806 Close the connection to the server.
3807 3807
3808 3808 flush
3809 3809 -----
3810 3810
3811 3811 Flush data written to the server.
3812 3812
3813 3813 readavailable
3814 3814 -------------
3815 3815
3816 3816 Close the write end of the connection and read all available data from
3817 3817 the server.
3818 3818
3819 3819 If the connection to the server encompasses multiple pipes, we poll both
3820 3820 pipes and read available data.
3821 3821
3822 3822 readline
3823 3823 --------
3824 3824
3825 3825 Read a line of output from the server. If there are multiple output
3826 3826 pipes, reads only the main pipe.
3827 3827
3828 3828 ereadline
3829 3829 ---------
3830 3830
3831 3831 Like ``readline``, but read from the stderr pipe, if available.
3832 3832
3833 3833 read <X>
3834 3834 --------
3835 3835
3836 3836 ``read()`` N bytes from the server's main output pipe.
3837 3837
3838 3838 eread <X>
3839 3839 ---------
3840 3840
3841 3841 ``read()`` N bytes from the server's stderr pipe, if available.
3842 3842
3843 3843 Specifying Unified Frame-Based Protocol Frames
3844 3844 ----------------------------------------------
3845 3845
3846 3846 It is possible to emit a *Unified Frame-Based Protocol* by using special
3847 3847 syntax.
3848 3848
3849 3849 A frame is composed as a type, flags, and payload. These can be parsed
3850 3850 from a string of the form:
3851 3851
3852 3852 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3853 3853
3854 3854 ``request-id`` and ``stream-id`` are integers defining the request and
3855 3855 stream identifiers.
3856 3856
3857 3857 ``type`` can be an integer value for the frame type or the string name
3858 3858 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3859 3859 ``command-name``.
3860 3860
3861 3861 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3862 3862 components. Each component (and there can be just one) can be an integer
3863 3863 or a flag name for stream flags or frame flags, respectively. Values are
3864 3864 resolved to integers and then bitwise OR'd together.
3865 3865
3866 3866 ``payload`` represents the raw frame payload. If it begins with
3867 3867 ``cbor:``, the following string is evaluated as Python code and the
3868 3868 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3869 3869 as a Python byte string literal.
3870 3870 """
3871 3871 opts = pycompat.byteskwargs(opts)
3872 3872
3873 3873 if opts[b'localssh'] and not repo:
3874 3874 raise error.Abort(_(b'--localssh requires a repository'))
3875 3875
3876 3876 if opts[b'peer'] and opts[b'peer'] not in (
3877 3877 b'raw',
3878 3878 b'http2',
3879 3879 b'ssh1',
3880 3880 b'ssh2',
3881 3881 ):
3882 3882 raise error.Abort(
3883 3883 _(b'invalid value for --peer'),
3884 3884 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3885 3885 )
3886 3886
3887 3887 if path and opts[b'localssh']:
3888 3888 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3889 3889
3890 3890 if ui.interactive():
3891 3891 ui.write(_(b'(waiting for commands on stdin)\n'))
3892 3892
3893 3893 blocks = list(_parsewirelangblocks(ui.fin))
3894 3894
3895 3895 proc = None
3896 3896 stdin = None
3897 3897 stdout = None
3898 3898 stderr = None
3899 3899 opener = None
3900 3900
3901 3901 if opts[b'localssh']:
3902 3902 # We start the SSH server in its own process so there is process
3903 3903 # separation. This prevents a whole class of potential bugs around
3904 3904 # shared state from interfering with server operation.
3905 3905 args = procutil.hgcmd() + [
3906 3906 b'-R',
3907 3907 repo.root,
3908 3908 b'debugserve',
3909 3909 b'--sshstdio',
3910 3910 ]
3911 3911 proc = subprocess.Popen(
3912 3912 pycompat.rapply(procutil.tonativestr, args),
3913 3913 stdin=subprocess.PIPE,
3914 3914 stdout=subprocess.PIPE,
3915 3915 stderr=subprocess.PIPE,
3916 3916 bufsize=0,
3917 3917 )
3918 3918
3919 3919 stdin = proc.stdin
3920 3920 stdout = proc.stdout
3921 3921 stderr = proc.stderr
3922 3922
3923 3923 # We turn the pipes into observers so we can log I/O.
3924 3924 if ui.verbose or opts[b'peer'] == b'raw':
3925 3925 stdin = util.makeloggingfileobject(
3926 3926 ui, proc.stdin, b'i', logdata=True
3927 3927 )
3928 3928 stdout = util.makeloggingfileobject(
3929 3929 ui, proc.stdout, b'o', logdata=True
3930 3930 )
3931 3931 stderr = util.makeloggingfileobject(
3932 3932 ui, proc.stderr, b'e', logdata=True
3933 3933 )
3934 3934
3935 3935 # --localssh also implies the peer connection settings.
3936 3936
3937 3937 url = b'ssh://localserver'
3938 3938 autoreadstderr = not opts[b'noreadstderr']
3939 3939
3940 3940 if opts[b'peer'] == b'ssh1':
3941 3941 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3942 3942 peer = sshpeer.sshv1peer(
3943 3943 ui,
3944 3944 url,
3945 3945 proc,
3946 3946 stdin,
3947 3947 stdout,
3948 3948 stderr,
3949 3949 None,
3950 3950 autoreadstderr=autoreadstderr,
3951 3951 )
3952 3952 elif opts[b'peer'] == b'ssh2':
3953 3953 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3954 3954 peer = sshpeer.sshv2peer(
3955 3955 ui,
3956 3956 url,
3957 3957 proc,
3958 3958 stdin,
3959 3959 stdout,
3960 3960 stderr,
3961 3961 None,
3962 3962 autoreadstderr=autoreadstderr,
3963 3963 )
3964 3964 elif opts[b'peer'] == b'raw':
3965 3965 ui.write(_(b'using raw connection to peer\n'))
3966 3966 peer = None
3967 3967 else:
3968 3968 ui.write(_(b'creating ssh peer from handshake results\n'))
3969 3969 peer = sshpeer.makepeer(
3970 3970 ui,
3971 3971 url,
3972 3972 proc,
3973 3973 stdin,
3974 3974 stdout,
3975 3975 stderr,
3976 3976 autoreadstderr=autoreadstderr,
3977 3977 )
3978 3978
3979 3979 elif path:
3980 3980 # We bypass hg.peer() so we can proxy the sockets.
3981 3981 # TODO consider not doing this because we skip
3982 3982 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3983 3983 u = util.url(path)
3984 3984 if u.scheme != b'http':
3985 3985 raise error.Abort(_(b'only http:// paths are currently supported'))
3986 3986
3987 3987 url, authinfo = u.authinfo()
3988 3988 openerargs = {
3989 3989 'useragent': b'Mercurial debugwireproto',
3990 3990 }
3991 3991
3992 3992 # Turn pipes/sockets into observers so we can log I/O.
3993 3993 if ui.verbose:
3994 3994 openerargs.update(
3995 3995 {
3996 3996 'loggingfh': ui,
3997 3997 'loggingname': b's',
3998 3998 'loggingopts': {'logdata': True, 'logdataapis': False,},
3999 3999 }
4000 4000 )
4001 4001
4002 4002 if ui.debugflag:
4003 4003 openerargs['loggingopts']['logdataapis'] = True
4004 4004
4005 4005 # Don't send default headers when in raw mode. This allows us to
4006 4006 # bypass most of the behavior of our URL handling code so we can
4007 4007 # have near complete control over what's sent on the wire.
4008 4008 if opts[b'peer'] == b'raw':
4009 4009 openerargs['sendaccept'] = False
4010 4010
4011 4011 opener = urlmod.opener(ui, authinfo, **openerargs)
4012 4012
4013 4013 if opts[b'peer'] == b'http2':
4014 4014 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4015 4015 # We go through makepeer() because we need an API descriptor for
4016 4016 # the peer instance to be useful.
4017 4017 with ui.configoverride(
4018 4018 {(b'experimental', b'httppeer.advertise-v2'): True}
4019 4019 ):
4020 4020 if opts[b'nologhandshake']:
4021 4021 ui.pushbuffer()
4022 4022
4023 4023 peer = httppeer.makepeer(ui, path, opener=opener)
4024 4024
4025 4025 if opts[b'nologhandshake']:
4026 4026 ui.popbuffer()
4027 4027
4028 4028 if not isinstance(peer, httppeer.httpv2peer):
4029 4029 raise error.Abort(
4030 4030 _(
4031 4031 b'could not instantiate HTTP peer for '
4032 4032 b'wire protocol version 2'
4033 4033 ),
4034 4034 hint=_(
4035 4035 b'the server may not have the feature '
4036 4036 b'enabled or is not allowing this '
4037 4037 b'client version'
4038 4038 ),
4039 4039 )
4040 4040
4041 4041 elif opts[b'peer'] == b'raw':
4042 4042 ui.write(_(b'using raw connection to peer\n'))
4043 4043 peer = None
4044 4044 elif opts[b'peer']:
4045 4045 raise error.Abort(
4046 4046 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4047 4047 )
4048 4048 else:
4049 4049 peer = httppeer.makepeer(ui, path, opener=opener)
4050 4050
4051 4051 # We /could/ populate stdin/stdout with sock.makefile()...
4052 4052 else:
4053 4053 raise error.Abort(_(b'unsupported connection configuration'))
4054 4054
4055 4055 batchedcommands = None
4056 4056
4057 4057 # Now perform actions based on the parsed wire language instructions.
4058 4058 for action, lines in blocks:
4059 4059 if action in (b'raw', b'raw+'):
4060 4060 if not stdin:
4061 4061 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4062 4062
4063 4063 # Concatenate the data together.
4064 4064 data = b''.join(l.lstrip() for l in lines)
4065 4065 data = stringutil.unescapestr(data)
4066 4066 stdin.write(data)
4067 4067
4068 4068 if action == b'raw+':
4069 4069 stdin.flush()
4070 4070 elif action == b'flush':
4071 4071 if not stdin:
4072 4072 raise error.Abort(_(b'cannot call flush on this peer'))
4073 4073 stdin.flush()
4074 4074 elif action.startswith(b'command'):
4075 4075 if not peer:
4076 4076 raise error.Abort(
4077 4077 _(
4078 4078 b'cannot send commands unless peer instance '
4079 4079 b'is available'
4080 4080 )
4081 4081 )
4082 4082
4083 4083 command = action.split(b' ', 1)[1]
4084 4084
4085 4085 args = {}
4086 4086 for line in lines:
4087 4087 # We need to allow empty values.
4088 4088 fields = line.lstrip().split(b' ', 1)
4089 4089 if len(fields) == 1:
4090 4090 key = fields[0]
4091 4091 value = b''
4092 4092 else:
4093 4093 key, value = fields
4094 4094
4095 4095 if value.startswith(b'eval:'):
4096 4096 value = stringutil.evalpythonliteral(value[5:])
4097 4097 else:
4098 4098 value = stringutil.unescapestr(value)
4099 4099
4100 4100 args[key] = value
4101 4101
4102 4102 if batchedcommands is not None:
4103 4103 batchedcommands.append((command, args))
4104 4104 continue
4105 4105
4106 4106 ui.status(_(b'sending %s command\n') % command)
4107 4107
4108 4108 if b'PUSHFILE' in args:
4109 4109 with open(args[b'PUSHFILE'], 'rb') as fh:
4110 4110 del args[b'PUSHFILE']
4111 4111 res, output = peer._callpush(
4112 4112 command, fh, **pycompat.strkwargs(args)
4113 4113 )
4114 4114 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4115 4115 ui.status(
4116 4116 _(b'remote output: %s\n') % stringutil.escapestr(output)
4117 4117 )
4118 4118 else:
4119 4119 with peer.commandexecutor() as e:
4120 4120 res = e.callcommand(command, args).result()
4121 4121
4122 4122 if isinstance(res, wireprotov2peer.commandresponse):
4123 4123 val = res.objects()
4124 4124 ui.status(
4125 4125 _(b'response: %s\n')
4126 4126 % stringutil.pprint(val, bprefix=True, indent=2)
4127 4127 )
4128 4128 else:
4129 4129 ui.status(
4130 4130 _(b'response: %s\n')
4131 4131 % stringutil.pprint(res, bprefix=True, indent=2)
4132 4132 )
4133 4133
4134 4134 elif action == b'batchbegin':
4135 4135 if batchedcommands is not None:
4136 4136 raise error.Abort(_(b'nested batchbegin not allowed'))
4137 4137
4138 4138 batchedcommands = []
4139 4139 elif action == b'batchsubmit':
4140 4140 # There is a batching API we could go through. But it would be
4141 4141 # difficult to normalize requests into function calls. It is easier
4142 4142 # to bypass this layer and normalize to commands + args.
4143 4143 ui.status(
4144 4144 _(b'sending batch with %d sub-commands\n')
4145 4145 % len(batchedcommands)
4146 4146 )
4147 4147 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4148 4148 ui.status(
4149 4149 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4150 4150 )
4151 4151
4152 4152 batchedcommands = None
4153 4153
4154 4154 elif action.startswith(b'httprequest '):
4155 4155 if not opener:
4156 4156 raise error.Abort(
4157 4157 _(b'cannot use httprequest without an HTTP peer')
4158 4158 )
4159 4159
4160 4160 request = action.split(b' ', 2)
4161 4161 if len(request) != 3:
4162 4162 raise error.Abort(
4163 4163 _(
4164 4164 b'invalid httprequest: expected format is '
4165 4165 b'"httprequest <method> <path>'
4166 4166 )
4167 4167 )
4168 4168
4169 4169 method, httppath = request[1:]
4170 4170 headers = {}
4171 4171 body = None
4172 4172 frames = []
4173 4173 for line in lines:
4174 4174 line = line.lstrip()
4175 4175 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4176 4176 if m:
4177 4177 # Headers need to use native strings.
4178 4178 key = pycompat.strurl(m.group(1))
4179 4179 value = pycompat.strurl(m.group(2))
4180 4180 headers[key] = value
4181 4181 continue
4182 4182
4183 4183 if line.startswith(b'BODYFILE '):
4184 4184 with open(line.split(b' ', 1), b'rb') as fh:
4185 4185 body = fh.read()
4186 4186 elif line.startswith(b'frame '):
4187 4187 frame = wireprotoframing.makeframefromhumanstring(
4188 4188 line[len(b'frame ') :]
4189 4189 )
4190 4190
4191 4191 frames.append(frame)
4192 4192 else:
4193 4193 raise error.Abort(
4194 4194 _(b'unknown argument to httprequest: %s') % line
4195 4195 )
4196 4196
4197 4197 url = path + httppath
4198 4198
4199 4199 if frames:
4200 4200 body = b''.join(bytes(f) for f in frames)
4201 4201
4202 4202 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4203 4203
4204 4204 # urllib.Request insists on using has_data() as a proxy for
4205 4205 # determining the request method. Override that to use our
4206 4206 # explicitly requested method.
4207 4207 req.get_method = lambda: pycompat.sysstr(method)
4208 4208
4209 4209 try:
4210 4210 res = opener.open(req)
4211 4211 body = res.read()
4212 4212 except util.urlerr.urlerror as e:
4213 4213 # read() method must be called, but only exists in Python 2
4214 4214 getattr(e, 'read', lambda: None)()
4215 4215 continue
4216 4216
4217 4217 ct = res.headers.get('Content-Type')
4218 4218 if ct == 'application/mercurial-cbor':
4219 4219 ui.write(
4220 4220 _(b'cbor> %s\n')
4221 4221 % stringutil.pprint(
4222 4222 cborutil.decodeall(body), bprefix=True, indent=2
4223 4223 )
4224 4224 )
4225 4225
4226 4226 elif action == b'close':
4227 4227 peer.close()
4228 4228 elif action == b'readavailable':
4229 4229 if not stdout or not stderr:
4230 4230 raise error.Abort(
4231 4231 _(b'readavailable not available on this peer')
4232 4232 )
4233 4233
4234 4234 stdin.close()
4235 4235 stdout.read()
4236 4236 stderr.read()
4237 4237
4238 4238 elif action == b'readline':
4239 4239 if not stdout:
4240 4240 raise error.Abort(_(b'readline not available on this peer'))
4241 4241 stdout.readline()
4242 4242 elif action == b'ereadline':
4243 4243 if not stderr:
4244 4244 raise error.Abort(_(b'ereadline not available on this peer'))
4245 4245 stderr.readline()
4246 4246 elif action.startswith(b'read '):
4247 4247 count = int(action.split(b' ', 1)[1])
4248 4248 if not stdout:
4249 4249 raise error.Abort(_(b'read not available on this peer'))
4250 4250 stdout.read(count)
4251 4251 elif action.startswith(b'eread '):
4252 4252 count = int(action.split(b' ', 1)[1])
4253 4253 if not stderr:
4254 4254 raise error.Abort(_(b'eread not available on this peer'))
4255 4255 stderr.read(count)
4256 4256 else:
4257 4257 raise error.Abort(_(b'unknown action: %s') % action)
4258 4258
4259 4259 if batchedcommands is not None:
4260 4260 raise error.Abort(_(b'unclosed "batchbegin" request'))
4261 4261
4262 4262 if peer:
4263 4263 peer.close()
4264 4264
4265 4265 if proc:
4266 4266 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now