##// END OF EJS Templates
debugcommands: finish moving `extendeddateformats` from util to dateutil...
Matt Harbison -
r44330:38d6aa76 default
parent child Browse files
Show More
@@ -1,4285 +1,4285 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 changegroup,
42 42 cmdutil,
43 43 color,
44 44 context,
45 45 copies,
46 46 dagparser,
47 47 encoding,
48 48 error,
49 49 exchange,
50 50 extensions,
51 51 filemerge,
52 52 filesetlang,
53 53 formatter,
54 54 hg,
55 55 httppeer,
56 56 localrepo,
57 57 lock as lockmod,
58 58 logcmdutil,
59 59 merge as mergemod,
60 60 obsolete,
61 61 obsutil,
62 62 pathutil,
63 63 phases,
64 64 policy,
65 65 pvec,
66 66 pycompat,
67 67 registrar,
68 68 repair,
69 69 revlog,
70 70 revset,
71 71 revsetlang,
72 72 scmutil,
73 73 setdiscovery,
74 74 simplemerge,
75 75 sshpeer,
76 76 sslutil,
77 77 streamclone,
78 78 templater,
79 79 treediscovery,
80 80 upgrade,
81 81 url as urlmod,
82 82 util,
83 83 vfs as vfsmod,
84 84 wireprotoframing,
85 85 wireprotoserver,
86 86 wireprotov2peer,
87 87 )
88 88 from .utils import (
89 89 cborutil,
90 90 compression,
91 91 dateutil,
92 92 procutil,
93 93 stringutil,
94 94 )
95 95
96 96 from .revlogutils import deltas as deltautil
97 97
98 98 release = lockmod.release
99 99
100 100 command = registrar.command()
101 101
102 102
103 103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 104 def debugancestor(ui, repo, *args):
105 105 """find the ancestor revision of two revisions in a given index"""
106 106 if len(args) == 3:
107 107 index, rev1, rev2 = args
108 108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 109 lookup = r.lookup
110 110 elif len(args) == 2:
111 111 if not repo:
112 112 raise error.Abort(
113 113 _(b'there is no Mercurial repository here (.hg not found)')
114 114 )
115 115 rev1, rev2 = args
116 116 r = repo.changelog
117 117 lookup = repo.lookup
118 118 else:
119 119 raise error.Abort(_(b'either two or three arguments required'))
120 120 a = r.ancestor(lookup(rev1), lookup(rev2))
121 121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122 122
123 123
124 124 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 125 def debugapplystreamclonebundle(ui, repo, fname):
126 126 """apply a stream clone bundle file"""
127 127 f = hg.openpath(ui, fname)
128 128 gen = exchange.readbundle(ui, f, fname)
129 129 gen.apply(repo)
130 130
131 131
132 132 @command(
133 133 b'debugbuilddag',
134 134 [
135 135 (
136 136 b'm',
137 137 b'mergeable-file',
138 138 None,
139 139 _(b'add single file mergeable changes'),
140 140 ),
141 141 (
142 142 b'o',
143 143 b'overwritten-file',
144 144 None,
145 145 _(b'add single file all revs overwrite'),
146 146 ),
147 147 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 148 ],
149 149 _(b'[OPTION]... [TEXT]'),
150 150 )
151 151 def debugbuilddag(
152 152 ui,
153 153 repo,
154 154 text=None,
155 155 mergeable_file=False,
156 156 overwritten_file=False,
157 157 new_file=False,
158 158 ):
159 159 """builds a repo with a given DAG from scratch in the current empty repo
160 160
161 161 The description of the DAG is read from stdin if not given on the
162 162 command line.
163 163
164 164 Elements:
165 165
166 166 - "+n" is a linear run of n nodes based on the current default parent
167 167 - "." is a single node based on the current default parent
168 168 - "$" resets the default parent to null (implied at the start);
169 169 otherwise the default parent is always the last node created
170 170 - "<p" sets the default parent to the backref p
171 171 - "*p" is a fork at parent p, which is a backref
172 172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 173 - "/p2" is a merge of the preceding node and p2
174 174 - ":tag" defines a local tag for the preceding node
175 175 - "@branch" sets the named branch for subsequent nodes
176 176 - "#...\\n" is a comment up to the end of the line
177 177
178 178 Whitespace between the above elements is ignored.
179 179
180 180 A backref is either
181 181
182 182 - a number n, which references the node curr-n, where curr is the current
183 183 node, or
184 184 - the name of a local tag you placed earlier using ":tag", or
185 185 - empty to denote the default parent.
186 186
187 187 All string valued-elements are either strictly alphanumeric, or must
188 188 be enclosed in double quotes ("..."), with "\\" as escape character.
189 189 """
190 190
191 191 if text is None:
192 192 ui.status(_(b"reading DAG from stdin\n"))
193 193 text = ui.fin.read()
194 194
195 195 cl = repo.changelog
196 196 if len(cl) > 0:
197 197 raise error.Abort(_(b'repository is not empty'))
198 198
199 199 # determine number of revs in DAG
200 200 total = 0
201 201 for type, data in dagparser.parsedag(text):
202 202 if type == b'n':
203 203 total += 1
204 204
205 205 if mergeable_file:
206 206 linesperrev = 2
207 207 # make a file with k lines per rev
208 208 initialmergedlines = [
209 209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 210 ]
211 211 initialmergedlines.append(b"")
212 212
213 213 tags = []
214 214 progress = ui.makeprogress(
215 215 _(b'building'), unit=_(b'revisions'), total=total
216 216 )
217 217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 218 at = -1
219 219 atbranch = b'default'
220 220 nodeids = []
221 221 id = 0
222 222 progress.update(id)
223 223 for type, data in dagparser.parsedag(text):
224 224 if type == b'n':
225 225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 226 id, ps = data
227 227
228 228 files = []
229 229 filecontent = {}
230 230
231 231 p2 = None
232 232 if mergeable_file:
233 233 fn = b"mf"
234 234 p1 = repo[ps[0]]
235 235 if len(ps) > 1:
236 236 p2 = repo[ps[1]]
237 237 pa = p1.ancestor(p2)
238 238 base, local, other = [
239 239 x[fn].data() for x in (pa, p1, p2)
240 240 ]
241 241 m3 = simplemerge.Merge3Text(base, local, other)
242 242 ml = [l.strip() for l in m3.merge_lines()]
243 243 ml.append(b"")
244 244 elif at > 0:
245 245 ml = p1[fn].data().split(b"\n")
246 246 else:
247 247 ml = initialmergedlines
248 248 ml[id * linesperrev] += b" r%i" % id
249 249 mergedtext = b"\n".join(ml)
250 250 files.append(fn)
251 251 filecontent[fn] = mergedtext
252 252
253 253 if overwritten_file:
254 254 fn = b"of"
255 255 files.append(fn)
256 256 filecontent[fn] = b"r%i\n" % id
257 257
258 258 if new_file:
259 259 fn = b"nf%i" % id
260 260 files.append(fn)
261 261 filecontent[fn] = b"r%i\n" % id
262 262 if len(ps) > 1:
263 263 if not p2:
264 264 p2 = repo[ps[1]]
265 265 for fn in p2:
266 266 if fn.startswith(b"nf"):
267 267 files.append(fn)
268 268 filecontent[fn] = p2[fn].data()
269 269
270 270 def fctxfn(repo, cx, path):
271 271 if path in filecontent:
272 272 return context.memfilectx(
273 273 repo, cx, path, filecontent[path]
274 274 )
275 275 return None
276 276
277 277 if len(ps) == 0 or ps[0] < 0:
278 278 pars = [None, None]
279 279 elif len(ps) == 1:
280 280 pars = [nodeids[ps[0]], None]
281 281 else:
282 282 pars = [nodeids[p] for p in ps]
283 283 cx = context.memctx(
284 284 repo,
285 285 pars,
286 286 b"r%i" % id,
287 287 files,
288 288 fctxfn,
289 289 date=(id, 0),
290 290 user=b"debugbuilddag",
291 291 extra={b'branch': atbranch},
292 292 )
293 293 nodeid = repo.commitctx(cx)
294 294 nodeids.append(nodeid)
295 295 at = id
296 296 elif type == b'l':
297 297 id, name = data
298 298 ui.note((b'tag %s\n' % name))
299 299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 300 elif type == b'a':
301 301 ui.note((b'branch %s\n' % data))
302 302 atbranch = data
303 303 progress.update(id)
304 304
305 305 if tags:
306 306 repo.vfs.write(b"localtags", b"".join(tags))
307 307
308 308
309 309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 310 indent_string = b' ' * indent
311 311 if all:
312 312 ui.writenoi18n(
313 313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 314 % indent_string
315 315 )
316 316
317 317 def showchunks(named):
318 318 ui.write(b"\n%s%s\n" % (indent_string, named))
319 319 for deltadata in gen.deltaiter():
320 320 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 321 ui.write(
322 322 b"%s%s %s %s %s %s %d\n"
323 323 % (
324 324 indent_string,
325 325 hex(node),
326 326 hex(p1),
327 327 hex(p2),
328 328 hex(cs),
329 329 hex(deltabase),
330 330 len(delta),
331 331 )
332 332 )
333 333
334 334 chunkdata = gen.changelogheader()
335 335 showchunks(b"changelog")
336 336 chunkdata = gen.manifestheader()
337 337 showchunks(b"manifest")
338 338 for chunkdata in iter(gen.filelogheader, {}):
339 339 fname = chunkdata[b'filename']
340 340 showchunks(fname)
341 341 else:
342 342 if isinstance(gen, bundle2.unbundle20):
343 343 raise error.Abort(_(b'use debugbundle2 for this file'))
344 344 chunkdata = gen.changelogheader()
345 345 for deltadata in gen.deltaiter():
346 346 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348 348
349 349
350 350 def _debugobsmarkers(ui, part, indent=0, **opts):
351 351 """display version and markers contained in 'data'"""
352 352 opts = pycompat.byteskwargs(opts)
353 353 data = part.read()
354 354 indent_string = b' ' * indent
355 355 try:
356 356 version, markers = obsolete._readmarkers(data)
357 357 except error.UnknownVersion as exc:
358 358 msg = b"%sunsupported version: %s (%d bytes)\n"
359 359 msg %= indent_string, exc.version, len(data)
360 360 ui.write(msg)
361 361 else:
362 362 msg = b"%sversion: %d (%d bytes)\n"
363 363 msg %= indent_string, version, len(data)
364 364 ui.write(msg)
365 365 fm = ui.formatter(b'debugobsolete', opts)
366 366 for rawmarker in sorted(markers):
367 367 m = obsutil.marker(None, rawmarker)
368 368 fm.startitem()
369 369 fm.plain(indent_string)
370 370 cmdutil.showmarker(fm, m)
371 371 fm.end()
372 372
373 373
374 374 def _debugphaseheads(ui, data, indent=0):
375 375 """display version and markers contained in 'data'"""
376 376 indent_string = b' ' * indent
377 377 headsbyphase = phases.binarydecode(data)
378 378 for phase in phases.allphases:
379 379 for head in headsbyphase[phase]:
380 380 ui.write(indent_string)
381 381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382 382
383 383
384 384 def _quasirepr(thing):
385 385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 386 return b'{%s}' % (
387 387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 388 )
389 389 return pycompat.bytestr(repr(thing))
390 390
391 391
392 392 def _debugbundle2(ui, gen, all=None, **opts):
393 393 """lists the contents of a bundle2"""
394 394 if not isinstance(gen, bundle2.unbundle20):
395 395 raise error.Abort(_(b'not a bundle2 file'))
396 396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 397 parttypes = opts.get('part_type', [])
398 398 for part in gen.iterparts():
399 399 if parttypes and part.type not in parttypes:
400 400 continue
401 401 msg = b'%s -- %s (mandatory: %r)\n'
402 402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 403 if part.type == b'changegroup':
404 404 version = part.params.get(b'version', b'01')
405 405 cg = changegroup.getunbundler(version, part, b'UN')
406 406 if not ui.quiet:
407 407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 408 if part.type == b'obsmarkers':
409 409 if not ui.quiet:
410 410 _debugobsmarkers(ui, part, indent=4, **opts)
411 411 if part.type == b'phase-heads':
412 412 if not ui.quiet:
413 413 _debugphaseheads(ui, part, indent=4)
414 414
415 415
416 416 @command(
417 417 b'debugbundle',
418 418 [
419 419 (b'a', b'all', None, _(b'show all details')),
420 420 (b'', b'part-type', [], _(b'show only the named part type')),
421 421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 422 ],
423 423 _(b'FILE'),
424 424 norepo=True,
425 425 )
426 426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 427 """lists the contents of a bundle"""
428 428 with hg.openpath(ui, bundlepath) as f:
429 429 if spec:
430 430 spec = exchange.getbundlespec(ui, f)
431 431 ui.write(b'%s\n' % spec)
432 432 return
433 433
434 434 gen = exchange.readbundle(ui, f, bundlepath)
435 435 if isinstance(gen, bundle2.unbundle20):
436 436 return _debugbundle2(ui, gen, all=all, **opts)
437 437 _debugchangegroup(ui, gen, all=all, **opts)
438 438
439 439
440 440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 441 def debugcapabilities(ui, path, **opts):
442 442 """lists the capabilities of a remote peer"""
443 443 opts = pycompat.byteskwargs(opts)
444 444 peer = hg.peer(ui, opts, path)
445 445 caps = peer.capabilities()
446 446 ui.writenoi18n(b'Main capabilities:\n')
447 447 for c in sorted(caps):
448 448 ui.write(b' %s\n' % c)
449 449 b2caps = bundle2.bundle2caps(peer)
450 450 if b2caps:
451 451 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 452 for key, values in sorted(pycompat.iteritems(b2caps)):
453 453 ui.write(b' %s\n' % key)
454 454 for v in values:
455 455 ui.write(b' %s\n' % v)
456 456
457 457
458 458 @command(b'debugcheckstate', [], b'')
459 459 def debugcheckstate(ui, repo):
460 460 """validate the correctness of the current dirstate"""
461 461 parent1, parent2 = repo.dirstate.parents()
462 462 m1 = repo[parent1].manifest()
463 463 m2 = repo[parent2].manifest()
464 464 errors = 0
465 465 for f in repo.dirstate:
466 466 state = repo.dirstate[f]
467 467 if state in b"nr" and f not in m1:
468 468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 469 errors += 1
470 470 if state in b"a" and f in m1:
471 471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 472 errors += 1
473 473 if state in b"m" and f not in m1 and f not in m2:
474 474 ui.warn(
475 475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 476 )
477 477 errors += 1
478 478 for f in m1:
479 479 state = repo.dirstate[f]
480 480 if state not in b"nrm":
481 481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 482 errors += 1
483 483 if errors:
484 484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 485 raise error.Abort(errstr)
486 486
487 487
488 488 @command(
489 489 b'debugcolor',
490 490 [(b'', b'style', None, _(b'show all configured styles'))],
491 491 b'hg debugcolor',
492 492 )
493 493 def debugcolor(ui, repo, **opts):
494 494 """show available color, effects or style"""
495 495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 496 if opts.get('style'):
497 497 return _debugdisplaystyle(ui)
498 498 else:
499 499 return _debugdisplaycolor(ui)
500 500
501 501
502 502 def _debugdisplaycolor(ui):
503 503 ui = ui.copy()
504 504 ui._styles.clear()
505 505 for effect in color._activeeffects(ui).keys():
506 506 ui._styles[effect] = effect
507 507 if ui._terminfoparams:
508 508 for k, v in ui.configitems(b'color'):
509 509 if k.startswith(b'color.'):
510 510 ui._styles[k] = k[6:]
511 511 elif k.startswith(b'terminfo.'):
512 512 ui._styles[k] = k[9:]
513 513 ui.write(_(b'available colors:\n'))
514 514 # sort label with a '_' after the other to group '_background' entry.
515 515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 516 for colorname, label in items:
517 517 ui.write(b'%s\n' % colorname, label=label)
518 518
519 519
520 520 def _debugdisplaystyle(ui):
521 521 ui.write(_(b'available style:\n'))
522 522 if not ui._styles:
523 523 return
524 524 width = max(len(s) for s in ui._styles)
525 525 for label, effects in sorted(ui._styles.items()):
526 526 ui.write(b'%s' % label, label=label)
527 527 if effects:
528 528 # 50
529 529 ui.write(b': ')
530 530 ui.write(b' ' * (max(0, width - len(label))))
531 531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 532 ui.write(b'\n')
533 533
534 534
535 535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 536 def debugcreatestreamclonebundle(ui, repo, fname):
537 537 """create a stream clone bundle file
538 538
539 539 Stream bundles are special bundles that are essentially archives of
540 540 revlog files. They are commonly used for cloning very quickly.
541 541 """
542 542 # TODO we may want to turn this into an abort when this functionality
543 543 # is moved into `hg bundle`.
544 544 if phases.hassecret(repo):
545 545 ui.warn(
546 546 _(
547 547 b'(warning: stream clone bundle will contain secret '
548 548 b'revisions)\n'
549 549 )
550 550 )
551 551
552 552 requirements, gen = streamclone.generatebundlev1(repo)
553 553 changegroup.writechunks(ui, gen, fname)
554 554
555 555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556 556
557 557
558 558 @command(
559 559 b'debugdag',
560 560 [
561 561 (b't', b'tags', None, _(b'use tags as labels')),
562 562 (b'b', b'branches', None, _(b'annotate with branch names')),
563 563 (b'', b'dots', None, _(b'use dots for runs')),
564 564 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 565 ],
566 566 _(b'[OPTION]... [FILE [REV]...]'),
567 567 optionalrepo=True,
568 568 )
569 569 def debugdag(ui, repo, file_=None, *revs, **opts):
570 570 """format the changelog or an index DAG as a concise textual description
571 571
572 572 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 573 revision numbers, they get labeled in the output as rN.
574 574
575 575 Otherwise, the changelog DAG of the current repo is emitted.
576 576 """
577 577 spaces = opts.get('spaces')
578 578 dots = opts.get('dots')
579 579 if file_:
580 580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 581 revs = set((int(r) for r in revs))
582 582
583 583 def events():
584 584 for r in rlog:
585 585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 586 if r in revs:
587 587 yield b'l', (r, b"r%i" % r)
588 588
589 589 elif repo:
590 590 cl = repo.changelog
591 591 tags = opts.get('tags')
592 592 branches = opts.get('branches')
593 593 if tags:
594 594 labels = {}
595 595 for l, n in repo.tags().items():
596 596 labels.setdefault(cl.rev(n), []).append(l)
597 597
598 598 def events():
599 599 b = b"default"
600 600 for r in cl:
601 601 if branches:
602 602 newb = cl.read(cl.node(r))[5][b'branch']
603 603 if newb != b:
604 604 yield b'a', newb
605 605 b = newb
606 606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 607 if tags:
608 608 ls = labels.get(r)
609 609 if ls:
610 610 for l in ls:
611 611 yield b'l', (r, l)
612 612
613 613 else:
614 614 raise error.Abort(_(b'need repo for changelog dag'))
615 615
616 616 for line in dagparser.dagtextlines(
617 617 events(),
618 618 addspaces=spaces,
619 619 wraplabels=True,
620 620 wrapannotations=True,
621 621 wrapnonlinear=dots,
622 622 usedots=dots,
623 623 maxlinewidth=70,
624 624 ):
625 625 ui.write(line)
626 626 ui.write(b"\n")
627 627
628 628
629 629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 630 def debugdata(ui, repo, file_, rev=None, **opts):
631 631 """dump the contents of a data file revision"""
632 632 opts = pycompat.byteskwargs(opts)
633 633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 634 if rev is not None:
635 635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 636 file_, rev = None, file_
637 637 elif rev is None:
638 638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 640 try:
641 641 ui.write(r.rawdata(r.lookup(rev)))
642 642 except KeyError:
643 643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644 644
645 645
646 646 @command(
647 647 b'debugdate',
648 648 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 649 _(b'[-e] DATE [RANGE]'),
650 650 norepo=True,
651 651 optionalrepo=True,
652 652 )
653 653 def debugdate(ui, date, range=None, **opts):
654 654 """parse and display a date"""
655 655 if opts["extended"]:
656 d = dateutil.parsedate(date, util.extendeddateformats)
656 d = dateutil.parsedate(date, dateutil.extendeddateformats)
657 657 else:
658 658 d = dateutil.parsedate(date)
659 659 ui.writenoi18n(b"internal: %d %d\n" % d)
660 660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 661 if range:
662 662 m = dateutil.matchdate(range)
663 663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664 664
665 665
666 666 @command(
667 667 b'debugdeltachain',
668 668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 669 _(b'-c|-m|FILE'),
670 670 optionalrepo=True,
671 671 )
672 672 def debugdeltachain(ui, repo, file_=None, **opts):
673 673 """dump information about delta chains in a revlog
674 674
675 675 Output can be templatized. Available template keywords are:
676 676
677 677 :``rev``: revision number
678 678 :``chainid``: delta chain identifier (numbered by unique base)
679 679 :``chainlen``: delta chain length to this revision
680 680 :``prevrev``: previous revision in delta chain
681 681 :``deltatype``: role of delta / how it was computed
682 682 :``compsize``: compressed size of revision
683 683 :``uncompsize``: uncompressed size of revision
684 684 :``chainsize``: total size of compressed revisions in chain
685 685 :``chainratio``: total chain size divided by uncompressed revision size
686 686 (new delta chains typically start at ratio 2.00)
687 687 :``lindist``: linear distance from base revision in delta chain to end
688 688 of this revision
689 689 :``extradist``: total size of revisions not part of this delta chain from
690 690 base of delta chain to end of this revision; a measurement
691 691 of how much extra data we need to read/seek across to read
692 692 the delta chain for this revision
693 693 :``extraratio``: extradist divided by chainsize; another representation of
694 694 how much unrelated data is needed to load this delta chain
695 695
696 696 If the repository is configured to use the sparse read, additional keywords
697 697 are available:
698 698
699 699 :``readsize``: total size of data read from the disk for a revision
700 700 (sum of the sizes of all the blocks)
701 701 :``largestblock``: size of the largest block of data read from the disk
702 702 :``readdensity``: density of useful bytes in the data read from the disk
703 703 :``srchunks``: in how many data hunks the whole revision would be read
704 704
705 705 The sparse read can be enabled with experimental.sparse-read = True
706 706 """
707 707 opts = pycompat.byteskwargs(opts)
708 708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 709 index = r.index
710 710 start = r.start
711 711 length = r.length
712 712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 713 withsparseread = getattr(r, '_withsparseread', False)
714 714
715 715 def revinfo(rev):
716 716 e = index[rev]
717 717 compsize = e[1]
718 718 uncompsize = e[2]
719 719 chainsize = 0
720 720
721 721 if generaldelta:
722 722 if e[3] == e[5]:
723 723 deltatype = b'p1'
724 724 elif e[3] == e[6]:
725 725 deltatype = b'p2'
726 726 elif e[3] == rev - 1:
727 727 deltatype = b'prev'
728 728 elif e[3] == rev:
729 729 deltatype = b'base'
730 730 else:
731 731 deltatype = b'other'
732 732 else:
733 733 if e[3] == rev:
734 734 deltatype = b'base'
735 735 else:
736 736 deltatype = b'prev'
737 737
738 738 chain = r._deltachain(rev)[0]
739 739 for iterrev in chain:
740 740 e = index[iterrev]
741 741 chainsize += e[1]
742 742
743 743 return compsize, uncompsize, deltatype, chain, chainsize
744 744
745 745 fm = ui.formatter(b'debugdeltachain', opts)
746 746
747 747 fm.plain(
748 748 b' rev chain# chainlen prev delta '
749 749 b'size rawsize chainsize ratio lindist extradist '
750 750 b'extraratio'
751 751 )
752 752 if withsparseread:
753 753 fm.plain(b' readsize largestblk rddensity srchunks')
754 754 fm.plain(b'\n')
755 755
756 756 chainbases = {}
757 757 for rev in r:
758 758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 759 chainbase = chain[0]
760 760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 761 basestart = start(chainbase)
762 762 revstart = start(rev)
763 763 lineardist = revstart + comp - basestart
764 764 extradist = lineardist - chainsize
765 765 try:
766 766 prevrev = chain[-2]
767 767 except IndexError:
768 768 prevrev = -1
769 769
770 770 if uncomp != 0:
771 771 chainratio = float(chainsize) / float(uncomp)
772 772 else:
773 773 chainratio = chainsize
774 774
775 775 if chainsize != 0:
776 776 extraratio = float(extradist) / float(chainsize)
777 777 else:
778 778 extraratio = extradist
779 779
780 780 fm.startitem()
781 781 fm.write(
782 782 b'rev chainid chainlen prevrev deltatype compsize '
783 783 b'uncompsize chainsize chainratio lindist extradist '
784 784 b'extraratio',
785 785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 786 rev,
787 787 chainid,
788 788 len(chain),
789 789 prevrev,
790 790 deltatype,
791 791 comp,
792 792 uncomp,
793 793 chainsize,
794 794 chainratio,
795 795 lineardist,
796 796 extradist,
797 797 extraratio,
798 798 rev=rev,
799 799 chainid=chainid,
800 800 chainlen=len(chain),
801 801 prevrev=prevrev,
802 802 deltatype=deltatype,
803 803 compsize=comp,
804 804 uncompsize=uncomp,
805 805 chainsize=chainsize,
806 806 chainratio=chainratio,
807 807 lindist=lineardist,
808 808 extradist=extradist,
809 809 extraratio=extraratio,
810 810 )
811 811 if withsparseread:
812 812 readsize = 0
813 813 largestblock = 0
814 814 srchunks = 0
815 815
816 816 for revschunk in deltautil.slicechunk(r, chain):
817 817 srchunks += 1
818 818 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 819 blksize = blkend - start(revschunk[0])
820 820
821 821 readsize += blksize
822 822 if largestblock < blksize:
823 823 largestblock = blksize
824 824
825 825 if readsize:
826 826 readdensity = float(chainsize) / float(readsize)
827 827 else:
828 828 readdensity = 1
829 829
830 830 fm.write(
831 831 b'readsize largestblock readdensity srchunks',
832 832 b' %10d %10d %9.5f %8d',
833 833 readsize,
834 834 largestblock,
835 835 readdensity,
836 836 srchunks,
837 837 readsize=readsize,
838 838 largestblock=largestblock,
839 839 readdensity=readdensity,
840 840 srchunks=srchunks,
841 841 )
842 842
843 843 fm.plain(b'\n')
844 844
845 845 fm.end()
846 846
847 847
848 848 @command(
849 849 b'debugdirstate|debugstate',
850 850 [
851 851 (
852 852 b'',
853 853 b'nodates',
854 854 None,
855 855 _(b'do not display the saved mtime (DEPRECATED)'),
856 856 ),
857 857 (b'', b'dates', True, _(b'display the saved mtime')),
858 858 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 859 ],
860 860 _(b'[OPTION]...'),
861 861 )
862 862 def debugstate(ui, repo, **opts):
863 863 """show the contents of the current dirstate"""
864 864
865 865 nodates = not opts['dates']
866 866 if opts.get('nodates') is not None:
867 867 nodates = True
868 868 datesort = opts.get('datesort')
869 869
870 870 if datesort:
871 871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 872 else:
873 873 keyfunc = None # sort by filename
874 874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 875 if ent[3] == -1:
876 876 timestr = b'unset '
877 877 elif nodates:
878 878 timestr = b'set '
879 879 else:
880 880 timestr = time.strftime(
881 881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 882 )
883 883 timestr = encoding.strtolocal(timestr)
884 884 if ent[1] & 0o20000:
885 885 mode = b'lnk'
886 886 else:
887 887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 889 for f in repo.dirstate.copies():
890 890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891 891
892 892
893 893 @command(
894 894 b'debugdiscovery',
895 895 [
896 896 (b'', b'old', None, _(b'use old-style discovery')),
897 897 (
898 898 b'',
899 899 b'nonheads',
900 900 None,
901 901 _(b'use old-style discovery with non-heads included'),
902 902 ),
903 903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 905 ]
906 906 + cmdutil.remoteopts,
907 907 _(b'[--rev REV] [OTHER]'),
908 908 )
909 909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 910 """runs the changeset discovery protocol in isolation"""
911 911 opts = pycompat.byteskwargs(opts)
912 912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 913 remote = hg.peer(repo, opts, remoteurl)
914 914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915 915
916 916 # make sure tests are repeatable
917 917 random.seed(int(opts[b'seed']))
918 918
919 919 if opts.get(b'old'):
920 920
921 921 def doit(pushedrevs, remoteheads, remote=remote):
922 922 if not util.safehasattr(remote, b'branches'):
923 923 # enable in-client legacy support
924 924 remote = localrepo.locallegacypeer(remote.local())
925 925 common, _in, hds = treediscovery.findcommonincoming(
926 926 repo, remote, force=True
927 927 )
928 928 common = set(common)
929 929 if not opts.get(b'nonheads'):
930 930 ui.writenoi18n(
931 931 b"unpruned common: %s\n"
932 932 % b" ".join(sorted(short(n) for n in common))
933 933 )
934 934
935 935 clnode = repo.changelog.node
936 936 common = repo.revs(b'heads(::%ln)', common)
937 937 common = {clnode(r) for r in common}
938 938 return common, hds
939 939
940 940 else:
941 941
942 942 def doit(pushedrevs, remoteheads, remote=remote):
943 943 nodes = None
944 944 if pushedrevs:
945 945 revs = scmutil.revrange(repo, pushedrevs)
946 946 nodes = [repo[r].node() for r in revs]
947 947 common, any, hds = setdiscovery.findcommonheads(
948 948 ui, repo, remote, ancestorsof=nodes
949 949 )
950 950 return common, hds
951 951
952 952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 953 localrevs = opts[b'rev']
954 954 with util.timedcm('debug-discovery') as t:
955 955 common, hds = doit(localrevs, remoterevs)
956 956
957 957 # compute all statistics
958 958 common = set(common)
959 959 rheads = set(hds)
960 960 lheads = set(repo.heads())
961 961
962 962 data = {}
963 963 data[b'elapsed'] = t.elapsed
964 964 data[b'nb-common'] = len(common)
965 965 data[b'nb-common-local'] = len(common & lheads)
966 966 data[b'nb-common-remote'] = len(common & rheads)
967 967 data[b'nb-common-both'] = len(common & rheads & lheads)
968 968 data[b'nb-local'] = len(lheads)
969 969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 970 data[b'nb-remote'] = len(rheads)
971 971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 972 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975 975
976 976 # display discovery summary
977 977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 978 ui.writenoi18n(b"heads summary:\n")
979 979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992 992
993 993 if ui.verbose:
994 994 ui.writenoi18n(
995 995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 996 )
997 997
998 998
999 999 _chunksize = 4 << 10
1000 1000
1001 1001
1002 1002 @command(
1003 1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 1004 )
1005 1005 def debugdownload(ui, repo, url, output=None, **opts):
1006 1006 """download a resource using Mercurial logic and config
1007 1007 """
1008 1008 fh = urlmod.open(ui, url, output)
1009 1009
1010 1010 dest = ui
1011 1011 if output:
1012 1012 dest = open(output, b"wb", _chunksize)
1013 1013 try:
1014 1014 data = fh.read(_chunksize)
1015 1015 while data:
1016 1016 dest.write(data)
1017 1017 data = fh.read(_chunksize)
1018 1018 finally:
1019 1019 if output:
1020 1020 dest.close()
1021 1021
1022 1022
1023 1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 1024 def debugextensions(ui, repo, **opts):
1025 1025 '''show information about active extensions'''
1026 1026 opts = pycompat.byteskwargs(opts)
1027 1027 exts = extensions.extensions(ui)
1028 1028 hgver = util.version()
1029 1029 fm = ui.formatter(b'debugextensions', opts)
1030 1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 1031 isinternal = extensions.ismoduleinternal(extmod)
1032 1032 extsource = None
1033 1033
1034 1034 if util.safehasattr(extmod, '__file__'):
1035 1035 extsource = pycompat.fsencode(extmod.__file__)
1036 1036 elif getattr(sys, 'oxidized', False):
1037 1037 extsource = pycompat.sysexecutable
1038 1038 if isinternal:
1039 1039 exttestedwith = [] # never expose magic string to users
1040 1040 else:
1041 1041 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1042 1042 extbuglink = getattr(extmod, 'buglink', None)
1043 1043
1044 1044 fm.startitem()
1045 1045
1046 1046 if ui.quiet or ui.verbose:
1047 1047 fm.write(b'name', b'%s\n', extname)
1048 1048 else:
1049 1049 fm.write(b'name', b'%s', extname)
1050 1050 if isinternal or hgver in exttestedwith:
1051 1051 fm.plain(b'\n')
1052 1052 elif not exttestedwith:
1053 1053 fm.plain(_(b' (untested!)\n'))
1054 1054 else:
1055 1055 lasttestedversion = exttestedwith[-1]
1056 1056 fm.plain(b' (%s!)\n' % lasttestedversion)
1057 1057
1058 1058 fm.condwrite(
1059 1059 ui.verbose and extsource,
1060 1060 b'source',
1061 1061 _(b' location: %s\n'),
1062 1062 extsource or b"",
1063 1063 )
1064 1064
1065 1065 if ui.verbose:
1066 1066 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1067 1067 fm.data(bundled=isinternal)
1068 1068
1069 1069 fm.condwrite(
1070 1070 ui.verbose and exttestedwith,
1071 1071 b'testedwith',
1072 1072 _(b' tested with: %s\n'),
1073 1073 fm.formatlist(exttestedwith, name=b'ver'),
1074 1074 )
1075 1075
1076 1076 fm.condwrite(
1077 1077 ui.verbose and extbuglink,
1078 1078 b'buglink',
1079 1079 _(b' bug reporting: %s\n'),
1080 1080 extbuglink or b"",
1081 1081 )
1082 1082
1083 1083 fm.end()
1084 1084
1085 1085
1086 1086 @command(
1087 1087 b'debugfileset',
1088 1088 [
1089 1089 (
1090 1090 b'r',
1091 1091 b'rev',
1092 1092 b'',
1093 1093 _(b'apply the filespec on this revision'),
1094 1094 _(b'REV'),
1095 1095 ),
1096 1096 (
1097 1097 b'',
1098 1098 b'all-files',
1099 1099 False,
1100 1100 _(b'test files from all revisions and working directory'),
1101 1101 ),
1102 1102 (
1103 1103 b's',
1104 1104 b'show-matcher',
1105 1105 None,
1106 1106 _(b'print internal representation of matcher'),
1107 1107 ),
1108 1108 (
1109 1109 b'p',
1110 1110 b'show-stage',
1111 1111 [],
1112 1112 _(b'print parsed tree at the given stage'),
1113 1113 _(b'NAME'),
1114 1114 ),
1115 1115 ],
1116 1116 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1117 1117 )
1118 1118 def debugfileset(ui, repo, expr, **opts):
1119 1119 '''parse and apply a fileset specification'''
1120 1120 from . import fileset
1121 1121
1122 1122 fileset.symbols # force import of fileset so we have predicates to optimize
1123 1123 opts = pycompat.byteskwargs(opts)
1124 1124 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1125 1125
1126 1126 stages = [
1127 1127 (b'parsed', pycompat.identity),
1128 1128 (b'analyzed', filesetlang.analyze),
1129 1129 (b'optimized', filesetlang.optimize),
1130 1130 ]
1131 1131 stagenames = set(n for n, f in stages)
1132 1132
1133 1133 showalways = set()
1134 1134 if ui.verbose and not opts[b'show_stage']:
1135 1135 # show parsed tree by --verbose (deprecated)
1136 1136 showalways.add(b'parsed')
1137 1137 if opts[b'show_stage'] == [b'all']:
1138 1138 showalways.update(stagenames)
1139 1139 else:
1140 1140 for n in opts[b'show_stage']:
1141 1141 if n not in stagenames:
1142 1142 raise error.Abort(_(b'invalid stage name: %s') % n)
1143 1143 showalways.update(opts[b'show_stage'])
1144 1144
1145 1145 tree = filesetlang.parse(expr)
1146 1146 for n, f in stages:
1147 1147 tree = f(tree)
1148 1148 if n in showalways:
1149 1149 if opts[b'show_stage'] or n != b'parsed':
1150 1150 ui.write(b"* %s:\n" % n)
1151 1151 ui.write(filesetlang.prettyformat(tree), b"\n")
1152 1152
1153 1153 files = set()
1154 1154 if opts[b'all_files']:
1155 1155 for r in repo:
1156 1156 c = repo[r]
1157 1157 files.update(c.files())
1158 1158 files.update(c.substate)
1159 1159 if opts[b'all_files'] or ctx.rev() is None:
1160 1160 wctx = repo[None]
1161 1161 files.update(
1162 1162 repo.dirstate.walk(
1163 1163 scmutil.matchall(repo),
1164 1164 subrepos=list(wctx.substate),
1165 1165 unknown=True,
1166 1166 ignored=True,
1167 1167 )
1168 1168 )
1169 1169 files.update(wctx.substate)
1170 1170 else:
1171 1171 files.update(ctx.files())
1172 1172 files.update(ctx.substate)
1173 1173
1174 1174 m = ctx.matchfileset(expr)
1175 1175 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1176 1176 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1177 1177 for f in sorted(files):
1178 1178 if not m(f):
1179 1179 continue
1180 1180 ui.write(b"%s\n" % f)
1181 1181
1182 1182
1183 1183 @command(b'debugformat', [] + cmdutil.formatteropts)
1184 1184 def debugformat(ui, repo, **opts):
1185 1185 """display format information about the current repository
1186 1186
1187 1187 Use --verbose to get extra information about current config value and
1188 1188 Mercurial default."""
1189 1189 opts = pycompat.byteskwargs(opts)
1190 1190 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1191 1191 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1192 1192
1193 1193 def makeformatname(name):
1194 1194 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1195 1195
1196 1196 fm = ui.formatter(b'debugformat', opts)
1197 1197 if fm.isplain():
1198 1198
1199 1199 def formatvalue(value):
1200 1200 if util.safehasattr(value, b'startswith'):
1201 1201 return value
1202 1202 if value:
1203 1203 return b'yes'
1204 1204 else:
1205 1205 return b'no'
1206 1206
1207 1207 else:
1208 1208 formatvalue = pycompat.identity
1209 1209
1210 1210 fm.plain(b'format-variant')
1211 1211 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1212 1212 fm.plain(b' repo')
1213 1213 if ui.verbose:
1214 1214 fm.plain(b' config default')
1215 1215 fm.plain(b'\n')
1216 1216 for fv in upgrade.allformatvariant:
1217 1217 fm.startitem()
1218 1218 repovalue = fv.fromrepo(repo)
1219 1219 configvalue = fv.fromconfig(repo)
1220 1220
1221 1221 if repovalue != configvalue:
1222 1222 namelabel = b'formatvariant.name.mismatchconfig'
1223 1223 repolabel = b'formatvariant.repo.mismatchconfig'
1224 1224 elif repovalue != fv.default:
1225 1225 namelabel = b'formatvariant.name.mismatchdefault'
1226 1226 repolabel = b'formatvariant.repo.mismatchdefault'
1227 1227 else:
1228 1228 namelabel = b'formatvariant.name.uptodate'
1229 1229 repolabel = b'formatvariant.repo.uptodate'
1230 1230
1231 1231 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1232 1232 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1233 1233 if fv.default != configvalue:
1234 1234 configlabel = b'formatvariant.config.special'
1235 1235 else:
1236 1236 configlabel = b'formatvariant.config.default'
1237 1237 fm.condwrite(
1238 1238 ui.verbose,
1239 1239 b'config',
1240 1240 b' %6s',
1241 1241 formatvalue(configvalue),
1242 1242 label=configlabel,
1243 1243 )
1244 1244 fm.condwrite(
1245 1245 ui.verbose,
1246 1246 b'default',
1247 1247 b' %7s',
1248 1248 formatvalue(fv.default),
1249 1249 label=b'formatvariant.default',
1250 1250 )
1251 1251 fm.plain(b'\n')
1252 1252 fm.end()
1253 1253
1254 1254
1255 1255 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1256 1256 def debugfsinfo(ui, path=b"."):
1257 1257 """show information detected about current filesystem"""
1258 1258 ui.writenoi18n(b'path: %s\n' % path)
1259 1259 ui.writenoi18n(
1260 1260 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1261 1261 )
1262 1262 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1263 1263 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1264 1264 ui.writenoi18n(
1265 1265 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1266 1266 )
1267 1267 ui.writenoi18n(
1268 1268 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1269 1269 )
1270 1270 casesensitive = b'(unknown)'
1271 1271 try:
1272 1272 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1273 1273 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1274 1274 except OSError:
1275 1275 pass
1276 1276 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1277 1277
1278 1278
1279 1279 @command(
1280 1280 b'debuggetbundle',
1281 1281 [
1282 1282 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1283 1283 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1284 1284 (
1285 1285 b't',
1286 1286 b'type',
1287 1287 b'bzip2',
1288 1288 _(b'bundle compression type to use'),
1289 1289 _(b'TYPE'),
1290 1290 ),
1291 1291 ],
1292 1292 _(b'REPO FILE [-H|-C ID]...'),
1293 1293 norepo=True,
1294 1294 )
1295 1295 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1296 1296 """retrieves a bundle from a repo
1297 1297
1298 1298 Every ID must be a full-length hex node id string. Saves the bundle to the
1299 1299 given file.
1300 1300 """
1301 1301 opts = pycompat.byteskwargs(opts)
1302 1302 repo = hg.peer(ui, opts, repopath)
1303 1303 if not repo.capable(b'getbundle'):
1304 1304 raise error.Abort(b"getbundle() not supported by target repository")
1305 1305 args = {}
1306 1306 if common:
1307 1307 args['common'] = [bin(s) for s in common]
1308 1308 if head:
1309 1309 args['heads'] = [bin(s) for s in head]
1310 1310 # TODO: get desired bundlecaps from command line.
1311 1311 args['bundlecaps'] = None
1312 1312 bundle = repo.getbundle(b'debug', **args)
1313 1313
1314 1314 bundletype = opts.get(b'type', b'bzip2').lower()
1315 1315 btypes = {
1316 1316 b'none': b'HG10UN',
1317 1317 b'bzip2': b'HG10BZ',
1318 1318 b'gzip': b'HG10GZ',
1319 1319 b'bundle2': b'HG20',
1320 1320 }
1321 1321 bundletype = btypes.get(bundletype)
1322 1322 if bundletype not in bundle2.bundletypes:
1323 1323 raise error.Abort(_(b'unknown bundle type specified with --type'))
1324 1324 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1325 1325
1326 1326
1327 1327 @command(b'debugignore', [], b'[FILE]')
1328 1328 def debugignore(ui, repo, *files, **opts):
1329 1329 """display the combined ignore pattern and information about ignored files
1330 1330
1331 1331 With no argument display the combined ignore pattern.
1332 1332
1333 1333 Given space separated file names, shows if the given file is ignored and
1334 1334 if so, show the ignore rule (file and line number) that matched it.
1335 1335 """
1336 1336 ignore = repo.dirstate._ignore
1337 1337 if not files:
1338 1338 # Show all the patterns
1339 1339 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1340 1340 else:
1341 1341 m = scmutil.match(repo[None], pats=files)
1342 1342 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1343 1343 for f in m.files():
1344 1344 nf = util.normpath(f)
1345 1345 ignored = None
1346 1346 ignoredata = None
1347 1347 if nf != b'.':
1348 1348 if ignore(nf):
1349 1349 ignored = nf
1350 1350 ignoredata = repo.dirstate._ignorefileandline(nf)
1351 1351 else:
1352 1352 for p in pathutil.finddirs(nf):
1353 1353 if ignore(p):
1354 1354 ignored = p
1355 1355 ignoredata = repo.dirstate._ignorefileandline(p)
1356 1356 break
1357 1357 if ignored:
1358 1358 if ignored == nf:
1359 1359 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1360 1360 else:
1361 1361 ui.write(
1362 1362 _(
1363 1363 b"%s is ignored because of "
1364 1364 b"containing directory %s\n"
1365 1365 )
1366 1366 % (uipathfn(f), ignored)
1367 1367 )
1368 1368 ignorefile, lineno, line = ignoredata
1369 1369 ui.write(
1370 1370 _(b"(ignore rule in %s, line %d: '%s')\n")
1371 1371 % (ignorefile, lineno, line)
1372 1372 )
1373 1373 else:
1374 1374 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1375 1375
1376 1376
1377 1377 @command(
1378 1378 b'debugindex',
1379 1379 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1380 1380 _(b'-c|-m|FILE'),
1381 1381 )
1382 1382 def debugindex(ui, repo, file_=None, **opts):
1383 1383 """dump index data for a storage primitive"""
1384 1384 opts = pycompat.byteskwargs(opts)
1385 1385 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1386 1386
1387 1387 if ui.debugflag:
1388 1388 shortfn = hex
1389 1389 else:
1390 1390 shortfn = short
1391 1391
1392 1392 idlen = 12
1393 1393 for i in store:
1394 1394 idlen = len(shortfn(store.node(i)))
1395 1395 break
1396 1396
1397 1397 fm = ui.formatter(b'debugindex', opts)
1398 1398 fm.plain(
1399 1399 b' rev linkrev %s %s p2\n'
1400 1400 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1401 1401 )
1402 1402
1403 1403 for rev in store:
1404 1404 node = store.node(rev)
1405 1405 parents = store.parents(node)
1406 1406
1407 1407 fm.startitem()
1408 1408 fm.write(b'rev', b'%6d ', rev)
1409 1409 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1410 1410 fm.write(b'node', b'%s ', shortfn(node))
1411 1411 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1412 1412 fm.write(b'p2', b'%s', shortfn(parents[1]))
1413 1413 fm.plain(b'\n')
1414 1414
1415 1415 fm.end()
1416 1416
1417 1417
1418 1418 @command(
1419 1419 b'debugindexdot',
1420 1420 cmdutil.debugrevlogopts,
1421 1421 _(b'-c|-m|FILE'),
1422 1422 optionalrepo=True,
1423 1423 )
1424 1424 def debugindexdot(ui, repo, file_=None, **opts):
1425 1425 """dump an index DAG as a graphviz dot file"""
1426 1426 opts = pycompat.byteskwargs(opts)
1427 1427 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1428 1428 ui.writenoi18n(b"digraph G {\n")
1429 1429 for i in r:
1430 1430 node = r.node(i)
1431 1431 pp = r.parents(node)
1432 1432 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1433 1433 if pp[1] != nullid:
1434 1434 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1435 1435 ui.write(b"}\n")
1436 1436
1437 1437
1438 1438 @command(b'debugindexstats', [])
1439 1439 def debugindexstats(ui, repo):
1440 1440 """show stats related to the changelog index"""
1441 1441 repo.changelog.shortest(nullid, 1)
1442 1442 index = repo.changelog.index
1443 1443 if not util.safehasattr(index, b'stats'):
1444 1444 raise error.Abort(_(b'debugindexstats only works with native code'))
1445 1445 for k, v in sorted(index.stats().items()):
1446 1446 ui.write(b'%s: %d\n' % (k, v))
1447 1447
1448 1448
1449 1449 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1450 1450 def debuginstall(ui, **opts):
1451 1451 '''test Mercurial installation
1452 1452
1453 1453 Returns 0 on success.
1454 1454 '''
1455 1455 opts = pycompat.byteskwargs(opts)
1456 1456
1457 1457 problems = 0
1458 1458
1459 1459 fm = ui.formatter(b'debuginstall', opts)
1460 1460 fm.startitem()
1461 1461
1462 1462 # encoding
1463 1463 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1464 1464 err = None
1465 1465 try:
1466 1466 codecs.lookup(pycompat.sysstr(encoding.encoding))
1467 1467 except LookupError as inst:
1468 1468 err = stringutil.forcebytestr(inst)
1469 1469 problems += 1
1470 1470 fm.condwrite(
1471 1471 err,
1472 1472 b'encodingerror',
1473 1473 _(b" %s\n (check that your locale is properly set)\n"),
1474 1474 err,
1475 1475 )
1476 1476
1477 1477 # Python
1478 1478 pythonlib = None
1479 1479 if util.safehasattr(os, '__file__'):
1480 1480 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1481 1481 elif getattr(sys, 'oxidized', False):
1482 1482 pythonlib = pycompat.sysexecutable
1483 1483
1484 1484 fm.write(
1485 1485 b'pythonexe',
1486 1486 _(b"checking Python executable (%s)\n"),
1487 1487 pycompat.sysexecutable or _(b"unknown"),
1488 1488 )
1489 1489 fm.write(
1490 1490 b'pythonver',
1491 1491 _(b"checking Python version (%s)\n"),
1492 1492 (b"%d.%d.%d" % sys.version_info[:3]),
1493 1493 )
1494 1494 fm.write(
1495 1495 b'pythonlib',
1496 1496 _(b"checking Python lib (%s)...\n"),
1497 1497 pythonlib or _(b"unknown"),
1498 1498 )
1499 1499
1500 1500 security = set(sslutil.supportedprotocols)
1501 1501 if sslutil.hassni:
1502 1502 security.add(b'sni')
1503 1503
1504 1504 fm.write(
1505 1505 b'pythonsecurity',
1506 1506 _(b"checking Python security support (%s)\n"),
1507 1507 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1508 1508 )
1509 1509
1510 1510 # These are warnings, not errors. So don't increment problem count. This
1511 1511 # may change in the future.
1512 1512 if b'tls1.2' not in security:
1513 1513 fm.plain(
1514 1514 _(
1515 1515 b' TLS 1.2 not supported by Python install; '
1516 1516 b'network connections lack modern security\n'
1517 1517 )
1518 1518 )
1519 1519 if b'sni' not in security:
1520 1520 fm.plain(
1521 1521 _(
1522 1522 b' SNI not supported by Python install; may have '
1523 1523 b'connectivity issues with some servers\n'
1524 1524 )
1525 1525 )
1526 1526
1527 1527 # TODO print CA cert info
1528 1528
1529 1529 # hg version
1530 1530 hgver = util.version()
1531 1531 fm.write(
1532 1532 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1533 1533 )
1534 1534 fm.write(
1535 1535 b'hgverextra',
1536 1536 _(b"checking Mercurial custom build (%s)\n"),
1537 1537 b'+'.join(hgver.split(b'+')[1:]),
1538 1538 )
1539 1539
1540 1540 # compiled modules
1541 1541 hgmodules = None
1542 1542 if util.safehasattr(sys.modules[__name__], '__file__'):
1543 1543 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1544 1544 elif getattr(sys, 'oxidized', False):
1545 1545 hgmodules = pycompat.sysexecutable
1546 1546
1547 1547 fm.write(
1548 1548 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1549 1549 )
1550 1550 fm.write(
1551 1551 b'hgmodules',
1552 1552 _(b"checking installed modules (%s)...\n"),
1553 1553 hgmodules or _(b"unknown"),
1554 1554 )
1555 1555
1556 1556 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1557 1557 rustext = rustandc # for now, that's the only case
1558 1558 cext = policy.policy in (b'c', b'allow') or rustandc
1559 1559 nopure = cext or rustext
1560 1560 if nopure:
1561 1561 err = None
1562 1562 try:
1563 1563 if cext:
1564 1564 from .cext import ( # pytype: disable=import-error
1565 1565 base85,
1566 1566 bdiff,
1567 1567 mpatch,
1568 1568 osutil,
1569 1569 )
1570 1570
1571 1571 # quiet pyflakes
1572 1572 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1573 1573 if rustext:
1574 1574 from .rustext import ( # pytype: disable=import-error
1575 1575 ancestor,
1576 1576 dirstate,
1577 1577 )
1578 1578
1579 1579 dir(ancestor), dir(dirstate) # quiet pyflakes
1580 1580 except Exception as inst:
1581 1581 err = stringutil.forcebytestr(inst)
1582 1582 problems += 1
1583 1583 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1584 1584
1585 1585 compengines = util.compengines._engines.values()
1586 1586 fm.write(
1587 1587 b'compengines',
1588 1588 _(b'checking registered compression engines (%s)\n'),
1589 1589 fm.formatlist(
1590 1590 sorted(e.name() for e in compengines),
1591 1591 name=b'compengine',
1592 1592 fmt=b'%s',
1593 1593 sep=b', ',
1594 1594 ),
1595 1595 )
1596 1596 fm.write(
1597 1597 b'compenginesavail',
1598 1598 _(b'checking available compression engines (%s)\n'),
1599 1599 fm.formatlist(
1600 1600 sorted(e.name() for e in compengines if e.available()),
1601 1601 name=b'compengine',
1602 1602 fmt=b'%s',
1603 1603 sep=b', ',
1604 1604 ),
1605 1605 )
1606 1606 wirecompengines = compression.compengines.supportedwireengines(
1607 1607 compression.SERVERROLE
1608 1608 )
1609 1609 fm.write(
1610 1610 b'compenginesserver',
1611 1611 _(
1612 1612 b'checking available compression engines '
1613 1613 b'for wire protocol (%s)\n'
1614 1614 ),
1615 1615 fm.formatlist(
1616 1616 [e.name() for e in wirecompengines if e.wireprotosupport()],
1617 1617 name=b'compengine',
1618 1618 fmt=b'%s',
1619 1619 sep=b', ',
1620 1620 ),
1621 1621 )
1622 1622 re2 = b'missing'
1623 1623 if util._re2:
1624 1624 re2 = b'available'
1625 1625 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1626 1626 fm.data(re2=bool(util._re2))
1627 1627
1628 1628 # templates
1629 1629 p = templater.templatepaths()
1630 1630 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1631 1631 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1632 1632 if p:
1633 1633 m = templater.templatepath(b"map-cmdline.default")
1634 1634 if m:
1635 1635 # template found, check if it is working
1636 1636 err = None
1637 1637 try:
1638 1638 templater.templater.frommapfile(m)
1639 1639 except Exception as inst:
1640 1640 err = stringutil.forcebytestr(inst)
1641 1641 p = None
1642 1642 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1643 1643 else:
1644 1644 p = None
1645 1645 fm.condwrite(
1646 1646 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1647 1647 )
1648 1648 fm.condwrite(
1649 1649 not m,
1650 1650 b'defaulttemplatenotfound',
1651 1651 _(b" template '%s' not found\n"),
1652 1652 b"default",
1653 1653 )
1654 1654 if not p:
1655 1655 problems += 1
1656 1656 fm.condwrite(
1657 1657 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1658 1658 )
1659 1659
1660 1660 # editor
1661 1661 editor = ui.geteditor()
1662 1662 editor = util.expandpath(editor)
1663 1663 editorbin = procutil.shellsplit(editor)[0]
1664 1664 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1665 1665 cmdpath = procutil.findexe(editorbin)
1666 1666 fm.condwrite(
1667 1667 not cmdpath and editor == b'vi',
1668 1668 b'vinotfound',
1669 1669 _(
1670 1670 b" No commit editor set and can't find %s in PATH\n"
1671 1671 b" (specify a commit editor in your configuration"
1672 1672 b" file)\n"
1673 1673 ),
1674 1674 not cmdpath and editor == b'vi' and editorbin,
1675 1675 )
1676 1676 fm.condwrite(
1677 1677 not cmdpath and editor != b'vi',
1678 1678 b'editornotfound',
1679 1679 _(
1680 1680 b" Can't find editor '%s' in PATH\n"
1681 1681 b" (specify a commit editor in your configuration"
1682 1682 b" file)\n"
1683 1683 ),
1684 1684 not cmdpath and editorbin,
1685 1685 )
1686 1686 if not cmdpath and editor != b'vi':
1687 1687 problems += 1
1688 1688
1689 1689 # check username
1690 1690 username = None
1691 1691 err = None
1692 1692 try:
1693 1693 username = ui.username()
1694 1694 except error.Abort as e:
1695 1695 err = stringutil.forcebytestr(e)
1696 1696 problems += 1
1697 1697
1698 1698 fm.condwrite(
1699 1699 username, b'username', _(b"checking username (%s)\n"), username
1700 1700 )
1701 1701 fm.condwrite(
1702 1702 err,
1703 1703 b'usernameerror',
1704 1704 _(
1705 1705 b"checking username...\n %s\n"
1706 1706 b" (specify a username in your configuration file)\n"
1707 1707 ),
1708 1708 err,
1709 1709 )
1710 1710
1711 1711 for name, mod in extensions.extensions():
1712 1712 handler = getattr(mod, 'debuginstall', None)
1713 1713 if handler is not None:
1714 1714 problems += handler(ui, fm)
1715 1715
1716 1716 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1717 1717 if not problems:
1718 1718 fm.data(problems=problems)
1719 1719 fm.condwrite(
1720 1720 problems,
1721 1721 b'problems',
1722 1722 _(b"%d problems detected, please check your install!\n"),
1723 1723 problems,
1724 1724 )
1725 1725 fm.end()
1726 1726
1727 1727 return problems
1728 1728
1729 1729
1730 1730 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1731 1731 def debugknown(ui, repopath, *ids, **opts):
1732 1732 """test whether node ids are known to a repo
1733 1733
1734 1734 Every ID must be a full-length hex node id string. Returns a list of 0s
1735 1735 and 1s indicating unknown/known.
1736 1736 """
1737 1737 opts = pycompat.byteskwargs(opts)
1738 1738 repo = hg.peer(ui, opts, repopath)
1739 1739 if not repo.capable(b'known'):
1740 1740 raise error.Abort(b"known() not supported by target repository")
1741 1741 flags = repo.known([bin(s) for s in ids])
1742 1742 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1743 1743
1744 1744
1745 1745 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1746 1746 def debuglabelcomplete(ui, repo, *args):
1747 1747 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1748 1748 debugnamecomplete(ui, repo, *args)
1749 1749
1750 1750
1751 1751 @command(
1752 1752 b'debuglocks',
1753 1753 [
1754 1754 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1755 1755 (
1756 1756 b'W',
1757 1757 b'force-wlock',
1758 1758 None,
1759 1759 _(b'free the working state lock (DANGEROUS)'),
1760 1760 ),
1761 1761 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1762 1762 (
1763 1763 b'S',
1764 1764 b'set-wlock',
1765 1765 None,
1766 1766 _(b'set the working state lock until stopped'),
1767 1767 ),
1768 1768 ],
1769 1769 _(b'[OPTION]...'),
1770 1770 )
1771 1771 def debuglocks(ui, repo, **opts):
1772 1772 """show or modify state of locks
1773 1773
1774 1774 By default, this command will show which locks are held. This
1775 1775 includes the user and process holding the lock, the amount of time
1776 1776 the lock has been held, and the machine name where the process is
1777 1777 running if it's not local.
1778 1778
1779 1779 Locks protect the integrity of Mercurial's data, so should be
1780 1780 treated with care. System crashes or other interruptions may cause
1781 1781 locks to not be properly released, though Mercurial will usually
1782 1782 detect and remove such stale locks automatically.
1783 1783
1784 1784 However, detecting stale locks may not always be possible (for
1785 1785 instance, on a shared filesystem). Removing locks may also be
1786 1786 blocked by filesystem permissions.
1787 1787
1788 1788 Setting a lock will prevent other commands from changing the data.
1789 1789 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1790 1790 The set locks are removed when the command exits.
1791 1791
1792 1792 Returns 0 if no locks are held.
1793 1793
1794 1794 """
1795 1795
1796 1796 if opts.get('force_lock'):
1797 1797 repo.svfs.unlink(b'lock')
1798 1798 if opts.get('force_wlock'):
1799 1799 repo.vfs.unlink(b'wlock')
1800 1800 if opts.get('force_lock') or opts.get('force_wlock'):
1801 1801 return 0
1802 1802
1803 1803 locks = []
1804 1804 try:
1805 1805 if opts.get('set_wlock'):
1806 1806 try:
1807 1807 locks.append(repo.wlock(False))
1808 1808 except error.LockHeld:
1809 1809 raise error.Abort(_(b'wlock is already held'))
1810 1810 if opts.get('set_lock'):
1811 1811 try:
1812 1812 locks.append(repo.lock(False))
1813 1813 except error.LockHeld:
1814 1814 raise error.Abort(_(b'lock is already held'))
1815 1815 if len(locks):
1816 1816 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1817 1817 return 0
1818 1818 finally:
1819 1819 release(*locks)
1820 1820
1821 1821 now = time.time()
1822 1822 held = 0
1823 1823
1824 1824 def report(vfs, name, method):
1825 1825 # this causes stale locks to get reaped for more accurate reporting
1826 1826 try:
1827 1827 l = method(False)
1828 1828 except error.LockHeld:
1829 1829 l = None
1830 1830
1831 1831 if l:
1832 1832 l.release()
1833 1833 else:
1834 1834 try:
1835 1835 st = vfs.lstat(name)
1836 1836 age = now - st[stat.ST_MTIME]
1837 1837 user = util.username(st.st_uid)
1838 1838 locker = vfs.readlock(name)
1839 1839 if b":" in locker:
1840 1840 host, pid = locker.split(b':')
1841 1841 if host == socket.gethostname():
1842 1842 locker = b'user %s, process %s' % (user or b'None', pid)
1843 1843 else:
1844 1844 locker = b'user %s, process %s, host %s' % (
1845 1845 user or b'None',
1846 1846 pid,
1847 1847 host,
1848 1848 )
1849 1849 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1850 1850 return 1
1851 1851 except OSError as e:
1852 1852 if e.errno != errno.ENOENT:
1853 1853 raise
1854 1854
1855 1855 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1856 1856 return 0
1857 1857
1858 1858 held += report(repo.svfs, b"lock", repo.lock)
1859 1859 held += report(repo.vfs, b"wlock", repo.wlock)
1860 1860
1861 1861 return held
1862 1862
1863 1863
1864 1864 @command(
1865 1865 b'debugmanifestfulltextcache',
1866 1866 [
1867 1867 (b'', b'clear', False, _(b'clear the cache')),
1868 1868 (
1869 1869 b'a',
1870 1870 b'add',
1871 1871 [],
1872 1872 _(b'add the given manifest nodes to the cache'),
1873 1873 _(b'NODE'),
1874 1874 ),
1875 1875 ],
1876 1876 b'',
1877 1877 )
1878 1878 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1879 1879 """show, clear or amend the contents of the manifest fulltext cache"""
1880 1880
1881 1881 def getcache():
1882 1882 r = repo.manifestlog.getstorage(b'')
1883 1883 try:
1884 1884 return r._fulltextcache
1885 1885 except AttributeError:
1886 1886 msg = _(
1887 1887 b"Current revlog implementation doesn't appear to have a "
1888 1888 b"manifest fulltext cache\n"
1889 1889 )
1890 1890 raise error.Abort(msg)
1891 1891
1892 1892 if opts.get('clear'):
1893 1893 with repo.wlock():
1894 1894 cache = getcache()
1895 1895 cache.clear(clear_persisted_data=True)
1896 1896 return
1897 1897
1898 1898 if add:
1899 1899 with repo.wlock():
1900 1900 m = repo.manifestlog
1901 1901 store = m.getstorage(b'')
1902 1902 for n in add:
1903 1903 try:
1904 1904 manifest = m[store.lookup(n)]
1905 1905 except error.LookupError as e:
1906 1906 raise error.Abort(e, hint=b"Check your manifest node id")
1907 1907 manifest.read() # stores revisision in cache too
1908 1908 return
1909 1909
1910 1910 cache = getcache()
1911 1911 if not len(cache):
1912 1912 ui.write(_(b'cache empty\n'))
1913 1913 else:
1914 1914 ui.write(
1915 1915 _(
1916 1916 b'cache contains %d manifest entries, in order of most to '
1917 1917 b'least recent:\n'
1918 1918 )
1919 1919 % (len(cache),)
1920 1920 )
1921 1921 totalsize = 0
1922 1922 for nodeid in cache:
1923 1923 # Use cache.get to not update the LRU order
1924 1924 data = cache.peek(nodeid)
1925 1925 size = len(data)
1926 1926 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1927 1927 ui.write(
1928 1928 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1929 1929 )
1930 1930 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1931 1931 ui.write(
1932 1932 _(b'total cache data size %s, on-disk %s\n')
1933 1933 % (util.bytecount(totalsize), util.bytecount(ondisk))
1934 1934 )
1935 1935
1936 1936
1937 1937 @command(b'debugmergestate', [], b'')
1938 1938 def debugmergestate(ui, repo, *args):
1939 1939 """print merge state
1940 1940
1941 1941 Use --verbose to print out information about whether v1 or v2 merge state
1942 1942 was chosen."""
1943 1943
1944 1944 def _hashornull(h):
1945 1945 if h == nullhex:
1946 1946 return b'null'
1947 1947 else:
1948 1948 return h
1949 1949
1950 1950 def printrecords(version):
1951 1951 ui.writenoi18n(b'* version %d records\n' % version)
1952 1952 if version == 1:
1953 1953 records = v1records
1954 1954 else:
1955 1955 records = v2records
1956 1956
1957 1957 for rtype, record in records:
1958 1958 # pretty print some record types
1959 1959 if rtype == b'L':
1960 1960 ui.writenoi18n(b'local: %s\n' % record)
1961 1961 elif rtype == b'O':
1962 1962 ui.writenoi18n(b'other: %s\n' % record)
1963 1963 elif rtype == b'm':
1964 1964 driver, mdstate = record.split(b'\0', 1)
1965 1965 ui.writenoi18n(
1966 1966 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1967 1967 )
1968 1968 elif rtype in b'FDC':
1969 1969 r = record.split(b'\0')
1970 1970 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1971 1971 if version == 1:
1972 1972 onode = b'not stored in v1 format'
1973 1973 flags = r[7]
1974 1974 else:
1975 1975 onode, flags = r[7:9]
1976 1976 ui.writenoi18n(
1977 1977 b'file: %s (record type "%s", state "%s", hash %s)\n'
1978 1978 % (f, rtype, state, _hashornull(hash))
1979 1979 )
1980 1980 ui.writenoi18n(
1981 1981 b' local path: %s (flags "%s")\n' % (lfile, flags)
1982 1982 )
1983 1983 ui.writenoi18n(
1984 1984 b' ancestor path: %s (node %s)\n'
1985 1985 % (afile, _hashornull(anode))
1986 1986 )
1987 1987 ui.writenoi18n(
1988 1988 b' other path: %s (node %s)\n'
1989 1989 % (ofile, _hashornull(onode))
1990 1990 )
1991 1991 elif rtype == b'f':
1992 1992 filename, rawextras = record.split(b'\0', 1)
1993 1993 extras = rawextras.split(b'\0')
1994 1994 i = 0
1995 1995 extrastrings = []
1996 1996 while i < len(extras):
1997 1997 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1998 1998 i += 2
1999 1999
2000 2000 ui.writenoi18n(
2001 2001 b'file extras: %s (%s)\n'
2002 2002 % (filename, b', '.join(extrastrings))
2003 2003 )
2004 2004 elif rtype == b'l':
2005 2005 labels = record.split(b'\0', 2)
2006 2006 labels = [l for l in labels if len(l) > 0]
2007 2007 ui.writenoi18n(b'labels:\n')
2008 2008 ui.write((b' local: %s\n' % labels[0]))
2009 2009 ui.write((b' other: %s\n' % labels[1]))
2010 2010 if len(labels) > 2:
2011 2011 ui.write((b' base: %s\n' % labels[2]))
2012 2012 else:
2013 2013 ui.writenoi18n(
2014 2014 b'unrecognized entry: %s\t%s\n'
2015 2015 % (rtype, record.replace(b'\0', b'\t'))
2016 2016 )
2017 2017
2018 2018 # Avoid mergestate.read() since it may raise an exception for unsupported
2019 2019 # merge state records. We shouldn't be doing this, but this is OK since this
2020 2020 # command is pretty low-level.
2021 2021 ms = mergemod.mergestate(repo)
2022 2022
2023 2023 # sort so that reasonable information is on top
2024 2024 v1records = ms._readrecordsv1()
2025 2025 v2records = ms._readrecordsv2()
2026 2026 order = b'LOml'
2027 2027
2028 2028 def key(r):
2029 2029 idx = order.find(r[0])
2030 2030 if idx == -1:
2031 2031 return (1, r[1])
2032 2032 else:
2033 2033 return (0, idx)
2034 2034
2035 2035 v1records.sort(key=key)
2036 2036 v2records.sort(key=key)
2037 2037
2038 2038 if not v1records and not v2records:
2039 2039 ui.writenoi18n(b'no merge state found\n')
2040 2040 elif not v2records:
2041 2041 ui.notenoi18n(b'no version 2 merge state\n')
2042 2042 printrecords(1)
2043 2043 elif ms._v1v2match(v1records, v2records):
2044 2044 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2045 2045 printrecords(2)
2046 2046 else:
2047 2047 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2048 2048 printrecords(1)
2049 2049 if ui.verbose:
2050 2050 printrecords(2)
2051 2051
2052 2052
2053 2053 @command(b'debugnamecomplete', [], _(b'NAME...'))
2054 2054 def debugnamecomplete(ui, repo, *args):
2055 2055 '''complete "names" - tags, open branch names, bookmark names'''
2056 2056
2057 2057 names = set()
2058 2058 # since we previously only listed open branches, we will handle that
2059 2059 # specially (after this for loop)
2060 2060 for name, ns in pycompat.iteritems(repo.names):
2061 2061 if name != b'branches':
2062 2062 names.update(ns.listnames(repo))
2063 2063 names.update(
2064 2064 tag
2065 2065 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2066 2066 if not closed
2067 2067 )
2068 2068 completions = set()
2069 2069 if not args:
2070 2070 args = [b'']
2071 2071 for a in args:
2072 2072 completions.update(n for n in names if n.startswith(a))
2073 2073 ui.write(b'\n'.join(sorted(completions)))
2074 2074 ui.write(b'\n')
2075 2075
2076 2076
2077 2077 @command(
2078 2078 b'debugobsolete',
2079 2079 [
2080 2080 (b'', b'flags', 0, _(b'markers flag')),
2081 2081 (
2082 2082 b'',
2083 2083 b'record-parents',
2084 2084 False,
2085 2085 _(b'record parent information for the precursor'),
2086 2086 ),
2087 2087 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2088 2088 (
2089 2089 b'',
2090 2090 b'exclusive',
2091 2091 False,
2092 2092 _(b'restrict display to markers only relevant to REV'),
2093 2093 ),
2094 2094 (b'', b'index', False, _(b'display index of the marker')),
2095 2095 (b'', b'delete', [], _(b'delete markers specified by indices')),
2096 2096 ]
2097 2097 + cmdutil.commitopts2
2098 2098 + cmdutil.formatteropts,
2099 2099 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2100 2100 )
2101 2101 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2102 2102 """create arbitrary obsolete marker
2103 2103
2104 2104 With no arguments, displays the list of obsolescence markers."""
2105 2105
2106 2106 opts = pycompat.byteskwargs(opts)
2107 2107
2108 2108 def parsenodeid(s):
2109 2109 try:
2110 2110 # We do not use revsingle/revrange functions here to accept
2111 2111 # arbitrary node identifiers, possibly not present in the
2112 2112 # local repository.
2113 2113 n = bin(s)
2114 2114 if len(n) != len(nullid):
2115 2115 raise TypeError()
2116 2116 return n
2117 2117 except TypeError:
2118 2118 raise error.Abort(
2119 2119 b'changeset references must be full hexadecimal '
2120 2120 b'node identifiers'
2121 2121 )
2122 2122
2123 2123 if opts.get(b'delete'):
2124 2124 indices = []
2125 2125 for v in opts.get(b'delete'):
2126 2126 try:
2127 2127 indices.append(int(v))
2128 2128 except ValueError:
2129 2129 raise error.Abort(
2130 2130 _(b'invalid index value: %r') % v,
2131 2131 hint=_(b'use integers for indices'),
2132 2132 )
2133 2133
2134 2134 if repo.currenttransaction():
2135 2135 raise error.Abort(
2136 2136 _(b'cannot delete obsmarkers in the middle of transaction.')
2137 2137 )
2138 2138
2139 2139 with repo.lock():
2140 2140 n = repair.deleteobsmarkers(repo.obsstore, indices)
2141 2141 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2142 2142
2143 2143 return
2144 2144
2145 2145 if precursor is not None:
2146 2146 if opts[b'rev']:
2147 2147 raise error.Abort(b'cannot select revision when creating marker')
2148 2148 metadata = {}
2149 2149 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2150 2150 succs = tuple(parsenodeid(succ) for succ in successors)
2151 2151 l = repo.lock()
2152 2152 try:
2153 2153 tr = repo.transaction(b'debugobsolete')
2154 2154 try:
2155 2155 date = opts.get(b'date')
2156 2156 if date:
2157 2157 date = dateutil.parsedate(date)
2158 2158 else:
2159 2159 date = None
2160 2160 prec = parsenodeid(precursor)
2161 2161 parents = None
2162 2162 if opts[b'record_parents']:
2163 2163 if prec not in repo.unfiltered():
2164 2164 raise error.Abort(
2165 2165 b'cannot used --record-parents on '
2166 2166 b'unknown changesets'
2167 2167 )
2168 2168 parents = repo.unfiltered()[prec].parents()
2169 2169 parents = tuple(p.node() for p in parents)
2170 2170 repo.obsstore.create(
2171 2171 tr,
2172 2172 prec,
2173 2173 succs,
2174 2174 opts[b'flags'],
2175 2175 parents=parents,
2176 2176 date=date,
2177 2177 metadata=metadata,
2178 2178 ui=ui,
2179 2179 )
2180 2180 tr.close()
2181 2181 except ValueError as exc:
2182 2182 raise error.Abort(
2183 2183 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2184 2184 )
2185 2185 finally:
2186 2186 tr.release()
2187 2187 finally:
2188 2188 l.release()
2189 2189 else:
2190 2190 if opts[b'rev']:
2191 2191 revs = scmutil.revrange(repo, opts[b'rev'])
2192 2192 nodes = [repo[r].node() for r in revs]
2193 2193 markers = list(
2194 2194 obsutil.getmarkers(
2195 2195 repo, nodes=nodes, exclusive=opts[b'exclusive']
2196 2196 )
2197 2197 )
2198 2198 markers.sort(key=lambda x: x._data)
2199 2199 else:
2200 2200 markers = obsutil.getmarkers(repo)
2201 2201
2202 2202 markerstoiter = markers
2203 2203 isrelevant = lambda m: True
2204 2204 if opts.get(b'rev') and opts.get(b'index'):
2205 2205 markerstoiter = obsutil.getmarkers(repo)
2206 2206 markerset = set(markers)
2207 2207 isrelevant = lambda m: m in markerset
2208 2208
2209 2209 fm = ui.formatter(b'debugobsolete', opts)
2210 2210 for i, m in enumerate(markerstoiter):
2211 2211 if not isrelevant(m):
2212 2212 # marker can be irrelevant when we're iterating over a set
2213 2213 # of markers (markerstoiter) which is bigger than the set
2214 2214 # of markers we want to display (markers)
2215 2215 # this can happen if both --index and --rev options are
2216 2216 # provided and thus we need to iterate over all of the markers
2217 2217 # to get the correct indices, but only display the ones that
2218 2218 # are relevant to --rev value
2219 2219 continue
2220 2220 fm.startitem()
2221 2221 ind = i if opts.get(b'index') else None
2222 2222 cmdutil.showmarker(fm, m, index=ind)
2223 2223 fm.end()
2224 2224
2225 2225
2226 2226 @command(
2227 2227 b'debugp1copies',
2228 2228 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2229 2229 _(b'[-r REV]'),
2230 2230 )
2231 2231 def debugp1copies(ui, repo, **opts):
2232 2232 """dump copy information compared to p1"""
2233 2233
2234 2234 opts = pycompat.byteskwargs(opts)
2235 2235 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2236 2236 for dst, src in ctx.p1copies().items():
2237 2237 ui.write(b'%s -> %s\n' % (src, dst))
2238 2238
2239 2239
2240 2240 @command(
2241 2241 b'debugp2copies',
2242 2242 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2243 2243 _(b'[-r REV]'),
2244 2244 )
2245 2245 def debugp1copies(ui, repo, **opts):
2246 2246 """dump copy information compared to p2"""
2247 2247
2248 2248 opts = pycompat.byteskwargs(opts)
2249 2249 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2250 2250 for dst, src in ctx.p2copies().items():
2251 2251 ui.write(b'%s -> %s\n' % (src, dst))
2252 2252
2253 2253
2254 2254 @command(
2255 2255 b'debugpathcomplete',
2256 2256 [
2257 2257 (b'f', b'full', None, _(b'complete an entire path')),
2258 2258 (b'n', b'normal', None, _(b'show only normal files')),
2259 2259 (b'a', b'added', None, _(b'show only added files')),
2260 2260 (b'r', b'removed', None, _(b'show only removed files')),
2261 2261 ],
2262 2262 _(b'FILESPEC...'),
2263 2263 )
2264 2264 def debugpathcomplete(ui, repo, *specs, **opts):
2265 2265 '''complete part or all of a tracked path
2266 2266
2267 2267 This command supports shells that offer path name completion. It
2268 2268 currently completes only files already known to the dirstate.
2269 2269
2270 2270 Completion extends only to the next path segment unless
2271 2271 --full is specified, in which case entire paths are used.'''
2272 2272
2273 2273 def complete(path, acceptable):
2274 2274 dirstate = repo.dirstate
2275 2275 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2276 2276 rootdir = repo.root + pycompat.ossep
2277 2277 if spec != repo.root and not spec.startswith(rootdir):
2278 2278 return [], []
2279 2279 if os.path.isdir(spec):
2280 2280 spec += b'/'
2281 2281 spec = spec[len(rootdir) :]
2282 2282 fixpaths = pycompat.ossep != b'/'
2283 2283 if fixpaths:
2284 2284 spec = spec.replace(pycompat.ossep, b'/')
2285 2285 speclen = len(spec)
2286 2286 fullpaths = opts['full']
2287 2287 files, dirs = set(), set()
2288 2288 adddir, addfile = dirs.add, files.add
2289 2289 for f, st in pycompat.iteritems(dirstate):
2290 2290 if f.startswith(spec) and st[0] in acceptable:
2291 2291 if fixpaths:
2292 2292 f = f.replace(b'/', pycompat.ossep)
2293 2293 if fullpaths:
2294 2294 addfile(f)
2295 2295 continue
2296 2296 s = f.find(pycompat.ossep, speclen)
2297 2297 if s >= 0:
2298 2298 adddir(f[:s])
2299 2299 else:
2300 2300 addfile(f)
2301 2301 return files, dirs
2302 2302
2303 2303 acceptable = b''
2304 2304 if opts['normal']:
2305 2305 acceptable += b'nm'
2306 2306 if opts['added']:
2307 2307 acceptable += b'a'
2308 2308 if opts['removed']:
2309 2309 acceptable += b'r'
2310 2310 cwd = repo.getcwd()
2311 2311 if not specs:
2312 2312 specs = [b'.']
2313 2313
2314 2314 files, dirs = set(), set()
2315 2315 for spec in specs:
2316 2316 f, d = complete(spec, acceptable or b'nmar')
2317 2317 files.update(f)
2318 2318 dirs.update(d)
2319 2319 files.update(dirs)
2320 2320 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2321 2321 ui.write(b'\n')
2322 2322
2323 2323
2324 2324 @command(
2325 2325 b'debugpathcopies',
2326 2326 cmdutil.walkopts,
2327 2327 b'hg debugpathcopies REV1 REV2 [FILE]',
2328 2328 inferrepo=True,
2329 2329 )
2330 2330 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2331 2331 """show copies between two revisions"""
2332 2332 ctx1 = scmutil.revsingle(repo, rev1)
2333 2333 ctx2 = scmutil.revsingle(repo, rev2)
2334 2334 m = scmutil.match(ctx1, pats, opts)
2335 2335 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2336 2336 ui.write(b'%s -> %s\n' % (src, dst))
2337 2337
2338 2338
2339 2339 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2340 2340 def debugpeer(ui, path):
2341 2341 """establish a connection to a peer repository"""
2342 2342 # Always enable peer request logging. Requires --debug to display
2343 2343 # though.
2344 2344 overrides = {
2345 2345 (b'devel', b'debug.peer-request'): True,
2346 2346 }
2347 2347
2348 2348 with ui.configoverride(overrides):
2349 2349 peer = hg.peer(ui, {}, path)
2350 2350
2351 2351 local = peer.local() is not None
2352 2352 canpush = peer.canpush()
2353 2353
2354 2354 ui.write(_(b'url: %s\n') % peer.url())
2355 2355 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2356 2356 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2357 2357
2358 2358
2359 2359 @command(
2360 2360 b'debugpickmergetool',
2361 2361 [
2362 2362 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2363 2363 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2364 2364 ]
2365 2365 + cmdutil.walkopts
2366 2366 + cmdutil.mergetoolopts,
2367 2367 _(b'[PATTERN]...'),
2368 2368 inferrepo=True,
2369 2369 )
2370 2370 def debugpickmergetool(ui, repo, *pats, **opts):
2371 2371 """examine which merge tool is chosen for specified file
2372 2372
2373 2373 As described in :hg:`help merge-tools`, Mercurial examines
2374 2374 configurations below in this order to decide which merge tool is
2375 2375 chosen for specified file.
2376 2376
2377 2377 1. ``--tool`` option
2378 2378 2. ``HGMERGE`` environment variable
2379 2379 3. configurations in ``merge-patterns`` section
2380 2380 4. configuration of ``ui.merge``
2381 2381 5. configurations in ``merge-tools`` section
2382 2382 6. ``hgmerge`` tool (for historical reason only)
2383 2383 7. default tool for fallback (``:merge`` or ``:prompt``)
2384 2384
2385 2385 This command writes out examination result in the style below::
2386 2386
2387 2387 FILE = MERGETOOL
2388 2388
2389 2389 By default, all files known in the first parent context of the
2390 2390 working directory are examined. Use file patterns and/or -I/-X
2391 2391 options to limit target files. -r/--rev is also useful to examine
2392 2392 files in another context without actual updating to it.
2393 2393
2394 2394 With --debug, this command shows warning messages while matching
2395 2395 against ``merge-patterns`` and so on, too. It is recommended to
2396 2396 use this option with explicit file patterns and/or -I/-X options,
2397 2397 because this option increases amount of output per file according
2398 2398 to configurations in hgrc.
2399 2399
2400 2400 With -v/--verbose, this command shows configurations below at
2401 2401 first (only if specified).
2402 2402
2403 2403 - ``--tool`` option
2404 2404 - ``HGMERGE`` environment variable
2405 2405 - configuration of ``ui.merge``
2406 2406
2407 2407 If merge tool is chosen before matching against
2408 2408 ``merge-patterns``, this command can't show any helpful
2409 2409 information, even with --debug. In such case, information above is
2410 2410 useful to know why a merge tool is chosen.
2411 2411 """
2412 2412 opts = pycompat.byteskwargs(opts)
2413 2413 overrides = {}
2414 2414 if opts[b'tool']:
2415 2415 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2416 2416 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2417 2417
2418 2418 with ui.configoverride(overrides, b'debugmergepatterns'):
2419 2419 hgmerge = encoding.environ.get(b"HGMERGE")
2420 2420 if hgmerge is not None:
2421 2421 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2422 2422 uimerge = ui.config(b"ui", b"merge")
2423 2423 if uimerge:
2424 2424 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2425 2425
2426 2426 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2427 2427 m = scmutil.match(ctx, pats, opts)
2428 2428 changedelete = opts[b'changedelete']
2429 2429 for path in ctx.walk(m):
2430 2430 fctx = ctx[path]
2431 2431 try:
2432 2432 if not ui.debugflag:
2433 2433 ui.pushbuffer(error=True)
2434 2434 tool, toolpath = filemerge._picktool(
2435 2435 repo,
2436 2436 ui,
2437 2437 path,
2438 2438 fctx.isbinary(),
2439 2439 b'l' in fctx.flags(),
2440 2440 changedelete,
2441 2441 )
2442 2442 finally:
2443 2443 if not ui.debugflag:
2444 2444 ui.popbuffer()
2445 2445 ui.write(b'%s = %s\n' % (path, tool))
2446 2446
2447 2447
2448 2448 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2449 2449 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2450 2450 '''access the pushkey key/value protocol
2451 2451
2452 2452 With two args, list the keys in the given namespace.
2453 2453
2454 2454 With five args, set a key to new if it currently is set to old.
2455 2455 Reports success or failure.
2456 2456 '''
2457 2457
2458 2458 target = hg.peer(ui, {}, repopath)
2459 2459 if keyinfo:
2460 2460 key, old, new = keyinfo
2461 2461 with target.commandexecutor() as e:
2462 2462 r = e.callcommand(
2463 2463 b'pushkey',
2464 2464 {
2465 2465 b'namespace': namespace,
2466 2466 b'key': key,
2467 2467 b'old': old,
2468 2468 b'new': new,
2469 2469 },
2470 2470 ).result()
2471 2471
2472 2472 ui.status(pycompat.bytestr(r) + b'\n')
2473 2473 return not r
2474 2474 else:
2475 2475 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2476 2476 ui.write(
2477 2477 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2478 2478 )
2479 2479
2480 2480
2481 2481 @command(b'debugpvec', [], _(b'A B'))
2482 2482 def debugpvec(ui, repo, a, b=None):
2483 2483 ca = scmutil.revsingle(repo, a)
2484 2484 cb = scmutil.revsingle(repo, b)
2485 2485 pa = pvec.ctxpvec(ca)
2486 2486 pb = pvec.ctxpvec(cb)
2487 2487 if pa == pb:
2488 2488 rel = b"="
2489 2489 elif pa > pb:
2490 2490 rel = b">"
2491 2491 elif pa < pb:
2492 2492 rel = b"<"
2493 2493 elif pa | pb:
2494 2494 rel = b"|"
2495 2495 ui.write(_(b"a: %s\n") % pa)
2496 2496 ui.write(_(b"b: %s\n") % pb)
2497 2497 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2498 2498 ui.write(
2499 2499 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2500 2500 % (
2501 2501 abs(pa._depth - pb._depth),
2502 2502 pvec._hamming(pa._vec, pb._vec),
2503 2503 pa.distance(pb),
2504 2504 rel,
2505 2505 )
2506 2506 )
2507 2507
2508 2508
2509 2509 @command(
2510 2510 b'debugrebuilddirstate|debugrebuildstate',
2511 2511 [
2512 2512 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2513 2513 (
2514 2514 b'',
2515 2515 b'minimal',
2516 2516 None,
2517 2517 _(
2518 2518 b'only rebuild files that are inconsistent with '
2519 2519 b'the working copy parent'
2520 2520 ),
2521 2521 ),
2522 2522 ],
2523 2523 _(b'[-r REV]'),
2524 2524 )
2525 2525 def debugrebuilddirstate(ui, repo, rev, **opts):
2526 2526 """rebuild the dirstate as it would look like for the given revision
2527 2527
2528 2528 If no revision is specified the first current parent will be used.
2529 2529
2530 2530 The dirstate will be set to the files of the given revision.
2531 2531 The actual working directory content or existing dirstate
2532 2532 information such as adds or removes is not considered.
2533 2533
2534 2534 ``minimal`` will only rebuild the dirstate status for files that claim to be
2535 2535 tracked but are not in the parent manifest, or that exist in the parent
2536 2536 manifest but are not in the dirstate. It will not change adds, removes, or
2537 2537 modified files that are in the working copy parent.
2538 2538
2539 2539 One use of this command is to make the next :hg:`status` invocation
2540 2540 check the actual file content.
2541 2541 """
2542 2542 ctx = scmutil.revsingle(repo, rev)
2543 2543 with repo.wlock():
2544 2544 dirstate = repo.dirstate
2545 2545 changedfiles = None
2546 2546 # See command doc for what minimal does.
2547 2547 if opts.get('minimal'):
2548 2548 manifestfiles = set(ctx.manifest().keys())
2549 2549 dirstatefiles = set(dirstate)
2550 2550 manifestonly = manifestfiles - dirstatefiles
2551 2551 dsonly = dirstatefiles - manifestfiles
2552 2552 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2553 2553 changedfiles = manifestonly | dsnotadded
2554 2554
2555 2555 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2556 2556
2557 2557
2558 2558 @command(b'debugrebuildfncache', [], b'')
2559 2559 def debugrebuildfncache(ui, repo):
2560 2560 """rebuild the fncache file"""
2561 2561 repair.rebuildfncache(ui, repo)
2562 2562
2563 2563
2564 2564 @command(
2565 2565 b'debugrename',
2566 2566 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2567 2567 _(b'[-r REV] [FILE]...'),
2568 2568 )
2569 2569 def debugrename(ui, repo, *pats, **opts):
2570 2570 """dump rename information"""
2571 2571
2572 2572 opts = pycompat.byteskwargs(opts)
2573 2573 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2574 2574 m = scmutil.match(ctx, pats, opts)
2575 2575 for abs in ctx.walk(m):
2576 2576 fctx = ctx[abs]
2577 2577 o = fctx.filelog().renamed(fctx.filenode())
2578 2578 rel = repo.pathto(abs)
2579 2579 if o:
2580 2580 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2581 2581 else:
2582 2582 ui.write(_(b"%s not renamed\n") % rel)
2583 2583
2584 2584
2585 2585 @command(
2586 2586 b'debugrevlog',
2587 2587 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2588 2588 _(b'-c|-m|FILE'),
2589 2589 optionalrepo=True,
2590 2590 )
2591 2591 def debugrevlog(ui, repo, file_=None, **opts):
2592 2592 """show data and statistics about a revlog"""
2593 2593 opts = pycompat.byteskwargs(opts)
2594 2594 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2595 2595
2596 2596 if opts.get(b"dump"):
2597 2597 numrevs = len(r)
2598 2598 ui.write(
2599 2599 (
2600 2600 b"# rev p1rev p2rev start end deltastart base p1 p2"
2601 2601 b" rawsize totalsize compression heads chainlen\n"
2602 2602 )
2603 2603 )
2604 2604 ts = 0
2605 2605 heads = set()
2606 2606
2607 2607 for rev in pycompat.xrange(numrevs):
2608 2608 dbase = r.deltaparent(rev)
2609 2609 if dbase == -1:
2610 2610 dbase = rev
2611 2611 cbase = r.chainbase(rev)
2612 2612 clen = r.chainlen(rev)
2613 2613 p1, p2 = r.parentrevs(rev)
2614 2614 rs = r.rawsize(rev)
2615 2615 ts = ts + rs
2616 2616 heads -= set(r.parentrevs(rev))
2617 2617 heads.add(rev)
2618 2618 try:
2619 2619 compression = ts / r.end(rev)
2620 2620 except ZeroDivisionError:
2621 2621 compression = 0
2622 2622 ui.write(
2623 2623 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2624 2624 b"%11d %5d %8d\n"
2625 2625 % (
2626 2626 rev,
2627 2627 p1,
2628 2628 p2,
2629 2629 r.start(rev),
2630 2630 r.end(rev),
2631 2631 r.start(dbase),
2632 2632 r.start(cbase),
2633 2633 r.start(p1),
2634 2634 r.start(p2),
2635 2635 rs,
2636 2636 ts,
2637 2637 compression,
2638 2638 len(heads),
2639 2639 clen,
2640 2640 )
2641 2641 )
2642 2642 return 0
2643 2643
2644 2644 v = r.version
2645 2645 format = v & 0xFFFF
2646 2646 flags = []
2647 2647 gdelta = False
2648 2648 if v & revlog.FLAG_INLINE_DATA:
2649 2649 flags.append(b'inline')
2650 2650 if v & revlog.FLAG_GENERALDELTA:
2651 2651 gdelta = True
2652 2652 flags.append(b'generaldelta')
2653 2653 if not flags:
2654 2654 flags = [b'(none)']
2655 2655
2656 2656 ### tracks merge vs single parent
2657 2657 nummerges = 0
2658 2658
2659 2659 ### tracks ways the "delta" are build
2660 2660 # nodelta
2661 2661 numempty = 0
2662 2662 numemptytext = 0
2663 2663 numemptydelta = 0
2664 2664 # full file content
2665 2665 numfull = 0
2666 2666 # intermediate snapshot against a prior snapshot
2667 2667 numsemi = 0
2668 2668 # snapshot count per depth
2669 2669 numsnapdepth = collections.defaultdict(lambda: 0)
2670 2670 # delta against previous revision
2671 2671 numprev = 0
2672 2672 # delta against first or second parent (not prev)
2673 2673 nump1 = 0
2674 2674 nump2 = 0
2675 2675 # delta against neither prev nor parents
2676 2676 numother = 0
2677 2677 # delta against prev that are also first or second parent
2678 2678 # (details of `numprev`)
2679 2679 nump1prev = 0
2680 2680 nump2prev = 0
2681 2681
2682 2682 # data about delta chain of each revs
2683 2683 chainlengths = []
2684 2684 chainbases = []
2685 2685 chainspans = []
2686 2686
2687 2687 # data about each revision
2688 2688 datasize = [None, 0, 0]
2689 2689 fullsize = [None, 0, 0]
2690 2690 semisize = [None, 0, 0]
2691 2691 # snapshot count per depth
2692 2692 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2693 2693 deltasize = [None, 0, 0]
2694 2694 chunktypecounts = {}
2695 2695 chunktypesizes = {}
2696 2696
2697 2697 def addsize(size, l):
2698 2698 if l[0] is None or size < l[0]:
2699 2699 l[0] = size
2700 2700 if size > l[1]:
2701 2701 l[1] = size
2702 2702 l[2] += size
2703 2703
2704 2704 numrevs = len(r)
2705 2705 for rev in pycompat.xrange(numrevs):
2706 2706 p1, p2 = r.parentrevs(rev)
2707 2707 delta = r.deltaparent(rev)
2708 2708 if format > 0:
2709 2709 addsize(r.rawsize(rev), datasize)
2710 2710 if p2 != nullrev:
2711 2711 nummerges += 1
2712 2712 size = r.length(rev)
2713 2713 if delta == nullrev:
2714 2714 chainlengths.append(0)
2715 2715 chainbases.append(r.start(rev))
2716 2716 chainspans.append(size)
2717 2717 if size == 0:
2718 2718 numempty += 1
2719 2719 numemptytext += 1
2720 2720 else:
2721 2721 numfull += 1
2722 2722 numsnapdepth[0] += 1
2723 2723 addsize(size, fullsize)
2724 2724 addsize(size, snapsizedepth[0])
2725 2725 else:
2726 2726 chainlengths.append(chainlengths[delta] + 1)
2727 2727 baseaddr = chainbases[delta]
2728 2728 revaddr = r.start(rev)
2729 2729 chainbases.append(baseaddr)
2730 2730 chainspans.append((revaddr - baseaddr) + size)
2731 2731 if size == 0:
2732 2732 numempty += 1
2733 2733 numemptydelta += 1
2734 2734 elif r.issnapshot(rev):
2735 2735 addsize(size, semisize)
2736 2736 numsemi += 1
2737 2737 depth = r.snapshotdepth(rev)
2738 2738 numsnapdepth[depth] += 1
2739 2739 addsize(size, snapsizedepth[depth])
2740 2740 else:
2741 2741 addsize(size, deltasize)
2742 2742 if delta == rev - 1:
2743 2743 numprev += 1
2744 2744 if delta == p1:
2745 2745 nump1prev += 1
2746 2746 elif delta == p2:
2747 2747 nump2prev += 1
2748 2748 elif delta == p1:
2749 2749 nump1 += 1
2750 2750 elif delta == p2:
2751 2751 nump2 += 1
2752 2752 elif delta != nullrev:
2753 2753 numother += 1
2754 2754
2755 2755 # Obtain data on the raw chunks in the revlog.
2756 2756 if util.safehasattr(r, b'_getsegmentforrevs'):
2757 2757 segment = r._getsegmentforrevs(rev, rev)[1]
2758 2758 else:
2759 2759 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2760 2760 if segment:
2761 2761 chunktype = bytes(segment[0:1])
2762 2762 else:
2763 2763 chunktype = b'empty'
2764 2764
2765 2765 if chunktype not in chunktypecounts:
2766 2766 chunktypecounts[chunktype] = 0
2767 2767 chunktypesizes[chunktype] = 0
2768 2768
2769 2769 chunktypecounts[chunktype] += 1
2770 2770 chunktypesizes[chunktype] += size
2771 2771
2772 2772 # Adjust size min value for empty cases
2773 2773 for size in (datasize, fullsize, semisize, deltasize):
2774 2774 if size[0] is None:
2775 2775 size[0] = 0
2776 2776
2777 2777 numdeltas = numrevs - numfull - numempty - numsemi
2778 2778 numoprev = numprev - nump1prev - nump2prev
2779 2779 totalrawsize = datasize[2]
2780 2780 datasize[2] /= numrevs
2781 2781 fulltotal = fullsize[2]
2782 2782 if numfull == 0:
2783 2783 fullsize[2] = 0
2784 2784 else:
2785 2785 fullsize[2] /= numfull
2786 2786 semitotal = semisize[2]
2787 2787 snaptotal = {}
2788 2788 if numsemi > 0:
2789 2789 semisize[2] /= numsemi
2790 2790 for depth in snapsizedepth:
2791 2791 snaptotal[depth] = snapsizedepth[depth][2]
2792 2792 snapsizedepth[depth][2] /= numsnapdepth[depth]
2793 2793
2794 2794 deltatotal = deltasize[2]
2795 2795 if numdeltas > 0:
2796 2796 deltasize[2] /= numdeltas
2797 2797 totalsize = fulltotal + semitotal + deltatotal
2798 2798 avgchainlen = sum(chainlengths) / numrevs
2799 2799 maxchainlen = max(chainlengths)
2800 2800 maxchainspan = max(chainspans)
2801 2801 compratio = 1
2802 2802 if totalsize:
2803 2803 compratio = totalrawsize / totalsize
2804 2804
2805 2805 basedfmtstr = b'%%%dd\n'
2806 2806 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2807 2807
2808 2808 def dfmtstr(max):
2809 2809 return basedfmtstr % len(str(max))
2810 2810
2811 2811 def pcfmtstr(max, padding=0):
2812 2812 return basepcfmtstr % (len(str(max)), b' ' * padding)
2813 2813
2814 2814 def pcfmt(value, total):
2815 2815 if total:
2816 2816 return (value, 100 * float(value) / total)
2817 2817 else:
2818 2818 return value, 100.0
2819 2819
2820 2820 ui.writenoi18n(b'format : %d\n' % format)
2821 2821 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2822 2822
2823 2823 ui.write(b'\n')
2824 2824 fmt = pcfmtstr(totalsize)
2825 2825 fmt2 = dfmtstr(totalsize)
2826 2826 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2827 2827 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2828 2828 ui.writenoi18n(
2829 2829 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2830 2830 )
2831 2831 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2832 2832 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2833 2833 ui.writenoi18n(
2834 2834 b' text : '
2835 2835 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2836 2836 )
2837 2837 ui.writenoi18n(
2838 2838 b' delta : '
2839 2839 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2840 2840 )
2841 2841 ui.writenoi18n(
2842 2842 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2843 2843 )
2844 2844 for depth in sorted(numsnapdepth):
2845 2845 ui.write(
2846 2846 (b' lvl-%-3d : ' % depth)
2847 2847 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2848 2848 )
2849 2849 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2850 2850 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2851 2851 ui.writenoi18n(
2852 2852 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2853 2853 )
2854 2854 for depth in sorted(numsnapdepth):
2855 2855 ui.write(
2856 2856 (b' lvl-%-3d : ' % depth)
2857 2857 + fmt % pcfmt(snaptotal[depth], totalsize)
2858 2858 )
2859 2859 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2860 2860
2861 2861 def fmtchunktype(chunktype):
2862 2862 if chunktype == b'empty':
2863 2863 return b' %s : ' % chunktype
2864 2864 elif chunktype in pycompat.bytestr(string.ascii_letters):
2865 2865 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2866 2866 else:
2867 2867 return b' 0x%s : ' % hex(chunktype)
2868 2868
2869 2869 ui.write(b'\n')
2870 2870 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2871 2871 for chunktype in sorted(chunktypecounts):
2872 2872 ui.write(fmtchunktype(chunktype))
2873 2873 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2874 2874 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2875 2875 for chunktype in sorted(chunktypecounts):
2876 2876 ui.write(fmtchunktype(chunktype))
2877 2877 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2878 2878
2879 2879 ui.write(b'\n')
2880 2880 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2881 2881 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2882 2882 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2883 2883 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2884 2884 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2885 2885
2886 2886 if format > 0:
2887 2887 ui.write(b'\n')
2888 2888 ui.writenoi18n(
2889 2889 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2890 2890 % tuple(datasize)
2891 2891 )
2892 2892 ui.writenoi18n(
2893 2893 b'full revision size (min/max/avg) : %d / %d / %d\n'
2894 2894 % tuple(fullsize)
2895 2895 )
2896 2896 ui.writenoi18n(
2897 2897 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2898 2898 % tuple(semisize)
2899 2899 )
2900 2900 for depth in sorted(snapsizedepth):
2901 2901 if depth == 0:
2902 2902 continue
2903 2903 ui.writenoi18n(
2904 2904 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2905 2905 % ((depth,) + tuple(snapsizedepth[depth]))
2906 2906 )
2907 2907 ui.writenoi18n(
2908 2908 b'delta size (min/max/avg) : %d / %d / %d\n'
2909 2909 % tuple(deltasize)
2910 2910 )
2911 2911
2912 2912 if numdeltas > 0:
2913 2913 ui.write(b'\n')
2914 2914 fmt = pcfmtstr(numdeltas)
2915 2915 fmt2 = pcfmtstr(numdeltas, 4)
2916 2916 ui.writenoi18n(
2917 2917 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2918 2918 )
2919 2919 if numprev > 0:
2920 2920 ui.writenoi18n(
2921 2921 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2922 2922 )
2923 2923 ui.writenoi18n(
2924 2924 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2925 2925 )
2926 2926 ui.writenoi18n(
2927 2927 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2928 2928 )
2929 2929 if gdelta:
2930 2930 ui.writenoi18n(
2931 2931 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2932 2932 )
2933 2933 ui.writenoi18n(
2934 2934 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2935 2935 )
2936 2936 ui.writenoi18n(
2937 2937 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2938 2938 )
2939 2939
2940 2940
2941 2941 @command(
2942 2942 b'debugrevlogindex',
2943 2943 cmdutil.debugrevlogopts
2944 2944 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2945 2945 _(b'[-f FORMAT] -c|-m|FILE'),
2946 2946 optionalrepo=True,
2947 2947 )
2948 2948 def debugrevlogindex(ui, repo, file_=None, **opts):
2949 2949 """dump the contents of a revlog index"""
2950 2950 opts = pycompat.byteskwargs(opts)
2951 2951 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2952 2952 format = opts.get(b'format', 0)
2953 2953 if format not in (0, 1):
2954 2954 raise error.Abort(_(b"unknown format %d") % format)
2955 2955
2956 2956 if ui.debugflag:
2957 2957 shortfn = hex
2958 2958 else:
2959 2959 shortfn = short
2960 2960
2961 2961 # There might not be anything in r, so have a sane default
2962 2962 idlen = 12
2963 2963 for i in r:
2964 2964 idlen = len(shortfn(r.node(i)))
2965 2965 break
2966 2966
2967 2967 if format == 0:
2968 2968 if ui.verbose:
2969 2969 ui.writenoi18n(
2970 2970 b" rev offset length linkrev %s %s p2\n"
2971 2971 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2972 2972 )
2973 2973 else:
2974 2974 ui.writenoi18n(
2975 2975 b" rev linkrev %s %s p2\n"
2976 2976 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2977 2977 )
2978 2978 elif format == 1:
2979 2979 if ui.verbose:
2980 2980 ui.writenoi18n(
2981 2981 (
2982 2982 b" rev flag offset length size link p1"
2983 2983 b" p2 %s\n"
2984 2984 )
2985 2985 % b"nodeid".rjust(idlen)
2986 2986 )
2987 2987 else:
2988 2988 ui.writenoi18n(
2989 2989 b" rev flag size link p1 p2 %s\n"
2990 2990 % b"nodeid".rjust(idlen)
2991 2991 )
2992 2992
2993 2993 for i in r:
2994 2994 node = r.node(i)
2995 2995 if format == 0:
2996 2996 try:
2997 2997 pp = r.parents(node)
2998 2998 except Exception:
2999 2999 pp = [nullid, nullid]
3000 3000 if ui.verbose:
3001 3001 ui.write(
3002 3002 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3003 3003 % (
3004 3004 i,
3005 3005 r.start(i),
3006 3006 r.length(i),
3007 3007 r.linkrev(i),
3008 3008 shortfn(node),
3009 3009 shortfn(pp[0]),
3010 3010 shortfn(pp[1]),
3011 3011 )
3012 3012 )
3013 3013 else:
3014 3014 ui.write(
3015 3015 b"% 6d % 7d %s %s %s\n"
3016 3016 % (
3017 3017 i,
3018 3018 r.linkrev(i),
3019 3019 shortfn(node),
3020 3020 shortfn(pp[0]),
3021 3021 shortfn(pp[1]),
3022 3022 )
3023 3023 )
3024 3024 elif format == 1:
3025 3025 pr = r.parentrevs(i)
3026 3026 if ui.verbose:
3027 3027 ui.write(
3028 3028 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3029 3029 % (
3030 3030 i,
3031 3031 r.flags(i),
3032 3032 r.start(i),
3033 3033 r.length(i),
3034 3034 r.rawsize(i),
3035 3035 r.linkrev(i),
3036 3036 pr[0],
3037 3037 pr[1],
3038 3038 shortfn(node),
3039 3039 )
3040 3040 )
3041 3041 else:
3042 3042 ui.write(
3043 3043 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3044 3044 % (
3045 3045 i,
3046 3046 r.flags(i),
3047 3047 r.rawsize(i),
3048 3048 r.linkrev(i),
3049 3049 pr[0],
3050 3050 pr[1],
3051 3051 shortfn(node),
3052 3052 )
3053 3053 )
3054 3054
3055 3055
3056 3056 @command(
3057 3057 b'debugrevspec',
3058 3058 [
3059 3059 (
3060 3060 b'',
3061 3061 b'optimize',
3062 3062 None,
3063 3063 _(b'print parsed tree after optimizing (DEPRECATED)'),
3064 3064 ),
3065 3065 (
3066 3066 b'',
3067 3067 b'show-revs',
3068 3068 True,
3069 3069 _(b'print list of result revisions (default)'),
3070 3070 ),
3071 3071 (
3072 3072 b's',
3073 3073 b'show-set',
3074 3074 None,
3075 3075 _(b'print internal representation of result set'),
3076 3076 ),
3077 3077 (
3078 3078 b'p',
3079 3079 b'show-stage',
3080 3080 [],
3081 3081 _(b'print parsed tree at the given stage'),
3082 3082 _(b'NAME'),
3083 3083 ),
3084 3084 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3085 3085 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3086 3086 ],
3087 3087 b'REVSPEC',
3088 3088 )
3089 3089 def debugrevspec(ui, repo, expr, **opts):
3090 3090 """parse and apply a revision specification
3091 3091
3092 3092 Use -p/--show-stage option to print the parsed tree at the given stages.
3093 3093 Use -p all to print tree at every stage.
3094 3094
3095 3095 Use --no-show-revs option with -s or -p to print only the set
3096 3096 representation or the parsed tree respectively.
3097 3097
3098 3098 Use --verify-optimized to compare the optimized result with the unoptimized
3099 3099 one. Returns 1 if the optimized result differs.
3100 3100 """
3101 3101 opts = pycompat.byteskwargs(opts)
3102 3102 aliases = ui.configitems(b'revsetalias')
3103 3103 stages = [
3104 3104 (b'parsed', lambda tree: tree),
3105 3105 (
3106 3106 b'expanded',
3107 3107 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3108 3108 ),
3109 3109 (b'concatenated', revsetlang.foldconcat),
3110 3110 (b'analyzed', revsetlang.analyze),
3111 3111 (b'optimized', revsetlang.optimize),
3112 3112 ]
3113 3113 if opts[b'no_optimized']:
3114 3114 stages = stages[:-1]
3115 3115 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3116 3116 raise error.Abort(
3117 3117 _(b'cannot use --verify-optimized with --no-optimized')
3118 3118 )
3119 3119 stagenames = set(n for n, f in stages)
3120 3120
3121 3121 showalways = set()
3122 3122 showchanged = set()
3123 3123 if ui.verbose and not opts[b'show_stage']:
3124 3124 # show parsed tree by --verbose (deprecated)
3125 3125 showalways.add(b'parsed')
3126 3126 showchanged.update([b'expanded', b'concatenated'])
3127 3127 if opts[b'optimize']:
3128 3128 showalways.add(b'optimized')
3129 3129 if opts[b'show_stage'] and opts[b'optimize']:
3130 3130 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3131 3131 if opts[b'show_stage'] == [b'all']:
3132 3132 showalways.update(stagenames)
3133 3133 else:
3134 3134 for n in opts[b'show_stage']:
3135 3135 if n not in stagenames:
3136 3136 raise error.Abort(_(b'invalid stage name: %s') % n)
3137 3137 showalways.update(opts[b'show_stage'])
3138 3138
3139 3139 treebystage = {}
3140 3140 printedtree = None
3141 3141 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3142 3142 for n, f in stages:
3143 3143 treebystage[n] = tree = f(tree)
3144 3144 if n in showalways or (n in showchanged and tree != printedtree):
3145 3145 if opts[b'show_stage'] or n != b'parsed':
3146 3146 ui.write(b"* %s:\n" % n)
3147 3147 ui.write(revsetlang.prettyformat(tree), b"\n")
3148 3148 printedtree = tree
3149 3149
3150 3150 if opts[b'verify_optimized']:
3151 3151 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3152 3152 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3153 3153 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3154 3154 ui.writenoi18n(
3155 3155 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3156 3156 )
3157 3157 ui.writenoi18n(
3158 3158 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3159 3159 )
3160 3160 arevs = list(arevs)
3161 3161 brevs = list(brevs)
3162 3162 if arevs == brevs:
3163 3163 return 0
3164 3164 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3165 3165 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3166 3166 sm = difflib.SequenceMatcher(None, arevs, brevs)
3167 3167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3168 3168 if tag in ('delete', 'replace'):
3169 3169 for c in arevs[alo:ahi]:
3170 3170 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3171 3171 if tag in ('insert', 'replace'):
3172 3172 for c in brevs[blo:bhi]:
3173 3173 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3174 3174 if tag == 'equal':
3175 3175 for c in arevs[alo:ahi]:
3176 3176 ui.write(b' %d\n' % c)
3177 3177 return 1
3178 3178
3179 3179 func = revset.makematcher(tree)
3180 3180 revs = func(repo)
3181 3181 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3182 3182 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3183 3183 if not opts[b'show_revs']:
3184 3184 return
3185 3185 for c in revs:
3186 3186 ui.write(b"%d\n" % c)
3187 3187
3188 3188
3189 3189 @command(
3190 3190 b'debugserve',
3191 3191 [
3192 3192 (
3193 3193 b'',
3194 3194 b'sshstdio',
3195 3195 False,
3196 3196 _(b'run an SSH server bound to process handles'),
3197 3197 ),
3198 3198 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3199 3199 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3200 3200 ],
3201 3201 b'',
3202 3202 )
3203 3203 def debugserve(ui, repo, **opts):
3204 3204 """run a server with advanced settings
3205 3205
3206 3206 This command is similar to :hg:`serve`. It exists partially as a
3207 3207 workaround to the fact that ``hg serve --stdio`` must have specific
3208 3208 arguments for security reasons.
3209 3209 """
3210 3210 opts = pycompat.byteskwargs(opts)
3211 3211
3212 3212 if not opts[b'sshstdio']:
3213 3213 raise error.Abort(_(b'only --sshstdio is currently supported'))
3214 3214
3215 3215 logfh = None
3216 3216
3217 3217 if opts[b'logiofd'] and opts[b'logiofile']:
3218 3218 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3219 3219
3220 3220 if opts[b'logiofd']:
3221 3221 # Line buffered because output is line based.
3222 3222 try:
3223 3223 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3224 3224 except OSError as e:
3225 3225 if e.errno != errno.ESPIPE:
3226 3226 raise
3227 3227 # can't seek a pipe, so `ab` mode fails on py3
3228 3228 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3229 3229 elif opts[b'logiofile']:
3230 3230 logfh = open(opts[b'logiofile'], b'ab', 1)
3231 3231
3232 3232 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3233 3233 s.serve_forever()
3234 3234
3235 3235
3236 3236 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3237 3237 def debugsetparents(ui, repo, rev1, rev2=None):
3238 3238 """manually set the parents of the current working directory
3239 3239
3240 3240 This is useful for writing repository conversion tools, but should
3241 3241 be used with care. For example, neither the working directory nor the
3242 3242 dirstate is updated, so file status may be incorrect after running this
3243 3243 command.
3244 3244
3245 3245 Returns 0 on success.
3246 3246 """
3247 3247
3248 3248 node1 = scmutil.revsingle(repo, rev1).node()
3249 3249 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3250 3250
3251 3251 with repo.wlock():
3252 3252 repo.setparents(node1, node2)
3253 3253
3254 3254
3255 3255 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3256 3256 def debugsidedata(ui, repo, file_, rev=None, **opts):
3257 3257 """dump the side data for a cl/manifest/file revision
3258 3258
3259 3259 Use --verbose to dump the sidedata content."""
3260 3260 opts = pycompat.byteskwargs(opts)
3261 3261 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3262 3262 if rev is not None:
3263 3263 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3264 3264 file_, rev = None, file_
3265 3265 elif rev is None:
3266 3266 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3267 3267 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3268 3268 r = getattr(r, '_revlog', r)
3269 3269 try:
3270 3270 sidedata = r.sidedata(r.lookup(rev))
3271 3271 except KeyError:
3272 3272 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3273 3273 if sidedata:
3274 3274 sidedata = list(sidedata.items())
3275 3275 sidedata.sort()
3276 3276 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3277 3277 for key, value in sidedata:
3278 3278 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3279 3279 if ui.verbose:
3280 3280 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3281 3281
3282 3282
3283 3283 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3284 3284 def debugssl(ui, repo, source=None, **opts):
3285 3285 '''test a secure connection to a server
3286 3286
3287 3287 This builds the certificate chain for the server on Windows, installing the
3288 3288 missing intermediates and trusted root via Windows Update if necessary. It
3289 3289 does nothing on other platforms.
3290 3290
3291 3291 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3292 3292 that server is used. See :hg:`help urls` for more information.
3293 3293
3294 3294 If the update succeeds, retry the original operation. Otherwise, the cause
3295 3295 of the SSL error is likely another issue.
3296 3296 '''
3297 3297 if not pycompat.iswindows:
3298 3298 raise error.Abort(
3299 3299 _(b'certificate chain building is only possible on Windows')
3300 3300 )
3301 3301
3302 3302 if not source:
3303 3303 if not repo:
3304 3304 raise error.Abort(
3305 3305 _(
3306 3306 b"there is no Mercurial repository here, and no "
3307 3307 b"server specified"
3308 3308 )
3309 3309 )
3310 3310 source = b"default"
3311 3311
3312 3312 source, branches = hg.parseurl(ui.expandpath(source))
3313 3313 url = util.url(source)
3314 3314
3315 3315 defaultport = {b'https': 443, b'ssh': 22}
3316 3316 if url.scheme in defaultport:
3317 3317 try:
3318 3318 addr = (url.host, int(url.port or defaultport[url.scheme]))
3319 3319 except ValueError:
3320 3320 raise error.Abort(_(b"malformed port number in URL"))
3321 3321 else:
3322 3322 raise error.Abort(_(b"only https and ssh connections are supported"))
3323 3323
3324 3324 from . import win32
3325 3325
3326 3326 s = ssl.wrap_socket(
3327 3327 socket.socket(),
3328 3328 ssl_version=ssl.PROTOCOL_TLS,
3329 3329 cert_reqs=ssl.CERT_NONE,
3330 3330 ca_certs=None,
3331 3331 )
3332 3332
3333 3333 try:
3334 3334 s.connect(addr)
3335 3335 cert = s.getpeercert(True)
3336 3336
3337 3337 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3338 3338
3339 3339 complete = win32.checkcertificatechain(cert, build=False)
3340 3340
3341 3341 if not complete:
3342 3342 ui.status(_(b'certificate chain is incomplete, updating... '))
3343 3343
3344 3344 if not win32.checkcertificatechain(cert):
3345 3345 ui.status(_(b'failed.\n'))
3346 3346 else:
3347 3347 ui.status(_(b'done.\n'))
3348 3348 else:
3349 3349 ui.status(_(b'full certificate chain is available\n'))
3350 3350 finally:
3351 3351 s.close()
3352 3352
3353 3353
3354 3354 @command(
3355 3355 b'debugsub',
3356 3356 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3357 3357 _(b'[-r REV] [REV]'),
3358 3358 )
3359 3359 def debugsub(ui, repo, rev=None):
3360 3360 ctx = scmutil.revsingle(repo, rev, None)
3361 3361 for k, v in sorted(ctx.substate.items()):
3362 3362 ui.writenoi18n(b'path %s\n' % k)
3363 3363 ui.writenoi18n(b' source %s\n' % v[0])
3364 3364 ui.writenoi18n(b' revision %s\n' % v[1])
3365 3365
3366 3366
3367 3367 @command(
3368 3368 b'debugsuccessorssets',
3369 3369 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3370 3370 _(b'[REV]'),
3371 3371 )
3372 3372 def debugsuccessorssets(ui, repo, *revs, **opts):
3373 3373 """show set of successors for revision
3374 3374
3375 3375 A successors set of changeset A is a consistent group of revisions that
3376 3376 succeed A. It contains non-obsolete changesets only unless closests
3377 3377 successors set is set.
3378 3378
3379 3379 In most cases a changeset A has a single successors set containing a single
3380 3380 successor (changeset A replaced by A').
3381 3381
3382 3382 A changeset that is made obsolete with no successors are called "pruned".
3383 3383 Such changesets have no successors sets at all.
3384 3384
3385 3385 A changeset that has been "split" will have a successors set containing
3386 3386 more than one successor.
3387 3387
3388 3388 A changeset that has been rewritten in multiple different ways is called
3389 3389 "divergent". Such changesets have multiple successor sets (each of which
3390 3390 may also be split, i.e. have multiple successors).
3391 3391
3392 3392 Results are displayed as follows::
3393 3393
3394 3394 <rev1>
3395 3395 <successors-1A>
3396 3396 <rev2>
3397 3397 <successors-2A>
3398 3398 <successors-2B1> <successors-2B2> <successors-2B3>
3399 3399
3400 3400 Here rev2 has two possible (i.e. divergent) successors sets. The first
3401 3401 holds one element, whereas the second holds three (i.e. the changeset has
3402 3402 been split).
3403 3403 """
3404 3404 # passed to successorssets caching computation from one call to another
3405 3405 cache = {}
3406 3406 ctx2str = bytes
3407 3407 node2str = short
3408 3408 for rev in scmutil.revrange(repo, revs):
3409 3409 ctx = repo[rev]
3410 3410 ui.write(b'%s\n' % ctx2str(ctx))
3411 3411 for succsset in obsutil.successorssets(
3412 3412 repo, ctx.node(), closest=opts['closest'], cache=cache
3413 3413 ):
3414 3414 if succsset:
3415 3415 ui.write(b' ')
3416 3416 ui.write(node2str(succsset[0]))
3417 3417 for node in succsset[1:]:
3418 3418 ui.write(b' ')
3419 3419 ui.write(node2str(node))
3420 3420 ui.write(b'\n')
3421 3421
3422 3422
3423 3423 @command(
3424 3424 b'debugtemplate',
3425 3425 [
3426 3426 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3427 3427 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3428 3428 ],
3429 3429 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3430 3430 optionalrepo=True,
3431 3431 )
3432 3432 def debugtemplate(ui, repo, tmpl, **opts):
3433 3433 """parse and apply a template
3434 3434
3435 3435 If -r/--rev is given, the template is processed as a log template and
3436 3436 applied to the given changesets. Otherwise, it is processed as a generic
3437 3437 template.
3438 3438
3439 3439 Use --verbose to print the parsed tree.
3440 3440 """
3441 3441 revs = None
3442 3442 if opts['rev']:
3443 3443 if repo is None:
3444 3444 raise error.RepoError(
3445 3445 _(b'there is no Mercurial repository here (.hg not found)')
3446 3446 )
3447 3447 revs = scmutil.revrange(repo, opts['rev'])
3448 3448
3449 3449 props = {}
3450 3450 for d in opts['define']:
3451 3451 try:
3452 3452 k, v = (e.strip() for e in d.split(b'=', 1))
3453 3453 if not k or k == b'ui':
3454 3454 raise ValueError
3455 3455 props[k] = v
3456 3456 except ValueError:
3457 3457 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3458 3458
3459 3459 if ui.verbose:
3460 3460 aliases = ui.configitems(b'templatealias')
3461 3461 tree = templater.parse(tmpl)
3462 3462 ui.note(templater.prettyformat(tree), b'\n')
3463 3463 newtree = templater.expandaliases(tree, aliases)
3464 3464 if newtree != tree:
3465 3465 ui.notenoi18n(
3466 3466 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3467 3467 )
3468 3468
3469 3469 if revs is None:
3470 3470 tres = formatter.templateresources(ui, repo)
3471 3471 t = formatter.maketemplater(ui, tmpl, resources=tres)
3472 3472 if ui.verbose:
3473 3473 kwds, funcs = t.symbolsuseddefault()
3474 3474 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3475 3475 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3476 3476 ui.write(t.renderdefault(props))
3477 3477 else:
3478 3478 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3479 3479 if ui.verbose:
3480 3480 kwds, funcs = displayer.t.symbolsuseddefault()
3481 3481 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3482 3482 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3483 3483 for r in revs:
3484 3484 displayer.show(repo[r], **pycompat.strkwargs(props))
3485 3485 displayer.close()
3486 3486
3487 3487
3488 3488 @command(
3489 3489 b'debuguigetpass',
3490 3490 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3491 3491 _(b'[-p TEXT]'),
3492 3492 norepo=True,
3493 3493 )
3494 3494 def debuguigetpass(ui, prompt=b''):
3495 3495 """show prompt to type password"""
3496 3496 r = ui.getpass(prompt)
3497 3497 ui.writenoi18n(b'respose: %s\n' % r)
3498 3498
3499 3499
3500 3500 @command(
3501 3501 b'debuguiprompt',
3502 3502 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3503 3503 _(b'[-p TEXT]'),
3504 3504 norepo=True,
3505 3505 )
3506 3506 def debuguiprompt(ui, prompt=b''):
3507 3507 """show plain prompt"""
3508 3508 r = ui.prompt(prompt)
3509 3509 ui.writenoi18n(b'response: %s\n' % r)
3510 3510
3511 3511
3512 3512 @command(b'debugupdatecaches', [])
3513 3513 def debugupdatecaches(ui, repo, *pats, **opts):
3514 3514 """warm all known caches in the repository"""
3515 3515 with repo.wlock(), repo.lock():
3516 3516 repo.updatecaches(full=True)
3517 3517
3518 3518
3519 3519 @command(
3520 3520 b'debugupgraderepo',
3521 3521 [
3522 3522 (
3523 3523 b'o',
3524 3524 b'optimize',
3525 3525 [],
3526 3526 _(b'extra optimization to perform'),
3527 3527 _(b'NAME'),
3528 3528 ),
3529 3529 (b'', b'run', False, _(b'performs an upgrade')),
3530 3530 (b'', b'backup', True, _(b'keep the old repository content around')),
3531 3531 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3532 3532 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3533 3533 ],
3534 3534 )
3535 3535 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3536 3536 """upgrade a repository to use different features
3537 3537
3538 3538 If no arguments are specified, the repository is evaluated for upgrade
3539 3539 and a list of problems and potential optimizations is printed.
3540 3540
3541 3541 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3542 3542 can be influenced via additional arguments. More details will be provided
3543 3543 by the command output when run without ``--run``.
3544 3544
3545 3545 During the upgrade, the repository will be locked and no writes will be
3546 3546 allowed.
3547 3547
3548 3548 At the end of the upgrade, the repository may not be readable while new
3549 3549 repository data is swapped in. This window will be as long as it takes to
3550 3550 rename some directories inside the ``.hg`` directory. On most machines, this
3551 3551 should complete almost instantaneously and the chances of a consumer being
3552 3552 unable to access the repository should be low.
3553 3553
3554 3554 By default, all revlog will be upgraded. You can restrict this using flag
3555 3555 such as `--manifest`:
3556 3556
3557 3557 * `--manifest`: only optimize the manifest
3558 3558 * `--no-manifest`: optimize all revlog but the manifest
3559 3559 * `--changelog`: optimize the changelog only
3560 3560 * `--no-changelog --no-manifest`: optimize filelogs only
3561 3561 """
3562 3562 return upgrade.upgraderepo(
3563 3563 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3564 3564 )
3565 3565
3566 3566
3567 3567 @command(
3568 3568 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3569 3569 )
3570 3570 def debugwalk(ui, repo, *pats, **opts):
3571 3571 """show how files match on given patterns"""
3572 3572 opts = pycompat.byteskwargs(opts)
3573 3573 m = scmutil.match(repo[None], pats, opts)
3574 3574 if ui.verbose:
3575 3575 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3576 3576 items = list(repo[None].walk(m))
3577 3577 if not items:
3578 3578 return
3579 3579 f = lambda fn: fn
3580 3580 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3581 3581 f = lambda fn: util.normpath(fn)
3582 3582 fmt = b'f %%-%ds %%-%ds %%s' % (
3583 3583 max([len(abs) for abs in items]),
3584 3584 max([len(repo.pathto(abs)) for abs in items]),
3585 3585 )
3586 3586 for abs in items:
3587 3587 line = fmt % (
3588 3588 abs,
3589 3589 f(repo.pathto(abs)),
3590 3590 m.exact(abs) and b'exact' or b'',
3591 3591 )
3592 3592 ui.write(b"%s\n" % line.rstrip())
3593 3593
3594 3594
3595 3595 @command(b'debugwhyunstable', [], _(b'REV'))
3596 3596 def debugwhyunstable(ui, repo, rev):
3597 3597 """explain instabilities of a changeset"""
3598 3598 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3599 3599 dnodes = b''
3600 3600 if entry.get(b'divergentnodes'):
3601 3601 dnodes = (
3602 3602 b' '.join(
3603 3603 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3604 3604 for ctx in entry[b'divergentnodes']
3605 3605 )
3606 3606 + b' '
3607 3607 )
3608 3608 ui.write(
3609 3609 b'%s: %s%s %s\n'
3610 3610 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3611 3611 )
3612 3612
3613 3613
3614 3614 @command(
3615 3615 b'debugwireargs',
3616 3616 [
3617 3617 (b'', b'three', b'', b'three'),
3618 3618 (b'', b'four', b'', b'four'),
3619 3619 (b'', b'five', b'', b'five'),
3620 3620 ]
3621 3621 + cmdutil.remoteopts,
3622 3622 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3623 3623 norepo=True,
3624 3624 )
3625 3625 def debugwireargs(ui, repopath, *vals, **opts):
3626 3626 opts = pycompat.byteskwargs(opts)
3627 3627 repo = hg.peer(ui, opts, repopath)
3628 3628 for opt in cmdutil.remoteopts:
3629 3629 del opts[opt[1]]
3630 3630 args = {}
3631 3631 for k, v in pycompat.iteritems(opts):
3632 3632 if v:
3633 3633 args[k] = v
3634 3634 args = pycompat.strkwargs(args)
3635 3635 # run twice to check that we don't mess up the stream for the next command
3636 3636 res1 = repo.debugwireargs(*vals, **args)
3637 3637 res2 = repo.debugwireargs(*vals, **args)
3638 3638 ui.write(b"%s\n" % res1)
3639 3639 if res1 != res2:
3640 3640 ui.warn(b"%s\n" % res2)
3641 3641
3642 3642
3643 3643 def _parsewirelangblocks(fh):
3644 3644 activeaction = None
3645 3645 blocklines = []
3646 3646 lastindent = 0
3647 3647
3648 3648 for line in fh:
3649 3649 line = line.rstrip()
3650 3650 if not line:
3651 3651 continue
3652 3652
3653 3653 if line.startswith(b'#'):
3654 3654 continue
3655 3655
3656 3656 if not line.startswith(b' '):
3657 3657 # New block. Flush previous one.
3658 3658 if activeaction:
3659 3659 yield activeaction, blocklines
3660 3660
3661 3661 activeaction = line
3662 3662 blocklines = []
3663 3663 lastindent = 0
3664 3664 continue
3665 3665
3666 3666 # Else we start with an indent.
3667 3667
3668 3668 if not activeaction:
3669 3669 raise error.Abort(_(b'indented line outside of block'))
3670 3670
3671 3671 indent = len(line) - len(line.lstrip())
3672 3672
3673 3673 # If this line is indented more than the last line, concatenate it.
3674 3674 if indent > lastindent and blocklines:
3675 3675 blocklines[-1] += line.lstrip()
3676 3676 else:
3677 3677 blocklines.append(line)
3678 3678 lastindent = indent
3679 3679
3680 3680 # Flush last block.
3681 3681 if activeaction:
3682 3682 yield activeaction, blocklines
3683 3683
3684 3684
3685 3685 @command(
3686 3686 b'debugwireproto',
3687 3687 [
3688 3688 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3689 3689 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3690 3690 (
3691 3691 b'',
3692 3692 b'noreadstderr',
3693 3693 False,
3694 3694 _(b'do not read from stderr of the remote'),
3695 3695 ),
3696 3696 (
3697 3697 b'',
3698 3698 b'nologhandshake',
3699 3699 False,
3700 3700 _(b'do not log I/O related to the peer handshake'),
3701 3701 ),
3702 3702 ]
3703 3703 + cmdutil.remoteopts,
3704 3704 _(b'[PATH]'),
3705 3705 optionalrepo=True,
3706 3706 )
3707 3707 def debugwireproto(ui, repo, path=None, **opts):
3708 3708 """send wire protocol commands to a server
3709 3709
3710 3710 This command can be used to issue wire protocol commands to remote
3711 3711 peers and to debug the raw data being exchanged.
3712 3712
3713 3713 ``--localssh`` will start an SSH server against the current repository
3714 3714 and connect to that. By default, the connection will perform a handshake
3715 3715 and establish an appropriate peer instance.
3716 3716
3717 3717 ``--peer`` can be used to bypass the handshake protocol and construct a
3718 3718 peer instance using the specified class type. Valid values are ``raw``,
3719 3719 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3720 3720 raw data payloads and don't support higher-level command actions.
3721 3721
3722 3722 ``--noreadstderr`` can be used to disable automatic reading from stderr
3723 3723 of the peer (for SSH connections only). Disabling automatic reading of
3724 3724 stderr is useful for making output more deterministic.
3725 3725
3726 3726 Commands are issued via a mini language which is specified via stdin.
3727 3727 The language consists of individual actions to perform. An action is
3728 3728 defined by a block. A block is defined as a line with no leading
3729 3729 space followed by 0 or more lines with leading space. Blocks are
3730 3730 effectively a high-level command with additional metadata.
3731 3731
3732 3732 Lines beginning with ``#`` are ignored.
3733 3733
3734 3734 The following sections denote available actions.
3735 3735
3736 3736 raw
3737 3737 ---
3738 3738
3739 3739 Send raw data to the server.
3740 3740
3741 3741 The block payload contains the raw data to send as one atomic send
3742 3742 operation. The data may not actually be delivered in a single system
3743 3743 call: it depends on the abilities of the transport being used.
3744 3744
3745 3745 Each line in the block is de-indented and concatenated. Then, that
3746 3746 value is evaluated as a Python b'' literal. This allows the use of
3747 3747 backslash escaping, etc.
3748 3748
3749 3749 raw+
3750 3750 ----
3751 3751
3752 3752 Behaves like ``raw`` except flushes output afterwards.
3753 3753
3754 3754 command <X>
3755 3755 -----------
3756 3756
3757 3757 Send a request to run a named command, whose name follows the ``command``
3758 3758 string.
3759 3759
3760 3760 Arguments to the command are defined as lines in this block. The format of
3761 3761 each line is ``<key> <value>``. e.g.::
3762 3762
3763 3763 command listkeys
3764 3764 namespace bookmarks
3765 3765
3766 3766 If the value begins with ``eval:``, it will be interpreted as a Python
3767 3767 literal expression. Otherwise values are interpreted as Python b'' literals.
3768 3768 This allows sending complex types and encoding special byte sequences via
3769 3769 backslash escaping.
3770 3770
3771 3771 The following arguments have special meaning:
3772 3772
3773 3773 ``PUSHFILE``
3774 3774 When defined, the *push* mechanism of the peer will be used instead
3775 3775 of the static request-response mechanism and the content of the
3776 3776 file specified in the value of this argument will be sent as the
3777 3777 command payload.
3778 3778
3779 3779 This can be used to submit a local bundle file to the remote.
3780 3780
3781 3781 batchbegin
3782 3782 ----------
3783 3783
3784 3784 Instruct the peer to begin a batched send.
3785 3785
3786 3786 All ``command`` blocks are queued for execution until the next
3787 3787 ``batchsubmit`` block.
3788 3788
3789 3789 batchsubmit
3790 3790 -----------
3791 3791
3792 3792 Submit previously queued ``command`` blocks as a batch request.
3793 3793
3794 3794 This action MUST be paired with a ``batchbegin`` action.
3795 3795
3796 3796 httprequest <method> <path>
3797 3797 ---------------------------
3798 3798
3799 3799 (HTTP peer only)
3800 3800
3801 3801 Send an HTTP request to the peer.
3802 3802
3803 3803 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3804 3804
3805 3805 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3806 3806 headers to add to the request. e.g. ``Accept: foo``.
3807 3807
3808 3808 The following arguments are special:
3809 3809
3810 3810 ``BODYFILE``
3811 3811 The content of the file defined as the value to this argument will be
3812 3812 transferred verbatim as the HTTP request body.
3813 3813
3814 3814 ``frame <type> <flags> <payload>``
3815 3815 Send a unified protocol frame as part of the request body.
3816 3816
3817 3817 All frames will be collected and sent as the body to the HTTP
3818 3818 request.
3819 3819
3820 3820 close
3821 3821 -----
3822 3822
3823 3823 Close the connection to the server.
3824 3824
3825 3825 flush
3826 3826 -----
3827 3827
3828 3828 Flush data written to the server.
3829 3829
3830 3830 readavailable
3831 3831 -------------
3832 3832
3833 3833 Close the write end of the connection and read all available data from
3834 3834 the server.
3835 3835
3836 3836 If the connection to the server encompasses multiple pipes, we poll both
3837 3837 pipes and read available data.
3838 3838
3839 3839 readline
3840 3840 --------
3841 3841
3842 3842 Read a line of output from the server. If there are multiple output
3843 3843 pipes, reads only the main pipe.
3844 3844
3845 3845 ereadline
3846 3846 ---------
3847 3847
3848 3848 Like ``readline``, but read from the stderr pipe, if available.
3849 3849
3850 3850 read <X>
3851 3851 --------
3852 3852
3853 3853 ``read()`` N bytes from the server's main output pipe.
3854 3854
3855 3855 eread <X>
3856 3856 ---------
3857 3857
3858 3858 ``read()`` N bytes from the server's stderr pipe, if available.
3859 3859
3860 3860 Specifying Unified Frame-Based Protocol Frames
3861 3861 ----------------------------------------------
3862 3862
3863 3863 It is possible to emit a *Unified Frame-Based Protocol* by using special
3864 3864 syntax.
3865 3865
3866 3866 A frame is composed as a type, flags, and payload. These can be parsed
3867 3867 from a string of the form:
3868 3868
3869 3869 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3870 3870
3871 3871 ``request-id`` and ``stream-id`` are integers defining the request and
3872 3872 stream identifiers.
3873 3873
3874 3874 ``type`` can be an integer value for the frame type or the string name
3875 3875 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3876 3876 ``command-name``.
3877 3877
3878 3878 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3879 3879 components. Each component (and there can be just one) can be an integer
3880 3880 or a flag name for stream flags or frame flags, respectively. Values are
3881 3881 resolved to integers and then bitwise OR'd together.
3882 3882
3883 3883 ``payload`` represents the raw frame payload. If it begins with
3884 3884 ``cbor:``, the following string is evaluated as Python code and the
3885 3885 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3886 3886 as a Python byte string literal.
3887 3887 """
3888 3888 opts = pycompat.byteskwargs(opts)
3889 3889
3890 3890 if opts[b'localssh'] and not repo:
3891 3891 raise error.Abort(_(b'--localssh requires a repository'))
3892 3892
3893 3893 if opts[b'peer'] and opts[b'peer'] not in (
3894 3894 b'raw',
3895 3895 b'http2',
3896 3896 b'ssh1',
3897 3897 b'ssh2',
3898 3898 ):
3899 3899 raise error.Abort(
3900 3900 _(b'invalid value for --peer'),
3901 3901 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3902 3902 )
3903 3903
3904 3904 if path and opts[b'localssh']:
3905 3905 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3906 3906
3907 3907 if ui.interactive():
3908 3908 ui.write(_(b'(waiting for commands on stdin)\n'))
3909 3909
3910 3910 blocks = list(_parsewirelangblocks(ui.fin))
3911 3911
3912 3912 proc = None
3913 3913 stdin = None
3914 3914 stdout = None
3915 3915 stderr = None
3916 3916 opener = None
3917 3917
3918 3918 if opts[b'localssh']:
3919 3919 # We start the SSH server in its own process so there is process
3920 3920 # separation. This prevents a whole class of potential bugs around
3921 3921 # shared state from interfering with server operation.
3922 3922 args = procutil.hgcmd() + [
3923 3923 b'-R',
3924 3924 repo.root,
3925 3925 b'debugserve',
3926 3926 b'--sshstdio',
3927 3927 ]
3928 3928 proc = subprocess.Popen(
3929 3929 pycompat.rapply(procutil.tonativestr, args),
3930 3930 stdin=subprocess.PIPE,
3931 3931 stdout=subprocess.PIPE,
3932 3932 stderr=subprocess.PIPE,
3933 3933 bufsize=0,
3934 3934 )
3935 3935
3936 3936 stdin = proc.stdin
3937 3937 stdout = proc.stdout
3938 3938 stderr = proc.stderr
3939 3939
3940 3940 # We turn the pipes into observers so we can log I/O.
3941 3941 if ui.verbose or opts[b'peer'] == b'raw':
3942 3942 stdin = util.makeloggingfileobject(
3943 3943 ui, proc.stdin, b'i', logdata=True
3944 3944 )
3945 3945 stdout = util.makeloggingfileobject(
3946 3946 ui, proc.stdout, b'o', logdata=True
3947 3947 )
3948 3948 stderr = util.makeloggingfileobject(
3949 3949 ui, proc.stderr, b'e', logdata=True
3950 3950 )
3951 3951
3952 3952 # --localssh also implies the peer connection settings.
3953 3953
3954 3954 url = b'ssh://localserver'
3955 3955 autoreadstderr = not opts[b'noreadstderr']
3956 3956
3957 3957 if opts[b'peer'] == b'ssh1':
3958 3958 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3959 3959 peer = sshpeer.sshv1peer(
3960 3960 ui,
3961 3961 url,
3962 3962 proc,
3963 3963 stdin,
3964 3964 stdout,
3965 3965 stderr,
3966 3966 None,
3967 3967 autoreadstderr=autoreadstderr,
3968 3968 )
3969 3969 elif opts[b'peer'] == b'ssh2':
3970 3970 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3971 3971 peer = sshpeer.sshv2peer(
3972 3972 ui,
3973 3973 url,
3974 3974 proc,
3975 3975 stdin,
3976 3976 stdout,
3977 3977 stderr,
3978 3978 None,
3979 3979 autoreadstderr=autoreadstderr,
3980 3980 )
3981 3981 elif opts[b'peer'] == b'raw':
3982 3982 ui.write(_(b'using raw connection to peer\n'))
3983 3983 peer = None
3984 3984 else:
3985 3985 ui.write(_(b'creating ssh peer from handshake results\n'))
3986 3986 peer = sshpeer.makepeer(
3987 3987 ui,
3988 3988 url,
3989 3989 proc,
3990 3990 stdin,
3991 3991 stdout,
3992 3992 stderr,
3993 3993 autoreadstderr=autoreadstderr,
3994 3994 )
3995 3995
3996 3996 elif path:
3997 3997 # We bypass hg.peer() so we can proxy the sockets.
3998 3998 # TODO consider not doing this because we skip
3999 3999 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4000 4000 u = util.url(path)
4001 4001 if u.scheme != b'http':
4002 4002 raise error.Abort(_(b'only http:// paths are currently supported'))
4003 4003
4004 4004 url, authinfo = u.authinfo()
4005 4005 openerargs = {
4006 4006 'useragent': b'Mercurial debugwireproto',
4007 4007 }
4008 4008
4009 4009 # Turn pipes/sockets into observers so we can log I/O.
4010 4010 if ui.verbose:
4011 4011 openerargs.update(
4012 4012 {
4013 4013 'loggingfh': ui,
4014 4014 'loggingname': b's',
4015 4015 'loggingopts': {'logdata': True, 'logdataapis': False,},
4016 4016 }
4017 4017 )
4018 4018
4019 4019 if ui.debugflag:
4020 4020 openerargs['loggingopts']['logdataapis'] = True
4021 4021
4022 4022 # Don't send default headers when in raw mode. This allows us to
4023 4023 # bypass most of the behavior of our URL handling code so we can
4024 4024 # have near complete control over what's sent on the wire.
4025 4025 if opts[b'peer'] == b'raw':
4026 4026 openerargs['sendaccept'] = False
4027 4027
4028 4028 opener = urlmod.opener(ui, authinfo, **openerargs)
4029 4029
4030 4030 if opts[b'peer'] == b'http2':
4031 4031 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4032 4032 # We go through makepeer() because we need an API descriptor for
4033 4033 # the peer instance to be useful.
4034 4034 with ui.configoverride(
4035 4035 {(b'experimental', b'httppeer.advertise-v2'): True}
4036 4036 ):
4037 4037 if opts[b'nologhandshake']:
4038 4038 ui.pushbuffer()
4039 4039
4040 4040 peer = httppeer.makepeer(ui, path, opener=opener)
4041 4041
4042 4042 if opts[b'nologhandshake']:
4043 4043 ui.popbuffer()
4044 4044
4045 4045 if not isinstance(peer, httppeer.httpv2peer):
4046 4046 raise error.Abort(
4047 4047 _(
4048 4048 b'could not instantiate HTTP peer for '
4049 4049 b'wire protocol version 2'
4050 4050 ),
4051 4051 hint=_(
4052 4052 b'the server may not have the feature '
4053 4053 b'enabled or is not allowing this '
4054 4054 b'client version'
4055 4055 ),
4056 4056 )
4057 4057
4058 4058 elif opts[b'peer'] == b'raw':
4059 4059 ui.write(_(b'using raw connection to peer\n'))
4060 4060 peer = None
4061 4061 elif opts[b'peer']:
4062 4062 raise error.Abort(
4063 4063 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4064 4064 )
4065 4065 else:
4066 4066 peer = httppeer.makepeer(ui, path, opener=opener)
4067 4067
4068 4068 # We /could/ populate stdin/stdout with sock.makefile()...
4069 4069 else:
4070 4070 raise error.Abort(_(b'unsupported connection configuration'))
4071 4071
4072 4072 batchedcommands = None
4073 4073
4074 4074 # Now perform actions based on the parsed wire language instructions.
4075 4075 for action, lines in blocks:
4076 4076 if action in (b'raw', b'raw+'):
4077 4077 if not stdin:
4078 4078 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4079 4079
4080 4080 # Concatenate the data together.
4081 4081 data = b''.join(l.lstrip() for l in lines)
4082 4082 data = stringutil.unescapestr(data)
4083 4083 stdin.write(data)
4084 4084
4085 4085 if action == b'raw+':
4086 4086 stdin.flush()
4087 4087 elif action == b'flush':
4088 4088 if not stdin:
4089 4089 raise error.Abort(_(b'cannot call flush on this peer'))
4090 4090 stdin.flush()
4091 4091 elif action.startswith(b'command'):
4092 4092 if not peer:
4093 4093 raise error.Abort(
4094 4094 _(
4095 4095 b'cannot send commands unless peer instance '
4096 4096 b'is available'
4097 4097 )
4098 4098 )
4099 4099
4100 4100 command = action.split(b' ', 1)[1]
4101 4101
4102 4102 args = {}
4103 4103 for line in lines:
4104 4104 # We need to allow empty values.
4105 4105 fields = line.lstrip().split(b' ', 1)
4106 4106 if len(fields) == 1:
4107 4107 key = fields[0]
4108 4108 value = b''
4109 4109 else:
4110 4110 key, value = fields
4111 4111
4112 4112 if value.startswith(b'eval:'):
4113 4113 value = stringutil.evalpythonliteral(value[5:])
4114 4114 else:
4115 4115 value = stringutil.unescapestr(value)
4116 4116
4117 4117 args[key] = value
4118 4118
4119 4119 if batchedcommands is not None:
4120 4120 batchedcommands.append((command, args))
4121 4121 continue
4122 4122
4123 4123 ui.status(_(b'sending %s command\n') % command)
4124 4124
4125 4125 if b'PUSHFILE' in args:
4126 4126 with open(args[b'PUSHFILE'], 'rb') as fh:
4127 4127 del args[b'PUSHFILE']
4128 4128 res, output = peer._callpush(
4129 4129 command, fh, **pycompat.strkwargs(args)
4130 4130 )
4131 4131 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4132 4132 ui.status(
4133 4133 _(b'remote output: %s\n') % stringutil.escapestr(output)
4134 4134 )
4135 4135 else:
4136 4136 with peer.commandexecutor() as e:
4137 4137 res = e.callcommand(command, args).result()
4138 4138
4139 4139 if isinstance(res, wireprotov2peer.commandresponse):
4140 4140 val = res.objects()
4141 4141 ui.status(
4142 4142 _(b'response: %s\n')
4143 4143 % stringutil.pprint(val, bprefix=True, indent=2)
4144 4144 )
4145 4145 else:
4146 4146 ui.status(
4147 4147 _(b'response: %s\n')
4148 4148 % stringutil.pprint(res, bprefix=True, indent=2)
4149 4149 )
4150 4150
4151 4151 elif action == b'batchbegin':
4152 4152 if batchedcommands is not None:
4153 4153 raise error.Abort(_(b'nested batchbegin not allowed'))
4154 4154
4155 4155 batchedcommands = []
4156 4156 elif action == b'batchsubmit':
4157 4157 # There is a batching API we could go through. But it would be
4158 4158 # difficult to normalize requests into function calls. It is easier
4159 4159 # to bypass this layer and normalize to commands + args.
4160 4160 ui.status(
4161 4161 _(b'sending batch with %d sub-commands\n')
4162 4162 % len(batchedcommands)
4163 4163 )
4164 4164 assert peer is not None
4165 4165 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4166 4166 ui.status(
4167 4167 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4168 4168 )
4169 4169
4170 4170 batchedcommands = None
4171 4171
4172 4172 elif action.startswith(b'httprequest '):
4173 4173 if not opener:
4174 4174 raise error.Abort(
4175 4175 _(b'cannot use httprequest without an HTTP peer')
4176 4176 )
4177 4177
4178 4178 request = action.split(b' ', 2)
4179 4179 if len(request) != 3:
4180 4180 raise error.Abort(
4181 4181 _(
4182 4182 b'invalid httprequest: expected format is '
4183 4183 b'"httprequest <method> <path>'
4184 4184 )
4185 4185 )
4186 4186
4187 4187 method, httppath = request[1:]
4188 4188 headers = {}
4189 4189 body = None
4190 4190 frames = []
4191 4191 for line in lines:
4192 4192 line = line.lstrip()
4193 4193 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4194 4194 if m:
4195 4195 # Headers need to use native strings.
4196 4196 key = pycompat.strurl(m.group(1))
4197 4197 value = pycompat.strurl(m.group(2))
4198 4198 headers[key] = value
4199 4199 continue
4200 4200
4201 4201 if line.startswith(b'BODYFILE '):
4202 4202 with open(line.split(b' ', 1), b'rb') as fh:
4203 4203 body = fh.read()
4204 4204 elif line.startswith(b'frame '):
4205 4205 frame = wireprotoframing.makeframefromhumanstring(
4206 4206 line[len(b'frame ') :]
4207 4207 )
4208 4208
4209 4209 frames.append(frame)
4210 4210 else:
4211 4211 raise error.Abort(
4212 4212 _(b'unknown argument to httprequest: %s') % line
4213 4213 )
4214 4214
4215 4215 url = path + httppath
4216 4216
4217 4217 if frames:
4218 4218 body = b''.join(bytes(f) for f in frames)
4219 4219
4220 4220 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4221 4221
4222 4222 # urllib.Request insists on using has_data() as a proxy for
4223 4223 # determining the request method. Override that to use our
4224 4224 # explicitly requested method.
4225 4225 req.get_method = lambda: pycompat.sysstr(method)
4226 4226
4227 4227 try:
4228 4228 res = opener.open(req)
4229 4229 body = res.read()
4230 4230 except util.urlerr.urlerror as e:
4231 4231 # read() method must be called, but only exists in Python 2
4232 4232 getattr(e, 'read', lambda: None)()
4233 4233 continue
4234 4234
4235 4235 ct = res.headers.get('Content-Type')
4236 4236 if ct == 'application/mercurial-cbor':
4237 4237 ui.write(
4238 4238 _(b'cbor> %s\n')
4239 4239 % stringutil.pprint(
4240 4240 cborutil.decodeall(body), bprefix=True, indent=2
4241 4241 )
4242 4242 )
4243 4243
4244 4244 elif action == b'close':
4245 4245 assert peer is not None
4246 4246 peer.close()
4247 4247 elif action == b'readavailable':
4248 4248 if not stdout or not stderr:
4249 4249 raise error.Abort(
4250 4250 _(b'readavailable not available on this peer')
4251 4251 )
4252 4252
4253 4253 stdin.close()
4254 4254 stdout.read()
4255 4255 stderr.read()
4256 4256
4257 4257 elif action == b'readline':
4258 4258 if not stdout:
4259 4259 raise error.Abort(_(b'readline not available on this peer'))
4260 4260 stdout.readline()
4261 4261 elif action == b'ereadline':
4262 4262 if not stderr:
4263 4263 raise error.Abort(_(b'ereadline not available on this peer'))
4264 4264 stderr.readline()
4265 4265 elif action.startswith(b'read '):
4266 4266 count = int(action.split(b' ', 1)[1])
4267 4267 if not stdout:
4268 4268 raise error.Abort(_(b'read not available on this peer'))
4269 4269 stdout.read(count)
4270 4270 elif action.startswith(b'eread '):
4271 4271 count = int(action.split(b' ', 1)[1])
4272 4272 if not stderr:
4273 4273 raise error.Abort(_(b'eread not available on this peer'))
4274 4274 stderr.read(count)
4275 4275 else:
4276 4276 raise error.Abort(_(b'unknown action: %s') % action)
4277 4277
4278 4278 if batchedcommands is not None:
4279 4279 raise error.Abort(_(b'unclosed "batchbegin" request'))
4280 4280
4281 4281 if peer:
4282 4282 peer.close()
4283 4283
4284 4284 if proc:
4285 4285 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now