##// END OF EJS Templates
debugssl: use `get_unique_pull_path`...
marmoute -
r47722:69359c91 default
parent child Browse files
Show More
@@ -1,4819 +1,4821
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullid,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 revlog,
75 75 revset,
76 76 revsetlang,
77 77 scmutil,
78 78 setdiscovery,
79 79 simplemerge,
80 80 sshpeer,
81 81 sslutil,
82 82 streamclone,
83 83 strip,
84 84 tags as tagsmod,
85 85 templater,
86 86 treediscovery,
87 87 upgrade,
88 88 url as urlmod,
89 89 util,
90 90 vfs as vfsmod,
91 91 wireprotoframing,
92 92 wireprotoserver,
93 93 wireprotov2peer,
94 94 )
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 urlutil,
102 102 )
103 103
104 104 from .revlogutils import (
105 105 deltas as deltautil,
106 106 nodemap,
107 107 sidedata,
108 108 )
109 109
110 110 release = lockmod.release
111 111
112 112 table = {}
113 113 table.update(strip.command._table)
114 114 command = registrar.command(table)
115 115
116 116
117 117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
118 118 def debugancestor(ui, repo, *args):
119 119 """find the ancestor revision of two revisions in a given index"""
120 120 if len(args) == 3:
121 121 index, rev1, rev2 = args
122 122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
123 123 lookup = r.lookup
124 124 elif len(args) == 2:
125 125 if not repo:
126 126 raise error.Abort(
127 127 _(b'there is no Mercurial repository here (.hg not found)')
128 128 )
129 129 rev1, rev2 = args
130 130 r = repo.changelog
131 131 lookup = repo.lookup
132 132 else:
133 133 raise error.Abort(_(b'either two or three arguments required'))
134 134 a = r.ancestor(lookup(rev1), lookup(rev2))
135 135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
136 136
137 137
138 138 @command(b'debugantivirusrunning', [])
139 139 def debugantivirusrunning(ui, repo):
140 140 """attempt to trigger an antivirus scanner to see if one is active"""
141 141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
142 142 f.write(
143 143 util.b85decode(
144 144 # This is a base85-armored version of the EICAR test file. See
145 145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
146 146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
147 147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
148 148 )
149 149 )
150 150 # Give an AV engine time to scan the file.
151 151 time.sleep(2)
152 152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
153 153
154 154
155 155 @command(b'debugapplystreamclonebundle', [], b'FILE')
156 156 def debugapplystreamclonebundle(ui, repo, fname):
157 157 """apply a stream clone bundle file"""
158 158 f = hg.openpath(ui, fname)
159 159 gen = exchange.readbundle(ui, f, fname)
160 160 gen.apply(repo)
161 161
162 162
163 163 @command(
164 164 b'debugbuilddag',
165 165 [
166 166 (
167 167 b'm',
168 168 b'mergeable-file',
169 169 None,
170 170 _(b'add single file mergeable changes'),
171 171 ),
172 172 (
173 173 b'o',
174 174 b'overwritten-file',
175 175 None,
176 176 _(b'add single file all revs overwrite'),
177 177 ),
178 178 (b'n', b'new-file', None, _(b'add new file at each rev')),
179 179 ],
180 180 _(b'[OPTION]... [TEXT]'),
181 181 )
182 182 def debugbuilddag(
183 183 ui,
184 184 repo,
185 185 text=None,
186 186 mergeable_file=False,
187 187 overwritten_file=False,
188 188 new_file=False,
189 189 ):
190 190 """builds a repo with a given DAG from scratch in the current empty repo
191 191
192 192 The description of the DAG is read from stdin if not given on the
193 193 command line.
194 194
195 195 Elements:
196 196
197 197 - "+n" is a linear run of n nodes based on the current default parent
198 198 - "." is a single node based on the current default parent
199 199 - "$" resets the default parent to null (implied at the start);
200 200 otherwise the default parent is always the last node created
201 201 - "<p" sets the default parent to the backref p
202 202 - "*p" is a fork at parent p, which is a backref
203 203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
204 204 - "/p2" is a merge of the preceding node and p2
205 205 - ":tag" defines a local tag for the preceding node
206 206 - "@branch" sets the named branch for subsequent nodes
207 207 - "#...\\n" is a comment up to the end of the line
208 208
209 209 Whitespace between the above elements is ignored.
210 210
211 211 A backref is either
212 212
213 213 - a number n, which references the node curr-n, where curr is the current
214 214 node, or
215 215 - the name of a local tag you placed earlier using ":tag", or
216 216 - empty to denote the default parent.
217 217
218 218 All string valued-elements are either strictly alphanumeric, or must
219 219 be enclosed in double quotes ("..."), with "\\" as escape character.
220 220 """
221 221
222 222 if text is None:
223 223 ui.status(_(b"reading DAG from stdin\n"))
224 224 text = ui.fin.read()
225 225
226 226 cl = repo.changelog
227 227 if len(cl) > 0:
228 228 raise error.Abort(_(b'repository is not empty'))
229 229
230 230 # determine number of revs in DAG
231 231 total = 0
232 232 for type, data in dagparser.parsedag(text):
233 233 if type == b'n':
234 234 total += 1
235 235
236 236 if mergeable_file:
237 237 linesperrev = 2
238 238 # make a file with k lines per rev
239 239 initialmergedlines = [
240 240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
241 241 ]
242 242 initialmergedlines.append(b"")
243 243
244 244 tags = []
245 245 progress = ui.makeprogress(
246 246 _(b'building'), unit=_(b'revisions'), total=total
247 247 )
248 248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
249 249 at = -1
250 250 atbranch = b'default'
251 251 nodeids = []
252 252 id = 0
253 253 progress.update(id)
254 254 for type, data in dagparser.parsedag(text):
255 255 if type == b'n':
256 256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
257 257 id, ps = data
258 258
259 259 files = []
260 260 filecontent = {}
261 261
262 262 p2 = None
263 263 if mergeable_file:
264 264 fn = b"mf"
265 265 p1 = repo[ps[0]]
266 266 if len(ps) > 1:
267 267 p2 = repo[ps[1]]
268 268 pa = p1.ancestor(p2)
269 269 base, local, other = [
270 270 x[fn].data() for x in (pa, p1, p2)
271 271 ]
272 272 m3 = simplemerge.Merge3Text(base, local, other)
273 273 ml = [l.strip() for l in m3.merge_lines()]
274 274 ml.append(b"")
275 275 elif at > 0:
276 276 ml = p1[fn].data().split(b"\n")
277 277 else:
278 278 ml = initialmergedlines
279 279 ml[id * linesperrev] += b" r%i" % id
280 280 mergedtext = b"\n".join(ml)
281 281 files.append(fn)
282 282 filecontent[fn] = mergedtext
283 283
284 284 if overwritten_file:
285 285 fn = b"of"
286 286 files.append(fn)
287 287 filecontent[fn] = b"r%i\n" % id
288 288
289 289 if new_file:
290 290 fn = b"nf%i" % id
291 291 files.append(fn)
292 292 filecontent[fn] = b"r%i\n" % id
293 293 if len(ps) > 1:
294 294 if not p2:
295 295 p2 = repo[ps[1]]
296 296 for fn in p2:
297 297 if fn.startswith(b"nf"):
298 298 files.append(fn)
299 299 filecontent[fn] = p2[fn].data()
300 300
301 301 def fctxfn(repo, cx, path):
302 302 if path in filecontent:
303 303 return context.memfilectx(
304 304 repo, cx, path, filecontent[path]
305 305 )
306 306 return None
307 307
308 308 if len(ps) == 0 or ps[0] < 0:
309 309 pars = [None, None]
310 310 elif len(ps) == 1:
311 311 pars = [nodeids[ps[0]], None]
312 312 else:
313 313 pars = [nodeids[p] for p in ps]
314 314 cx = context.memctx(
315 315 repo,
316 316 pars,
317 317 b"r%i" % id,
318 318 files,
319 319 fctxfn,
320 320 date=(id, 0),
321 321 user=b"debugbuilddag",
322 322 extra={b'branch': atbranch},
323 323 )
324 324 nodeid = repo.commitctx(cx)
325 325 nodeids.append(nodeid)
326 326 at = id
327 327 elif type == b'l':
328 328 id, name = data
329 329 ui.note((b'tag %s\n' % name))
330 330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
331 331 elif type == b'a':
332 332 ui.note((b'branch %s\n' % data))
333 333 atbranch = data
334 334 progress.update(id)
335 335
336 336 if tags:
337 337 repo.vfs.write(b"localtags", b"".join(tags))
338 338
339 339
340 340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
341 341 indent_string = b' ' * indent
342 342 if all:
343 343 ui.writenoi18n(
344 344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
345 345 % indent_string
346 346 )
347 347
348 348 def showchunks(named):
349 349 ui.write(b"\n%s%s\n" % (indent_string, named))
350 350 for deltadata in gen.deltaiter():
351 351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
352 352 ui.write(
353 353 b"%s%s %s %s %s %s %d\n"
354 354 % (
355 355 indent_string,
356 356 hex(node),
357 357 hex(p1),
358 358 hex(p2),
359 359 hex(cs),
360 360 hex(deltabase),
361 361 len(delta),
362 362 )
363 363 )
364 364
365 365 gen.changelogheader()
366 366 showchunks(b"changelog")
367 367 gen.manifestheader()
368 368 showchunks(b"manifest")
369 369 for chunkdata in iter(gen.filelogheader, {}):
370 370 fname = chunkdata[b'filename']
371 371 showchunks(fname)
372 372 else:
373 373 if isinstance(gen, bundle2.unbundle20):
374 374 raise error.Abort(_(b'use debugbundle2 for this file'))
375 375 gen.changelogheader()
376 376 for deltadata in gen.deltaiter():
377 377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
378 378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
379 379
380 380
381 381 def _debugobsmarkers(ui, part, indent=0, **opts):
382 382 """display version and markers contained in 'data'"""
383 383 opts = pycompat.byteskwargs(opts)
384 384 data = part.read()
385 385 indent_string = b' ' * indent
386 386 try:
387 387 version, markers = obsolete._readmarkers(data)
388 388 except error.UnknownVersion as exc:
389 389 msg = b"%sunsupported version: %s (%d bytes)\n"
390 390 msg %= indent_string, exc.version, len(data)
391 391 ui.write(msg)
392 392 else:
393 393 msg = b"%sversion: %d (%d bytes)\n"
394 394 msg %= indent_string, version, len(data)
395 395 ui.write(msg)
396 396 fm = ui.formatter(b'debugobsolete', opts)
397 397 for rawmarker in sorted(markers):
398 398 m = obsutil.marker(None, rawmarker)
399 399 fm.startitem()
400 400 fm.plain(indent_string)
401 401 cmdutil.showmarker(fm, m)
402 402 fm.end()
403 403
404 404
405 405 def _debugphaseheads(ui, data, indent=0):
406 406 """display version and markers contained in 'data'"""
407 407 indent_string = b' ' * indent
408 408 headsbyphase = phases.binarydecode(data)
409 409 for phase in phases.allphases:
410 410 for head in headsbyphase[phase]:
411 411 ui.write(indent_string)
412 412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
413 413
414 414
415 415 def _quasirepr(thing):
416 416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
417 417 return b'{%s}' % (
418 418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
419 419 )
420 420 return pycompat.bytestr(repr(thing))
421 421
422 422
423 423 def _debugbundle2(ui, gen, all=None, **opts):
424 424 """lists the contents of a bundle2"""
425 425 if not isinstance(gen, bundle2.unbundle20):
426 426 raise error.Abort(_(b'not a bundle2 file'))
427 427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
428 428 parttypes = opts.get('part_type', [])
429 429 for part in gen.iterparts():
430 430 if parttypes and part.type not in parttypes:
431 431 continue
432 432 msg = b'%s -- %s (mandatory: %r)\n'
433 433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
434 434 if part.type == b'changegroup':
435 435 version = part.params.get(b'version', b'01')
436 436 cg = changegroup.getunbundler(version, part, b'UN')
437 437 if not ui.quiet:
438 438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
439 439 if part.type == b'obsmarkers':
440 440 if not ui.quiet:
441 441 _debugobsmarkers(ui, part, indent=4, **opts)
442 442 if part.type == b'phase-heads':
443 443 if not ui.quiet:
444 444 _debugphaseheads(ui, part, indent=4)
445 445
446 446
447 447 @command(
448 448 b'debugbundle',
449 449 [
450 450 (b'a', b'all', None, _(b'show all details')),
451 451 (b'', b'part-type', [], _(b'show only the named part type')),
452 452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
453 453 ],
454 454 _(b'FILE'),
455 455 norepo=True,
456 456 )
457 457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
458 458 """lists the contents of a bundle"""
459 459 with hg.openpath(ui, bundlepath) as f:
460 460 if spec:
461 461 spec = exchange.getbundlespec(ui, f)
462 462 ui.write(b'%s\n' % spec)
463 463 return
464 464
465 465 gen = exchange.readbundle(ui, f, bundlepath)
466 466 if isinstance(gen, bundle2.unbundle20):
467 467 return _debugbundle2(ui, gen, all=all, **opts)
468 468 _debugchangegroup(ui, gen, all=all, **opts)
469 469
470 470
471 471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
472 472 def debugcapabilities(ui, path, **opts):
473 473 """lists the capabilities of a remote peer"""
474 474 opts = pycompat.byteskwargs(opts)
475 475 peer = hg.peer(ui, opts, path)
476 476 try:
477 477 caps = peer.capabilities()
478 478 ui.writenoi18n(b'Main capabilities:\n')
479 479 for c in sorted(caps):
480 480 ui.write(b' %s\n' % c)
481 481 b2caps = bundle2.bundle2caps(peer)
482 482 if b2caps:
483 483 ui.writenoi18n(b'Bundle2 capabilities:\n')
484 484 for key, values in sorted(pycompat.iteritems(b2caps)):
485 485 ui.write(b' %s\n' % key)
486 486 for v in values:
487 487 ui.write(b' %s\n' % v)
488 488 finally:
489 489 peer.close()
490 490
491 491
492 492 @command(
493 493 b'debugchangedfiles',
494 494 [
495 495 (
496 496 b'',
497 497 b'compute',
498 498 False,
499 499 b"compute information instead of reading it from storage",
500 500 ),
501 501 ],
502 502 b'REV',
503 503 )
504 504 def debugchangedfiles(ui, repo, rev, **opts):
505 505 """list the stored files changes for a revision"""
506 506 ctx = scmutil.revsingle(repo, rev, None)
507 507 files = None
508 508
509 509 if opts['compute']:
510 510 files = metadata.compute_all_files_changes(ctx)
511 511 else:
512 512 sd = repo.changelog.sidedata(ctx.rev())
513 513 files_block = sd.get(sidedata.SD_FILES)
514 514 if files_block is not None:
515 515 files = metadata.decode_files_sidedata(sd)
516 516 if files is not None:
517 517 for f in sorted(files.touched):
518 518 if f in files.added:
519 519 action = b"added"
520 520 elif f in files.removed:
521 521 action = b"removed"
522 522 elif f in files.merged:
523 523 action = b"merged"
524 524 elif f in files.salvaged:
525 525 action = b"salvaged"
526 526 else:
527 527 action = b"touched"
528 528
529 529 copy_parent = b""
530 530 copy_source = b""
531 531 if f in files.copied_from_p1:
532 532 copy_parent = b"p1"
533 533 copy_source = files.copied_from_p1[f]
534 534 elif f in files.copied_from_p2:
535 535 copy_parent = b"p2"
536 536 copy_source = files.copied_from_p2[f]
537 537
538 538 data = (action, copy_parent, f, copy_source)
539 539 template = b"%-8s %2s: %s, %s;\n"
540 540 ui.write(template % data)
541 541
542 542
543 543 @command(b'debugcheckstate', [], b'')
544 544 def debugcheckstate(ui, repo):
545 545 """validate the correctness of the current dirstate"""
546 546 parent1, parent2 = repo.dirstate.parents()
547 547 m1 = repo[parent1].manifest()
548 548 m2 = repo[parent2].manifest()
549 549 errors = 0
550 550 for f in repo.dirstate:
551 551 state = repo.dirstate[f]
552 552 if state in b"nr" and f not in m1:
553 553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
554 554 errors += 1
555 555 if state in b"a" and f in m1:
556 556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
557 557 errors += 1
558 558 if state in b"m" and f not in m1 and f not in m2:
559 559 ui.warn(
560 560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
561 561 )
562 562 errors += 1
563 563 for f in m1:
564 564 state = repo.dirstate[f]
565 565 if state not in b"nrm":
566 566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
567 567 errors += 1
568 568 if errors:
569 569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
570 570 raise error.Abort(errstr)
571 571
572 572
573 573 @command(
574 574 b'debugcolor',
575 575 [(b'', b'style', None, _(b'show all configured styles'))],
576 576 b'hg debugcolor',
577 577 )
578 578 def debugcolor(ui, repo, **opts):
579 579 """show available color, effects or style"""
580 580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
581 581 if opts.get('style'):
582 582 return _debugdisplaystyle(ui)
583 583 else:
584 584 return _debugdisplaycolor(ui)
585 585
586 586
587 587 def _debugdisplaycolor(ui):
588 588 ui = ui.copy()
589 589 ui._styles.clear()
590 590 for effect in color._activeeffects(ui).keys():
591 591 ui._styles[effect] = effect
592 592 if ui._terminfoparams:
593 593 for k, v in ui.configitems(b'color'):
594 594 if k.startswith(b'color.'):
595 595 ui._styles[k] = k[6:]
596 596 elif k.startswith(b'terminfo.'):
597 597 ui._styles[k] = k[9:]
598 598 ui.write(_(b'available colors:\n'))
599 599 # sort label with a '_' after the other to group '_background' entry.
600 600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
601 601 for colorname, label in items:
602 602 ui.write(b'%s\n' % colorname, label=label)
603 603
604 604
605 605 def _debugdisplaystyle(ui):
606 606 ui.write(_(b'available style:\n'))
607 607 if not ui._styles:
608 608 return
609 609 width = max(len(s) for s in ui._styles)
610 610 for label, effects in sorted(ui._styles.items()):
611 611 ui.write(b'%s' % label, label=label)
612 612 if effects:
613 613 # 50
614 614 ui.write(b': ')
615 615 ui.write(b' ' * (max(0, width - len(label))))
616 616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
617 617 ui.write(b'\n')
618 618
619 619
620 620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
621 621 def debugcreatestreamclonebundle(ui, repo, fname):
622 622 """create a stream clone bundle file
623 623
624 624 Stream bundles are special bundles that are essentially archives of
625 625 revlog files. They are commonly used for cloning very quickly.
626 626 """
627 627 # TODO we may want to turn this into an abort when this functionality
628 628 # is moved into `hg bundle`.
629 629 if phases.hassecret(repo):
630 630 ui.warn(
631 631 _(
632 632 b'(warning: stream clone bundle will contain secret '
633 633 b'revisions)\n'
634 634 )
635 635 )
636 636
637 637 requirements, gen = streamclone.generatebundlev1(repo)
638 638 changegroup.writechunks(ui, gen, fname)
639 639
640 640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
641 641
642 642
643 643 @command(
644 644 b'debugdag',
645 645 [
646 646 (b't', b'tags', None, _(b'use tags as labels')),
647 647 (b'b', b'branches', None, _(b'annotate with branch names')),
648 648 (b'', b'dots', None, _(b'use dots for runs')),
649 649 (b's', b'spaces', None, _(b'separate elements by spaces')),
650 650 ],
651 651 _(b'[OPTION]... [FILE [REV]...]'),
652 652 optionalrepo=True,
653 653 )
654 654 def debugdag(ui, repo, file_=None, *revs, **opts):
655 655 """format the changelog or an index DAG as a concise textual description
656 656
657 657 If you pass a revlog index, the revlog's DAG is emitted. If you list
658 658 revision numbers, they get labeled in the output as rN.
659 659
660 660 Otherwise, the changelog DAG of the current repo is emitted.
661 661 """
662 662 spaces = opts.get('spaces')
663 663 dots = opts.get('dots')
664 664 if file_:
665 665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
666 666 revs = {int(r) for r in revs}
667 667
668 668 def events():
669 669 for r in rlog:
670 670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
671 671 if r in revs:
672 672 yield b'l', (r, b"r%i" % r)
673 673
674 674 elif repo:
675 675 cl = repo.changelog
676 676 tags = opts.get('tags')
677 677 branches = opts.get('branches')
678 678 if tags:
679 679 labels = {}
680 680 for l, n in repo.tags().items():
681 681 labels.setdefault(cl.rev(n), []).append(l)
682 682
683 683 def events():
684 684 b = b"default"
685 685 for r in cl:
686 686 if branches:
687 687 newb = cl.read(cl.node(r))[5][b'branch']
688 688 if newb != b:
689 689 yield b'a', newb
690 690 b = newb
691 691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
692 692 if tags:
693 693 ls = labels.get(r)
694 694 if ls:
695 695 for l in ls:
696 696 yield b'l', (r, l)
697 697
698 698 else:
699 699 raise error.Abort(_(b'need repo for changelog dag'))
700 700
701 701 for line in dagparser.dagtextlines(
702 702 events(),
703 703 addspaces=spaces,
704 704 wraplabels=True,
705 705 wrapannotations=True,
706 706 wrapnonlinear=dots,
707 707 usedots=dots,
708 708 maxlinewidth=70,
709 709 ):
710 710 ui.write(line)
711 711 ui.write(b"\n")
712 712
713 713
714 714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
715 715 def debugdata(ui, repo, file_, rev=None, **opts):
716 716 """dump the contents of a data file revision"""
717 717 opts = pycompat.byteskwargs(opts)
718 718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
719 719 if rev is not None:
720 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 721 file_, rev = None, file_
722 722 elif rev is None:
723 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
725 725 try:
726 726 ui.write(r.rawdata(r.lookup(rev)))
727 727 except KeyError:
728 728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
729 729
730 730
731 731 @command(
732 732 b'debugdate',
733 733 [(b'e', b'extended', None, _(b'try extended date formats'))],
734 734 _(b'[-e] DATE [RANGE]'),
735 735 norepo=True,
736 736 optionalrepo=True,
737 737 )
738 738 def debugdate(ui, date, range=None, **opts):
739 739 """parse and display a date"""
740 740 if opts["extended"]:
741 741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
742 742 else:
743 743 d = dateutil.parsedate(date)
744 744 ui.writenoi18n(b"internal: %d %d\n" % d)
745 745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
746 746 if range:
747 747 m = dateutil.matchdate(range)
748 748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
749 749
750 750
751 751 @command(
752 752 b'debugdeltachain',
753 753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
754 754 _(b'-c|-m|FILE'),
755 755 optionalrepo=True,
756 756 )
757 757 def debugdeltachain(ui, repo, file_=None, **opts):
758 758 """dump information about delta chains in a revlog
759 759
760 760 Output can be templatized. Available template keywords are:
761 761
762 762 :``rev``: revision number
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 767 :``compsize``: compressed size of revision
768 768 :``uncompsize``: uncompressed size of revision
769 769 :``chainsize``: total size of compressed revisions in chain
770 770 :``chainratio``: total chain size divided by uncompressed revision size
771 771 (new delta chains typically start at ratio 2.00)
772 772 :``lindist``: linear distance from base revision in delta chain to end
773 773 of this revision
774 774 :``extradist``: total size of revisions not part of this delta chain from
775 775 base of delta chain to end of this revision; a measurement
776 776 of how much extra data we need to read/seek across to read
777 777 the delta chain for this revision
778 778 :``extraratio``: extradist divided by chainsize; another representation of
779 779 how much unrelated data is needed to load this delta chain
780 780
781 781 If the repository is configured to use the sparse read, additional keywords
782 782 are available:
783 783
784 784 :``readsize``: total size of data read from the disk for a revision
785 785 (sum of the sizes of all the blocks)
786 786 :``largestblock``: size of the largest block of data read from the disk
787 787 :``readdensity``: density of useful bytes in the data read from the disk
788 788 :``srchunks``: in how many data hunks the whole revision would be read
789 789
790 790 The sparse read can be enabled with experimental.sparse-read = True
791 791 """
792 792 opts = pycompat.byteskwargs(opts)
793 793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
794 794 index = r.index
795 795 start = r.start
796 796 length = r.length
797 797 generaldelta = r.version & revlog.FLAG_GENERALDELTA
798 798 withsparseread = getattr(r, '_withsparseread', False)
799 799
800 800 def revinfo(rev):
801 801 e = index[rev]
802 802 compsize = e[1]
803 803 uncompsize = e[2]
804 804 chainsize = 0
805 805
806 806 if generaldelta:
807 807 if e[3] == e[5]:
808 808 deltatype = b'p1'
809 809 elif e[3] == e[6]:
810 810 deltatype = b'p2'
811 811 elif e[3] == rev - 1:
812 812 deltatype = b'prev'
813 813 elif e[3] == rev:
814 814 deltatype = b'base'
815 815 else:
816 816 deltatype = b'other'
817 817 else:
818 818 if e[3] == rev:
819 819 deltatype = b'base'
820 820 else:
821 821 deltatype = b'prev'
822 822
823 823 chain = r._deltachain(rev)[0]
824 824 for iterrev in chain:
825 825 e = index[iterrev]
826 826 chainsize += e[1]
827 827
828 828 return compsize, uncompsize, deltatype, chain, chainsize
829 829
830 830 fm = ui.formatter(b'debugdeltachain', opts)
831 831
832 832 fm.plain(
833 833 b' rev chain# chainlen prev delta '
834 834 b'size rawsize chainsize ratio lindist extradist '
835 835 b'extraratio'
836 836 )
837 837 if withsparseread:
838 838 fm.plain(b' readsize largestblk rddensity srchunks')
839 839 fm.plain(b'\n')
840 840
841 841 chainbases = {}
842 842 for rev in r:
843 843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
844 844 chainbase = chain[0]
845 845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
846 846 basestart = start(chainbase)
847 847 revstart = start(rev)
848 848 lineardist = revstart + comp - basestart
849 849 extradist = lineardist - chainsize
850 850 try:
851 851 prevrev = chain[-2]
852 852 except IndexError:
853 853 prevrev = -1
854 854
855 855 if uncomp != 0:
856 856 chainratio = float(chainsize) / float(uncomp)
857 857 else:
858 858 chainratio = chainsize
859 859
860 860 if chainsize != 0:
861 861 extraratio = float(extradist) / float(chainsize)
862 862 else:
863 863 extraratio = extradist
864 864
865 865 fm.startitem()
866 866 fm.write(
867 867 b'rev chainid chainlen prevrev deltatype compsize '
868 868 b'uncompsize chainsize chainratio lindist extradist '
869 869 b'extraratio',
870 870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
871 871 rev,
872 872 chainid,
873 873 len(chain),
874 874 prevrev,
875 875 deltatype,
876 876 comp,
877 877 uncomp,
878 878 chainsize,
879 879 chainratio,
880 880 lineardist,
881 881 extradist,
882 882 extraratio,
883 883 rev=rev,
884 884 chainid=chainid,
885 885 chainlen=len(chain),
886 886 prevrev=prevrev,
887 887 deltatype=deltatype,
888 888 compsize=comp,
889 889 uncompsize=uncomp,
890 890 chainsize=chainsize,
891 891 chainratio=chainratio,
892 892 lindist=lineardist,
893 893 extradist=extradist,
894 894 extraratio=extraratio,
895 895 )
896 896 if withsparseread:
897 897 readsize = 0
898 898 largestblock = 0
899 899 srchunks = 0
900 900
901 901 for revschunk in deltautil.slicechunk(r, chain):
902 902 srchunks += 1
903 903 blkend = start(revschunk[-1]) + length(revschunk[-1])
904 904 blksize = blkend - start(revschunk[0])
905 905
906 906 readsize += blksize
907 907 if largestblock < blksize:
908 908 largestblock = blksize
909 909
910 910 if readsize:
911 911 readdensity = float(chainsize) / float(readsize)
912 912 else:
913 913 readdensity = 1
914 914
915 915 fm.write(
916 916 b'readsize largestblock readdensity srchunks',
917 917 b' %10d %10d %9.5f %8d',
918 918 readsize,
919 919 largestblock,
920 920 readdensity,
921 921 srchunks,
922 922 readsize=readsize,
923 923 largestblock=largestblock,
924 924 readdensity=readdensity,
925 925 srchunks=srchunks,
926 926 )
927 927
928 928 fm.plain(b'\n')
929 929
930 930 fm.end()
931 931
932 932
933 933 @command(
934 934 b'debugdirstate|debugstate',
935 935 [
936 936 (
937 937 b'',
938 938 b'nodates',
939 939 None,
940 940 _(b'do not display the saved mtime (DEPRECATED)'),
941 941 ),
942 942 (b'', b'dates', True, _(b'display the saved mtime')),
943 943 (b'', b'datesort', None, _(b'sort by saved mtime')),
944 944 ],
945 945 _(b'[OPTION]...'),
946 946 )
947 947 def debugstate(ui, repo, **opts):
948 948 """show the contents of the current dirstate"""
949 949
950 950 nodates = not opts['dates']
951 951 if opts.get('nodates') is not None:
952 952 nodates = True
953 953 datesort = opts.get('datesort')
954 954
955 955 if datesort:
956 956 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
957 957 else:
958 958 keyfunc = None # sort by filename
959 959 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
960 960 if ent[3] == -1:
961 961 timestr = b'unset '
962 962 elif nodates:
963 963 timestr = b'set '
964 964 else:
965 965 timestr = time.strftime(
966 966 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
967 967 )
968 968 timestr = encoding.strtolocal(timestr)
969 969 if ent[1] & 0o20000:
970 970 mode = b'lnk'
971 971 else:
972 972 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
973 973 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
974 974 for f in repo.dirstate.copies():
975 975 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
976 976
977 977
978 978 @command(
979 979 b'debugdiscovery',
980 980 [
981 981 (b'', b'old', None, _(b'use old-style discovery')),
982 982 (
983 983 b'',
984 984 b'nonheads',
985 985 None,
986 986 _(b'use old-style discovery with non-heads included'),
987 987 ),
988 988 (b'', b'rev', [], b'restrict discovery to this set of revs'),
989 989 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
990 990 (
991 991 b'',
992 992 b'local-as-revs',
993 993 b"",
994 994 b'treat local has having these revisions only',
995 995 ),
996 996 (
997 997 b'',
998 998 b'remote-as-revs',
999 999 b"",
1000 1000 b'use local as remote, with only these these revisions',
1001 1001 ),
1002 1002 ]
1003 1003 + cmdutil.remoteopts
1004 1004 + cmdutil.formatteropts,
1005 1005 _(b'[--rev REV] [OTHER]'),
1006 1006 )
1007 1007 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1008 1008 """runs the changeset discovery protocol in isolation
1009 1009
1010 1010 The local peer can be "replaced" by a subset of the local repository by
1011 1011 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1012 1012 be "replaced" by a subset of the local repository using the
1013 1013 `--local-as-revs` flag. This is useful to efficiently debug pathological
1014 1014 discovery situation.
1015 1015
1016 1016 The following developer oriented config are relevant for people playing with this command:
1017 1017
1018 1018 * devel.discovery.exchange-heads=True
1019 1019
1020 1020 If False, the discovery will not start with
1021 1021 remote head fetching and local head querying.
1022 1022
1023 1023 * devel.discovery.grow-sample=True
1024 1024
1025 1025 If False, the sample size used in set discovery will not be increased
1026 1026 through the process
1027 1027
1028 1028 * devel.discovery.grow-sample.dynamic=True
1029 1029
1030 1030 When discovery.grow-sample.dynamic is True, the default, the sample size is
1031 1031 adapted to the shape of the undecided set (it is set to the max of:
1032 1032 <target-size>, len(roots(undecided)), len(heads(undecided)
1033 1033
1034 1034 * devel.discovery.grow-sample.rate=1.05
1035 1035
1036 1036 the rate at which the sample grow
1037 1037
1038 1038 * devel.discovery.randomize=True
1039 1039
1040 1040 If andom sampling during discovery are deterministic. It is meant for
1041 1041 integration tests.
1042 1042
1043 1043 * devel.discovery.sample-size=200
1044 1044
1045 1045 Control the initial size of the discovery sample
1046 1046
1047 1047 * devel.discovery.sample-size.initial=100
1048 1048
1049 1049 Control the initial size of the discovery for initial change
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 unfi = repo.unfiltered()
1053 1053
1054 1054 # setup potential extra filtering
1055 1055 local_revs = opts[b"local_as_revs"]
1056 1056 remote_revs = opts[b"remote_as_revs"]
1057 1057
1058 1058 # make sure tests are repeatable
1059 1059 random.seed(int(opts[b'seed']))
1060 1060
1061 1061 if not remote_revs:
1062 1062
1063 1063 remoteurl, branches = urlutil.get_unique_pull_path(
1064 1064 b'debugdiscovery', repo, ui, remoteurl
1065 1065 )
1066 1066 remote = hg.peer(repo, opts, remoteurl)
1067 1067 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1068 1068 else:
1069 1069 branches = (None, [])
1070 1070 remote_filtered_revs = scmutil.revrange(
1071 1071 unfi, [b"not (::(%s))" % remote_revs]
1072 1072 )
1073 1073 remote_filtered_revs = frozenset(remote_filtered_revs)
1074 1074
1075 1075 def remote_func(x):
1076 1076 return remote_filtered_revs
1077 1077
1078 1078 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1079 1079
1080 1080 remote = repo.peer()
1081 1081 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1082 1082
1083 1083 if local_revs:
1084 1084 local_filtered_revs = scmutil.revrange(
1085 1085 unfi, [b"not (::(%s))" % local_revs]
1086 1086 )
1087 1087 local_filtered_revs = frozenset(local_filtered_revs)
1088 1088
1089 1089 def local_func(x):
1090 1090 return local_filtered_revs
1091 1091
1092 1092 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1093 1093 repo = repo.filtered(b'debug-discovery-local-filter')
1094 1094
1095 1095 data = {}
1096 1096 if opts.get(b'old'):
1097 1097
1098 1098 def doit(pushedrevs, remoteheads, remote=remote):
1099 1099 if not util.safehasattr(remote, b'branches'):
1100 1100 # enable in-client legacy support
1101 1101 remote = localrepo.locallegacypeer(remote.local())
1102 1102 common, _in, hds = treediscovery.findcommonincoming(
1103 1103 repo, remote, force=True, audit=data
1104 1104 )
1105 1105 common = set(common)
1106 1106 if not opts.get(b'nonheads'):
1107 1107 ui.writenoi18n(
1108 1108 b"unpruned common: %s\n"
1109 1109 % b" ".join(sorted(short(n) for n in common))
1110 1110 )
1111 1111
1112 1112 clnode = repo.changelog.node
1113 1113 common = repo.revs(b'heads(::%ln)', common)
1114 1114 common = {clnode(r) for r in common}
1115 1115 return common, hds
1116 1116
1117 1117 else:
1118 1118
1119 1119 def doit(pushedrevs, remoteheads, remote=remote):
1120 1120 nodes = None
1121 1121 if pushedrevs:
1122 1122 revs = scmutil.revrange(repo, pushedrevs)
1123 1123 nodes = [repo[r].node() for r in revs]
1124 1124 common, any, hds = setdiscovery.findcommonheads(
1125 1125 ui, repo, remote, ancestorsof=nodes, audit=data
1126 1126 )
1127 1127 return common, hds
1128 1128
1129 1129 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1130 1130 localrevs = opts[b'rev']
1131 1131
1132 1132 fm = ui.formatter(b'debugdiscovery', opts)
1133 1133 if fm.strict_format:
1134 1134
1135 1135 @contextlib.contextmanager
1136 1136 def may_capture_output():
1137 1137 ui.pushbuffer()
1138 1138 yield
1139 1139 data[b'output'] = ui.popbuffer()
1140 1140
1141 1141 else:
1142 1142 may_capture_output = util.nullcontextmanager
1143 1143 with may_capture_output():
1144 1144 with util.timedcm('debug-discovery') as t:
1145 1145 common, hds = doit(localrevs, remoterevs)
1146 1146
1147 1147 # compute all statistics
1148 1148 heads_common = set(common)
1149 1149 heads_remote = set(hds)
1150 1150 heads_local = set(repo.heads())
1151 1151 # note: they cannot be a local or remote head that is in common and not
1152 1152 # itself a head of common.
1153 1153 heads_common_local = heads_common & heads_local
1154 1154 heads_common_remote = heads_common & heads_remote
1155 1155 heads_common_both = heads_common & heads_remote & heads_local
1156 1156
1157 1157 all = repo.revs(b'all()')
1158 1158 common = repo.revs(b'::%ln', common)
1159 1159 roots_common = repo.revs(b'roots(::%ld)', common)
1160 1160 missing = repo.revs(b'not ::%ld', common)
1161 1161 heads_missing = repo.revs(b'heads(%ld)', missing)
1162 1162 roots_missing = repo.revs(b'roots(%ld)', missing)
1163 1163 assert len(common) + len(missing) == len(all)
1164 1164
1165 1165 initial_undecided = repo.revs(
1166 1166 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1167 1167 )
1168 1168 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1169 1169 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1170 1170 common_initial_undecided = initial_undecided & common
1171 1171 missing_initial_undecided = initial_undecided & missing
1172 1172
1173 1173 data[b'elapsed'] = t.elapsed
1174 1174 data[b'nb-common-heads'] = len(heads_common)
1175 1175 data[b'nb-common-heads-local'] = len(heads_common_local)
1176 1176 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1177 1177 data[b'nb-common-heads-both'] = len(heads_common_both)
1178 1178 data[b'nb-common-roots'] = len(roots_common)
1179 1179 data[b'nb-head-local'] = len(heads_local)
1180 1180 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1181 1181 data[b'nb-head-remote'] = len(heads_remote)
1182 1182 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1183 1183 heads_common_remote
1184 1184 )
1185 1185 data[b'nb-revs'] = len(all)
1186 1186 data[b'nb-revs-common'] = len(common)
1187 1187 data[b'nb-revs-missing'] = len(missing)
1188 1188 data[b'nb-missing-heads'] = len(heads_missing)
1189 1189 data[b'nb-missing-roots'] = len(roots_missing)
1190 1190 data[b'nb-ini_und'] = len(initial_undecided)
1191 1191 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1192 1192 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1193 1193 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1194 1194 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1195 1195
1196 1196 fm.startitem()
1197 1197 fm.data(**pycompat.strkwargs(data))
1198 1198 # display discovery summary
1199 1199 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1200 1200 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1201 1201 fm.plain(b"heads summary:\n")
1202 1202 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1203 1203 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1204 1204 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1205 1205 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1206 1206 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1207 1207 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1208 1208 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1209 1209 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1210 1210 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1211 1211 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1212 1212 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1213 1213 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1214 1214 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1215 1215 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1216 1216 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1217 1217 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1218 1218 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1219 1219 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1220 1220 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1221 1221 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1222 1222 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1223 1223 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1224 1224
1225 1225 if ui.verbose:
1226 1226 fm.plain(
1227 1227 b"common heads: %s\n"
1228 1228 % b" ".join(sorted(short(n) for n in heads_common))
1229 1229 )
1230 1230 fm.end()
1231 1231
1232 1232
1233 1233 _chunksize = 4 << 10
1234 1234
1235 1235
1236 1236 @command(
1237 1237 b'debugdownload',
1238 1238 [
1239 1239 (b'o', b'output', b'', _(b'path')),
1240 1240 ],
1241 1241 optionalrepo=True,
1242 1242 )
1243 1243 def debugdownload(ui, repo, url, output=None, **opts):
1244 1244 """download a resource using Mercurial logic and config"""
1245 1245 fh = urlmod.open(ui, url, output)
1246 1246
1247 1247 dest = ui
1248 1248 if output:
1249 1249 dest = open(output, b"wb", _chunksize)
1250 1250 try:
1251 1251 data = fh.read(_chunksize)
1252 1252 while data:
1253 1253 dest.write(data)
1254 1254 data = fh.read(_chunksize)
1255 1255 finally:
1256 1256 if output:
1257 1257 dest.close()
1258 1258
1259 1259
1260 1260 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1261 1261 def debugextensions(ui, repo, **opts):
1262 1262 '''show information about active extensions'''
1263 1263 opts = pycompat.byteskwargs(opts)
1264 1264 exts = extensions.extensions(ui)
1265 1265 hgver = util.version()
1266 1266 fm = ui.formatter(b'debugextensions', opts)
1267 1267 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1268 1268 isinternal = extensions.ismoduleinternal(extmod)
1269 1269 extsource = None
1270 1270
1271 1271 if util.safehasattr(extmod, '__file__'):
1272 1272 extsource = pycompat.fsencode(extmod.__file__)
1273 1273 elif getattr(sys, 'oxidized', False):
1274 1274 extsource = pycompat.sysexecutable
1275 1275 if isinternal:
1276 1276 exttestedwith = [] # never expose magic string to users
1277 1277 else:
1278 1278 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1279 1279 extbuglink = getattr(extmod, 'buglink', None)
1280 1280
1281 1281 fm.startitem()
1282 1282
1283 1283 if ui.quiet or ui.verbose:
1284 1284 fm.write(b'name', b'%s\n', extname)
1285 1285 else:
1286 1286 fm.write(b'name', b'%s', extname)
1287 1287 if isinternal or hgver in exttestedwith:
1288 1288 fm.plain(b'\n')
1289 1289 elif not exttestedwith:
1290 1290 fm.plain(_(b' (untested!)\n'))
1291 1291 else:
1292 1292 lasttestedversion = exttestedwith[-1]
1293 1293 fm.plain(b' (%s!)\n' % lasttestedversion)
1294 1294
1295 1295 fm.condwrite(
1296 1296 ui.verbose and extsource,
1297 1297 b'source',
1298 1298 _(b' location: %s\n'),
1299 1299 extsource or b"",
1300 1300 )
1301 1301
1302 1302 if ui.verbose:
1303 1303 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1304 1304 fm.data(bundled=isinternal)
1305 1305
1306 1306 fm.condwrite(
1307 1307 ui.verbose and exttestedwith,
1308 1308 b'testedwith',
1309 1309 _(b' tested with: %s\n'),
1310 1310 fm.formatlist(exttestedwith, name=b'ver'),
1311 1311 )
1312 1312
1313 1313 fm.condwrite(
1314 1314 ui.verbose and extbuglink,
1315 1315 b'buglink',
1316 1316 _(b' bug reporting: %s\n'),
1317 1317 extbuglink or b"",
1318 1318 )
1319 1319
1320 1320 fm.end()
1321 1321
1322 1322
1323 1323 @command(
1324 1324 b'debugfileset',
1325 1325 [
1326 1326 (
1327 1327 b'r',
1328 1328 b'rev',
1329 1329 b'',
1330 1330 _(b'apply the filespec on this revision'),
1331 1331 _(b'REV'),
1332 1332 ),
1333 1333 (
1334 1334 b'',
1335 1335 b'all-files',
1336 1336 False,
1337 1337 _(b'test files from all revisions and working directory'),
1338 1338 ),
1339 1339 (
1340 1340 b's',
1341 1341 b'show-matcher',
1342 1342 None,
1343 1343 _(b'print internal representation of matcher'),
1344 1344 ),
1345 1345 (
1346 1346 b'p',
1347 1347 b'show-stage',
1348 1348 [],
1349 1349 _(b'print parsed tree at the given stage'),
1350 1350 _(b'NAME'),
1351 1351 ),
1352 1352 ],
1353 1353 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1354 1354 )
1355 1355 def debugfileset(ui, repo, expr, **opts):
1356 1356 '''parse and apply a fileset specification'''
1357 1357 from . import fileset
1358 1358
1359 1359 fileset.symbols # force import of fileset so we have predicates to optimize
1360 1360 opts = pycompat.byteskwargs(opts)
1361 1361 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1362 1362
1363 1363 stages = [
1364 1364 (b'parsed', pycompat.identity),
1365 1365 (b'analyzed', filesetlang.analyze),
1366 1366 (b'optimized', filesetlang.optimize),
1367 1367 ]
1368 1368 stagenames = {n for n, f in stages}
1369 1369
1370 1370 showalways = set()
1371 1371 if ui.verbose and not opts[b'show_stage']:
1372 1372 # show parsed tree by --verbose (deprecated)
1373 1373 showalways.add(b'parsed')
1374 1374 if opts[b'show_stage'] == [b'all']:
1375 1375 showalways.update(stagenames)
1376 1376 else:
1377 1377 for n in opts[b'show_stage']:
1378 1378 if n not in stagenames:
1379 1379 raise error.Abort(_(b'invalid stage name: %s') % n)
1380 1380 showalways.update(opts[b'show_stage'])
1381 1381
1382 1382 tree = filesetlang.parse(expr)
1383 1383 for n, f in stages:
1384 1384 tree = f(tree)
1385 1385 if n in showalways:
1386 1386 if opts[b'show_stage'] or n != b'parsed':
1387 1387 ui.write(b"* %s:\n" % n)
1388 1388 ui.write(filesetlang.prettyformat(tree), b"\n")
1389 1389
1390 1390 files = set()
1391 1391 if opts[b'all_files']:
1392 1392 for r in repo:
1393 1393 c = repo[r]
1394 1394 files.update(c.files())
1395 1395 files.update(c.substate)
1396 1396 if opts[b'all_files'] or ctx.rev() is None:
1397 1397 wctx = repo[None]
1398 1398 files.update(
1399 1399 repo.dirstate.walk(
1400 1400 scmutil.matchall(repo),
1401 1401 subrepos=list(wctx.substate),
1402 1402 unknown=True,
1403 1403 ignored=True,
1404 1404 )
1405 1405 )
1406 1406 files.update(wctx.substate)
1407 1407 else:
1408 1408 files.update(ctx.files())
1409 1409 files.update(ctx.substate)
1410 1410
1411 1411 m = ctx.matchfileset(repo.getcwd(), expr)
1412 1412 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1413 1413 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1414 1414 for f in sorted(files):
1415 1415 if not m(f):
1416 1416 continue
1417 1417 ui.write(b"%s\n" % f)
1418 1418
1419 1419
1420 1420 @command(b'debugformat', [] + cmdutil.formatteropts)
1421 1421 def debugformat(ui, repo, **opts):
1422 1422 """display format information about the current repository
1423 1423
1424 1424 Use --verbose to get extra information about current config value and
1425 1425 Mercurial default."""
1426 1426 opts = pycompat.byteskwargs(opts)
1427 1427 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1428 1428 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1429 1429
1430 1430 def makeformatname(name):
1431 1431 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1432 1432
1433 1433 fm = ui.formatter(b'debugformat', opts)
1434 1434 if fm.isplain():
1435 1435
1436 1436 def formatvalue(value):
1437 1437 if util.safehasattr(value, b'startswith'):
1438 1438 return value
1439 1439 if value:
1440 1440 return b'yes'
1441 1441 else:
1442 1442 return b'no'
1443 1443
1444 1444 else:
1445 1445 formatvalue = pycompat.identity
1446 1446
1447 1447 fm.plain(b'format-variant')
1448 1448 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1449 1449 fm.plain(b' repo')
1450 1450 if ui.verbose:
1451 1451 fm.plain(b' config default')
1452 1452 fm.plain(b'\n')
1453 1453 for fv in upgrade.allformatvariant:
1454 1454 fm.startitem()
1455 1455 repovalue = fv.fromrepo(repo)
1456 1456 configvalue = fv.fromconfig(repo)
1457 1457
1458 1458 if repovalue != configvalue:
1459 1459 namelabel = b'formatvariant.name.mismatchconfig'
1460 1460 repolabel = b'formatvariant.repo.mismatchconfig'
1461 1461 elif repovalue != fv.default:
1462 1462 namelabel = b'formatvariant.name.mismatchdefault'
1463 1463 repolabel = b'formatvariant.repo.mismatchdefault'
1464 1464 else:
1465 1465 namelabel = b'formatvariant.name.uptodate'
1466 1466 repolabel = b'formatvariant.repo.uptodate'
1467 1467
1468 1468 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1469 1469 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1470 1470 if fv.default != configvalue:
1471 1471 configlabel = b'formatvariant.config.special'
1472 1472 else:
1473 1473 configlabel = b'formatvariant.config.default'
1474 1474 fm.condwrite(
1475 1475 ui.verbose,
1476 1476 b'config',
1477 1477 b' %6s',
1478 1478 formatvalue(configvalue),
1479 1479 label=configlabel,
1480 1480 )
1481 1481 fm.condwrite(
1482 1482 ui.verbose,
1483 1483 b'default',
1484 1484 b' %7s',
1485 1485 formatvalue(fv.default),
1486 1486 label=b'formatvariant.default',
1487 1487 )
1488 1488 fm.plain(b'\n')
1489 1489 fm.end()
1490 1490
1491 1491
1492 1492 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1493 1493 def debugfsinfo(ui, path=b"."):
1494 1494 """show information detected about current filesystem"""
1495 1495 ui.writenoi18n(b'path: %s\n' % path)
1496 1496 ui.writenoi18n(
1497 1497 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1498 1498 )
1499 1499 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1500 1500 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1501 1501 ui.writenoi18n(
1502 1502 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1503 1503 )
1504 1504 ui.writenoi18n(
1505 1505 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1506 1506 )
1507 1507 casesensitive = b'(unknown)'
1508 1508 try:
1509 1509 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1510 1510 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1511 1511 except OSError:
1512 1512 pass
1513 1513 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1514 1514
1515 1515
1516 1516 @command(
1517 1517 b'debuggetbundle',
1518 1518 [
1519 1519 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1520 1520 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1521 1521 (
1522 1522 b't',
1523 1523 b'type',
1524 1524 b'bzip2',
1525 1525 _(b'bundle compression type to use'),
1526 1526 _(b'TYPE'),
1527 1527 ),
1528 1528 ],
1529 1529 _(b'REPO FILE [-H|-C ID]...'),
1530 1530 norepo=True,
1531 1531 )
1532 1532 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1533 1533 """retrieves a bundle from a repo
1534 1534
1535 1535 Every ID must be a full-length hex node id string. Saves the bundle to the
1536 1536 given file.
1537 1537 """
1538 1538 opts = pycompat.byteskwargs(opts)
1539 1539 repo = hg.peer(ui, opts, repopath)
1540 1540 if not repo.capable(b'getbundle'):
1541 1541 raise error.Abort(b"getbundle() not supported by target repository")
1542 1542 args = {}
1543 1543 if common:
1544 1544 args['common'] = [bin(s) for s in common]
1545 1545 if head:
1546 1546 args['heads'] = [bin(s) for s in head]
1547 1547 # TODO: get desired bundlecaps from command line.
1548 1548 args['bundlecaps'] = None
1549 1549 bundle = repo.getbundle(b'debug', **args)
1550 1550
1551 1551 bundletype = opts.get(b'type', b'bzip2').lower()
1552 1552 btypes = {
1553 1553 b'none': b'HG10UN',
1554 1554 b'bzip2': b'HG10BZ',
1555 1555 b'gzip': b'HG10GZ',
1556 1556 b'bundle2': b'HG20',
1557 1557 }
1558 1558 bundletype = btypes.get(bundletype)
1559 1559 if bundletype not in bundle2.bundletypes:
1560 1560 raise error.Abort(_(b'unknown bundle type specified with --type'))
1561 1561 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1562 1562
1563 1563
1564 1564 @command(b'debugignore', [], b'[FILE]')
1565 1565 def debugignore(ui, repo, *files, **opts):
1566 1566 """display the combined ignore pattern and information about ignored files
1567 1567
1568 1568 With no argument display the combined ignore pattern.
1569 1569
1570 1570 Given space separated file names, shows if the given file is ignored and
1571 1571 if so, show the ignore rule (file and line number) that matched it.
1572 1572 """
1573 1573 ignore = repo.dirstate._ignore
1574 1574 if not files:
1575 1575 # Show all the patterns
1576 1576 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1577 1577 else:
1578 1578 m = scmutil.match(repo[None], pats=files)
1579 1579 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1580 1580 for f in m.files():
1581 1581 nf = util.normpath(f)
1582 1582 ignored = None
1583 1583 ignoredata = None
1584 1584 if nf != b'.':
1585 1585 if ignore(nf):
1586 1586 ignored = nf
1587 1587 ignoredata = repo.dirstate._ignorefileandline(nf)
1588 1588 else:
1589 1589 for p in pathutil.finddirs(nf):
1590 1590 if ignore(p):
1591 1591 ignored = p
1592 1592 ignoredata = repo.dirstate._ignorefileandline(p)
1593 1593 break
1594 1594 if ignored:
1595 1595 if ignored == nf:
1596 1596 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1597 1597 else:
1598 1598 ui.write(
1599 1599 _(
1600 1600 b"%s is ignored because of "
1601 1601 b"containing directory %s\n"
1602 1602 )
1603 1603 % (uipathfn(f), ignored)
1604 1604 )
1605 1605 ignorefile, lineno, line = ignoredata
1606 1606 ui.write(
1607 1607 _(b"(ignore rule in %s, line %d: '%s')\n")
1608 1608 % (ignorefile, lineno, line)
1609 1609 )
1610 1610 else:
1611 1611 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1612 1612
1613 1613
1614 1614 @command(
1615 1615 b'debugindex',
1616 1616 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1617 1617 _(b'-c|-m|FILE'),
1618 1618 )
1619 1619 def debugindex(ui, repo, file_=None, **opts):
1620 1620 """dump index data for a storage primitive"""
1621 1621 opts = pycompat.byteskwargs(opts)
1622 1622 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1623 1623
1624 1624 if ui.debugflag:
1625 1625 shortfn = hex
1626 1626 else:
1627 1627 shortfn = short
1628 1628
1629 1629 idlen = 12
1630 1630 for i in store:
1631 1631 idlen = len(shortfn(store.node(i)))
1632 1632 break
1633 1633
1634 1634 fm = ui.formatter(b'debugindex', opts)
1635 1635 fm.plain(
1636 1636 b' rev linkrev %s %s p2\n'
1637 1637 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1638 1638 )
1639 1639
1640 1640 for rev in store:
1641 1641 node = store.node(rev)
1642 1642 parents = store.parents(node)
1643 1643
1644 1644 fm.startitem()
1645 1645 fm.write(b'rev', b'%6d ', rev)
1646 1646 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1647 1647 fm.write(b'node', b'%s ', shortfn(node))
1648 1648 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1649 1649 fm.write(b'p2', b'%s', shortfn(parents[1]))
1650 1650 fm.plain(b'\n')
1651 1651
1652 1652 fm.end()
1653 1653
1654 1654
1655 1655 @command(
1656 1656 b'debugindexdot',
1657 1657 cmdutil.debugrevlogopts,
1658 1658 _(b'-c|-m|FILE'),
1659 1659 optionalrepo=True,
1660 1660 )
1661 1661 def debugindexdot(ui, repo, file_=None, **opts):
1662 1662 """dump an index DAG as a graphviz dot file"""
1663 1663 opts = pycompat.byteskwargs(opts)
1664 1664 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1665 1665 ui.writenoi18n(b"digraph G {\n")
1666 1666 for i in r:
1667 1667 node = r.node(i)
1668 1668 pp = r.parents(node)
1669 1669 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1670 1670 if pp[1] != nullid:
1671 1671 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1672 1672 ui.write(b"}\n")
1673 1673
1674 1674
1675 1675 @command(b'debugindexstats', [])
1676 1676 def debugindexstats(ui, repo):
1677 1677 """show stats related to the changelog index"""
1678 1678 repo.changelog.shortest(nullid, 1)
1679 1679 index = repo.changelog.index
1680 1680 if not util.safehasattr(index, b'stats'):
1681 1681 raise error.Abort(_(b'debugindexstats only works with native code'))
1682 1682 for k, v in sorted(index.stats().items()):
1683 1683 ui.write(b'%s: %d\n' % (k, v))
1684 1684
1685 1685
1686 1686 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1687 1687 def debuginstall(ui, **opts):
1688 1688 """test Mercurial installation
1689 1689
1690 1690 Returns 0 on success.
1691 1691 """
1692 1692 opts = pycompat.byteskwargs(opts)
1693 1693
1694 1694 problems = 0
1695 1695
1696 1696 fm = ui.formatter(b'debuginstall', opts)
1697 1697 fm.startitem()
1698 1698
1699 1699 # encoding might be unknown or wrong. don't translate these messages.
1700 1700 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1701 1701 err = None
1702 1702 try:
1703 1703 codecs.lookup(pycompat.sysstr(encoding.encoding))
1704 1704 except LookupError as inst:
1705 1705 err = stringutil.forcebytestr(inst)
1706 1706 problems += 1
1707 1707 fm.condwrite(
1708 1708 err,
1709 1709 b'encodingerror',
1710 1710 b" %s\n (check that your locale is properly set)\n",
1711 1711 err,
1712 1712 )
1713 1713
1714 1714 # Python
1715 1715 pythonlib = None
1716 1716 if util.safehasattr(os, '__file__'):
1717 1717 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1718 1718 elif getattr(sys, 'oxidized', False):
1719 1719 pythonlib = pycompat.sysexecutable
1720 1720
1721 1721 fm.write(
1722 1722 b'pythonexe',
1723 1723 _(b"checking Python executable (%s)\n"),
1724 1724 pycompat.sysexecutable or _(b"unknown"),
1725 1725 )
1726 1726 fm.write(
1727 1727 b'pythonimplementation',
1728 1728 _(b"checking Python implementation (%s)\n"),
1729 1729 pycompat.sysbytes(platform.python_implementation()),
1730 1730 )
1731 1731 fm.write(
1732 1732 b'pythonver',
1733 1733 _(b"checking Python version (%s)\n"),
1734 1734 (b"%d.%d.%d" % sys.version_info[:3]),
1735 1735 )
1736 1736 fm.write(
1737 1737 b'pythonlib',
1738 1738 _(b"checking Python lib (%s)...\n"),
1739 1739 pythonlib or _(b"unknown"),
1740 1740 )
1741 1741
1742 1742 try:
1743 1743 from . import rustext # pytype: disable=import-error
1744 1744
1745 1745 rustext.__doc__ # trigger lazy import
1746 1746 except ImportError:
1747 1747 rustext = None
1748 1748
1749 1749 security = set(sslutil.supportedprotocols)
1750 1750 if sslutil.hassni:
1751 1751 security.add(b'sni')
1752 1752
1753 1753 fm.write(
1754 1754 b'pythonsecurity',
1755 1755 _(b"checking Python security support (%s)\n"),
1756 1756 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1757 1757 )
1758 1758
1759 1759 # These are warnings, not errors. So don't increment problem count. This
1760 1760 # may change in the future.
1761 1761 if b'tls1.2' not in security:
1762 1762 fm.plain(
1763 1763 _(
1764 1764 b' TLS 1.2 not supported by Python install; '
1765 1765 b'network connections lack modern security\n'
1766 1766 )
1767 1767 )
1768 1768 if b'sni' not in security:
1769 1769 fm.plain(
1770 1770 _(
1771 1771 b' SNI not supported by Python install; may have '
1772 1772 b'connectivity issues with some servers\n'
1773 1773 )
1774 1774 )
1775 1775
1776 1776 fm.plain(
1777 1777 _(
1778 1778 b"checking Rust extensions (%s)\n"
1779 1779 % (b'missing' if rustext is None else b'installed')
1780 1780 ),
1781 1781 )
1782 1782
1783 1783 # TODO print CA cert info
1784 1784
1785 1785 # hg version
1786 1786 hgver = util.version()
1787 1787 fm.write(
1788 1788 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1789 1789 )
1790 1790 fm.write(
1791 1791 b'hgverextra',
1792 1792 _(b"checking Mercurial custom build (%s)\n"),
1793 1793 b'+'.join(hgver.split(b'+')[1:]),
1794 1794 )
1795 1795
1796 1796 # compiled modules
1797 1797 hgmodules = None
1798 1798 if util.safehasattr(sys.modules[__name__], '__file__'):
1799 1799 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1800 1800 elif getattr(sys, 'oxidized', False):
1801 1801 hgmodules = pycompat.sysexecutable
1802 1802
1803 1803 fm.write(
1804 1804 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1805 1805 )
1806 1806 fm.write(
1807 1807 b'hgmodules',
1808 1808 _(b"checking installed modules (%s)...\n"),
1809 1809 hgmodules or _(b"unknown"),
1810 1810 )
1811 1811
1812 1812 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1813 1813 rustext = rustandc # for now, that's the only case
1814 1814 cext = policy.policy in (b'c', b'allow') or rustandc
1815 1815 nopure = cext or rustext
1816 1816 if nopure:
1817 1817 err = None
1818 1818 try:
1819 1819 if cext:
1820 1820 from .cext import ( # pytype: disable=import-error
1821 1821 base85,
1822 1822 bdiff,
1823 1823 mpatch,
1824 1824 osutil,
1825 1825 )
1826 1826
1827 1827 # quiet pyflakes
1828 1828 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1829 1829 if rustext:
1830 1830 from .rustext import ( # pytype: disable=import-error
1831 1831 ancestor,
1832 1832 dirstate,
1833 1833 )
1834 1834
1835 1835 dir(ancestor), dir(dirstate) # quiet pyflakes
1836 1836 except Exception as inst:
1837 1837 err = stringutil.forcebytestr(inst)
1838 1838 problems += 1
1839 1839 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1840 1840
1841 1841 compengines = util.compengines._engines.values()
1842 1842 fm.write(
1843 1843 b'compengines',
1844 1844 _(b'checking registered compression engines (%s)\n'),
1845 1845 fm.formatlist(
1846 1846 sorted(e.name() for e in compengines),
1847 1847 name=b'compengine',
1848 1848 fmt=b'%s',
1849 1849 sep=b', ',
1850 1850 ),
1851 1851 )
1852 1852 fm.write(
1853 1853 b'compenginesavail',
1854 1854 _(b'checking available compression engines (%s)\n'),
1855 1855 fm.formatlist(
1856 1856 sorted(e.name() for e in compengines if e.available()),
1857 1857 name=b'compengine',
1858 1858 fmt=b'%s',
1859 1859 sep=b', ',
1860 1860 ),
1861 1861 )
1862 1862 wirecompengines = compression.compengines.supportedwireengines(
1863 1863 compression.SERVERROLE
1864 1864 )
1865 1865 fm.write(
1866 1866 b'compenginesserver',
1867 1867 _(
1868 1868 b'checking available compression engines '
1869 1869 b'for wire protocol (%s)\n'
1870 1870 ),
1871 1871 fm.formatlist(
1872 1872 [e.name() for e in wirecompengines if e.wireprotosupport()],
1873 1873 name=b'compengine',
1874 1874 fmt=b'%s',
1875 1875 sep=b', ',
1876 1876 ),
1877 1877 )
1878 1878 re2 = b'missing'
1879 1879 if util._re2:
1880 1880 re2 = b'available'
1881 1881 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1882 1882 fm.data(re2=bool(util._re2))
1883 1883
1884 1884 # templates
1885 1885 p = templater.templatedir()
1886 1886 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1887 1887 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1888 1888 if p:
1889 1889 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1890 1890 if m:
1891 1891 # template found, check if it is working
1892 1892 err = None
1893 1893 try:
1894 1894 templater.templater.frommapfile(m)
1895 1895 except Exception as inst:
1896 1896 err = stringutil.forcebytestr(inst)
1897 1897 p = None
1898 1898 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1899 1899 else:
1900 1900 p = None
1901 1901 fm.condwrite(
1902 1902 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1903 1903 )
1904 1904 fm.condwrite(
1905 1905 not m,
1906 1906 b'defaulttemplatenotfound',
1907 1907 _(b" template '%s' not found\n"),
1908 1908 b"default",
1909 1909 )
1910 1910 if not p:
1911 1911 problems += 1
1912 1912 fm.condwrite(
1913 1913 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1914 1914 )
1915 1915
1916 1916 # editor
1917 1917 editor = ui.geteditor()
1918 1918 editor = util.expandpath(editor)
1919 1919 editorbin = procutil.shellsplit(editor)[0]
1920 1920 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1921 1921 cmdpath = procutil.findexe(editorbin)
1922 1922 fm.condwrite(
1923 1923 not cmdpath and editor == b'vi',
1924 1924 b'vinotfound',
1925 1925 _(
1926 1926 b" No commit editor set and can't find %s in PATH\n"
1927 1927 b" (specify a commit editor in your configuration"
1928 1928 b" file)\n"
1929 1929 ),
1930 1930 not cmdpath and editor == b'vi' and editorbin,
1931 1931 )
1932 1932 fm.condwrite(
1933 1933 not cmdpath and editor != b'vi',
1934 1934 b'editornotfound',
1935 1935 _(
1936 1936 b" Can't find editor '%s' in PATH\n"
1937 1937 b" (specify a commit editor in your configuration"
1938 1938 b" file)\n"
1939 1939 ),
1940 1940 not cmdpath and editorbin,
1941 1941 )
1942 1942 if not cmdpath and editor != b'vi':
1943 1943 problems += 1
1944 1944
1945 1945 # check username
1946 1946 username = None
1947 1947 err = None
1948 1948 try:
1949 1949 username = ui.username()
1950 1950 except error.Abort as e:
1951 1951 err = e.message
1952 1952 problems += 1
1953 1953
1954 1954 fm.condwrite(
1955 1955 username, b'username', _(b"checking username (%s)\n"), username
1956 1956 )
1957 1957 fm.condwrite(
1958 1958 err,
1959 1959 b'usernameerror',
1960 1960 _(
1961 1961 b"checking username...\n %s\n"
1962 1962 b" (specify a username in your configuration file)\n"
1963 1963 ),
1964 1964 err,
1965 1965 )
1966 1966
1967 1967 for name, mod in extensions.extensions():
1968 1968 handler = getattr(mod, 'debuginstall', None)
1969 1969 if handler is not None:
1970 1970 problems += handler(ui, fm)
1971 1971
1972 1972 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1973 1973 if not problems:
1974 1974 fm.data(problems=problems)
1975 1975 fm.condwrite(
1976 1976 problems,
1977 1977 b'problems',
1978 1978 _(b"%d problems detected, please check your install!\n"),
1979 1979 problems,
1980 1980 )
1981 1981 fm.end()
1982 1982
1983 1983 return problems
1984 1984
1985 1985
1986 1986 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1987 1987 def debugknown(ui, repopath, *ids, **opts):
1988 1988 """test whether node ids are known to a repo
1989 1989
1990 1990 Every ID must be a full-length hex node id string. Returns a list of 0s
1991 1991 and 1s indicating unknown/known.
1992 1992 """
1993 1993 opts = pycompat.byteskwargs(opts)
1994 1994 repo = hg.peer(ui, opts, repopath)
1995 1995 if not repo.capable(b'known'):
1996 1996 raise error.Abort(b"known() not supported by target repository")
1997 1997 flags = repo.known([bin(s) for s in ids])
1998 1998 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1999 1999
2000 2000
2001 2001 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2002 2002 def debuglabelcomplete(ui, repo, *args):
2003 2003 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2004 2004 debugnamecomplete(ui, repo, *args)
2005 2005
2006 2006
2007 2007 @command(
2008 2008 b'debuglocks',
2009 2009 [
2010 2010 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2011 2011 (
2012 2012 b'W',
2013 2013 b'force-free-wlock',
2014 2014 None,
2015 2015 _(b'free the working state lock (DANGEROUS)'),
2016 2016 ),
2017 2017 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2018 2018 (
2019 2019 b'S',
2020 2020 b'set-wlock',
2021 2021 None,
2022 2022 _(b'set the working state lock until stopped'),
2023 2023 ),
2024 2024 ],
2025 2025 _(b'[OPTION]...'),
2026 2026 )
2027 2027 def debuglocks(ui, repo, **opts):
2028 2028 """show or modify state of locks
2029 2029
2030 2030 By default, this command will show which locks are held. This
2031 2031 includes the user and process holding the lock, the amount of time
2032 2032 the lock has been held, and the machine name where the process is
2033 2033 running if it's not local.
2034 2034
2035 2035 Locks protect the integrity of Mercurial's data, so should be
2036 2036 treated with care. System crashes or other interruptions may cause
2037 2037 locks to not be properly released, though Mercurial will usually
2038 2038 detect and remove such stale locks automatically.
2039 2039
2040 2040 However, detecting stale locks may not always be possible (for
2041 2041 instance, on a shared filesystem). Removing locks may also be
2042 2042 blocked by filesystem permissions.
2043 2043
2044 2044 Setting a lock will prevent other commands from changing the data.
2045 2045 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2046 2046 The set locks are removed when the command exits.
2047 2047
2048 2048 Returns 0 if no locks are held.
2049 2049
2050 2050 """
2051 2051
2052 2052 if opts.get('force_free_lock'):
2053 2053 repo.svfs.unlink(b'lock')
2054 2054 if opts.get('force_free_wlock'):
2055 2055 repo.vfs.unlink(b'wlock')
2056 2056 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2057 2057 return 0
2058 2058
2059 2059 locks = []
2060 2060 try:
2061 2061 if opts.get('set_wlock'):
2062 2062 try:
2063 2063 locks.append(repo.wlock(False))
2064 2064 except error.LockHeld:
2065 2065 raise error.Abort(_(b'wlock is already held'))
2066 2066 if opts.get('set_lock'):
2067 2067 try:
2068 2068 locks.append(repo.lock(False))
2069 2069 except error.LockHeld:
2070 2070 raise error.Abort(_(b'lock is already held'))
2071 2071 if len(locks):
2072 2072 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2073 2073 return 0
2074 2074 finally:
2075 2075 release(*locks)
2076 2076
2077 2077 now = time.time()
2078 2078 held = 0
2079 2079
2080 2080 def report(vfs, name, method):
2081 2081 # this causes stale locks to get reaped for more accurate reporting
2082 2082 try:
2083 2083 l = method(False)
2084 2084 except error.LockHeld:
2085 2085 l = None
2086 2086
2087 2087 if l:
2088 2088 l.release()
2089 2089 else:
2090 2090 try:
2091 2091 st = vfs.lstat(name)
2092 2092 age = now - st[stat.ST_MTIME]
2093 2093 user = util.username(st.st_uid)
2094 2094 locker = vfs.readlock(name)
2095 2095 if b":" in locker:
2096 2096 host, pid = locker.split(b':')
2097 2097 if host == socket.gethostname():
2098 2098 locker = b'user %s, process %s' % (user or b'None', pid)
2099 2099 else:
2100 2100 locker = b'user %s, process %s, host %s' % (
2101 2101 user or b'None',
2102 2102 pid,
2103 2103 host,
2104 2104 )
2105 2105 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2106 2106 return 1
2107 2107 except OSError as e:
2108 2108 if e.errno != errno.ENOENT:
2109 2109 raise
2110 2110
2111 2111 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2112 2112 return 0
2113 2113
2114 2114 held += report(repo.svfs, b"lock", repo.lock)
2115 2115 held += report(repo.vfs, b"wlock", repo.wlock)
2116 2116
2117 2117 return held
2118 2118
2119 2119
2120 2120 @command(
2121 2121 b'debugmanifestfulltextcache',
2122 2122 [
2123 2123 (b'', b'clear', False, _(b'clear the cache')),
2124 2124 (
2125 2125 b'a',
2126 2126 b'add',
2127 2127 [],
2128 2128 _(b'add the given manifest nodes to the cache'),
2129 2129 _(b'NODE'),
2130 2130 ),
2131 2131 ],
2132 2132 b'',
2133 2133 )
2134 2134 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2135 2135 """show, clear or amend the contents of the manifest fulltext cache"""
2136 2136
2137 2137 def getcache():
2138 2138 r = repo.manifestlog.getstorage(b'')
2139 2139 try:
2140 2140 return r._fulltextcache
2141 2141 except AttributeError:
2142 2142 msg = _(
2143 2143 b"Current revlog implementation doesn't appear to have a "
2144 2144 b"manifest fulltext cache\n"
2145 2145 )
2146 2146 raise error.Abort(msg)
2147 2147
2148 2148 if opts.get('clear'):
2149 2149 with repo.wlock():
2150 2150 cache = getcache()
2151 2151 cache.clear(clear_persisted_data=True)
2152 2152 return
2153 2153
2154 2154 if add:
2155 2155 with repo.wlock():
2156 2156 m = repo.manifestlog
2157 2157 store = m.getstorage(b'')
2158 2158 for n in add:
2159 2159 try:
2160 2160 manifest = m[store.lookup(n)]
2161 2161 except error.LookupError as e:
2162 2162 raise error.Abort(
2163 2163 bytes(e), hint=b"Check your manifest node id"
2164 2164 )
2165 2165 manifest.read() # stores revisision in cache too
2166 2166 return
2167 2167
2168 2168 cache = getcache()
2169 2169 if not len(cache):
2170 2170 ui.write(_(b'cache empty\n'))
2171 2171 else:
2172 2172 ui.write(
2173 2173 _(
2174 2174 b'cache contains %d manifest entries, in order of most to '
2175 2175 b'least recent:\n'
2176 2176 )
2177 2177 % (len(cache),)
2178 2178 )
2179 2179 totalsize = 0
2180 2180 for nodeid in cache:
2181 2181 # Use cache.get to not update the LRU order
2182 2182 data = cache.peek(nodeid)
2183 2183 size = len(data)
2184 2184 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2185 2185 ui.write(
2186 2186 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2187 2187 )
2188 2188 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2189 2189 ui.write(
2190 2190 _(b'total cache data size %s, on-disk %s\n')
2191 2191 % (util.bytecount(totalsize), util.bytecount(ondisk))
2192 2192 )
2193 2193
2194 2194
2195 2195 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2196 2196 def debugmergestate(ui, repo, *args, **opts):
2197 2197 """print merge state
2198 2198
2199 2199 Use --verbose to print out information about whether v1 or v2 merge state
2200 2200 was chosen."""
2201 2201
2202 2202 if ui.verbose:
2203 2203 ms = mergestatemod.mergestate(repo)
2204 2204
2205 2205 # sort so that reasonable information is on top
2206 2206 v1records = ms._readrecordsv1()
2207 2207 v2records = ms._readrecordsv2()
2208 2208
2209 2209 if not v1records and not v2records:
2210 2210 pass
2211 2211 elif not v2records:
2212 2212 ui.writenoi18n(b'no version 2 merge state\n')
2213 2213 elif ms._v1v2match(v1records, v2records):
2214 2214 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2215 2215 else:
2216 2216 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2217 2217
2218 2218 opts = pycompat.byteskwargs(opts)
2219 2219 if not opts[b'template']:
2220 2220 opts[b'template'] = (
2221 2221 b'{if(commits, "", "no merge state found\n")}'
2222 2222 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2223 2223 b'{files % "file: {path} (state \\"{state}\\")\n'
2224 2224 b'{if(local_path, "'
2225 2225 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2226 2226 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2227 2227 b' other path: {other_path} (node {other_node})\n'
2228 2228 b'")}'
2229 2229 b'{if(rename_side, "'
2230 2230 b' rename side: {rename_side}\n'
2231 2231 b' renamed path: {renamed_path}\n'
2232 2232 b'")}'
2233 2233 b'{extras % " extra: {key} = {value}\n"}'
2234 2234 b'"}'
2235 2235 b'{extras % "extra: {file} ({key} = {value})\n"}'
2236 2236 )
2237 2237
2238 2238 ms = mergestatemod.mergestate.read(repo)
2239 2239
2240 2240 fm = ui.formatter(b'debugmergestate', opts)
2241 2241 fm.startitem()
2242 2242
2243 2243 fm_commits = fm.nested(b'commits')
2244 2244 if ms.active():
2245 2245 for name, node, label_index in (
2246 2246 (b'local', ms.local, 0),
2247 2247 (b'other', ms.other, 1),
2248 2248 ):
2249 2249 fm_commits.startitem()
2250 2250 fm_commits.data(name=name)
2251 2251 fm_commits.data(node=hex(node))
2252 2252 if ms._labels and len(ms._labels) > label_index:
2253 2253 fm_commits.data(label=ms._labels[label_index])
2254 2254 fm_commits.end()
2255 2255
2256 2256 fm_files = fm.nested(b'files')
2257 2257 if ms.active():
2258 2258 for f in ms:
2259 2259 fm_files.startitem()
2260 2260 fm_files.data(path=f)
2261 2261 state = ms._state[f]
2262 2262 fm_files.data(state=state[0])
2263 2263 if state[0] in (
2264 2264 mergestatemod.MERGE_RECORD_UNRESOLVED,
2265 2265 mergestatemod.MERGE_RECORD_RESOLVED,
2266 2266 ):
2267 2267 fm_files.data(local_key=state[1])
2268 2268 fm_files.data(local_path=state[2])
2269 2269 fm_files.data(ancestor_path=state[3])
2270 2270 fm_files.data(ancestor_node=state[4])
2271 2271 fm_files.data(other_path=state[5])
2272 2272 fm_files.data(other_node=state[6])
2273 2273 fm_files.data(local_flags=state[7])
2274 2274 elif state[0] in (
2275 2275 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2276 2276 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2277 2277 ):
2278 2278 fm_files.data(renamed_path=state[1])
2279 2279 fm_files.data(rename_side=state[2])
2280 2280 fm_extras = fm_files.nested(b'extras')
2281 2281 for k, v in sorted(ms.extras(f).items()):
2282 2282 fm_extras.startitem()
2283 2283 fm_extras.data(key=k)
2284 2284 fm_extras.data(value=v)
2285 2285 fm_extras.end()
2286 2286
2287 2287 fm_files.end()
2288 2288
2289 2289 fm_extras = fm.nested(b'extras')
2290 2290 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2291 2291 if f in ms:
2292 2292 # If file is in mergestate, we have already processed it's extras
2293 2293 continue
2294 2294 for k, v in pycompat.iteritems(d):
2295 2295 fm_extras.startitem()
2296 2296 fm_extras.data(file=f)
2297 2297 fm_extras.data(key=k)
2298 2298 fm_extras.data(value=v)
2299 2299 fm_extras.end()
2300 2300
2301 2301 fm.end()
2302 2302
2303 2303
2304 2304 @command(b'debugnamecomplete', [], _(b'NAME...'))
2305 2305 def debugnamecomplete(ui, repo, *args):
2306 2306 '''complete "names" - tags, open branch names, bookmark names'''
2307 2307
2308 2308 names = set()
2309 2309 # since we previously only listed open branches, we will handle that
2310 2310 # specially (after this for loop)
2311 2311 for name, ns in pycompat.iteritems(repo.names):
2312 2312 if name != b'branches':
2313 2313 names.update(ns.listnames(repo))
2314 2314 names.update(
2315 2315 tag
2316 2316 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2317 2317 if not closed
2318 2318 )
2319 2319 completions = set()
2320 2320 if not args:
2321 2321 args = [b'']
2322 2322 for a in args:
2323 2323 completions.update(n for n in names if n.startswith(a))
2324 2324 ui.write(b'\n'.join(sorted(completions)))
2325 2325 ui.write(b'\n')
2326 2326
2327 2327
2328 2328 @command(
2329 2329 b'debugnodemap',
2330 2330 [
2331 2331 (
2332 2332 b'',
2333 2333 b'dump-new',
2334 2334 False,
2335 2335 _(b'write a (new) persistent binary nodemap on stdout'),
2336 2336 ),
2337 2337 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2338 2338 (
2339 2339 b'',
2340 2340 b'check',
2341 2341 False,
2342 2342 _(b'check that the data on disk data are correct.'),
2343 2343 ),
2344 2344 (
2345 2345 b'',
2346 2346 b'metadata',
2347 2347 False,
2348 2348 _(b'display the on disk meta data for the nodemap'),
2349 2349 ),
2350 2350 ],
2351 2351 )
2352 2352 def debugnodemap(ui, repo, **opts):
2353 2353 """write and inspect on disk nodemap"""
2354 2354 if opts['dump_new']:
2355 2355 unfi = repo.unfiltered()
2356 2356 cl = unfi.changelog
2357 2357 if util.safehasattr(cl.index, "nodemap_data_all"):
2358 2358 data = cl.index.nodemap_data_all()
2359 2359 else:
2360 2360 data = nodemap.persistent_data(cl.index)
2361 2361 ui.write(data)
2362 2362 elif opts['dump_disk']:
2363 2363 unfi = repo.unfiltered()
2364 2364 cl = unfi.changelog
2365 2365 nm_data = nodemap.persisted_data(cl)
2366 2366 if nm_data is not None:
2367 2367 docket, data = nm_data
2368 2368 ui.write(data[:])
2369 2369 elif opts['check']:
2370 2370 unfi = repo.unfiltered()
2371 2371 cl = unfi.changelog
2372 2372 nm_data = nodemap.persisted_data(cl)
2373 2373 if nm_data is not None:
2374 2374 docket, data = nm_data
2375 2375 return nodemap.check_data(ui, cl.index, data)
2376 2376 elif opts['metadata']:
2377 2377 unfi = repo.unfiltered()
2378 2378 cl = unfi.changelog
2379 2379 nm_data = nodemap.persisted_data(cl)
2380 2380 if nm_data is not None:
2381 2381 docket, data = nm_data
2382 2382 ui.write((b"uid: %s\n") % docket.uid)
2383 2383 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2384 2384 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2385 2385 ui.write((b"data-length: %d\n") % docket.data_length)
2386 2386 ui.write((b"data-unused: %d\n") % docket.data_unused)
2387 2387 unused_perc = docket.data_unused * 100.0 / docket.data_length
2388 2388 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2389 2389
2390 2390
2391 2391 @command(
2392 2392 b'debugobsolete',
2393 2393 [
2394 2394 (b'', b'flags', 0, _(b'markers flag')),
2395 2395 (
2396 2396 b'',
2397 2397 b'record-parents',
2398 2398 False,
2399 2399 _(b'record parent information for the precursor'),
2400 2400 ),
2401 2401 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2402 2402 (
2403 2403 b'',
2404 2404 b'exclusive',
2405 2405 False,
2406 2406 _(b'restrict display to markers only relevant to REV'),
2407 2407 ),
2408 2408 (b'', b'index', False, _(b'display index of the marker')),
2409 2409 (b'', b'delete', [], _(b'delete markers specified by indices')),
2410 2410 ]
2411 2411 + cmdutil.commitopts2
2412 2412 + cmdutil.formatteropts,
2413 2413 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2414 2414 )
2415 2415 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2416 2416 """create arbitrary obsolete marker
2417 2417
2418 2418 With no arguments, displays the list of obsolescence markers."""
2419 2419
2420 2420 opts = pycompat.byteskwargs(opts)
2421 2421
2422 2422 def parsenodeid(s):
2423 2423 try:
2424 2424 # We do not use revsingle/revrange functions here to accept
2425 2425 # arbitrary node identifiers, possibly not present in the
2426 2426 # local repository.
2427 2427 n = bin(s)
2428 2428 if len(n) != len(nullid):
2429 2429 raise TypeError()
2430 2430 return n
2431 2431 except TypeError:
2432 2432 raise error.InputError(
2433 2433 b'changeset references must be full hexadecimal '
2434 2434 b'node identifiers'
2435 2435 )
2436 2436
2437 2437 if opts.get(b'delete'):
2438 2438 indices = []
2439 2439 for v in opts.get(b'delete'):
2440 2440 try:
2441 2441 indices.append(int(v))
2442 2442 except ValueError:
2443 2443 raise error.InputError(
2444 2444 _(b'invalid index value: %r') % v,
2445 2445 hint=_(b'use integers for indices'),
2446 2446 )
2447 2447
2448 2448 if repo.currenttransaction():
2449 2449 raise error.Abort(
2450 2450 _(b'cannot delete obsmarkers in the middle of transaction.')
2451 2451 )
2452 2452
2453 2453 with repo.lock():
2454 2454 n = repair.deleteobsmarkers(repo.obsstore, indices)
2455 2455 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2456 2456
2457 2457 return
2458 2458
2459 2459 if precursor is not None:
2460 2460 if opts[b'rev']:
2461 2461 raise error.InputError(
2462 2462 b'cannot select revision when creating marker'
2463 2463 )
2464 2464 metadata = {}
2465 2465 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2466 2466 succs = tuple(parsenodeid(succ) for succ in successors)
2467 2467 l = repo.lock()
2468 2468 try:
2469 2469 tr = repo.transaction(b'debugobsolete')
2470 2470 try:
2471 2471 date = opts.get(b'date')
2472 2472 if date:
2473 2473 date = dateutil.parsedate(date)
2474 2474 else:
2475 2475 date = None
2476 2476 prec = parsenodeid(precursor)
2477 2477 parents = None
2478 2478 if opts[b'record_parents']:
2479 2479 if prec not in repo.unfiltered():
2480 2480 raise error.Abort(
2481 2481 b'cannot used --record-parents on '
2482 2482 b'unknown changesets'
2483 2483 )
2484 2484 parents = repo.unfiltered()[prec].parents()
2485 2485 parents = tuple(p.node() for p in parents)
2486 2486 repo.obsstore.create(
2487 2487 tr,
2488 2488 prec,
2489 2489 succs,
2490 2490 opts[b'flags'],
2491 2491 parents=parents,
2492 2492 date=date,
2493 2493 metadata=metadata,
2494 2494 ui=ui,
2495 2495 )
2496 2496 tr.close()
2497 2497 except ValueError as exc:
2498 2498 raise error.Abort(
2499 2499 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2500 2500 )
2501 2501 finally:
2502 2502 tr.release()
2503 2503 finally:
2504 2504 l.release()
2505 2505 else:
2506 2506 if opts[b'rev']:
2507 2507 revs = scmutil.revrange(repo, opts[b'rev'])
2508 2508 nodes = [repo[r].node() for r in revs]
2509 2509 markers = list(
2510 2510 obsutil.getmarkers(
2511 2511 repo, nodes=nodes, exclusive=opts[b'exclusive']
2512 2512 )
2513 2513 )
2514 2514 markers.sort(key=lambda x: x._data)
2515 2515 else:
2516 2516 markers = obsutil.getmarkers(repo)
2517 2517
2518 2518 markerstoiter = markers
2519 2519 isrelevant = lambda m: True
2520 2520 if opts.get(b'rev') and opts.get(b'index'):
2521 2521 markerstoiter = obsutil.getmarkers(repo)
2522 2522 markerset = set(markers)
2523 2523 isrelevant = lambda m: m in markerset
2524 2524
2525 2525 fm = ui.formatter(b'debugobsolete', opts)
2526 2526 for i, m in enumerate(markerstoiter):
2527 2527 if not isrelevant(m):
2528 2528 # marker can be irrelevant when we're iterating over a set
2529 2529 # of markers (markerstoiter) which is bigger than the set
2530 2530 # of markers we want to display (markers)
2531 2531 # this can happen if both --index and --rev options are
2532 2532 # provided and thus we need to iterate over all of the markers
2533 2533 # to get the correct indices, but only display the ones that
2534 2534 # are relevant to --rev value
2535 2535 continue
2536 2536 fm.startitem()
2537 2537 ind = i if opts.get(b'index') else None
2538 2538 cmdutil.showmarker(fm, m, index=ind)
2539 2539 fm.end()
2540 2540
2541 2541
2542 2542 @command(
2543 2543 b'debugp1copies',
2544 2544 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2545 2545 _(b'[-r REV]'),
2546 2546 )
2547 2547 def debugp1copies(ui, repo, **opts):
2548 2548 """dump copy information compared to p1"""
2549 2549
2550 2550 opts = pycompat.byteskwargs(opts)
2551 2551 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2552 2552 for dst, src in ctx.p1copies().items():
2553 2553 ui.write(b'%s -> %s\n' % (src, dst))
2554 2554
2555 2555
2556 2556 @command(
2557 2557 b'debugp2copies',
2558 2558 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2559 2559 _(b'[-r REV]'),
2560 2560 )
2561 2561 def debugp1copies(ui, repo, **opts):
2562 2562 """dump copy information compared to p2"""
2563 2563
2564 2564 opts = pycompat.byteskwargs(opts)
2565 2565 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2566 2566 for dst, src in ctx.p2copies().items():
2567 2567 ui.write(b'%s -> %s\n' % (src, dst))
2568 2568
2569 2569
2570 2570 @command(
2571 2571 b'debugpathcomplete',
2572 2572 [
2573 2573 (b'f', b'full', None, _(b'complete an entire path')),
2574 2574 (b'n', b'normal', None, _(b'show only normal files')),
2575 2575 (b'a', b'added', None, _(b'show only added files')),
2576 2576 (b'r', b'removed', None, _(b'show only removed files')),
2577 2577 ],
2578 2578 _(b'FILESPEC...'),
2579 2579 )
2580 2580 def debugpathcomplete(ui, repo, *specs, **opts):
2581 2581 """complete part or all of a tracked path
2582 2582
2583 2583 This command supports shells that offer path name completion. It
2584 2584 currently completes only files already known to the dirstate.
2585 2585
2586 2586 Completion extends only to the next path segment unless
2587 2587 --full is specified, in which case entire paths are used."""
2588 2588
2589 2589 def complete(path, acceptable):
2590 2590 dirstate = repo.dirstate
2591 2591 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2592 2592 rootdir = repo.root + pycompat.ossep
2593 2593 if spec != repo.root and not spec.startswith(rootdir):
2594 2594 return [], []
2595 2595 if os.path.isdir(spec):
2596 2596 spec += b'/'
2597 2597 spec = spec[len(rootdir) :]
2598 2598 fixpaths = pycompat.ossep != b'/'
2599 2599 if fixpaths:
2600 2600 spec = spec.replace(pycompat.ossep, b'/')
2601 2601 speclen = len(spec)
2602 2602 fullpaths = opts['full']
2603 2603 files, dirs = set(), set()
2604 2604 adddir, addfile = dirs.add, files.add
2605 2605 for f, st in pycompat.iteritems(dirstate):
2606 2606 if f.startswith(spec) and st[0] in acceptable:
2607 2607 if fixpaths:
2608 2608 f = f.replace(b'/', pycompat.ossep)
2609 2609 if fullpaths:
2610 2610 addfile(f)
2611 2611 continue
2612 2612 s = f.find(pycompat.ossep, speclen)
2613 2613 if s >= 0:
2614 2614 adddir(f[:s])
2615 2615 else:
2616 2616 addfile(f)
2617 2617 return files, dirs
2618 2618
2619 2619 acceptable = b''
2620 2620 if opts['normal']:
2621 2621 acceptable += b'nm'
2622 2622 if opts['added']:
2623 2623 acceptable += b'a'
2624 2624 if opts['removed']:
2625 2625 acceptable += b'r'
2626 2626 cwd = repo.getcwd()
2627 2627 if not specs:
2628 2628 specs = [b'.']
2629 2629
2630 2630 files, dirs = set(), set()
2631 2631 for spec in specs:
2632 2632 f, d = complete(spec, acceptable or b'nmar')
2633 2633 files.update(f)
2634 2634 dirs.update(d)
2635 2635 files.update(dirs)
2636 2636 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2637 2637 ui.write(b'\n')
2638 2638
2639 2639
2640 2640 @command(
2641 2641 b'debugpathcopies',
2642 2642 cmdutil.walkopts,
2643 2643 b'hg debugpathcopies REV1 REV2 [FILE]',
2644 2644 inferrepo=True,
2645 2645 )
2646 2646 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2647 2647 """show copies between two revisions"""
2648 2648 ctx1 = scmutil.revsingle(repo, rev1)
2649 2649 ctx2 = scmutil.revsingle(repo, rev2)
2650 2650 m = scmutil.match(ctx1, pats, opts)
2651 2651 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2652 2652 ui.write(b'%s -> %s\n' % (src, dst))
2653 2653
2654 2654
2655 2655 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2656 2656 def debugpeer(ui, path):
2657 2657 """establish a connection to a peer repository"""
2658 2658 # Always enable peer request logging. Requires --debug to display
2659 2659 # though.
2660 2660 overrides = {
2661 2661 (b'devel', b'debug.peer-request'): True,
2662 2662 }
2663 2663
2664 2664 with ui.configoverride(overrides):
2665 2665 peer = hg.peer(ui, {}, path)
2666 2666
2667 2667 try:
2668 2668 local = peer.local() is not None
2669 2669 canpush = peer.canpush()
2670 2670
2671 2671 ui.write(_(b'url: %s\n') % peer.url())
2672 2672 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2673 2673 ui.write(
2674 2674 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2675 2675 )
2676 2676 finally:
2677 2677 peer.close()
2678 2678
2679 2679
2680 2680 @command(
2681 2681 b'debugpickmergetool',
2682 2682 [
2683 2683 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2684 2684 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2685 2685 ]
2686 2686 + cmdutil.walkopts
2687 2687 + cmdutil.mergetoolopts,
2688 2688 _(b'[PATTERN]...'),
2689 2689 inferrepo=True,
2690 2690 )
2691 2691 def debugpickmergetool(ui, repo, *pats, **opts):
2692 2692 """examine which merge tool is chosen for specified file
2693 2693
2694 2694 As described in :hg:`help merge-tools`, Mercurial examines
2695 2695 configurations below in this order to decide which merge tool is
2696 2696 chosen for specified file.
2697 2697
2698 2698 1. ``--tool`` option
2699 2699 2. ``HGMERGE`` environment variable
2700 2700 3. configurations in ``merge-patterns`` section
2701 2701 4. configuration of ``ui.merge``
2702 2702 5. configurations in ``merge-tools`` section
2703 2703 6. ``hgmerge`` tool (for historical reason only)
2704 2704 7. default tool for fallback (``:merge`` or ``:prompt``)
2705 2705
2706 2706 This command writes out examination result in the style below::
2707 2707
2708 2708 FILE = MERGETOOL
2709 2709
2710 2710 By default, all files known in the first parent context of the
2711 2711 working directory are examined. Use file patterns and/or -I/-X
2712 2712 options to limit target files. -r/--rev is also useful to examine
2713 2713 files in another context without actual updating to it.
2714 2714
2715 2715 With --debug, this command shows warning messages while matching
2716 2716 against ``merge-patterns`` and so on, too. It is recommended to
2717 2717 use this option with explicit file patterns and/or -I/-X options,
2718 2718 because this option increases amount of output per file according
2719 2719 to configurations in hgrc.
2720 2720
2721 2721 With -v/--verbose, this command shows configurations below at
2722 2722 first (only if specified).
2723 2723
2724 2724 - ``--tool`` option
2725 2725 - ``HGMERGE`` environment variable
2726 2726 - configuration of ``ui.merge``
2727 2727
2728 2728 If merge tool is chosen before matching against
2729 2729 ``merge-patterns``, this command can't show any helpful
2730 2730 information, even with --debug. In such case, information above is
2731 2731 useful to know why a merge tool is chosen.
2732 2732 """
2733 2733 opts = pycompat.byteskwargs(opts)
2734 2734 overrides = {}
2735 2735 if opts[b'tool']:
2736 2736 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2737 2737 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2738 2738
2739 2739 with ui.configoverride(overrides, b'debugmergepatterns'):
2740 2740 hgmerge = encoding.environ.get(b"HGMERGE")
2741 2741 if hgmerge is not None:
2742 2742 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2743 2743 uimerge = ui.config(b"ui", b"merge")
2744 2744 if uimerge:
2745 2745 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2746 2746
2747 2747 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2748 2748 m = scmutil.match(ctx, pats, opts)
2749 2749 changedelete = opts[b'changedelete']
2750 2750 for path in ctx.walk(m):
2751 2751 fctx = ctx[path]
2752 2752 try:
2753 2753 if not ui.debugflag:
2754 2754 ui.pushbuffer(error=True)
2755 2755 tool, toolpath = filemerge._picktool(
2756 2756 repo,
2757 2757 ui,
2758 2758 path,
2759 2759 fctx.isbinary(),
2760 2760 b'l' in fctx.flags(),
2761 2761 changedelete,
2762 2762 )
2763 2763 finally:
2764 2764 if not ui.debugflag:
2765 2765 ui.popbuffer()
2766 2766 ui.write(b'%s = %s\n' % (path, tool))
2767 2767
2768 2768
2769 2769 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2770 2770 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2771 2771 """access the pushkey key/value protocol
2772 2772
2773 2773 With two args, list the keys in the given namespace.
2774 2774
2775 2775 With five args, set a key to new if it currently is set to old.
2776 2776 Reports success or failure.
2777 2777 """
2778 2778
2779 2779 target = hg.peer(ui, {}, repopath)
2780 2780 try:
2781 2781 if keyinfo:
2782 2782 key, old, new = keyinfo
2783 2783 with target.commandexecutor() as e:
2784 2784 r = e.callcommand(
2785 2785 b'pushkey',
2786 2786 {
2787 2787 b'namespace': namespace,
2788 2788 b'key': key,
2789 2789 b'old': old,
2790 2790 b'new': new,
2791 2791 },
2792 2792 ).result()
2793 2793
2794 2794 ui.status(pycompat.bytestr(r) + b'\n')
2795 2795 return not r
2796 2796 else:
2797 2797 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2798 2798 ui.write(
2799 2799 b"%s\t%s\n"
2800 2800 % (stringutil.escapestr(k), stringutil.escapestr(v))
2801 2801 )
2802 2802 finally:
2803 2803 target.close()
2804 2804
2805 2805
2806 2806 @command(b'debugpvec', [], _(b'A B'))
2807 2807 def debugpvec(ui, repo, a, b=None):
2808 2808 ca = scmutil.revsingle(repo, a)
2809 2809 cb = scmutil.revsingle(repo, b)
2810 2810 pa = pvec.ctxpvec(ca)
2811 2811 pb = pvec.ctxpvec(cb)
2812 2812 if pa == pb:
2813 2813 rel = b"="
2814 2814 elif pa > pb:
2815 2815 rel = b">"
2816 2816 elif pa < pb:
2817 2817 rel = b"<"
2818 2818 elif pa | pb:
2819 2819 rel = b"|"
2820 2820 ui.write(_(b"a: %s\n") % pa)
2821 2821 ui.write(_(b"b: %s\n") % pb)
2822 2822 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2823 2823 ui.write(
2824 2824 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2825 2825 % (
2826 2826 abs(pa._depth - pb._depth),
2827 2827 pvec._hamming(pa._vec, pb._vec),
2828 2828 pa.distance(pb),
2829 2829 rel,
2830 2830 )
2831 2831 )
2832 2832
2833 2833
2834 2834 @command(
2835 2835 b'debugrebuilddirstate|debugrebuildstate',
2836 2836 [
2837 2837 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2838 2838 (
2839 2839 b'',
2840 2840 b'minimal',
2841 2841 None,
2842 2842 _(
2843 2843 b'only rebuild files that are inconsistent with '
2844 2844 b'the working copy parent'
2845 2845 ),
2846 2846 ),
2847 2847 ],
2848 2848 _(b'[-r REV]'),
2849 2849 )
2850 2850 def debugrebuilddirstate(ui, repo, rev, **opts):
2851 2851 """rebuild the dirstate as it would look like for the given revision
2852 2852
2853 2853 If no revision is specified the first current parent will be used.
2854 2854
2855 2855 The dirstate will be set to the files of the given revision.
2856 2856 The actual working directory content or existing dirstate
2857 2857 information such as adds or removes is not considered.
2858 2858
2859 2859 ``minimal`` will only rebuild the dirstate status for files that claim to be
2860 2860 tracked but are not in the parent manifest, or that exist in the parent
2861 2861 manifest but are not in the dirstate. It will not change adds, removes, or
2862 2862 modified files that are in the working copy parent.
2863 2863
2864 2864 One use of this command is to make the next :hg:`status` invocation
2865 2865 check the actual file content.
2866 2866 """
2867 2867 ctx = scmutil.revsingle(repo, rev)
2868 2868 with repo.wlock():
2869 2869 dirstate = repo.dirstate
2870 2870 changedfiles = None
2871 2871 # See command doc for what minimal does.
2872 2872 if opts.get('minimal'):
2873 2873 manifestfiles = set(ctx.manifest().keys())
2874 2874 dirstatefiles = set(dirstate)
2875 2875 manifestonly = manifestfiles - dirstatefiles
2876 2876 dsonly = dirstatefiles - manifestfiles
2877 2877 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2878 2878 changedfiles = manifestonly | dsnotadded
2879 2879
2880 2880 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2881 2881
2882 2882
2883 2883 @command(b'debugrebuildfncache', [], b'')
2884 2884 def debugrebuildfncache(ui, repo):
2885 2885 """rebuild the fncache file"""
2886 2886 repair.rebuildfncache(ui, repo)
2887 2887
2888 2888
2889 2889 @command(
2890 2890 b'debugrename',
2891 2891 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2892 2892 _(b'[-r REV] [FILE]...'),
2893 2893 )
2894 2894 def debugrename(ui, repo, *pats, **opts):
2895 2895 """dump rename information"""
2896 2896
2897 2897 opts = pycompat.byteskwargs(opts)
2898 2898 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2899 2899 m = scmutil.match(ctx, pats, opts)
2900 2900 for abs in ctx.walk(m):
2901 2901 fctx = ctx[abs]
2902 2902 o = fctx.filelog().renamed(fctx.filenode())
2903 2903 rel = repo.pathto(abs)
2904 2904 if o:
2905 2905 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2906 2906 else:
2907 2907 ui.write(_(b"%s not renamed\n") % rel)
2908 2908
2909 2909
2910 2910 @command(b'debugrequires|debugrequirements', [], b'')
2911 2911 def debugrequirements(ui, repo):
2912 2912 """ print the current repo requirements """
2913 2913 for r in sorted(repo.requirements):
2914 2914 ui.write(b"%s\n" % r)
2915 2915
2916 2916
2917 2917 @command(
2918 2918 b'debugrevlog',
2919 2919 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2920 2920 _(b'-c|-m|FILE'),
2921 2921 optionalrepo=True,
2922 2922 )
2923 2923 def debugrevlog(ui, repo, file_=None, **opts):
2924 2924 """show data and statistics about a revlog"""
2925 2925 opts = pycompat.byteskwargs(opts)
2926 2926 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2927 2927
2928 2928 if opts.get(b"dump"):
2929 2929 numrevs = len(r)
2930 2930 ui.write(
2931 2931 (
2932 2932 b"# rev p1rev p2rev start end deltastart base p1 p2"
2933 2933 b" rawsize totalsize compression heads chainlen\n"
2934 2934 )
2935 2935 )
2936 2936 ts = 0
2937 2937 heads = set()
2938 2938
2939 2939 for rev in pycompat.xrange(numrevs):
2940 2940 dbase = r.deltaparent(rev)
2941 2941 if dbase == -1:
2942 2942 dbase = rev
2943 2943 cbase = r.chainbase(rev)
2944 2944 clen = r.chainlen(rev)
2945 2945 p1, p2 = r.parentrevs(rev)
2946 2946 rs = r.rawsize(rev)
2947 2947 ts = ts + rs
2948 2948 heads -= set(r.parentrevs(rev))
2949 2949 heads.add(rev)
2950 2950 try:
2951 2951 compression = ts / r.end(rev)
2952 2952 except ZeroDivisionError:
2953 2953 compression = 0
2954 2954 ui.write(
2955 2955 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2956 2956 b"%11d %5d %8d\n"
2957 2957 % (
2958 2958 rev,
2959 2959 p1,
2960 2960 p2,
2961 2961 r.start(rev),
2962 2962 r.end(rev),
2963 2963 r.start(dbase),
2964 2964 r.start(cbase),
2965 2965 r.start(p1),
2966 2966 r.start(p2),
2967 2967 rs,
2968 2968 ts,
2969 2969 compression,
2970 2970 len(heads),
2971 2971 clen,
2972 2972 )
2973 2973 )
2974 2974 return 0
2975 2975
2976 2976 v = r.version
2977 2977 format = v & 0xFFFF
2978 2978 flags = []
2979 2979 gdelta = False
2980 2980 if v & revlog.FLAG_INLINE_DATA:
2981 2981 flags.append(b'inline')
2982 2982 if v & revlog.FLAG_GENERALDELTA:
2983 2983 gdelta = True
2984 2984 flags.append(b'generaldelta')
2985 2985 if not flags:
2986 2986 flags = [b'(none)']
2987 2987
2988 2988 ### tracks merge vs single parent
2989 2989 nummerges = 0
2990 2990
2991 2991 ### tracks ways the "delta" are build
2992 2992 # nodelta
2993 2993 numempty = 0
2994 2994 numemptytext = 0
2995 2995 numemptydelta = 0
2996 2996 # full file content
2997 2997 numfull = 0
2998 2998 # intermediate snapshot against a prior snapshot
2999 2999 numsemi = 0
3000 3000 # snapshot count per depth
3001 3001 numsnapdepth = collections.defaultdict(lambda: 0)
3002 3002 # delta against previous revision
3003 3003 numprev = 0
3004 3004 # delta against first or second parent (not prev)
3005 3005 nump1 = 0
3006 3006 nump2 = 0
3007 3007 # delta against neither prev nor parents
3008 3008 numother = 0
3009 3009 # delta against prev that are also first or second parent
3010 3010 # (details of `numprev`)
3011 3011 nump1prev = 0
3012 3012 nump2prev = 0
3013 3013
3014 3014 # data about delta chain of each revs
3015 3015 chainlengths = []
3016 3016 chainbases = []
3017 3017 chainspans = []
3018 3018
3019 3019 # data about each revision
3020 3020 datasize = [None, 0, 0]
3021 3021 fullsize = [None, 0, 0]
3022 3022 semisize = [None, 0, 0]
3023 3023 # snapshot count per depth
3024 3024 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3025 3025 deltasize = [None, 0, 0]
3026 3026 chunktypecounts = {}
3027 3027 chunktypesizes = {}
3028 3028
3029 3029 def addsize(size, l):
3030 3030 if l[0] is None or size < l[0]:
3031 3031 l[0] = size
3032 3032 if size > l[1]:
3033 3033 l[1] = size
3034 3034 l[2] += size
3035 3035
3036 3036 numrevs = len(r)
3037 3037 for rev in pycompat.xrange(numrevs):
3038 3038 p1, p2 = r.parentrevs(rev)
3039 3039 delta = r.deltaparent(rev)
3040 3040 if format > 0:
3041 3041 addsize(r.rawsize(rev), datasize)
3042 3042 if p2 != nullrev:
3043 3043 nummerges += 1
3044 3044 size = r.length(rev)
3045 3045 if delta == nullrev:
3046 3046 chainlengths.append(0)
3047 3047 chainbases.append(r.start(rev))
3048 3048 chainspans.append(size)
3049 3049 if size == 0:
3050 3050 numempty += 1
3051 3051 numemptytext += 1
3052 3052 else:
3053 3053 numfull += 1
3054 3054 numsnapdepth[0] += 1
3055 3055 addsize(size, fullsize)
3056 3056 addsize(size, snapsizedepth[0])
3057 3057 else:
3058 3058 chainlengths.append(chainlengths[delta] + 1)
3059 3059 baseaddr = chainbases[delta]
3060 3060 revaddr = r.start(rev)
3061 3061 chainbases.append(baseaddr)
3062 3062 chainspans.append((revaddr - baseaddr) + size)
3063 3063 if size == 0:
3064 3064 numempty += 1
3065 3065 numemptydelta += 1
3066 3066 elif r.issnapshot(rev):
3067 3067 addsize(size, semisize)
3068 3068 numsemi += 1
3069 3069 depth = r.snapshotdepth(rev)
3070 3070 numsnapdepth[depth] += 1
3071 3071 addsize(size, snapsizedepth[depth])
3072 3072 else:
3073 3073 addsize(size, deltasize)
3074 3074 if delta == rev - 1:
3075 3075 numprev += 1
3076 3076 if delta == p1:
3077 3077 nump1prev += 1
3078 3078 elif delta == p2:
3079 3079 nump2prev += 1
3080 3080 elif delta == p1:
3081 3081 nump1 += 1
3082 3082 elif delta == p2:
3083 3083 nump2 += 1
3084 3084 elif delta != nullrev:
3085 3085 numother += 1
3086 3086
3087 3087 # Obtain data on the raw chunks in the revlog.
3088 3088 if util.safehasattr(r, b'_getsegmentforrevs'):
3089 3089 segment = r._getsegmentforrevs(rev, rev)[1]
3090 3090 else:
3091 3091 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3092 3092 if segment:
3093 3093 chunktype = bytes(segment[0:1])
3094 3094 else:
3095 3095 chunktype = b'empty'
3096 3096
3097 3097 if chunktype not in chunktypecounts:
3098 3098 chunktypecounts[chunktype] = 0
3099 3099 chunktypesizes[chunktype] = 0
3100 3100
3101 3101 chunktypecounts[chunktype] += 1
3102 3102 chunktypesizes[chunktype] += size
3103 3103
3104 3104 # Adjust size min value for empty cases
3105 3105 for size in (datasize, fullsize, semisize, deltasize):
3106 3106 if size[0] is None:
3107 3107 size[0] = 0
3108 3108
3109 3109 numdeltas = numrevs - numfull - numempty - numsemi
3110 3110 numoprev = numprev - nump1prev - nump2prev
3111 3111 totalrawsize = datasize[2]
3112 3112 datasize[2] /= numrevs
3113 3113 fulltotal = fullsize[2]
3114 3114 if numfull == 0:
3115 3115 fullsize[2] = 0
3116 3116 else:
3117 3117 fullsize[2] /= numfull
3118 3118 semitotal = semisize[2]
3119 3119 snaptotal = {}
3120 3120 if numsemi > 0:
3121 3121 semisize[2] /= numsemi
3122 3122 for depth in snapsizedepth:
3123 3123 snaptotal[depth] = snapsizedepth[depth][2]
3124 3124 snapsizedepth[depth][2] /= numsnapdepth[depth]
3125 3125
3126 3126 deltatotal = deltasize[2]
3127 3127 if numdeltas > 0:
3128 3128 deltasize[2] /= numdeltas
3129 3129 totalsize = fulltotal + semitotal + deltatotal
3130 3130 avgchainlen = sum(chainlengths) / numrevs
3131 3131 maxchainlen = max(chainlengths)
3132 3132 maxchainspan = max(chainspans)
3133 3133 compratio = 1
3134 3134 if totalsize:
3135 3135 compratio = totalrawsize / totalsize
3136 3136
3137 3137 basedfmtstr = b'%%%dd\n'
3138 3138 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3139 3139
3140 3140 def dfmtstr(max):
3141 3141 return basedfmtstr % len(str(max))
3142 3142
3143 3143 def pcfmtstr(max, padding=0):
3144 3144 return basepcfmtstr % (len(str(max)), b' ' * padding)
3145 3145
3146 3146 def pcfmt(value, total):
3147 3147 if total:
3148 3148 return (value, 100 * float(value) / total)
3149 3149 else:
3150 3150 return value, 100.0
3151 3151
3152 3152 ui.writenoi18n(b'format : %d\n' % format)
3153 3153 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3154 3154
3155 3155 ui.write(b'\n')
3156 3156 fmt = pcfmtstr(totalsize)
3157 3157 fmt2 = dfmtstr(totalsize)
3158 3158 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3159 3159 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3160 3160 ui.writenoi18n(
3161 3161 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3162 3162 )
3163 3163 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3164 3164 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3165 3165 ui.writenoi18n(
3166 3166 b' text : '
3167 3167 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3168 3168 )
3169 3169 ui.writenoi18n(
3170 3170 b' delta : '
3171 3171 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3172 3172 )
3173 3173 ui.writenoi18n(
3174 3174 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3175 3175 )
3176 3176 for depth in sorted(numsnapdepth):
3177 3177 ui.write(
3178 3178 (b' lvl-%-3d : ' % depth)
3179 3179 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3180 3180 )
3181 3181 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3182 3182 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3183 3183 ui.writenoi18n(
3184 3184 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3185 3185 )
3186 3186 for depth in sorted(numsnapdepth):
3187 3187 ui.write(
3188 3188 (b' lvl-%-3d : ' % depth)
3189 3189 + fmt % pcfmt(snaptotal[depth], totalsize)
3190 3190 )
3191 3191 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3192 3192
3193 3193 def fmtchunktype(chunktype):
3194 3194 if chunktype == b'empty':
3195 3195 return b' %s : ' % chunktype
3196 3196 elif chunktype in pycompat.bytestr(string.ascii_letters):
3197 3197 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3198 3198 else:
3199 3199 return b' 0x%s : ' % hex(chunktype)
3200 3200
3201 3201 ui.write(b'\n')
3202 3202 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3203 3203 for chunktype in sorted(chunktypecounts):
3204 3204 ui.write(fmtchunktype(chunktype))
3205 3205 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3206 3206 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3207 3207 for chunktype in sorted(chunktypecounts):
3208 3208 ui.write(fmtchunktype(chunktype))
3209 3209 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3210 3210
3211 3211 ui.write(b'\n')
3212 3212 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3213 3213 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3214 3214 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3215 3215 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3216 3216 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3217 3217
3218 3218 if format > 0:
3219 3219 ui.write(b'\n')
3220 3220 ui.writenoi18n(
3221 3221 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3222 3222 % tuple(datasize)
3223 3223 )
3224 3224 ui.writenoi18n(
3225 3225 b'full revision size (min/max/avg) : %d / %d / %d\n'
3226 3226 % tuple(fullsize)
3227 3227 )
3228 3228 ui.writenoi18n(
3229 3229 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3230 3230 % tuple(semisize)
3231 3231 )
3232 3232 for depth in sorted(snapsizedepth):
3233 3233 if depth == 0:
3234 3234 continue
3235 3235 ui.writenoi18n(
3236 3236 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3237 3237 % ((depth,) + tuple(snapsizedepth[depth]))
3238 3238 )
3239 3239 ui.writenoi18n(
3240 3240 b'delta size (min/max/avg) : %d / %d / %d\n'
3241 3241 % tuple(deltasize)
3242 3242 )
3243 3243
3244 3244 if numdeltas > 0:
3245 3245 ui.write(b'\n')
3246 3246 fmt = pcfmtstr(numdeltas)
3247 3247 fmt2 = pcfmtstr(numdeltas, 4)
3248 3248 ui.writenoi18n(
3249 3249 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3250 3250 )
3251 3251 if numprev > 0:
3252 3252 ui.writenoi18n(
3253 3253 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3254 3254 )
3255 3255 ui.writenoi18n(
3256 3256 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3257 3257 )
3258 3258 ui.writenoi18n(
3259 3259 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3260 3260 )
3261 3261 if gdelta:
3262 3262 ui.writenoi18n(
3263 3263 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3264 3264 )
3265 3265 ui.writenoi18n(
3266 3266 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3267 3267 )
3268 3268 ui.writenoi18n(
3269 3269 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3270 3270 )
3271 3271
3272 3272
3273 3273 @command(
3274 3274 b'debugrevlogindex',
3275 3275 cmdutil.debugrevlogopts
3276 3276 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3277 3277 _(b'[-f FORMAT] -c|-m|FILE'),
3278 3278 optionalrepo=True,
3279 3279 )
3280 3280 def debugrevlogindex(ui, repo, file_=None, **opts):
3281 3281 """dump the contents of a revlog index"""
3282 3282 opts = pycompat.byteskwargs(opts)
3283 3283 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3284 3284 format = opts.get(b'format', 0)
3285 3285 if format not in (0, 1):
3286 3286 raise error.Abort(_(b"unknown format %d") % format)
3287 3287
3288 3288 if ui.debugflag:
3289 3289 shortfn = hex
3290 3290 else:
3291 3291 shortfn = short
3292 3292
3293 3293 # There might not be anything in r, so have a sane default
3294 3294 idlen = 12
3295 3295 for i in r:
3296 3296 idlen = len(shortfn(r.node(i)))
3297 3297 break
3298 3298
3299 3299 if format == 0:
3300 3300 if ui.verbose:
3301 3301 ui.writenoi18n(
3302 3302 b" rev offset length linkrev %s %s p2\n"
3303 3303 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3304 3304 )
3305 3305 else:
3306 3306 ui.writenoi18n(
3307 3307 b" rev linkrev %s %s p2\n"
3308 3308 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3309 3309 )
3310 3310 elif format == 1:
3311 3311 if ui.verbose:
3312 3312 ui.writenoi18n(
3313 3313 (
3314 3314 b" rev flag offset length size link p1"
3315 3315 b" p2 %s\n"
3316 3316 )
3317 3317 % b"nodeid".rjust(idlen)
3318 3318 )
3319 3319 else:
3320 3320 ui.writenoi18n(
3321 3321 b" rev flag size link p1 p2 %s\n"
3322 3322 % b"nodeid".rjust(idlen)
3323 3323 )
3324 3324
3325 3325 for i in r:
3326 3326 node = r.node(i)
3327 3327 if format == 0:
3328 3328 try:
3329 3329 pp = r.parents(node)
3330 3330 except Exception:
3331 3331 pp = [nullid, nullid]
3332 3332 if ui.verbose:
3333 3333 ui.write(
3334 3334 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3335 3335 % (
3336 3336 i,
3337 3337 r.start(i),
3338 3338 r.length(i),
3339 3339 r.linkrev(i),
3340 3340 shortfn(node),
3341 3341 shortfn(pp[0]),
3342 3342 shortfn(pp[1]),
3343 3343 )
3344 3344 )
3345 3345 else:
3346 3346 ui.write(
3347 3347 b"% 6d % 7d %s %s %s\n"
3348 3348 % (
3349 3349 i,
3350 3350 r.linkrev(i),
3351 3351 shortfn(node),
3352 3352 shortfn(pp[0]),
3353 3353 shortfn(pp[1]),
3354 3354 )
3355 3355 )
3356 3356 elif format == 1:
3357 3357 pr = r.parentrevs(i)
3358 3358 if ui.verbose:
3359 3359 ui.write(
3360 3360 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3361 3361 % (
3362 3362 i,
3363 3363 r.flags(i),
3364 3364 r.start(i),
3365 3365 r.length(i),
3366 3366 r.rawsize(i),
3367 3367 r.linkrev(i),
3368 3368 pr[0],
3369 3369 pr[1],
3370 3370 shortfn(node),
3371 3371 )
3372 3372 )
3373 3373 else:
3374 3374 ui.write(
3375 3375 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3376 3376 % (
3377 3377 i,
3378 3378 r.flags(i),
3379 3379 r.rawsize(i),
3380 3380 r.linkrev(i),
3381 3381 pr[0],
3382 3382 pr[1],
3383 3383 shortfn(node),
3384 3384 )
3385 3385 )
3386 3386
3387 3387
3388 3388 @command(
3389 3389 b'debugrevspec',
3390 3390 [
3391 3391 (
3392 3392 b'',
3393 3393 b'optimize',
3394 3394 None,
3395 3395 _(b'print parsed tree after optimizing (DEPRECATED)'),
3396 3396 ),
3397 3397 (
3398 3398 b'',
3399 3399 b'show-revs',
3400 3400 True,
3401 3401 _(b'print list of result revisions (default)'),
3402 3402 ),
3403 3403 (
3404 3404 b's',
3405 3405 b'show-set',
3406 3406 None,
3407 3407 _(b'print internal representation of result set'),
3408 3408 ),
3409 3409 (
3410 3410 b'p',
3411 3411 b'show-stage',
3412 3412 [],
3413 3413 _(b'print parsed tree at the given stage'),
3414 3414 _(b'NAME'),
3415 3415 ),
3416 3416 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3417 3417 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3418 3418 ],
3419 3419 b'REVSPEC',
3420 3420 )
3421 3421 def debugrevspec(ui, repo, expr, **opts):
3422 3422 """parse and apply a revision specification
3423 3423
3424 3424 Use -p/--show-stage option to print the parsed tree at the given stages.
3425 3425 Use -p all to print tree at every stage.
3426 3426
3427 3427 Use --no-show-revs option with -s or -p to print only the set
3428 3428 representation or the parsed tree respectively.
3429 3429
3430 3430 Use --verify-optimized to compare the optimized result with the unoptimized
3431 3431 one. Returns 1 if the optimized result differs.
3432 3432 """
3433 3433 opts = pycompat.byteskwargs(opts)
3434 3434 aliases = ui.configitems(b'revsetalias')
3435 3435 stages = [
3436 3436 (b'parsed', lambda tree: tree),
3437 3437 (
3438 3438 b'expanded',
3439 3439 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3440 3440 ),
3441 3441 (b'concatenated', revsetlang.foldconcat),
3442 3442 (b'analyzed', revsetlang.analyze),
3443 3443 (b'optimized', revsetlang.optimize),
3444 3444 ]
3445 3445 if opts[b'no_optimized']:
3446 3446 stages = stages[:-1]
3447 3447 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3448 3448 raise error.Abort(
3449 3449 _(b'cannot use --verify-optimized with --no-optimized')
3450 3450 )
3451 3451 stagenames = {n for n, f in stages}
3452 3452
3453 3453 showalways = set()
3454 3454 showchanged = set()
3455 3455 if ui.verbose and not opts[b'show_stage']:
3456 3456 # show parsed tree by --verbose (deprecated)
3457 3457 showalways.add(b'parsed')
3458 3458 showchanged.update([b'expanded', b'concatenated'])
3459 3459 if opts[b'optimize']:
3460 3460 showalways.add(b'optimized')
3461 3461 if opts[b'show_stage'] and opts[b'optimize']:
3462 3462 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3463 3463 if opts[b'show_stage'] == [b'all']:
3464 3464 showalways.update(stagenames)
3465 3465 else:
3466 3466 for n in opts[b'show_stage']:
3467 3467 if n not in stagenames:
3468 3468 raise error.Abort(_(b'invalid stage name: %s') % n)
3469 3469 showalways.update(opts[b'show_stage'])
3470 3470
3471 3471 treebystage = {}
3472 3472 printedtree = None
3473 3473 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3474 3474 for n, f in stages:
3475 3475 treebystage[n] = tree = f(tree)
3476 3476 if n in showalways or (n in showchanged and tree != printedtree):
3477 3477 if opts[b'show_stage'] or n != b'parsed':
3478 3478 ui.write(b"* %s:\n" % n)
3479 3479 ui.write(revsetlang.prettyformat(tree), b"\n")
3480 3480 printedtree = tree
3481 3481
3482 3482 if opts[b'verify_optimized']:
3483 3483 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3484 3484 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3485 3485 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3486 3486 ui.writenoi18n(
3487 3487 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3488 3488 )
3489 3489 ui.writenoi18n(
3490 3490 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3491 3491 )
3492 3492 arevs = list(arevs)
3493 3493 brevs = list(brevs)
3494 3494 if arevs == brevs:
3495 3495 return 0
3496 3496 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3497 3497 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3498 3498 sm = difflib.SequenceMatcher(None, arevs, brevs)
3499 3499 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3500 3500 if tag in ('delete', 'replace'):
3501 3501 for c in arevs[alo:ahi]:
3502 3502 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3503 3503 if tag in ('insert', 'replace'):
3504 3504 for c in brevs[blo:bhi]:
3505 3505 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3506 3506 if tag == 'equal':
3507 3507 for c in arevs[alo:ahi]:
3508 3508 ui.write(b' %d\n' % c)
3509 3509 return 1
3510 3510
3511 3511 func = revset.makematcher(tree)
3512 3512 revs = func(repo)
3513 3513 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3514 3514 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3515 3515 if not opts[b'show_revs']:
3516 3516 return
3517 3517 for c in revs:
3518 3518 ui.write(b"%d\n" % c)
3519 3519
3520 3520
3521 3521 @command(
3522 3522 b'debugserve',
3523 3523 [
3524 3524 (
3525 3525 b'',
3526 3526 b'sshstdio',
3527 3527 False,
3528 3528 _(b'run an SSH server bound to process handles'),
3529 3529 ),
3530 3530 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3531 3531 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3532 3532 ],
3533 3533 b'',
3534 3534 )
3535 3535 def debugserve(ui, repo, **opts):
3536 3536 """run a server with advanced settings
3537 3537
3538 3538 This command is similar to :hg:`serve`. It exists partially as a
3539 3539 workaround to the fact that ``hg serve --stdio`` must have specific
3540 3540 arguments for security reasons.
3541 3541 """
3542 3542 opts = pycompat.byteskwargs(opts)
3543 3543
3544 3544 if not opts[b'sshstdio']:
3545 3545 raise error.Abort(_(b'only --sshstdio is currently supported'))
3546 3546
3547 3547 logfh = None
3548 3548
3549 3549 if opts[b'logiofd'] and opts[b'logiofile']:
3550 3550 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3551 3551
3552 3552 if opts[b'logiofd']:
3553 3553 # Ideally we would be line buffered. But line buffering in binary
3554 3554 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3555 3555 # buffering could have performance impacts. But since this isn't
3556 3556 # performance critical code, it should be fine.
3557 3557 try:
3558 3558 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3559 3559 except OSError as e:
3560 3560 if e.errno != errno.ESPIPE:
3561 3561 raise
3562 3562 # can't seek a pipe, so `ab` mode fails on py3
3563 3563 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3564 3564 elif opts[b'logiofile']:
3565 3565 logfh = open(opts[b'logiofile'], b'ab', 0)
3566 3566
3567 3567 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3568 3568 s.serve_forever()
3569 3569
3570 3570
3571 3571 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3572 3572 def debugsetparents(ui, repo, rev1, rev2=None):
3573 3573 """manually set the parents of the current working directory (DANGEROUS)
3574 3574
3575 3575 This command is not what you are looking for and should not be used. Using
3576 3576 this command will most certainly results in slight corruption of the file
3577 3577 level histories withing your repository. DO NOT USE THIS COMMAND.
3578 3578
3579 3579 The command update the p1 and p2 field in the dirstate, and not touching
3580 3580 anything else. This useful for writing repository conversion tools, but
3581 3581 should be used with extreme care. For example, neither the working
3582 3582 directory nor the dirstate is updated, so file status may be incorrect
3583 3583 after running this command. Only used if you are one of the few people that
3584 3584 deeply unstand both conversion tools and file level histories. If you are
3585 3585 reading this help, you are not one of this people (most of them sailed west
3586 3586 from Mithlond anyway.
3587 3587
3588 3588 So one last time DO NOT USE THIS COMMAND.
3589 3589
3590 3590 Returns 0 on success.
3591 3591 """
3592 3592
3593 3593 node1 = scmutil.revsingle(repo, rev1).node()
3594 3594 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3595 3595
3596 3596 with repo.wlock():
3597 3597 repo.setparents(node1, node2)
3598 3598
3599 3599
3600 3600 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3601 3601 def debugsidedata(ui, repo, file_, rev=None, **opts):
3602 3602 """dump the side data for a cl/manifest/file revision
3603 3603
3604 3604 Use --verbose to dump the sidedata content."""
3605 3605 opts = pycompat.byteskwargs(opts)
3606 3606 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3607 3607 if rev is not None:
3608 3608 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3609 3609 file_, rev = None, file_
3610 3610 elif rev is None:
3611 3611 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3612 3612 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3613 3613 r = getattr(r, '_revlog', r)
3614 3614 try:
3615 3615 sidedata = r.sidedata(r.lookup(rev))
3616 3616 except KeyError:
3617 3617 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3618 3618 if sidedata:
3619 3619 sidedata = list(sidedata.items())
3620 3620 sidedata.sort()
3621 3621 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3622 3622 for key, value in sidedata:
3623 3623 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3624 3624 if ui.verbose:
3625 3625 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3626 3626
3627 3627
3628 3628 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3629 3629 def debugssl(ui, repo, source=None, **opts):
3630 3630 """test a secure connection to a server
3631 3631
3632 3632 This builds the certificate chain for the server on Windows, installing the
3633 3633 missing intermediates and trusted root via Windows Update if necessary. It
3634 3634 does nothing on other platforms.
3635 3635
3636 3636 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3637 3637 that server is used. See :hg:`help urls` for more information.
3638 3638
3639 3639 If the update succeeds, retry the original operation. Otherwise, the cause
3640 3640 of the SSL error is likely another issue.
3641 3641 """
3642 3642 if not pycompat.iswindows:
3643 3643 raise error.Abort(
3644 3644 _(b'certificate chain building is only possible on Windows')
3645 3645 )
3646 3646
3647 3647 if not source:
3648 3648 if not repo:
3649 3649 raise error.Abort(
3650 3650 _(
3651 3651 b"there is no Mercurial repository here, and no "
3652 3652 b"server specified"
3653 3653 )
3654 3654 )
3655 3655 source = b"default"
3656 3656
3657 source, branches = urlutil.parseurl(ui.expandpath(source))
3657 source, branches = urlutil.get_unique_pull_path(
3658 b'debugssl', repo, ui, source
3659 )
3658 3660 url = urlutil.url(source)
3659 3661
3660 3662 defaultport = {b'https': 443, b'ssh': 22}
3661 3663 if url.scheme in defaultport:
3662 3664 try:
3663 3665 addr = (url.host, int(url.port or defaultport[url.scheme]))
3664 3666 except ValueError:
3665 3667 raise error.Abort(_(b"malformed port number in URL"))
3666 3668 else:
3667 3669 raise error.Abort(_(b"only https and ssh connections are supported"))
3668 3670
3669 3671 from . import win32
3670 3672
3671 3673 s = ssl.wrap_socket(
3672 3674 socket.socket(),
3673 3675 ssl_version=ssl.PROTOCOL_TLS,
3674 3676 cert_reqs=ssl.CERT_NONE,
3675 3677 ca_certs=None,
3676 3678 )
3677 3679
3678 3680 try:
3679 3681 s.connect(addr)
3680 3682 cert = s.getpeercert(True)
3681 3683
3682 3684 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3683 3685
3684 3686 complete = win32.checkcertificatechain(cert, build=False)
3685 3687
3686 3688 if not complete:
3687 3689 ui.status(_(b'certificate chain is incomplete, updating... '))
3688 3690
3689 3691 if not win32.checkcertificatechain(cert):
3690 3692 ui.status(_(b'failed.\n'))
3691 3693 else:
3692 3694 ui.status(_(b'done.\n'))
3693 3695 else:
3694 3696 ui.status(_(b'full certificate chain is available\n'))
3695 3697 finally:
3696 3698 s.close()
3697 3699
3698 3700
3699 3701 @command(
3700 3702 b"debugbackupbundle",
3701 3703 [
3702 3704 (
3703 3705 b"",
3704 3706 b"recover",
3705 3707 b"",
3706 3708 b"brings the specified changeset back into the repository",
3707 3709 )
3708 3710 ]
3709 3711 + cmdutil.logopts,
3710 3712 _(b"hg debugbackupbundle [--recover HASH]"),
3711 3713 )
3712 3714 def debugbackupbundle(ui, repo, *pats, **opts):
3713 3715 """lists the changesets available in backup bundles
3714 3716
3715 3717 Without any arguments, this command prints a list of the changesets in each
3716 3718 backup bundle.
3717 3719
3718 3720 --recover takes a changeset hash and unbundles the first bundle that
3719 3721 contains that hash, which puts that changeset back in your repository.
3720 3722
3721 3723 --verbose will print the entire commit message and the bundle path for that
3722 3724 backup.
3723 3725 """
3724 3726 backups = list(
3725 3727 filter(
3726 3728 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3727 3729 )
3728 3730 )
3729 3731 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3730 3732
3731 3733 opts = pycompat.byteskwargs(opts)
3732 3734 opts[b"bundle"] = b""
3733 3735 opts[b"force"] = None
3734 3736 limit = logcmdutil.getlimit(opts)
3735 3737
3736 3738 def display(other, chlist, displayer):
3737 3739 if opts.get(b"newest_first"):
3738 3740 chlist.reverse()
3739 3741 count = 0
3740 3742 for n in chlist:
3741 3743 if limit is not None and count >= limit:
3742 3744 break
3743 3745 parents = [True for p in other.changelog.parents(n) if p != nullid]
3744 3746 if opts.get(b"no_merges") and len(parents) == 2:
3745 3747 continue
3746 3748 count += 1
3747 3749 displayer.show(other[n])
3748 3750
3749 3751 recovernode = opts.get(b"recover")
3750 3752 if recovernode:
3751 3753 if scmutil.isrevsymbol(repo, recovernode):
3752 3754 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3753 3755 return
3754 3756 elif backups:
3755 3757 msg = _(
3756 3758 b"Recover changesets using: hg debugbackupbundle --recover "
3757 3759 b"<changeset hash>\n\nAvailable backup changesets:"
3758 3760 )
3759 3761 ui.status(msg, label=b"status.removed")
3760 3762 else:
3761 3763 ui.status(_(b"no backup changesets found\n"))
3762 3764 return
3763 3765
3764 3766 for backup in backups:
3765 3767 # Much of this is copied from the hg incoming logic
3766 3768 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3767 3769 source, branches = urlutil.parseurl(source, opts.get(b"branch"))
3768 3770 try:
3769 3771 other = hg.peer(repo, opts, source)
3770 3772 except error.LookupError as ex:
3771 3773 msg = _(b"\nwarning: unable to open bundle %s") % source
3772 3774 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3773 3775 ui.warn(msg, hint=hint)
3774 3776 continue
3775 3777 revs, checkout = hg.addbranchrevs(
3776 3778 repo, other, branches, opts.get(b"rev")
3777 3779 )
3778 3780
3779 3781 if revs:
3780 3782 revs = [other.lookup(rev) for rev in revs]
3781 3783
3782 3784 quiet = ui.quiet
3783 3785 try:
3784 3786 ui.quiet = True
3785 3787 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3786 3788 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3787 3789 )
3788 3790 except error.LookupError:
3789 3791 continue
3790 3792 finally:
3791 3793 ui.quiet = quiet
3792 3794
3793 3795 try:
3794 3796 if not chlist:
3795 3797 continue
3796 3798 if recovernode:
3797 3799 with repo.lock(), repo.transaction(b"unbundle") as tr:
3798 3800 if scmutil.isrevsymbol(other, recovernode):
3799 3801 ui.status(_(b"Unbundling %s\n") % (recovernode))
3800 3802 f = hg.openpath(ui, source)
3801 3803 gen = exchange.readbundle(ui, f, source)
3802 3804 if isinstance(gen, bundle2.unbundle20):
3803 3805 bundle2.applybundle(
3804 3806 repo,
3805 3807 gen,
3806 3808 tr,
3807 3809 source=b"unbundle",
3808 3810 url=b"bundle:" + source,
3809 3811 )
3810 3812 else:
3811 3813 gen.apply(repo, b"unbundle", b"bundle:" + source)
3812 3814 break
3813 3815 else:
3814 3816 backupdate = encoding.strtolocal(
3815 3817 time.strftime(
3816 3818 "%a %H:%M, %Y-%m-%d",
3817 3819 time.localtime(os.path.getmtime(source)),
3818 3820 )
3819 3821 )
3820 3822 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3821 3823 if ui.verbose:
3822 3824 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3823 3825 else:
3824 3826 opts[
3825 3827 b"template"
3826 3828 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3827 3829 displayer = logcmdutil.changesetdisplayer(
3828 3830 ui, other, opts, False
3829 3831 )
3830 3832 display(other, chlist, displayer)
3831 3833 displayer.close()
3832 3834 finally:
3833 3835 cleanupfn()
3834 3836
3835 3837
3836 3838 @command(
3837 3839 b'debugsub',
3838 3840 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3839 3841 _(b'[-r REV] [REV]'),
3840 3842 )
3841 3843 def debugsub(ui, repo, rev=None):
3842 3844 ctx = scmutil.revsingle(repo, rev, None)
3843 3845 for k, v in sorted(ctx.substate.items()):
3844 3846 ui.writenoi18n(b'path %s\n' % k)
3845 3847 ui.writenoi18n(b' source %s\n' % v[0])
3846 3848 ui.writenoi18n(b' revision %s\n' % v[1])
3847 3849
3848 3850
3849 3851 @command(b'debugshell', optionalrepo=True)
3850 3852 def debugshell(ui, repo):
3851 3853 """run an interactive Python interpreter
3852 3854
3853 3855 The local namespace is provided with a reference to the ui and
3854 3856 the repo instance (if available).
3855 3857 """
3856 3858 import code
3857 3859
3858 3860 imported_objects = {
3859 3861 'ui': ui,
3860 3862 'repo': repo,
3861 3863 }
3862 3864
3863 3865 code.interact(local=imported_objects)
3864 3866
3865 3867
3866 3868 @command(
3867 3869 b'debugsuccessorssets',
3868 3870 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3869 3871 _(b'[REV]'),
3870 3872 )
3871 3873 def debugsuccessorssets(ui, repo, *revs, **opts):
3872 3874 """show set of successors for revision
3873 3875
3874 3876 A successors set of changeset A is a consistent group of revisions that
3875 3877 succeed A. It contains non-obsolete changesets only unless closests
3876 3878 successors set is set.
3877 3879
3878 3880 In most cases a changeset A has a single successors set containing a single
3879 3881 successor (changeset A replaced by A').
3880 3882
3881 3883 A changeset that is made obsolete with no successors are called "pruned".
3882 3884 Such changesets have no successors sets at all.
3883 3885
3884 3886 A changeset that has been "split" will have a successors set containing
3885 3887 more than one successor.
3886 3888
3887 3889 A changeset that has been rewritten in multiple different ways is called
3888 3890 "divergent". Such changesets have multiple successor sets (each of which
3889 3891 may also be split, i.e. have multiple successors).
3890 3892
3891 3893 Results are displayed as follows::
3892 3894
3893 3895 <rev1>
3894 3896 <successors-1A>
3895 3897 <rev2>
3896 3898 <successors-2A>
3897 3899 <successors-2B1> <successors-2B2> <successors-2B3>
3898 3900
3899 3901 Here rev2 has two possible (i.e. divergent) successors sets. The first
3900 3902 holds one element, whereas the second holds three (i.e. the changeset has
3901 3903 been split).
3902 3904 """
3903 3905 # passed to successorssets caching computation from one call to another
3904 3906 cache = {}
3905 3907 ctx2str = bytes
3906 3908 node2str = short
3907 3909 for rev in scmutil.revrange(repo, revs):
3908 3910 ctx = repo[rev]
3909 3911 ui.write(b'%s\n' % ctx2str(ctx))
3910 3912 for succsset in obsutil.successorssets(
3911 3913 repo, ctx.node(), closest=opts['closest'], cache=cache
3912 3914 ):
3913 3915 if succsset:
3914 3916 ui.write(b' ')
3915 3917 ui.write(node2str(succsset[0]))
3916 3918 for node in succsset[1:]:
3917 3919 ui.write(b' ')
3918 3920 ui.write(node2str(node))
3919 3921 ui.write(b'\n')
3920 3922
3921 3923
3922 3924 @command(b'debugtagscache', [])
3923 3925 def debugtagscache(ui, repo):
3924 3926 """display the contents of .hg/cache/hgtagsfnodes1"""
3925 3927 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3926 3928 flog = repo.file(b'.hgtags')
3927 3929 for r in repo:
3928 3930 node = repo[r].node()
3929 3931 tagsnode = cache.getfnode(node, computemissing=False)
3930 3932 if tagsnode:
3931 3933 tagsnodedisplay = hex(tagsnode)
3932 3934 if not flog.hasnode(tagsnode):
3933 3935 tagsnodedisplay += b' (unknown node)'
3934 3936 elif tagsnode is None:
3935 3937 tagsnodedisplay = b'missing'
3936 3938 else:
3937 3939 tagsnodedisplay = b'invalid'
3938 3940
3939 3941 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3940 3942
3941 3943
3942 3944 @command(
3943 3945 b'debugtemplate',
3944 3946 [
3945 3947 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3946 3948 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3947 3949 ],
3948 3950 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3949 3951 optionalrepo=True,
3950 3952 )
3951 3953 def debugtemplate(ui, repo, tmpl, **opts):
3952 3954 """parse and apply a template
3953 3955
3954 3956 If -r/--rev is given, the template is processed as a log template and
3955 3957 applied to the given changesets. Otherwise, it is processed as a generic
3956 3958 template.
3957 3959
3958 3960 Use --verbose to print the parsed tree.
3959 3961 """
3960 3962 revs = None
3961 3963 if opts['rev']:
3962 3964 if repo is None:
3963 3965 raise error.RepoError(
3964 3966 _(b'there is no Mercurial repository here (.hg not found)')
3965 3967 )
3966 3968 revs = scmutil.revrange(repo, opts['rev'])
3967 3969
3968 3970 props = {}
3969 3971 for d in opts['define']:
3970 3972 try:
3971 3973 k, v = (e.strip() for e in d.split(b'=', 1))
3972 3974 if not k or k == b'ui':
3973 3975 raise ValueError
3974 3976 props[k] = v
3975 3977 except ValueError:
3976 3978 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3977 3979
3978 3980 if ui.verbose:
3979 3981 aliases = ui.configitems(b'templatealias')
3980 3982 tree = templater.parse(tmpl)
3981 3983 ui.note(templater.prettyformat(tree), b'\n')
3982 3984 newtree = templater.expandaliases(tree, aliases)
3983 3985 if newtree != tree:
3984 3986 ui.notenoi18n(
3985 3987 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3986 3988 )
3987 3989
3988 3990 if revs is None:
3989 3991 tres = formatter.templateresources(ui, repo)
3990 3992 t = formatter.maketemplater(ui, tmpl, resources=tres)
3991 3993 if ui.verbose:
3992 3994 kwds, funcs = t.symbolsuseddefault()
3993 3995 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3994 3996 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3995 3997 ui.write(t.renderdefault(props))
3996 3998 else:
3997 3999 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3998 4000 if ui.verbose:
3999 4001 kwds, funcs = displayer.t.symbolsuseddefault()
4000 4002 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4001 4003 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4002 4004 for r in revs:
4003 4005 displayer.show(repo[r], **pycompat.strkwargs(props))
4004 4006 displayer.close()
4005 4007
4006 4008
4007 4009 @command(
4008 4010 b'debuguigetpass',
4009 4011 [
4010 4012 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4011 4013 ],
4012 4014 _(b'[-p TEXT]'),
4013 4015 norepo=True,
4014 4016 )
4015 4017 def debuguigetpass(ui, prompt=b''):
4016 4018 """show prompt to type password"""
4017 4019 r = ui.getpass(prompt)
4018 4020 if r is None:
4019 4021 r = b"<default response>"
4020 4022 ui.writenoi18n(b'response: %s\n' % r)
4021 4023
4022 4024
4023 4025 @command(
4024 4026 b'debuguiprompt',
4025 4027 [
4026 4028 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4027 4029 ],
4028 4030 _(b'[-p TEXT]'),
4029 4031 norepo=True,
4030 4032 )
4031 4033 def debuguiprompt(ui, prompt=b''):
4032 4034 """show plain prompt"""
4033 4035 r = ui.prompt(prompt)
4034 4036 ui.writenoi18n(b'response: %s\n' % r)
4035 4037
4036 4038
4037 4039 @command(b'debugupdatecaches', [])
4038 4040 def debugupdatecaches(ui, repo, *pats, **opts):
4039 4041 """warm all known caches in the repository"""
4040 4042 with repo.wlock(), repo.lock():
4041 4043 repo.updatecaches(full=True)
4042 4044
4043 4045
4044 4046 @command(
4045 4047 b'debugupgraderepo',
4046 4048 [
4047 4049 (
4048 4050 b'o',
4049 4051 b'optimize',
4050 4052 [],
4051 4053 _(b'extra optimization to perform'),
4052 4054 _(b'NAME'),
4053 4055 ),
4054 4056 (b'', b'run', False, _(b'performs an upgrade')),
4055 4057 (b'', b'backup', True, _(b'keep the old repository content around')),
4056 4058 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4057 4059 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4058 4060 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4059 4061 ],
4060 4062 )
4061 4063 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4062 4064 """upgrade a repository to use different features
4063 4065
4064 4066 If no arguments are specified, the repository is evaluated for upgrade
4065 4067 and a list of problems and potential optimizations is printed.
4066 4068
4067 4069 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4068 4070 can be influenced via additional arguments. More details will be provided
4069 4071 by the command output when run without ``--run``.
4070 4072
4071 4073 During the upgrade, the repository will be locked and no writes will be
4072 4074 allowed.
4073 4075
4074 4076 At the end of the upgrade, the repository may not be readable while new
4075 4077 repository data is swapped in. This window will be as long as it takes to
4076 4078 rename some directories inside the ``.hg`` directory. On most machines, this
4077 4079 should complete almost instantaneously and the chances of a consumer being
4078 4080 unable to access the repository should be low.
4079 4081
4080 4082 By default, all revlog will be upgraded. You can restrict this using flag
4081 4083 such as `--manifest`:
4082 4084
4083 4085 * `--manifest`: only optimize the manifest
4084 4086 * `--no-manifest`: optimize all revlog but the manifest
4085 4087 * `--changelog`: optimize the changelog only
4086 4088 * `--no-changelog --no-manifest`: optimize filelogs only
4087 4089 * `--filelogs`: optimize the filelogs only
4088 4090 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4089 4091 """
4090 4092 return upgrade.upgraderepo(
4091 4093 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4092 4094 )
4093 4095
4094 4096
4095 4097 @command(
4096 4098 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4097 4099 )
4098 4100 def debugwalk(ui, repo, *pats, **opts):
4099 4101 """show how files match on given patterns"""
4100 4102 opts = pycompat.byteskwargs(opts)
4101 4103 m = scmutil.match(repo[None], pats, opts)
4102 4104 if ui.verbose:
4103 4105 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4104 4106 items = list(repo[None].walk(m))
4105 4107 if not items:
4106 4108 return
4107 4109 f = lambda fn: fn
4108 4110 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4109 4111 f = lambda fn: util.normpath(fn)
4110 4112 fmt = b'f %%-%ds %%-%ds %%s' % (
4111 4113 max([len(abs) for abs in items]),
4112 4114 max([len(repo.pathto(abs)) for abs in items]),
4113 4115 )
4114 4116 for abs in items:
4115 4117 line = fmt % (
4116 4118 abs,
4117 4119 f(repo.pathto(abs)),
4118 4120 m.exact(abs) and b'exact' or b'',
4119 4121 )
4120 4122 ui.write(b"%s\n" % line.rstrip())
4121 4123
4122 4124
4123 4125 @command(b'debugwhyunstable', [], _(b'REV'))
4124 4126 def debugwhyunstable(ui, repo, rev):
4125 4127 """explain instabilities of a changeset"""
4126 4128 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4127 4129 dnodes = b''
4128 4130 if entry.get(b'divergentnodes'):
4129 4131 dnodes = (
4130 4132 b' '.join(
4131 4133 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4132 4134 for ctx in entry[b'divergentnodes']
4133 4135 )
4134 4136 + b' '
4135 4137 )
4136 4138 ui.write(
4137 4139 b'%s: %s%s %s\n'
4138 4140 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4139 4141 )
4140 4142
4141 4143
4142 4144 @command(
4143 4145 b'debugwireargs',
4144 4146 [
4145 4147 (b'', b'three', b'', b'three'),
4146 4148 (b'', b'four', b'', b'four'),
4147 4149 (b'', b'five', b'', b'five'),
4148 4150 ]
4149 4151 + cmdutil.remoteopts,
4150 4152 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4151 4153 norepo=True,
4152 4154 )
4153 4155 def debugwireargs(ui, repopath, *vals, **opts):
4154 4156 opts = pycompat.byteskwargs(opts)
4155 4157 repo = hg.peer(ui, opts, repopath)
4156 4158 try:
4157 4159 for opt in cmdutil.remoteopts:
4158 4160 del opts[opt[1]]
4159 4161 args = {}
4160 4162 for k, v in pycompat.iteritems(opts):
4161 4163 if v:
4162 4164 args[k] = v
4163 4165 args = pycompat.strkwargs(args)
4164 4166 # run twice to check that we don't mess up the stream for the next command
4165 4167 res1 = repo.debugwireargs(*vals, **args)
4166 4168 res2 = repo.debugwireargs(*vals, **args)
4167 4169 ui.write(b"%s\n" % res1)
4168 4170 if res1 != res2:
4169 4171 ui.warn(b"%s\n" % res2)
4170 4172 finally:
4171 4173 repo.close()
4172 4174
4173 4175
4174 4176 def _parsewirelangblocks(fh):
4175 4177 activeaction = None
4176 4178 blocklines = []
4177 4179 lastindent = 0
4178 4180
4179 4181 for line in fh:
4180 4182 line = line.rstrip()
4181 4183 if not line:
4182 4184 continue
4183 4185
4184 4186 if line.startswith(b'#'):
4185 4187 continue
4186 4188
4187 4189 if not line.startswith(b' '):
4188 4190 # New block. Flush previous one.
4189 4191 if activeaction:
4190 4192 yield activeaction, blocklines
4191 4193
4192 4194 activeaction = line
4193 4195 blocklines = []
4194 4196 lastindent = 0
4195 4197 continue
4196 4198
4197 4199 # Else we start with an indent.
4198 4200
4199 4201 if not activeaction:
4200 4202 raise error.Abort(_(b'indented line outside of block'))
4201 4203
4202 4204 indent = len(line) - len(line.lstrip())
4203 4205
4204 4206 # If this line is indented more than the last line, concatenate it.
4205 4207 if indent > lastindent and blocklines:
4206 4208 blocklines[-1] += line.lstrip()
4207 4209 else:
4208 4210 blocklines.append(line)
4209 4211 lastindent = indent
4210 4212
4211 4213 # Flush last block.
4212 4214 if activeaction:
4213 4215 yield activeaction, blocklines
4214 4216
4215 4217
4216 4218 @command(
4217 4219 b'debugwireproto',
4218 4220 [
4219 4221 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4220 4222 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4221 4223 (
4222 4224 b'',
4223 4225 b'noreadstderr',
4224 4226 False,
4225 4227 _(b'do not read from stderr of the remote'),
4226 4228 ),
4227 4229 (
4228 4230 b'',
4229 4231 b'nologhandshake',
4230 4232 False,
4231 4233 _(b'do not log I/O related to the peer handshake'),
4232 4234 ),
4233 4235 ]
4234 4236 + cmdutil.remoteopts,
4235 4237 _(b'[PATH]'),
4236 4238 optionalrepo=True,
4237 4239 )
4238 4240 def debugwireproto(ui, repo, path=None, **opts):
4239 4241 """send wire protocol commands to a server
4240 4242
4241 4243 This command can be used to issue wire protocol commands to remote
4242 4244 peers and to debug the raw data being exchanged.
4243 4245
4244 4246 ``--localssh`` will start an SSH server against the current repository
4245 4247 and connect to that. By default, the connection will perform a handshake
4246 4248 and establish an appropriate peer instance.
4247 4249
4248 4250 ``--peer`` can be used to bypass the handshake protocol and construct a
4249 4251 peer instance using the specified class type. Valid values are ``raw``,
4250 4252 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4251 4253 raw data payloads and don't support higher-level command actions.
4252 4254
4253 4255 ``--noreadstderr`` can be used to disable automatic reading from stderr
4254 4256 of the peer (for SSH connections only). Disabling automatic reading of
4255 4257 stderr is useful for making output more deterministic.
4256 4258
4257 4259 Commands are issued via a mini language which is specified via stdin.
4258 4260 The language consists of individual actions to perform. An action is
4259 4261 defined by a block. A block is defined as a line with no leading
4260 4262 space followed by 0 or more lines with leading space. Blocks are
4261 4263 effectively a high-level command with additional metadata.
4262 4264
4263 4265 Lines beginning with ``#`` are ignored.
4264 4266
4265 4267 The following sections denote available actions.
4266 4268
4267 4269 raw
4268 4270 ---
4269 4271
4270 4272 Send raw data to the server.
4271 4273
4272 4274 The block payload contains the raw data to send as one atomic send
4273 4275 operation. The data may not actually be delivered in a single system
4274 4276 call: it depends on the abilities of the transport being used.
4275 4277
4276 4278 Each line in the block is de-indented and concatenated. Then, that
4277 4279 value is evaluated as a Python b'' literal. This allows the use of
4278 4280 backslash escaping, etc.
4279 4281
4280 4282 raw+
4281 4283 ----
4282 4284
4283 4285 Behaves like ``raw`` except flushes output afterwards.
4284 4286
4285 4287 command <X>
4286 4288 -----------
4287 4289
4288 4290 Send a request to run a named command, whose name follows the ``command``
4289 4291 string.
4290 4292
4291 4293 Arguments to the command are defined as lines in this block. The format of
4292 4294 each line is ``<key> <value>``. e.g.::
4293 4295
4294 4296 command listkeys
4295 4297 namespace bookmarks
4296 4298
4297 4299 If the value begins with ``eval:``, it will be interpreted as a Python
4298 4300 literal expression. Otherwise values are interpreted as Python b'' literals.
4299 4301 This allows sending complex types and encoding special byte sequences via
4300 4302 backslash escaping.
4301 4303
4302 4304 The following arguments have special meaning:
4303 4305
4304 4306 ``PUSHFILE``
4305 4307 When defined, the *push* mechanism of the peer will be used instead
4306 4308 of the static request-response mechanism and the content of the
4307 4309 file specified in the value of this argument will be sent as the
4308 4310 command payload.
4309 4311
4310 4312 This can be used to submit a local bundle file to the remote.
4311 4313
4312 4314 batchbegin
4313 4315 ----------
4314 4316
4315 4317 Instruct the peer to begin a batched send.
4316 4318
4317 4319 All ``command`` blocks are queued for execution until the next
4318 4320 ``batchsubmit`` block.
4319 4321
4320 4322 batchsubmit
4321 4323 -----------
4322 4324
4323 4325 Submit previously queued ``command`` blocks as a batch request.
4324 4326
4325 4327 This action MUST be paired with a ``batchbegin`` action.
4326 4328
4327 4329 httprequest <method> <path>
4328 4330 ---------------------------
4329 4331
4330 4332 (HTTP peer only)
4331 4333
4332 4334 Send an HTTP request to the peer.
4333 4335
4334 4336 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4335 4337
4336 4338 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4337 4339 headers to add to the request. e.g. ``Accept: foo``.
4338 4340
4339 4341 The following arguments are special:
4340 4342
4341 4343 ``BODYFILE``
4342 4344 The content of the file defined as the value to this argument will be
4343 4345 transferred verbatim as the HTTP request body.
4344 4346
4345 4347 ``frame <type> <flags> <payload>``
4346 4348 Send a unified protocol frame as part of the request body.
4347 4349
4348 4350 All frames will be collected and sent as the body to the HTTP
4349 4351 request.
4350 4352
4351 4353 close
4352 4354 -----
4353 4355
4354 4356 Close the connection to the server.
4355 4357
4356 4358 flush
4357 4359 -----
4358 4360
4359 4361 Flush data written to the server.
4360 4362
4361 4363 readavailable
4362 4364 -------------
4363 4365
4364 4366 Close the write end of the connection and read all available data from
4365 4367 the server.
4366 4368
4367 4369 If the connection to the server encompasses multiple pipes, we poll both
4368 4370 pipes and read available data.
4369 4371
4370 4372 readline
4371 4373 --------
4372 4374
4373 4375 Read a line of output from the server. If there are multiple output
4374 4376 pipes, reads only the main pipe.
4375 4377
4376 4378 ereadline
4377 4379 ---------
4378 4380
4379 4381 Like ``readline``, but read from the stderr pipe, if available.
4380 4382
4381 4383 read <X>
4382 4384 --------
4383 4385
4384 4386 ``read()`` N bytes from the server's main output pipe.
4385 4387
4386 4388 eread <X>
4387 4389 ---------
4388 4390
4389 4391 ``read()`` N bytes from the server's stderr pipe, if available.
4390 4392
4391 4393 Specifying Unified Frame-Based Protocol Frames
4392 4394 ----------------------------------------------
4393 4395
4394 4396 It is possible to emit a *Unified Frame-Based Protocol* by using special
4395 4397 syntax.
4396 4398
4397 4399 A frame is composed as a type, flags, and payload. These can be parsed
4398 4400 from a string of the form:
4399 4401
4400 4402 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4401 4403
4402 4404 ``request-id`` and ``stream-id`` are integers defining the request and
4403 4405 stream identifiers.
4404 4406
4405 4407 ``type`` can be an integer value for the frame type or the string name
4406 4408 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4407 4409 ``command-name``.
4408 4410
4409 4411 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4410 4412 components. Each component (and there can be just one) can be an integer
4411 4413 or a flag name for stream flags or frame flags, respectively. Values are
4412 4414 resolved to integers and then bitwise OR'd together.
4413 4415
4414 4416 ``payload`` represents the raw frame payload. If it begins with
4415 4417 ``cbor:``, the following string is evaluated as Python code and the
4416 4418 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4417 4419 as a Python byte string literal.
4418 4420 """
4419 4421 opts = pycompat.byteskwargs(opts)
4420 4422
4421 4423 if opts[b'localssh'] and not repo:
4422 4424 raise error.Abort(_(b'--localssh requires a repository'))
4423 4425
4424 4426 if opts[b'peer'] and opts[b'peer'] not in (
4425 4427 b'raw',
4426 4428 b'http2',
4427 4429 b'ssh1',
4428 4430 b'ssh2',
4429 4431 ):
4430 4432 raise error.Abort(
4431 4433 _(b'invalid value for --peer'),
4432 4434 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4433 4435 )
4434 4436
4435 4437 if path and opts[b'localssh']:
4436 4438 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4437 4439
4438 4440 if ui.interactive():
4439 4441 ui.write(_(b'(waiting for commands on stdin)\n'))
4440 4442
4441 4443 blocks = list(_parsewirelangblocks(ui.fin))
4442 4444
4443 4445 proc = None
4444 4446 stdin = None
4445 4447 stdout = None
4446 4448 stderr = None
4447 4449 opener = None
4448 4450
4449 4451 if opts[b'localssh']:
4450 4452 # We start the SSH server in its own process so there is process
4451 4453 # separation. This prevents a whole class of potential bugs around
4452 4454 # shared state from interfering with server operation.
4453 4455 args = procutil.hgcmd() + [
4454 4456 b'-R',
4455 4457 repo.root,
4456 4458 b'debugserve',
4457 4459 b'--sshstdio',
4458 4460 ]
4459 4461 proc = subprocess.Popen(
4460 4462 pycompat.rapply(procutil.tonativestr, args),
4461 4463 stdin=subprocess.PIPE,
4462 4464 stdout=subprocess.PIPE,
4463 4465 stderr=subprocess.PIPE,
4464 4466 bufsize=0,
4465 4467 )
4466 4468
4467 4469 stdin = proc.stdin
4468 4470 stdout = proc.stdout
4469 4471 stderr = proc.stderr
4470 4472
4471 4473 # We turn the pipes into observers so we can log I/O.
4472 4474 if ui.verbose or opts[b'peer'] == b'raw':
4473 4475 stdin = util.makeloggingfileobject(
4474 4476 ui, proc.stdin, b'i', logdata=True
4475 4477 )
4476 4478 stdout = util.makeloggingfileobject(
4477 4479 ui, proc.stdout, b'o', logdata=True
4478 4480 )
4479 4481 stderr = util.makeloggingfileobject(
4480 4482 ui, proc.stderr, b'e', logdata=True
4481 4483 )
4482 4484
4483 4485 # --localssh also implies the peer connection settings.
4484 4486
4485 4487 url = b'ssh://localserver'
4486 4488 autoreadstderr = not opts[b'noreadstderr']
4487 4489
4488 4490 if opts[b'peer'] == b'ssh1':
4489 4491 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4490 4492 peer = sshpeer.sshv1peer(
4491 4493 ui,
4492 4494 url,
4493 4495 proc,
4494 4496 stdin,
4495 4497 stdout,
4496 4498 stderr,
4497 4499 None,
4498 4500 autoreadstderr=autoreadstderr,
4499 4501 )
4500 4502 elif opts[b'peer'] == b'ssh2':
4501 4503 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4502 4504 peer = sshpeer.sshv2peer(
4503 4505 ui,
4504 4506 url,
4505 4507 proc,
4506 4508 stdin,
4507 4509 stdout,
4508 4510 stderr,
4509 4511 None,
4510 4512 autoreadstderr=autoreadstderr,
4511 4513 )
4512 4514 elif opts[b'peer'] == b'raw':
4513 4515 ui.write(_(b'using raw connection to peer\n'))
4514 4516 peer = None
4515 4517 else:
4516 4518 ui.write(_(b'creating ssh peer from handshake results\n'))
4517 4519 peer = sshpeer.makepeer(
4518 4520 ui,
4519 4521 url,
4520 4522 proc,
4521 4523 stdin,
4522 4524 stdout,
4523 4525 stderr,
4524 4526 autoreadstderr=autoreadstderr,
4525 4527 )
4526 4528
4527 4529 elif path:
4528 4530 # We bypass hg.peer() so we can proxy the sockets.
4529 4531 # TODO consider not doing this because we skip
4530 4532 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4531 4533 u = urlutil.url(path)
4532 4534 if u.scheme != b'http':
4533 4535 raise error.Abort(_(b'only http:// paths are currently supported'))
4534 4536
4535 4537 url, authinfo = u.authinfo()
4536 4538 openerargs = {
4537 4539 'useragent': b'Mercurial debugwireproto',
4538 4540 }
4539 4541
4540 4542 # Turn pipes/sockets into observers so we can log I/O.
4541 4543 if ui.verbose:
4542 4544 openerargs.update(
4543 4545 {
4544 4546 'loggingfh': ui,
4545 4547 'loggingname': b's',
4546 4548 'loggingopts': {
4547 4549 'logdata': True,
4548 4550 'logdataapis': False,
4549 4551 },
4550 4552 }
4551 4553 )
4552 4554
4553 4555 if ui.debugflag:
4554 4556 openerargs['loggingopts']['logdataapis'] = True
4555 4557
4556 4558 # Don't send default headers when in raw mode. This allows us to
4557 4559 # bypass most of the behavior of our URL handling code so we can
4558 4560 # have near complete control over what's sent on the wire.
4559 4561 if opts[b'peer'] == b'raw':
4560 4562 openerargs['sendaccept'] = False
4561 4563
4562 4564 opener = urlmod.opener(ui, authinfo, **openerargs)
4563 4565
4564 4566 if opts[b'peer'] == b'http2':
4565 4567 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4566 4568 # We go through makepeer() because we need an API descriptor for
4567 4569 # the peer instance to be useful.
4568 4570 with ui.configoverride(
4569 4571 {(b'experimental', b'httppeer.advertise-v2'): True}
4570 4572 ):
4571 4573 if opts[b'nologhandshake']:
4572 4574 ui.pushbuffer()
4573 4575
4574 4576 peer = httppeer.makepeer(ui, path, opener=opener)
4575 4577
4576 4578 if opts[b'nologhandshake']:
4577 4579 ui.popbuffer()
4578 4580
4579 4581 if not isinstance(peer, httppeer.httpv2peer):
4580 4582 raise error.Abort(
4581 4583 _(
4582 4584 b'could not instantiate HTTP peer for '
4583 4585 b'wire protocol version 2'
4584 4586 ),
4585 4587 hint=_(
4586 4588 b'the server may not have the feature '
4587 4589 b'enabled or is not allowing this '
4588 4590 b'client version'
4589 4591 ),
4590 4592 )
4591 4593
4592 4594 elif opts[b'peer'] == b'raw':
4593 4595 ui.write(_(b'using raw connection to peer\n'))
4594 4596 peer = None
4595 4597 elif opts[b'peer']:
4596 4598 raise error.Abort(
4597 4599 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4598 4600 )
4599 4601 else:
4600 4602 peer = httppeer.makepeer(ui, path, opener=opener)
4601 4603
4602 4604 # We /could/ populate stdin/stdout with sock.makefile()...
4603 4605 else:
4604 4606 raise error.Abort(_(b'unsupported connection configuration'))
4605 4607
4606 4608 batchedcommands = None
4607 4609
4608 4610 # Now perform actions based on the parsed wire language instructions.
4609 4611 for action, lines in blocks:
4610 4612 if action in (b'raw', b'raw+'):
4611 4613 if not stdin:
4612 4614 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4613 4615
4614 4616 # Concatenate the data together.
4615 4617 data = b''.join(l.lstrip() for l in lines)
4616 4618 data = stringutil.unescapestr(data)
4617 4619 stdin.write(data)
4618 4620
4619 4621 if action == b'raw+':
4620 4622 stdin.flush()
4621 4623 elif action == b'flush':
4622 4624 if not stdin:
4623 4625 raise error.Abort(_(b'cannot call flush on this peer'))
4624 4626 stdin.flush()
4625 4627 elif action.startswith(b'command'):
4626 4628 if not peer:
4627 4629 raise error.Abort(
4628 4630 _(
4629 4631 b'cannot send commands unless peer instance '
4630 4632 b'is available'
4631 4633 )
4632 4634 )
4633 4635
4634 4636 command = action.split(b' ', 1)[1]
4635 4637
4636 4638 args = {}
4637 4639 for line in lines:
4638 4640 # We need to allow empty values.
4639 4641 fields = line.lstrip().split(b' ', 1)
4640 4642 if len(fields) == 1:
4641 4643 key = fields[0]
4642 4644 value = b''
4643 4645 else:
4644 4646 key, value = fields
4645 4647
4646 4648 if value.startswith(b'eval:'):
4647 4649 value = stringutil.evalpythonliteral(value[5:])
4648 4650 else:
4649 4651 value = stringutil.unescapestr(value)
4650 4652
4651 4653 args[key] = value
4652 4654
4653 4655 if batchedcommands is not None:
4654 4656 batchedcommands.append((command, args))
4655 4657 continue
4656 4658
4657 4659 ui.status(_(b'sending %s command\n') % command)
4658 4660
4659 4661 if b'PUSHFILE' in args:
4660 4662 with open(args[b'PUSHFILE'], 'rb') as fh:
4661 4663 del args[b'PUSHFILE']
4662 4664 res, output = peer._callpush(
4663 4665 command, fh, **pycompat.strkwargs(args)
4664 4666 )
4665 4667 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4666 4668 ui.status(
4667 4669 _(b'remote output: %s\n') % stringutil.escapestr(output)
4668 4670 )
4669 4671 else:
4670 4672 with peer.commandexecutor() as e:
4671 4673 res = e.callcommand(command, args).result()
4672 4674
4673 4675 if isinstance(res, wireprotov2peer.commandresponse):
4674 4676 val = res.objects()
4675 4677 ui.status(
4676 4678 _(b'response: %s\n')
4677 4679 % stringutil.pprint(val, bprefix=True, indent=2)
4678 4680 )
4679 4681 else:
4680 4682 ui.status(
4681 4683 _(b'response: %s\n')
4682 4684 % stringutil.pprint(res, bprefix=True, indent=2)
4683 4685 )
4684 4686
4685 4687 elif action == b'batchbegin':
4686 4688 if batchedcommands is not None:
4687 4689 raise error.Abort(_(b'nested batchbegin not allowed'))
4688 4690
4689 4691 batchedcommands = []
4690 4692 elif action == b'batchsubmit':
4691 4693 # There is a batching API we could go through. But it would be
4692 4694 # difficult to normalize requests into function calls. It is easier
4693 4695 # to bypass this layer and normalize to commands + args.
4694 4696 ui.status(
4695 4697 _(b'sending batch with %d sub-commands\n')
4696 4698 % len(batchedcommands)
4697 4699 )
4698 4700 assert peer is not None
4699 4701 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4700 4702 ui.status(
4701 4703 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4702 4704 )
4703 4705
4704 4706 batchedcommands = None
4705 4707
4706 4708 elif action.startswith(b'httprequest '):
4707 4709 if not opener:
4708 4710 raise error.Abort(
4709 4711 _(b'cannot use httprequest without an HTTP peer')
4710 4712 )
4711 4713
4712 4714 request = action.split(b' ', 2)
4713 4715 if len(request) != 3:
4714 4716 raise error.Abort(
4715 4717 _(
4716 4718 b'invalid httprequest: expected format is '
4717 4719 b'"httprequest <method> <path>'
4718 4720 )
4719 4721 )
4720 4722
4721 4723 method, httppath = request[1:]
4722 4724 headers = {}
4723 4725 body = None
4724 4726 frames = []
4725 4727 for line in lines:
4726 4728 line = line.lstrip()
4727 4729 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4728 4730 if m:
4729 4731 # Headers need to use native strings.
4730 4732 key = pycompat.strurl(m.group(1))
4731 4733 value = pycompat.strurl(m.group(2))
4732 4734 headers[key] = value
4733 4735 continue
4734 4736
4735 4737 if line.startswith(b'BODYFILE '):
4736 4738 with open(line.split(b' ', 1), b'rb') as fh:
4737 4739 body = fh.read()
4738 4740 elif line.startswith(b'frame '):
4739 4741 frame = wireprotoframing.makeframefromhumanstring(
4740 4742 line[len(b'frame ') :]
4741 4743 )
4742 4744
4743 4745 frames.append(frame)
4744 4746 else:
4745 4747 raise error.Abort(
4746 4748 _(b'unknown argument to httprequest: %s') % line
4747 4749 )
4748 4750
4749 4751 url = path + httppath
4750 4752
4751 4753 if frames:
4752 4754 body = b''.join(bytes(f) for f in frames)
4753 4755
4754 4756 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4755 4757
4756 4758 # urllib.Request insists on using has_data() as a proxy for
4757 4759 # determining the request method. Override that to use our
4758 4760 # explicitly requested method.
4759 4761 req.get_method = lambda: pycompat.sysstr(method)
4760 4762
4761 4763 try:
4762 4764 res = opener.open(req)
4763 4765 body = res.read()
4764 4766 except util.urlerr.urlerror as e:
4765 4767 # read() method must be called, but only exists in Python 2
4766 4768 getattr(e, 'read', lambda: None)()
4767 4769 continue
4768 4770
4769 4771 ct = res.headers.get('Content-Type')
4770 4772 if ct == 'application/mercurial-cbor':
4771 4773 ui.write(
4772 4774 _(b'cbor> %s\n')
4773 4775 % stringutil.pprint(
4774 4776 cborutil.decodeall(body), bprefix=True, indent=2
4775 4777 )
4776 4778 )
4777 4779
4778 4780 elif action == b'close':
4779 4781 assert peer is not None
4780 4782 peer.close()
4781 4783 elif action == b'readavailable':
4782 4784 if not stdout or not stderr:
4783 4785 raise error.Abort(
4784 4786 _(b'readavailable not available on this peer')
4785 4787 )
4786 4788
4787 4789 stdin.close()
4788 4790 stdout.read()
4789 4791 stderr.read()
4790 4792
4791 4793 elif action == b'readline':
4792 4794 if not stdout:
4793 4795 raise error.Abort(_(b'readline not available on this peer'))
4794 4796 stdout.readline()
4795 4797 elif action == b'ereadline':
4796 4798 if not stderr:
4797 4799 raise error.Abort(_(b'ereadline not available on this peer'))
4798 4800 stderr.readline()
4799 4801 elif action.startswith(b'read '):
4800 4802 count = int(action.split(b' ', 1)[1])
4801 4803 if not stdout:
4802 4804 raise error.Abort(_(b'read not available on this peer'))
4803 4805 stdout.read(count)
4804 4806 elif action.startswith(b'eread '):
4805 4807 count = int(action.split(b' ', 1)[1])
4806 4808 if not stderr:
4807 4809 raise error.Abort(_(b'eread not available on this peer'))
4808 4810 stderr.read(count)
4809 4811 else:
4810 4812 raise error.Abort(_(b'unknown action: %s') % action)
4811 4813
4812 4814 if batchedcommands is not None:
4813 4815 raise error.Abort(_(b'unclosed "batchbegin" request'))
4814 4816
4815 4817 if peer:
4816 4818 peer.close()
4817 4819
4818 4820 if proc:
4819 4821 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now